-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmaster.bib
1462 lines (1371 loc) · 101 KB
/
master.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
@article{agliotiActionAnticipationMotor2008,
title = {Action Anticipation and Motor Resonance in Elite Basketball Players},
author = {Aglioti, Salvatore M and Cesari, Paola and Romani, Michela and Urgesi, Cosimo},
date = {2008-09},
journaltitle = {Nature Neuroscience},
shortjournal = {Nat Neurosci},
volume = {11},
number = {9},
pages = {1109--1116},
issn = {1097-6256, 1546-1726},
doi = {10.1038/nn.2182},
url = {http://www.nature.com/articles/nn.2182},
urldate = {2022-12-22},
langid = {english},
keywords = {extracted,linus}
}
@book{almeidaDevelopmentWearableSensor2012,
title = {Development of a Wearable Sensor System for Real-Time Control of Knee Prostheses},
author = {de Almeida, Eduardo Carlos Venancio},
date = {2012},
url = {http://urn.kb.se/resolve?urn=urn:nbn:se:liu:diva-81244},
urldate = {2022-09-21},
abstract = {DiVA portal is a finding tool for research publications and student theses written at the following 50 universities and research institutions.},
langid = {english},
keywords = {latency},
file = {C\:\\Users\\webma\\Zotero\\storage\\WCDV26XV\\Almeida - 2012 - Development of a wearable sensor system for real-t.pdf;C\:\\Users\\webma\\Zotero\\storage\\KIPBEW4J\\record.html}
}
@inproceedings{benedictSurveyMaterialsCoatings2016,
title = {Survey of Materials and Coatings Suitable for Controlling Stray Light from the Near-{{UV}} to the near-{{IR}}},
author = {Benedict, Tom and Barrick, Gregory A. and Pazder, John},
editor = {Evans, Christopher J. and Simard, Luc and Takami, Hideki},
date = {2016-08-09},
pages = {99083T},
location = {{Edinburgh, United Kingdom}},
doi = {10.1117/12.2231348},
url = {http://proceedings.spiedigitallibrary.org/proceeding.aspx?doi=10.1117/12.2231348},
urldate = {2023-02-05},
eventtitle = {{{SPIE Astronomical Telescopes}} + {{Instrumentation}}},
file = {C\:\\Users\\webma\\Zotero\\storage\\PVRJC2CC\\Benedict et al_2016_Survey of materials and coatings suitable for controlling stray light from the.pdf}
}
@article{bevilacquaSensoriMotorLearningMovement2016,
title = {Sensori-{{Motor Learning}} with {{Movement Sonification}}: {{Perspectives}} from {{Recent Interdisciplinary Studies}}},
shorttitle = {Sensori-{{Motor Learning}} with {{Movement Sonification}}},
author = {Bevilacqua, Frédéric and Boyer, Eric O. and Françoise, Jules and Houix, Olivier and Susini, Patrick and Roby-Brami, Agnès and Hanneton, Sylvain},
date = {2016-08-25},
journaltitle = {Frontiers in Neuroscience},
shortjournal = {Front. Neurosci.},
volume = {10},
issn = {1662-453X},
doi = {10.3389/fnins.2016.00385},
url = {http://journal.frontiersin.org/Article/10.3389/fnins.2016.00385/abstract},
urldate = {2023-02-13},
file = {C\:\\Users\\webma\\Zotero\\storage\\V48R697X\\Bevilacqua et al. - 2016 - Sensori-Motor Learning with Movement Sonification.pdf}
}
@article{bidelmanNeuralCorrelatesConsonance2009,
title = {Neural {{Correlates}} of {{Consonance}}, {{Dissonance}}, and the {{Hierarchy}} of {{Musical Pitch}} in the {{Human Brainstem}}},
author = {Bidelman, G. M. and Krishnan, A.},
date = {2009-10-21},
journaltitle = {Journal of Neuroscience},
shortjournal = {Journal of Neuroscience},
volume = {29},
number = {42},
pages = {13165--13171},
issn = {0270-6474, 1529-2401},
doi = {10.1523/JNEUROSCI.3900-09.2009},
url = {https://www.jneurosci.org/lookup/doi/10.1523/JNEUROSCI.3900-09.2009},
urldate = {2023-02-14},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\DTCGC8Y7\\Bidelman_Krishnan_2009_Neural Correlates of Consonance, Dissonance, and the Hierarchy of Musical Pitch.pdf}
}
@article{bokerWindowedCrosscorrelationPeak2002,
title = {Windowed Cross-Correlation and Peak Picking for the Analysis of Variability in the Association between Behavioral Time Series.},
author = {Boker, Steven M. and Rotondo, Jennifer L. and Xu, Minquan and King, Kadijah},
date = {2002},
journaltitle = {Psychological Methods},
shortjournal = {Psychological Methods},
volume = {7},
number = {3},
pages = {338--355},
issn = {1939-1463, 1082-989X},
doi = {10.1037/1082-989X.7.3.338},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/1082-989X.7.3.338},
urldate = {2023-01-22},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\QAX9GYWC\\Boker et al_2002_Windowed cross-correlation and peak picking for the analysis of variability in.pdf}
}
@article{bokerWindowedCrosscorrelationPeak2002a,
title = {Windowed Cross-Correlation and Peak Picking for the Analysis of Variability in the Association between Behavioral Time Series},
author = {Boker, Steven M. and Rotondo, Jennifer L. and Xu, Minquan and King, Kadijah},
date = {2002},
journaltitle = {Psychological Methods},
volume = {7},
pages = {338--355},
publisher = {{American Psychological Association}},
location = {{US}},
issn = {1939-1463},
doi = {10.1037/1082-989X.7.3.338},
abstract = {Cross-correlation and most other longitudinal analyses assume that the association between 2 variables is stationary. Thus, a sample of occasions of measurement is expected to be representative of the association between variables regardless of the time of onset or number of occasions in the sample. The authors propose a method to analyze the association between 2 variables when the assumption of stationarity may not be warranted. The method results in estimates of both the strength of peak association and the time lag when the peak association occurred for a range of starting values of elapsed time from the beginning of an experiment. (PsycINFO Database Record (c) 2016 APA, all rights reserved)},
keywords = {Analysis of Variance,Response Variability,Time,Time Series},
file = {C\:\\Users\\webma\\Zotero\\storage\\6TWBAUAQ\\2002-18342-006.html}
}
@article{boltSensoryAttenuationAuditory2021,
title = {Sensory {{Attenuation}} of the {{Auditory P2 Differentiates Self-}} from {{Partner-Produced Sounds}} during {{Joint Action}}},
author = {Bolt, Nicole K. and Loehr, Janeen D.},
date = {2021-10-01},
journaltitle = {Journal of Cognitive Neuroscience},
volume = {33},
number = {11},
pages = {2297--2310},
issn = {0898-929X, 1530-8898},
doi = {10.1162/jocn_a_01760},
url = {https://direct.mit.edu/jocn/article/33/11/2297/102999/Sensory-Attenuation-of-the-Auditory-P2},
urldate = {2022-12-02},
abstract = {Abstract Successful human interaction relies on people's ability to differentiate between the sensory consequences of their own and others' actions. Research in solo action contexts has identified sensory attenuation, that is, the selective perceptual or neural dampening of the sensory consequences of self-produced actions, as a potential marker of the distinction between self- and externally produced sensory consequences. However, very little research has examined whether sensory attenuation distinguishes self- from partner-produced sensory consequences in joint action contexts. The current study examined whether sensory attenuation of the auditory N1 or P2 ERPs distinguishes self- from partner-produced tones when pairs of people coordinate their actions to produce tone sequences that match a metronome pace. We did not find evidence of auditory N1 attenuation for either self- or partner-produced tones. Instead, the auditory P2 was attenuated for self-produced tones compared to partner-produced tones within the joint action. These findings indicate that self-specific attenuation of the auditory P2 differentiates the sensory consequences of one's own from others' actions during joint action. These findings also corroborate recent evidence that N1 attenuation may be driven by general rather than action-specific processes and support a recently proposed functional dissociation between auditory N1 and P2 attenuation.},
langid = {english},
keywords = {extracted,introduction,linus,main-paper},
file = {C\:\\Users\\webma\\Zotero\\storage\\I32PJGPZ\\Bolt and Loehr - 2021 - Sensory Attenuation of the Auditory P2 Differentia.pdf}
}
@article{botvinickConflictMonitoringCognitive2001,
title = {Conflict Monitoring and Cognitive Control.},
author = {Botvinick, Matthew M. and Braver, Todd S. and Barch, Deanna M. and Carter, Cameron S. and Cohen, Jonathan D.},
date = {2001},
journaltitle = {Psychological Review},
shortjournal = {Psychological Review},
volume = {108},
number = {3},
pages = {624--652},
issn = {1939-1471, 0033-295X},
doi = {10.1037/0033-295X.108.3.624},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0033-295X.108.3.624},
urldate = {2022-12-19},
langid = {english},
keywords = {extracted}
}
@article{brockIfMotionSounds2012,
title = {If Motion Sounds: {{Movement}} Sonification Based on Inertial Sensor Data},
shorttitle = {If Motion Sounds},
author = {Brock, Heike and Schmitz, Gerd and Baumann, Jan and Effenberg, Alfred O.},
date = {2012},
journaltitle = {Procedia Engineering},
shortjournal = {Procedia Engineering},
volume = {34},
pages = {556--561},
issn = {18777058},
doi = {10.1016/j.proeng.2012.04.095},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1877705812017080},
urldate = {2022-09-01},
abstract = {Within last years, movement sonification turned out to be an appropriate support for motor perception and motor control that can display physical motion in a very rich and direct way. But how should movement sonification be configured to support motor learning? The appropriate selection of movement parameters and their transformation into characteristic motion features is essential for an auditory display to become effective. In this paper, we introduce a real-time sonification framework for all common MIDI environments based on acceleration and orientation data from inertial sensors. Fundamental processing steps to transform motion information into meaningful sound will be discussed. The proposed framework of inertial motion capturing, kinematic parameter selection and possible kinematic acoustic mapping provides a basis for mobile real-time movement sonification which is a prospective powerful training tool for rehabilitation and sports and offers a broad variety of application possibilities.},
langid = {english},
keywords = {linus,luke,methods},
file = {C\:\\Users\\webma\\Zotero\\storage\\HYCVDNZ8\\Brock et al. - 2012 - If motion sounds Movement sonification based on i.pdf}
}
@article{calvo-merinoActionObservationAcquired2005,
title = {Action {{Observation}} and {{Acquired Motor Skills}}: {{An fMRI Study}} with {{Expert Dancers}}},
shorttitle = {Action {{Observation}} and {{Acquired Motor Skills}}},
author = {Calvo-Merino, B. and Glaser, D.E. and Grèzes, J. and Passingham, R.E. and Haggard, P.},
date = {2005-08-01},
journaltitle = {Cerebral Cortex},
volume = {15},
number = {8},
pages = {1243--1249},
issn = {1460-2199, 1047-3211},
doi = {10.1093/cercor/bhi007},
url = {http://academic.oup.com/cercor/article/15/8/1243/304707/Action-Observation-and-Acquired-Motor-Skills-An},
urldate = {2022-12-22},
langid = {english},
keywords = {extracted,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\HF8XSMRU\\Calvo-Merino et al. - 2005 - Action Observation and Acquired Motor Skills An f.pdf}
}
@article{carlilePerceptionAuditoryMotion2016,
title = {The {{Perception}} of {{Auditory Motion}}},
author = {Carlile, Simon and Leung, Johahn},
date = {2016-01-01},
journaltitle = {Trends in Hearing},
shortjournal = {Trends in Hearing},
volume = {20},
pages = {233121651664425},
issn = {2331-2165, 2331-2165},
doi = {10.1177/2331216516644254},
url = {http://journals.sagepub.com/doi/10.1177/2331216516644254},
urldate = {2023-02-14},
abstract = {The growing availability of efficient and relatively inexpensive virtual auditory display technology has provided new research platforms to explore the perception of auditory motion. At the same time, deployment of these technologies in command and control as well as in entertainment roles is generating an increasing need to better understand the complex processes underlying auditory motion perception. This is a particularly challenging processing feat because it involves the rapid deconvolution of the relative change in the locations of sound sources produced by rotational and translations of the head in space (self-motion) to enable the perception of actual source motion. The fact that we perceive our auditory world to be stable despite almost continual movement of the head demonstrates the efficiency and effectiveness of this process. This review examines the acoustical basis of auditory motion perception and a wide range of psychophysical, electrophysiological, and cortical imaging studies that have probed the limits and possible mechanisms underlying this perception.},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\GULJUGQW\\Carlile_Leung_2016_The Perception of Auditory Motion.pdf}
}
@article{cheongFourWaysQuantify2022,
title = {Four Ways to Quantify Synchrony between Time Series Data},
author = {Cheong, Jin Hyun},
date = {2022-08-30},
publisher = {{Open Science Framework}},
doi = {10.17605/OSF.IO/BA3NY},
url = {https://osf.io/ba3ny/},
urldate = {2023-01-24},
abstract = {This project provides a sample dataset with detailed code on how to quantify synchrony between time series data using a Pearson correlation, time-lagged cross correlations, Dynamic Time Warping, and instantaneous phase synchrony. Rendered tutorial is available at http://jinhyuncheong.com/jekyll/update/2019/05/16/Four\_ways\_to\_qunatify\_synchrony.html},
editora = {Open Science Framework},
editoratype = {collaborator}
}
@article{clarkCoordinatingEachOther2005,
title = {Coordinating with Each Other in a Material World},
author = {Clark, Herbert H.},
date = {2005-10},
journaltitle = {Discourse Studies},
shortjournal = {Discourse Studies},
volume = {7},
number = {4-5},
pages = {507--525},
issn = {1461-4456, 1461-7080},
doi = {10.1177/1461445605054404},
url = {http://journals.sagepub.com/doi/10.1177/1461445605054404},
urldate = {2023-02-12},
abstract = {In everyday joint activities, people coordinate with each other by means not only of linguistic signals, but also of material signals – signals in which they indicate things by deploying material objects, locations, or actions around them. Material signals fall into two main classes: directing-to and placing-for. In directing-to, people request addressees to direct their attention to objects, events, or themselves. In placing-for, people place objects, actions, or themselves in special sites for addressees to interpret. Both classes have many subtypes. Features of material signals were examined in pairs of people who were videotaped as they assembled TV stands, built Lego models, planned furnishings for a house, played piano duets, or bought coffee at Starbucks. In these activities, the pointing and placements were often sustained, creating three phases of signals – initiation, maintenance, and termination – each with its own interpretation.},
langid = {english}
}
@article{debashiSonificationNetworkTraffic2018,
title = {Sonification of Network Traffic Flow for Monitoring and Situational Awareness},
author = {Debashi, Mohamed and Vickers, Paul},
editor = {Mankin, Richard},
date = {2018-04-19},
journaltitle = {PLOS ONE},
shortjournal = {PLoS ONE},
volume = {13},
number = {4},
pages = {e0195948},
issn = {1932-6203},
doi = {10.1371/journal.pone.0195948},
url = {https://dx.plos.org/10.1371/journal.pone.0195948},
urldate = {2023-02-11},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\VNPCFP54\\Debashi and Vickers - 2018 - Sonification of network traffic flow for monitorin.pdf}
}
@article{demosRockingBeatEffects2012,
title = {Rocking to the Beat: {{Effects}} of Music and Partner's Movements on Spontaneous Interpersonal Coordination.},
shorttitle = {Rocking to the Beat},
author = {Demos, Alexander P. and Chaffin, Roger and Begosh, Kristen T. and Daniels, Jennifer R. and Marsh, Kerry L.},
date = {2012-02},
journaltitle = {Journal of Experimental Psychology: General},
shortjournal = {Journal of Experimental Psychology: General},
volume = {141},
number = {1},
pages = {49--53},
issn = {1939-2222, 0096-3445},
doi = {10.1037/a0023843},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/a0023843},
urldate = {2022-10-15},
langid = {english},
keywords = {introduction,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\HN8UYBSS\\Demos et al. - 2012 - Rocking to the beat Effects of music and partner'.pdf}
}
@incollection{dixLatencyCyberPhysicalSystems2022,
title = {Latency in {{Cyber-Physical Systems}}: {{The Role}} of {{Visual Feedback Delays}} on {{Manual Skill Learning}}},
shorttitle = {Latency in {{Cyber-Physical Systems}}},
author = {Dix, Annika and Helmert, Jens and Pannasch, Sebastian},
date = {2022-01-01},
pages = {1138--1146},
doi = {10.1007/978-3-030-85540-6_146},
abstract = {To inform, guide and optimize the design of cyber-physical systems (CPS), this study examined whether and how delayed visual feedback affects human fine-motor skills. Two experiments are presented, in which participants performed a complex motor task with their hands. During the task, visual feedback was provided on a display with varying delay lengths. Further, to investigate effects of adaptation and transfer, some participants were first exposed to a fixed delay before performing the task with varying delay lengths. Results show that independent of earlier delay exposure feedback delays had detrimental effects on performance, particularly when performance information was lacking and delay length variable. Hand kinematic indicate the use of a strategy geared to a slowing of the movement process. Implications and future research ideas like the application of augmented feedback on movement kinematic and the examination of different settings to promote adaptation effects are discussed.},
isbn = {978-3-030-85539-0},
keywords = {latency}
}
@misc{dobsonDetailsPublicHealth1963,
title = {Details - {{Public Health Image Library}}({{PHIL}})},
author = {Dobson, Warren},
date = {1963},
publisher = {{CDC}},
url = {https://phil.cdc.gov/details.aspx?pid=12020},
urldate = {2023-02-14},
file = {C\:\\Users\\webma\\Zotero\\storage\\RDQ7SZGM\\details.html}
}
@article{dotovEntrainingChaoticDynamics2018,
title = {Entraining Chaotic Dynamics: {{A}} Novel Movement Sonification Paradigm Could Promote Generalization},
shorttitle = {Entraining Chaotic Dynamics},
author = {Dotov, Dobromir and Froese, Tom},
date = {2018-10},
journaltitle = {Human Movement Science},
shortjournal = {Human Movement Science},
volume = {61},
pages = {27--41},
issn = {01679457},
doi = {10.1016/j.humov.2018.06.016},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0167945717308941},
urldate = {2023-02-14},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\IZ7P295W\\Dotov_Froese_2018_Entraining chaotic dynamics.pdf}
}
@article{dubusEvaluationFourModels2012,
title = {Evaluation of Four Models for the Sonification of Elite Rowing},
author = {Dubus, Gaël},
date = {2012-05},
journaltitle = {Journal on Multimodal User Interfaces},
shortjournal = {J Multimodal User Interfaces},
volume = {5},
number = {3-4},
pages = {143--156},
issn = {1783-7677, 1783-8738},
doi = {10.1007/s12193-011-0085-1},
url = {http://link.springer.com/10.1007/s12193-011-0085-1},
urldate = {2023-02-11},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\ISNSZYIP\\Dubus - 2012 - Evaluation of four models for the sonification of .pdf}
}
@article{dubusInteractiveSonificationMotion2013,
title = {Interactive Sonification of Motion : {{Design}}, Implementation and Control of Expressive Auditory Feedback with Mobile Devices},
shorttitle = {Interactive Sonification of Motion},
author = {Dubus, Gaël},
date = {2013},
publisher = {{KTH Royal Institute of Technology}},
url = {http://urn.kb.se/resolve?urn=urn:nbn:se:kth:diva-127944},
urldate = {2022-09-05},
abstract = {DiVA portal is a finding tool for research publications and student theses written at the following 50 universities and research institutions.},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\V4JITEWG\\Dubus - 2013 - Interactive sonification of motion Design, imple.pdf;C\:\\Users\\webma\\Zotero\\storage\\94UMW8VZ\\record.html}
}
@article{dubusSonificationPhysicalQuantities2011,
title = {Sonification of {{Physical Quantities Throughout History}}: {{A Meta-Study}} of {{Previous Mapping Strategies}}},
author = {Dubus, Gael and Bresin, Roberto},
date = {2011-06},
journaltitle = {International Conference on Auditory Display, 2011},
pages = {8},
abstract = {We introduce a meta-study of previous sonification designs taking physical quantities as input data. The aim is to build a solid foundation for future sonification works so that auditory display researchers would be able to take benefit from former studies, avoiding to start from scratch when beginning new sonification projects. This work is at an early stage and the objective of this paper is rather to introduce the methodology than to come to definitive conclusions. After a historical introduction, we explain how to collect a large amount of articles and extract useful information about mapping strategies. Then, we present the physical quantities grouped according to conceptual dimensions, as well as the sound parameters used in sonification designs and we summarize the current state of the study by listing the couplings extracted from the article database. A total of 54 articles have been examined for the present article. Finally, a preliminary analysis of the results is performed.},
langid = {english},
keywords = {introduction,linus,luke,useful},
file = {C\:\\Users\\webma\\Zotero\\storage\\R5PXJFRK\\Dubus and Bresin - 2011 - SONIFICATION OF PHYSICAL QUANTITIES THROUGHOUT HIS.pdf}
}
@article{dubusSystematicReviewMapping2013,
title = {A {{Systematic Review}} of {{Mapping Strategies}} for the {{Sonification}} of {{Physical Quantities}}},
author = {Dubus, Gaël and Bresin, Roberto},
date = {2013-12-17},
journaltitle = {PLOS ONE},
shortjournal = {PLOS ONE},
volume = {8},
number = {12},
pages = {e82491},
publisher = {{Public Library of Science}},
issn = {1932-6203},
doi = {10.1371/journal.pone.0082491},
url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0082491},
urldate = {2022-09-05},
abstract = {The field of sonification has progressed greatly over the past twenty years and currently constitutes an established area of research. This article aims at exploiting and organizing the knowledge accumulated in previous experimental studies to build a foundation for future sonification works. A systematic review of these studies may reveal trends in sonification design, and therefore support the development of design guidelines. To this end, we have reviewed and analyzed 179 scientific publications related to sonification of physical quantities. Using a bottom-up approach, we set up a list of conceptual dimensions belonging to both physical and auditory domains. Mappings used in the reviewed works were identified, forming a database of 495 entries. Frequency of use was analyzed among these conceptual dimensions as well as higher-level categories. Results confirm two hypotheses formulated in a preliminary study: pitch is by far the most used auditory dimension in sonification applications, and spatial auditory dimensions are almost exclusively used to sonify kinematic quantities. To detect successful as well as unsuccessful sonification strategies, assessment of mapping efficiency conducted in the reviewed works was considered. Results show that a proper evaluation of sonification mappings is performed only in a marginal proportion of publications. Additional aspects of the publication database were investigated: historical distribution of sonification works is presented, projects are classified according to their primary function, and the sonic material used in the auditory display is discussed. Finally, a mapping-based approach for characterizing sonification is proposed.},
langid = {english},
keywords = {Drug synthesis,Kinematics,Physical mapping,Pitch perception,Sensory perception,Signal filtering,Sonification,Systematic reviews},
file = {C\:\\Users\\webma\\Zotero\\storage\\ILLLBKVQ\\Dubus and Bresin - 2013 - A Systematic Review of Mapping Strategies for the .pdf;C\:\\Users\\webma\\Zotero\\storage\\3DTNDWBP\\article.html}
}
@article{effenbergAccelerationDecelerationConstant2018,
title = {Acceleration and Deceleration at Constant Speed: Systematic Modulation of Motion Perception by Kinematic Sonification},
shorttitle = {Acceleration and Deceleration at Constant Speed},
author = {Effenberg, Alfred O. and Schmitz, Gerd},
date = {2018-08},
journaltitle = {Annals of the New York Academy of Sciences},
shortjournal = {Ann. N.Y. Acad. Sci.},
volume = {1425},
number = {1},
pages = {52--69},
issn = {0077-8923, 1749-6632},
doi = {10.1111/nyas.13693},
url = {https://onlinelibrary.wiley.com/doi/10.1111/nyas.13693},
urldate = {2022-09-01},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\QCZDNVFK\\Effenberg and Schmitz - 2018 - Acceleration and deceleration at constant speed s.pdf}
}
@article{effenbergMovementSonificationAudiovisual2011,
title = {Movement {{Sonification}}: {{Audiovisual}} Benefits on Motor Learning},
shorttitle = {Movement {{Sonification}}},
author = {Effenberg, Alfred O. and Fehse, Ursula and Weber, Andreas},
date = {2011},
journaltitle = {BIO Web of Conferences},
shortjournal = {BIO Web of Conferences},
volume = {1},
pages = {00022},
issn = {2117-4458},
doi = {10.1051/bioconf/20110100022},
url = {http://www.bio-conferences.org/10.1051/bioconf/20110100022},
urldate = {2023-02-10},
file = {C\:\\Users\\webma\\Zotero\\storage\\4MFF2998\\Effenberg et al. - 2011 - Movement Sonification Audiovisual benefits on mot.pdf}
}
@article{effenbergMovementSonificationEffects2005,
title = {Movement {{Sonification}}: {{Effects}} on {{Perception}} and {{Action}}},
shorttitle = {Movement {{Sonification}}},
author = {Effenberg, Alfred~O.},
date = {2005-04},
journaltitle = {IEEE Multimedia},
shortjournal = {IEEE Multimedia},
volume = {12},
number = {2},
pages = {53--59},
issn = {1070-986X},
doi = {10.1109/MMUL.2005.31},
url = {http://ieeexplore.ieee.org/document/1423934/},
urldate = {2022-12-30},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\7U59MJ5B\\Effenberg - 2005 - Movement Sonification Effects on Perception and A.pdf}
}
@article{ferrari-tonioloTwoBrainsAction2019,
title = {Two Brains in Action: {{Joint-action}} Coding in the Primate Frontal Cortex},
shorttitle = {Two Brains in Action},
author = {Ferrari-Toniolo, S. and Visco-Comandini, F. and Battaglia-Mayer, A.},
date = {2019-02-25},
journaltitle = {The Journal of Neuroscience},
shortjournal = {J. Neurosci.},
pages = {1512--18},
issn = {0270-6474, 1529-2401},
doi = {10.1523/JNEUROSCI.1512-18.2019},
url = {https://www.jneurosci.org/lookup/doi/10.1523/JNEUROSCI.1512-18.2019},
urldate = {2023-02-14},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\5QIHYL6C\\Ferrari-Toniolo et al_2019_Two brains in action.pdf}
}
@article{galantucciExperimentalSemioticsNew2009,
title = {Experimental {{Semiotics}}: {{A New Approach}} for {{Studying Communication}} as a {{Form}} of {{Joint Action}}},
shorttitle = {Experimental {{Semiotics}}},
author = {Galantucci, Bruno},
date = {2009-04},
journaltitle = {Topics in Cognitive Science},
volume = {1},
number = {2},
pages = {393--410},
issn = {17568757, 17568765},
doi = {10.1111/j.1756-8765.2009.01027.x},
url = {https://onlinelibrary.wiley.com/doi/10.1111/j.1756-8765.2009.01027.x},
urldate = {2023-02-12},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\G9GJK8IS\\Galantucci - 2009 - Experimental Semiotics A New Approach for Studyin.pdf}
}
@inproceedings{gelerDynamicTimeWarping2019,
title = {Dynamic {{Time Warping}}: {{Itakura}} vs {{Sakoe-Chiba}}},
shorttitle = {Dynamic {{Time Warping}}},
booktitle = {2019 {{IEEE International Symposium}} on {{INnovations}} in {{Intelligent SysTems}} and {{Applications}} ({{INISTA}})},
author = {Geler, Zoltan and Kurbalija, Vladimir and Ivanovic, Mirjana and Radovanovic, Milos and Dai, Weihui},
date = {2019-07},
pages = {1--6},
publisher = {{IEEE}},
location = {{Sofia, Bulgaria}},
doi = {10.1109/INISTA.2019.8778300},
url = {https://ieeexplore.ieee.org/document/8778300/},
urldate = {2023-02-14},
eventtitle = {2019 {{IEEE International Symposium}} on {{INnovations}} in {{Intelligent SysTems}} and {{Applications}} ({{INISTA}})},
isbn = {978-1-72811-862-8}
}
@incollection{gilkeyAuditoryMotionPerception1997,
title = {Auditory {{Motion Perception}}: {{Snapshots Revisited}}},
booktitle = {Binaural and {{Spatial Hearing}} in {{Real}} and {{Virtual Environments}}},
editor = {Gilkey, Robert and Anderson, Timothy R.},
date = {1997},
pages = {295--313},
publisher = {{Psychology Press}},
doi = {10.4324/9781315806341},
url = {https://www.taylorfrancis.com/books/9781317780267},
urldate = {2023-02-14},
isbn = {978-1-317-78026-7},
langid = {english}
}
@incollection{hildebrandtShortPaperEnhancing2014,
title = {Short {{Paper}}: {{Towards Enhancing Business Process Monitoring}} with {{Sonification}}},
shorttitle = {Short {{Paper}}},
booktitle = {Business {{Process Management Workshops}}},
author = {Hildebrandt, Tobias},
editor = {Lohmann, Niels and Song, Minseok and Wohed, Petia},
date = {2014},
series = {Lecture {{Notes}} in {{Business Information Processing}}},
volume = {171},
pages = {529--536},
publisher = {{Springer International Publishing}},
location = {{Cham}},
doi = {10.1007/978-3-319-06257-0_42},
url = {https://link.springer.com/10.1007/978-3-319-06257-0_42},
urldate = {2023-02-11},
isbn = {978-3-319-06256-3 978-3-319-06257-0},
langid = {english}
}
@article{hochDancingTogetherInfant2021,
title = {“{{Dancing}}” {{Together}}: {{Infant}}–{{Mother Locomotor Synchrony}}},
shorttitle = {“{{Dancing}}” {{Together}}},
author = {Hoch, Justine E. and Ossmy, Ori and Cole, Whitney G. and Hasan, Shohan and Adolph, Karen E.},
date = {2021-07},
journaltitle = {Child Development},
shortjournal = {Child Dev},
volume = {92},
number = {4},
pages = {1337--1353},
issn = {0009-3920, 1467-8624},
doi = {10.1111/cdev.13513},
url = {https://onlinelibrary.wiley.com/doi/10.1111/cdev.13513},
urldate = {2023-02-14},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\FSUT6D7Y\\Hoch et al_2021_“Dancing” Together.pdf}
}
@article{hwangEffectPerformanceBasedAuditory2018,
title = {Effect- and {{Performance-Based Auditory Feedback}} on {{Interpersonal Coordination}}},
author = {Hwang, Tong-Hun and Schmitz, Gerd and Klemmt, Kevin and Brinkop, Lukas and Ghai, Shashank and Stoica, Mircea and Maye, Alexander and Blume, Holger and Effenberg, Alfred O.},
date = {2018},
journaltitle = {Frontiers in Psychology},
volume = {9},
issn = {1664-1078},
url = {https://www.frontiersin.org/articles/10.3389/fpsyg.2018.00404},
urldate = {2022-09-01},
abstract = {When two individuals interact in a collaborative task, such as carrying a sofa or a table, usually spatiotemporal coordination of individual motor behavior will emerge. In many cases, interpersonal coordination can arise independently of verbal communication, based on the observation of the partners' movements and/or the object's movements. In this study, we investigate how social coupling between two individuals can emerge in a collaborative task under different modes of perceptual information. A visual reference condition was compared with three different conditions with new types of additional auditory feedback provided in real time: effect-based auditory feedback, performance-based auditory feedback, and combined effect/performance-based auditory feedback. We have developed a new paradigm in which the actions of both participants continuously result in a seamlessly merged effect on an object simulated by a tablet computer application. Here, participants should temporally synchronize their movements with a 90° phase difference and precisely adjust the finger dynamics in order to keep the object (a ball) accurately rotating on a given circular trajectory on the tablet. Results demonstrate that interpersonal coordination in a joint task can be altered by different kinds of additional auditory information in various ways.},
keywords = {useful},
file = {C\:\\Users\\webma\\Zotero\\storage\\IQ3HXJFR\\Hwang et al. - 2018 - Effect- and Performance-Based Auditory Feedback on.pdf}
}
@article{jooMetricsDescribingDyadic2018,
title = {Metrics for Describing Dyadic Movement: A Review},
shorttitle = {Metrics for Describing Dyadic Movement},
author = {Joo, Rocio and Etienne, Marie-Pierre and Bez, Nicolas and Mahévas, Stéphanie},
date = {2018-12},
journaltitle = {Movement Ecology},
shortjournal = {Mov Ecol},
volume = {6},
number = {1},
pages = {26},
issn = {2051-3933},
doi = {10.1186/s40462-018-0144-2},
url = {https://movementecologyjournal.biomedcentral.com/articles/10.1186/s40462-018-0144-2},
urldate = {2023-02-08},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\ICDN2LSN\\Joo et al_2018_Metrics for describing dyadic movement.pdf}
}
@incollection{keetels2012perception,
title = {Perception of Synchrony between the Senses},
booktitle = {The Neural Bases of Multisensory Processes},
author = {Keetels, Mirjam and Vroomen, Jean},
editor = {{Murray, MM} and {Wallace, MT}},
date = {2012},
publisher = {{CRC Press/Taylor \& Francis}},
location = {{Boca Raton (FL)}},
url = {https://www.ncbi.nlm.nih.gov/books/NBK92837/}
}
@article{kellerMusicalMeterAttention2005,
title = {Musical {{Meter}} in {{Attention}} to {{Multipart Rhythm}}},
author = {Keller, Peter E. and Burnham, Denis K.},
date = {2005-04-01},
journaltitle = {Music Perception},
volume = {22},
number = {4},
pages = {629--661},
issn = {0730-7829, 1533-8312},
doi = {10.1525/mp.2005.22.4.629},
url = {https://online.ucpress.edu/mp/article/22/4/629/62193/Musical-Meter-in-Attention-to-Multipart-Rhythm},
urldate = {2023-02-12},
abstract = {Performing in musical ensembles can be viewed as a dual task that requires simultaneous attention to a high priority �target� auditory pattern (e.g., a performer�s own part) and either (a) another part in the ensemble or (b) the aggregate texture that results when all parts are integrated. The current study tested the hypothesis that metric frameworks (rhythmic schemas) promote the efficient allocation of attentional resources in such multipart musical contexts. Experiment 1 employed a recognition memory paradigm to investigate the effects of attending to metrical versus nonmetrical target patterns upon the perception of aggregate patterns in which they were embedded. Experiment 2 required metrical and nonmetrical target patterns to be reproduced while memorizing different, concurrently presented metrical patterns that were also subsequently reproduced. Both experiments included conditions in which the different patterns within the multipart structure were matched or mismatched in terms of best-fitting meter. Results indicate that dual-task performance was best in matched-metrical conditions, intermediate in mismatched-metrical conditions, and worst in nonmetrical conditions. This suggests that metric frameworks may facilitate complex musical interactions by enabling efficient allocation of attentional resources.},
langid = {english}
}
@article{kellerPianistsDuetBetter2007,
title = {Pianists Duet Better When They Play with Themselves: {{On}} the Possible Role of Action Simulation in Synchronization},
shorttitle = {Pianists Duet Better When They Play with Themselves},
author = {Keller, Peter E. and Knoblich, Günther and Repp, Bruno H.},
date = {2007-03},
journaltitle = {Consciousness and Cognition},
shortjournal = {Consciousness and Cognition},
volume = {16},
number = {1},
pages = {102--111},
issn = {10538100},
doi = {10.1016/j.concog.2005.12.004},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1053810005001613},
urldate = {2022-12-22},
langid = {english},
keywords = {extracted,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\9N9QBGQ4\\Keller et al. - 2007 - Pianists duet better when they play with themselve.pdf}
}
@article{kellerRhythmJointAction2014,
title = {Rhythm in Joint Action: Psychological and Neurophysiological Mechanisms for Real-Time Interpersonal Coordination},
shorttitle = {Rhythm in Joint Action},
author = {Keller, Peter E. and Novembre, Giacomo and Hove, Michael J.},
date = {2014-12-19},
journaltitle = {Philosophical Transactions of the Royal Society B: Biological Sciences},
volume = {369},
number = {1658},
pages = {20130394},
publisher = {{Royal Society}},
doi = {10.1098/rstb.2013.0394},
url = {https://royalsocietypublishing.org/doi/full/10.1098/rstb.2013.0394},
urldate = {2022-12-14},
abstract = {Human interaction often requires simultaneous precision and flexibility in the coordination of rhythmic behaviour between individuals engaged in joint activity, for example, playing a musical duet or dancing with a partner. This review article addresses the psychological processes and brain mechanisms that enable such rhythmic interpersonal coordination. First, an overview is given of research on the cognitive-motor processes that enable individuals to represent joint action goals and to anticipate, attend and adapt to other's actions in real time. Second, the neurophysiological mechanisms that underpin rhythmic interpersonal coordination are sought in studies of sensorimotor and cognitive processes that play a role in the representation and integration of self- and other-related actions within and between individuals' brains. Finally, relationships between social–psychological factors and rhythmic interpersonal coordination are considered from two perspectives, one concerning how social-cognitive tendencies (e.g. empathy) affect coordination, and the other concerning how coordination affects interpersonal affiliation, trust and prosocial behaviour. Our review highlights musical ensemble performance as an ecologically valid yet readily controlled domain for investigating rhythm in joint action.},
keywords = {interpersonal coordination,joint action,linus,musical ensembles,rhythm,sensorimotor synchronization,social neuroscience},
file = {C\:\\Users\\webma\\Zotero\\storage\\PALZG7J6\\Keller et al_2014_Rhythm in joint action.pdf}
}
@inproceedings{kimotoDesignImplementationStetho2002,
title = {Design and Implementation of Stetho: {{Network}} Sonificiation System},
booktitle = {{{ICMC}}},
author = {Kimoto, Masahiko and Ohno, Hiroyuki},
date = {2002}
}
@article{knoblichActionCoordinationGroups2003,
title = {Action Coordination in Groups and Individuals: {{Learning}} Anticipatory Control.},
shorttitle = {Action Coordination in Groups and Individuals},
author = {Knoblich, Günther and Jordan, Jerome Scott},
date = {2003},
journaltitle = {Journal of Experimental Psychology: Learning, Memory, and Cognition},
shortjournal = {Journal of Experimental Psychology: Learning, Memory, and Cognition},
volume = {29},
number = {5},
pages = {1006--1016},
issn = {1939-1285, 0278-7393},
doi = {10.1037/0278-7393.29.5.1006},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0278-7393.29.5.1006},
urldate = {2022-12-22},
langid = {english},
keywords = {extracted,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\NDC9GIFC\\Knoblich and Jordan - 2003 - Action coordination in groups and individuals Lea.pdf}
}
@incollection{knoblichPsychologicalResearchJoint2011,
title = {Psychological {{Research}} on {{Joint Action}}},
booktitle = {Psychology of {{Learning}} and {{Motivation}}},
author = {Knoblich, Günther and Butterfill, Stephen and Sebanz, Natalie},
date = {2011},
volume = {54},
pages = {59--101},
publisher = {{Elsevier}},
doi = {10.1016/B978-0-12-385527-5.00003-6},
url = {https://linkinghub.elsevier.com/retrieve/pii/B9780123855275000036},
urldate = {2022-12-18},
isbn = {978-0-12-385527-5},
langid = {english},
keywords = {extracted,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\LEBQDHQ2\\Knoblich et al. - 2011 - Psychological Research on Joint Action.pdf}
}
@inproceedings{kosBiofeedbackSportChallenges2015,
title = {Biofeedback in Sport: {{Challenges}} in Real-Time Motion Tracking and Processing},
shorttitle = {Biofeedback in Sport},
booktitle = {2015 {{IEEE}} 15th {{International Conference}} on {{Bioinformatics}} and {{Bioengineering}} ({{BIBE}})},
author = {Kos, Anton and Umek, Anton and Tomazic, Saso},
date = {2015-11},
pages = {1--4},
doi = {10.1109/BIBE.2015.7367681},
abstract = {Science and technology are ever more frequently used in sports for achieving the competitive advantage. Motion tracking systems, in connection to the biomechanical biofeedback, help in accelerating motor learning. Requirements about various parameters important in real-time biofeedback applications are discussed. Special focus is given on feedback loop delays and its real-time operation. Optical tracking and inertial sensor tracking systems are presented and compared. Real-time sensor signal acquisitions and real-time processing challenges, in connection to biomechanical biofeedback, are presented. This paper can serve as a starting point for determining the adequate combination of technical equipment and its specifications that work favorably for the operation of the planned real-time biofeedback application.},
eventtitle = {2015 {{IEEE}} 15th {{International Conference}} on {{Bioinformatics}} and {{Bioengineering}} ({{BIBE}})},
keywords = {Biological control systems,Biomedical optical imaging,Delays,Feedback loop,Gyroscopes,introduction,latency,Real-time systems,Tracking},
file = {C\:\\Users\\webma\\Zotero\\storage\\PYVTBGBM\\Kos et al. - 2015 - Biofeedback in sport Challenges in real-time moti.pdf;C\:\\Users\\webma\\Zotero\\storage\\TVQCQ34F\\7367681.html}
}
@article{kosinskiLiteratureReviewReaction2008,
title = {A {{Literature Review}} on {{Reaction Time}}},
author = {Kosinski, Robert J.},
date = {2008},
journaltitle = {Clemson University},
volume = {10(1)},
pages = {337--344}
}
@article{kourtisAttentionAllocationTask2014,
title = {Attention {{Allocation}} and {{Task Representation}} during {{Joint Action Planning}}},
author = {Kourtis, Dimitrios and Knoblich, Günther and Woźniak, Mateusz and Sebanz, Natalie},
date = {2014-10-01},
journaltitle = {Journal of Cognitive Neuroscience},
volume = {26},
number = {10},
pages = {2275--2286},
issn = {0898-929X, 1530-8898},
doi = {10.1162/jocn_a_00634},
url = {https://direct.mit.edu/jocn/article/26/10/2275/28187/Attention-Allocation-and-Task-Representation},
urldate = {2022-12-14},
abstract = {Abstract We investigated whether people take into account an interaction partner's attentional focus and whether they represent in advance their partner's part of the task when planning to engage in a synchronous joint action. The experiment involved two participants planning and performing joint actions (i.e., synchronously lifting and clinking glasses), unimanual individual actions (i.e., lifting and moving a glass as if clinking with another person), and bimanual individual actions. EEG was recorded from one of the participants. We employed a choice reaction paradigm where a visual cue indicated the type of action to be planned, followed 1.5 sec later by a visual go stimulus, prompting the participants to act. We studied attention allocation processes by examining two lateralized EEG components, namely the anterior directing attention negativity and the late directing attention positivity. Action planning processes were examined using the late contingent negative variation and the movement-related potential. The results show that early stages of joint action planning involve dividing attention between locations in space relevant for one's own part of the joint action and locations relevant for one's partner's part of the joint action. At later stages of joint action planning, participants represented in advance their partner's upcoming action in addition to their own action, although not at an effector-specific level. Our study provides electrophysiological evidence supporting the operation of attention sharing processes and predictive self/other action representation during the planning phase of a synchronous joint task.},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\LEAUPDEW\\Kourtis et al. - 2014 - Attention Allocation and Task Representation durin.pdf}
}
@article{kourtisPredictiveRepresentationOther2012,
title = {Predictive Representation of Other People's Actions in Joint Action Planning: {{An EEG}} Study},
shorttitle = {Predictive Representation of Other People's Actions in Joint Action Planning},
author = {Kourtis, Dimitrios and Sebanz, N and Knoblich, G},
date = {2012-06-06},
journaltitle = {Social neuroscience},
shortjournal = {Social neuroscience},
volume = {8},
doi = {10.1080/17470919.2012.694823},
abstract = {It has been postulated that when people engage in joint actions they form internal representations not only of their part of the joint task but of their co-actors' parts of the task as well. However, empirical evidence for this claim is scarce. By means of high-density electroencephalography, this study investigated whether one represents and simulates the action of an interaction partner when planning to perform a joint action. The results showed that joint action planning compared with individual action planning resulted in amplitude modulations of the frontal P3a and parietal P3b event-related potentials, which are associated with stimulus classification, updating of representations, and decision-making. Moreover, there was evidence for anticipatory motor simulation of the partner's action in the amplitude and peak latency of the late, motor part of the Contingent Negative Variation, which was correlated with joint action performance. Our results provide evidence that when people engage in joint tasks, they represent in advance each other's actions in order to facilitate coordination.},
keywords = {extracted,introduction,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\7JHZ2WQC\\Kourtis et al. - 2012 - Predictive representation of other people's action.pdf}
}
@misc{kramerSonificationReportStatus1999,
title = {Sonification {{Report}}: {{Status}} of the {{Field}} and {{Research Agenda}}},
author = {Kramer, Gregory and Walker, Bruce and Bonebright, Terri and Cook, Perry},
date = {1999},
publisher = {{International Community for Auditory Display}},
keywords = {highlighted,introduction,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\RDTBT4TR\\Kramer et al. - 1999 - Sonification Report Status of the Field and Resea.pdf}
}
@article{krascekWebBasedElearning2015,
title = {Web {{Based E-learning Tool}} for {{Visualization}} and {{Analysis}} of {{3D Motion Capture Data}}},
author = {Krašček, Andraž and Stojmenova, Kristina and Tomažič, Sašo and Sodnik, Jaka},
date = {2015},
journaltitle = {ACHI 2015},
pages = {143},
abstract = {Abstract—In this paper, we propose an e-learning tool for visualization and manipulation of 3D data on a web platform. The data is streamed in real time from an optical motion capture system Qualisys consisting of eight infrared cameras and Qualisys Track Manager (QTM) software. A WebSocket protocol and WebGL application programming interface (API) are used to visualize and to interact with the data in a browser. The tool represents a web-based extension of QTM software providing also additional features and new possibilities to manipulate and analyze the data. We report also on a user study in which we evaluated the web based application and compared it with the original desktop-based application. The proposed application proved to be fast, effective and intuitive and can be used as an e-learning tool for demonstrating and teaching techniques for visualization and analysis of motion capture data. Keywords-motion capture; Qualisys; e-learning; 3D data; AIM model; WebGL; WebSocket. I.},
keywords = {latency},
file = {C\:\\Users\\webma\\Zotero\\storage\\4JMEH4XF\\Krašček et al. - Web Based E-learning Tool for Visualization and An.pdf;C\:\\Users\\webma\\Zotero\\storage\\5QD5TBKB\\download.html}
}
@article{loehrMonitoringIndividualJoint2013,
title = {Monitoring {{Individual}} and {{Joint Action Outcomes}} in {{Duet Music Performance}}},
author = {Loehr, Janeen D. and Kourtis, Dimitrios and Vesper, Cordula and Sebanz, Natalie and Knoblich, Günther},
date = {2013-07-01},
journaltitle = {Journal of Cognitive Neuroscience},
volume = {25},
number = {7},
pages = {1049--1061},
issn = {0898-929X, 1530-8898},
doi = {10.1162/jocn_a_00388},
url = {https://direct.mit.edu/jocn/article/25/7/1049/27943/Monitoring-Individual-and-Joint-Action-Outcomes-in},
urldate = {2022-12-02},
abstract = {Abstract We investigated whether people monitor the outcomes of their own and their partners' individual actions as well as the outcome of their combined actions when performing joint actions together. Pairs of pianists memorized both parts of a piano duet. Each pianist then performed one part while their partner performed the other; EEG was recorded from both. Auditory outcomes (pitches) associated with keystrokes produced by the pianists were occasionally altered in a way that either did or did not affect the joint auditory outcome (i.e., the harmony of a chord produced by the two pianists' combined pitches). Altered auditory outcomes elicited a feedback-related negativity whether they occurred in the pianist's own part or the partner's part, and whether they affected individual or joint action outcomes. Altered auditory outcomes also elicited a P300 whose amplitude was larger when the alteration affected the joint outcome compared with individual outcomes and when the alteration affected the pianist's own part compared with the partner's part. Thus, musicians engaged in joint actions monitor their own and their partner's actions as well as their combined action outcomes, while at the same time maintaining a distinction between their own and others' actions and between individual and joint outcomes.},
langid = {english},
keywords = {extracted,introduction,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\8KJ3IZXM\\Loehr et al. - 2013 - Monitoring Individual and Joint Action Outcomes in.pdf}
}
@article{loehrSoundYouMe2016,
title = {The Sound of You and Me: {{Novices}} Represent Shared Goals in Joint Action},
shorttitle = {The Sound of You and Me},
author = {Loehr, Janeen D. and Vesper, Cordula},
date = {2016-03},
journaltitle = {Quarterly Journal of Experimental Psychology},
shortjournal = {Quarterly Journal of Experimental Psychology},
volume = {69},
number = {3},
pages = {535--547},
issn = {1747-0218, 1747-0226},
doi = {10.1080/17470218.2015.1061029},
url = {http://journals.sagepub.com/doi/10.1080/17470218.2015.1061029},
urldate = {2022-12-18},
abstract = {People performing joint actions coordinate their individual actions with each other to achieve a shared goal. The current study investigated the mental representations that are formed when people learn a new skill as part of a joint action. In a musical transfer-of-learning paradigm, piano novices first learned to perform simple melodies in the joint action context of coordinating with an accompanist to produce musical duets. Participants then performed their previously learned actions with two types of auditory feedback: while hearing either their individual action goal (the melody) or the shared action goal (the duet). As predicted, participants made more performance errors in the individual goal condition than in the shared goal condition. Further experimental manipulations indicated that this difference was not due to different coordination requirements in the two conditions or perceptual dissimilarities between learning and test. Together, these findings indicate that people form representations of shared goals in contexts that promote minimal representations, such as when learning a new action together with another person.},
langid = {english},
keywords = {extracted,linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\UMASTSSV\\Loehr and Vesper - 2016 - The sound of you and me Novices represent shared .pdf}
}
@article{loehrTemporalCoordinationPerforming2011,
title = {Temporal {{Coordination}} between {{Performing Musicians}}},
author = {Loehr, Janeen D. and Palmer, Caroline},
date = {2011-11},
journaltitle = {Quarterly Journal of Experimental Psychology},
shortjournal = {Quarterly Journal of Experimental Psychology},
volume = {64},
number = {11},
pages = {2153--2167},
issn = {1747-0218, 1747-0226},
doi = {10.1080/17470218.2011.603427},
url = {http://journals.sagepub.com/doi/10.1080/17470218.2011.603427},
urldate = {2023-02-12},
abstract = {Many common behaviours require people to coordinate the timing of their actions with the timing of others' actions. We examined whether representations of musicians' actions are activated in coperformers with whom they must coordinate their actions in time and whether coperformers simulate each other's actions using their own motor systems during temporal coordination. Pianists performed right-hand melodies along with simple or complex left-hand accompaniments produced by themselves or by another pianist. Individual performers' preferred performance rates were measured in solo performance of the right-hand melody. The complexity of the left-hand accompaniment influenced the temporal grouping structure of the right-hand melody in the same way when it was performed by the self or by the duet partner, providing some support for the action corepresentation hypothesis. In contrast, accompaniment complexity had little influence on temporal coordination measures (asynchronies and cross-correlations between parts). Temporal coordination measures were influenced by a priori similarities between partners' preferred rates; partners who had similar preferred rates in solo performance were better synchronized and showed mutual adaptation to each other's timing during duet performances. These findings extend previous findings of action corepresentation and action simulation to a task that requires precise temporal coordination of independent yet simultaneous actions.},
langid = {english}
}
@article{lottoPsychologyAuditoryPerception2011,
title = {Psychology of Auditory Perception},
author = {Lotto, Andrew and Holt, Lori},
date = {2011-09},
journaltitle = {WIREs Cognitive Science},
shortjournal = {WIREs Cogn Sci},
volume = {2},
number = {5},
pages = {479--489},
issn = {1939-5078, 1939-5086},
doi = {10.1002/wcs.123},
url = {https://onlinelibrary.wiley.com/doi/10.1002/wcs.123},
urldate = {2023-02-09},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\M7FIH6NS\\Lotto and Holt - 2011 - Psychology of auditory perception.pdf}
}
@article{marshSocialConnectionJoint2009,
title = {Social {{Connection Through Joint Action}} and {{Interpersonal Coordination}}},
author = {Marsh, Kerry L. and Richardson, Michael J. and Schmidt, R. C.},
date = {2009-04},
journaltitle = {Topics in Cognitive Science},
volume = {1},
number = {2},
pages = {320--339},
issn = {17568757, 17568765},
doi = {10.1111/j.1756-8765.2009.01022.x},
url = {https://onlinelibrary.wiley.com/doi/10.1111/j.1756-8765.2009.01022.x},
urldate = {2022-11-02},
langid = {english},
keywords = {highlighted,introduction,linus,methods},
file = {C\:\\Users\\webma\\Zotero\\storage\\27IWFI3S\\Marsh et al. - 2009 - Social Connection Through Joint Action and Interpe.pdf}
}
@article{mcdermottMusicalIntervalsRelative2010,
title = {Musical Intervals and Relative Pitch: {{Frequency}} Resolution, Not Interval Resolution, Is Special},
shorttitle = {Musical Intervals and Relative Pitch},
author = {McDermott, Josh H. and Keebler, Michael V. and Micheyl, Christophe and Oxenham, Andrew J.},
date = {2010-10},
journaltitle = {The Journal of the Acoustical Society of America},
shortjournal = {J. Acoust. Soc. Am.},
volume = {128},
number = {4},
pages = {1943--1951},
issn = {0001-4966},
doi = {10.1121/1.3478785},
url = {http://scitation.aip.org/content/asa/journal/jasa/128/4/10.1121/1.3478785},
urldate = {2023-02-07},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\HVK5ELIQ\\McDermott et al_2010_Musical intervals and relative pitch.pdf}
}
@article{mcellinSynchronicitiesThatShape2020,
title = {Synchronicities That Shape the Perception of Joint Action},
author = {McEllin, Luke and Knoblich, Günther and Sebanz, Natalie},
date = {2020-09-23},
journaltitle = {Scientific Reports},
shortjournal = {Sci Rep},
volume = {10},
number = {1},
pages = {15554},
issn = {2045-2322},
doi = {10.1038/s41598-020-72729-6},
url = {https://www.nature.com/articles/s41598-020-72729-6},
urldate = {2022-10-09},
abstract = {In joint performances spanning from jazz improvisation to soccer, expert performers synchronize their movements in ways that novices cannot. Particularly, experts can align the velocity profiles of their movements in order to achieve synchrony on a fine-grained time scale, compared to novices who can only synchronize the duration of their movement intervals. This study investigated how experts’ ability to engage in velocity-based synchrony affects observers’ perception of coordination and their aesthetic experience of joint performances. Participants observed two moving dots on a screen and were told that these reflect the hand movements of two performers engaging in joint improvisation. The dots were animated to reflect the velocity-based synchrony characteristic of expert performance (in terms of jitter of the velocity profile: Experiment 1, or through aligning sharpness of the velocity profile: Experiment 2) or contained only interval-based synchrony. Performances containing velocity-based synchrony were judged as more coordinated with performers rated as liking each other more, and were rated as more beautiful, providing observers with a stronger aesthetic experience. These findings demonstrate that subtle timing cues fundamentally shape the experience of watching joint actions, directly influencing how beautiful and enjoyable we find these interactions, as well as our perception of the relationship between co-actors.},
langid = {english},
keywords = {linus-skim,Neuroscience,Psychology},
file = {C\:\\Users\\webma\\Zotero\\storage\\335D5LVI\\McEllin et al. - 2020 - Synchronicities that shape the perception of joint action.pdf;C\:\\Users\\webma\\Zotero\\storage\\MY2TNPW4\\McEllin et al_2020_Synchronicities that shape the perception of joint action.pdf}
}
@article{mcphersonActionSoundLatencyAre2016,
title = {Action-{{Sound Latency}}: {{Are Our Tools Fast Enough}}?},
shorttitle = {Action-{{Sound Latency}}},
author = {McPherson, A. P. and Jack, R. H. and Moro, G. and Proceedings of the International Conference on New Interfaces for Musical Expression, Brisbane},
date = {2016-07-11},
publisher = {{Griffith University}},
url = {https://qmro.qmul.ac.uk/xmlui/handle/123456789/12479},
urldate = {2022-10-31},
abstract = {The importance of low and consistent latency in interactive music systems is well-established. So how do commonly-used tools for creating digital musical instruments and other tangible interfaces perform in terms of latency from user action to sound output? This paper examines several common configurations where a microcontroller (e.g. Arduino) or wireless device communicates with computer-based sound generator (e.g. Max/MSP, Pd). We find that, perhaps surprisingly, almost none of the tested configurations meet generally-accepted guidelines for latency and jitter. To address this limitation, the paper presents a new embedded platform, Bela, which is capable of complex audio and sensor processing at submillisecond latency.},
langid = {english},
annotation = {Accepted: 2016-05-24T10:51:01Z},
file = {C\:\\Users\\webma\\Zotero\\storage\\IYIYJZKB\\McPherson et al_2016_Action-Sound Latency.pdf;C\:\\Users\\webma\\Zotero\\storage\\7FF6KL99\\12479.html}
}
@inproceedings{mcphersonEnvironmentSubmillisecondLatencyAudio2015,
title = {An {{Environment}} for {{Submillisecond-Latency Audio}} and {{Sensor Processing}} on {{BeagleBone Black}}},
author = {McPherson, Andrew and Zappi, Victor},
date = {2015-05-06},
publisher = {{Audio Engineering Society}},
url = {https://www.aes.org/e-lib/browse.cfm?elib=17755},
urldate = {2022-10-31},
abstract = {This paper presents a new environment for ultra-low-latency processing of audio and sensor data on embedded hardware. The platform, which is targeted at digital musical instruments and audio effects, is based on the low-cost BeagleBone Black single-board computer. A custom expansion board features stereo audio and 8 channels each of 16-bit ADC and 16-bit DAC for sensors and actuators. In contrast to typical embedded Linux approaches, the platform uses the Xenomai real-time kernel extensions to...},
eventtitle = {Audio {{Engineering Society Convention}} 138},
langid = {english},
file = {C\:\\Users\\webma\\Zotero\\storage\\I66ZAKFM\\browse.html}
}
@inbook{mullerDynamicTimeWarping2007,
title = {Dynamic {{Time Warping}}},
booktitle = {Information {{Retrieval}} for {{Music}} and {{Motion}}},
date = {2007},
pages = {69--84},
publisher = {{Springer Berlin Heidelberg}},
location = {{Berlin, Heidelberg}},
doi = {10.1007/978-3-540-74048-3_4},
url = {http://link.springer.com/10.1007/978-3-540-74048-3_4},
urldate = {2023-02-14},
bookauthor = {Müller, Meinard},
isbn = {978-3-540-74047-6 978-3-540-74048-3},
langid = {english}
}
@article{mullerHowOrchestrateSoccer2022,
title = {How to Orchestrate a Soccer Team: {{Generalized}} Synchronization Promoted by Rhythmic Acoustic Stimuli},
shorttitle = {How to Orchestrate a Soccer Team},
author = {Müller, Manfred A. and Martínez-Guerrero, Antonieta and Corsi-Cabrera, Maria and Effenberg, Alfred O. and Friedrich, Armin and Garcia-Madrid, Ignacio and Hornschuh, Matthias and Schmitz, Gerd and Müller, Markus F.},
date = {2022},
journaltitle = {Frontiers in Human Neuroscience},
volume = {16},
issn = {1662-5161},
url = {https://www.frontiersin.org/articles/10.3389/fnhum.2022.909939},
urldate = {2022-09-01},
abstract = {Interpersonal coordination requires precise actions concerted in space and time in a self-organized manner. We found, using soccer teams as a testing ground, that a common timeframe provided by adequate acoustic stimuli improves the interplay between teammates. We provide quantitative evidence that the connectivity between teammates and the scoring rate of male soccer teams improve significantly when playing under the influence of an appropriate acoustic environment. Unexpectedly, female teams do not show any improvement under the same experimental conditions. We show by follow-up experiments that the acoustic rhythm modulates the attention level of the participants with a pronounced tempo preference and a marked gender difference in the preferred tempo. These results lead to a consistent explanation in terms of the dynamical system theory, nonlinear resonances, and dynamic attention theory, which may illuminate generic mechanisms of the brain dynamics and may have an impact on the design of novel training strategies in team sports.},
file = {C\:\\Users\\webma\\Zotero\\storage\\J4A753Z9\\Müller et al. - 2022 - How to orchestrate a soccer team Generalized sync.pdf}
}
@article{noyBeingZonePhysiological2015,
title = {Being in the Zone: Physiological Markers of Togetherness in Joint Improvisation},
shorttitle = {Being in the Zone},
author = {Noy, Lior and Levit-Binun, Nava and Golland, Yulia},
date = {2015-05-05},
journaltitle = {Frontiers in Human Neuroscience},
shortjournal = {Front. Hum. Neurosci.},
volume = {9},
issn = {1662-5161},
doi = {10.3389/fnhum.2015.00187},
url = {http://www.frontiersin.org/Human_Neuroscience/10.3389/fnhum.2015.00187/abstract},
urldate = {2022-10-09},
keywords = {linus},
file = {C\:\\Users\\webma\\Zotero\\storage\\KGKJ9LWL\\Noy et al. - 2015 - Being in the zone_ physiological markers of togetherness in joint improvisation.pdf;C\:\\Users\\webma\\Zotero\\storage\\V5NI77E8\\Noy et al_2015_Being in the zone.pdf}
}
@article{noyMirrorGameParadigm2011,
title = {The Mirror Game as a Paradigm for Studying the Dynamics of Two People Improvising Motion Together},
author = {Noy, Lior and Dekel, Erez and Alon, Uri},
date = {2011-12-27},
journaltitle = {Proceedings of the National Academy of Sciences},
shortjournal = {Proc. Natl. Acad. Sci. U.S.A.},
volume = {108},
number = {52},
pages = {20947--20952},
issn = {0027-8424, 1091-6490},
doi = {10.1073/pnas.1108155108},
url = {https://pnas.org/doi/full/10.1073/pnas.1108155108},
urldate = {2022-10-09},
abstract = {Joint improvisation is the creative action of two or more people without a script or designated leader. Examples include improvisational theater and music, and day-to-day activities such as conversations. In joint improvisation, novel action is created, emerging from the interaction between people. Although central to creative processes and social interaction, joint improvisation remains largely unexplored due to the lack of experimental paradigms. Here we introduce a paradigm based on a theater practice called the mirror game. We measured the hand motions of two people mirroring each other at high temporal and spatial resolution. We focused on expert actors and musicians skilled in joint improvisation. We found that players can jointly create novel complex motion without a designated leader, synchronized to less than 40 ms. In contrast, we found that designating one player as leader deteriorated performance: The follower showed 2–3 Hz oscillation around the leader's smooth trajectory, decreasing synchrony and reducing the range of velocities reached. A mathematical model suggests a mechanism for these observations based on mutual agreement on future motion in mirrored reactive–predictive controllers. This is a step toward understanding the human ability to create novelty by improvising together.},
langid = {english},
keywords = {linus-skim},
file = {C\:\\Users\\webma\\Zotero\\storage\\EY5IYM6N\\Noy_et_al_2011_The_mirror_game_as_a_paradigm_for_studying_the_dynamics_of_two_people.pdf}
}
@article{pedersenRelationshipInstantaneousPhase2018,
title = {On the Relationship between Instantaneous Phase Synchrony and Correlation-Based Sliding Windows for Time-Resolved {{fMRI}} Connectivity Analysis},
author = {Pedersen, Mangor and Omidvarnia, Amir and Zalesky, Andrew and Jackson, Graeme D.},
date = {2018-11-01},
journaltitle = {NeuroImage},
shortjournal = {NeuroImage},
volume = {181},
pages = {85--94},
issn = {1053-8119},
doi = {10.1016/j.neuroimage.2018.06.020},
url = {https://www.sciencedirect.com/science/article/pii/S1053811918305275},
urldate = {2023-01-24},
abstract = {Correlation-based sliding window analysis (CSWA) is the most commonly used method to estimate time-resolved functional MRI (fMRI) connectivity. However, instantaneous phase synchrony analysis (IPSA) is gaining popularity mainly because it offers single time-point resolution of time-resolved fMRI connectivity. We aim to provide a systematic comparison between these two approaches, on temporal, topological and anatomical levels. For this purpose, we used resting-state fMRI data from two separate cohorts with different temporal resolutions (45 healthy subjects from Human Connectome Project fMRI data with repetition time of 0.72\,s and 25 healthy subjects from a separate validation fMRI dataset with a repetition time of 3\,s). For time-resolved functional connectivity analysis, we calculated tapered CSWA over a wide range of different window lengths that were compared to IPSA. We found a strong association in connectivity dynamics between IPSA and CSWA when considering the absolute values of CSWA. The association between CSWA and IPSA was stronger for a window length of ∼20\,s (shorter than filtered fMRI wavelength) than ∼100\,s (longer than filtered fMRI wavelength), irrespective of the sampling rate of the underlying fMRI data. Narrow-band filtering of fMRI data (0.03–0.07\,Hz) yielded a stronger relationship between IPSA and CSWA than wider-band (0.01–0.1\,Hz). On a topological level, time-averaged IPSA and CSWA nodes were non-linearly correlated for both short (∼20\,s) and long (∼100\,s) windows, mainly because nodes with strong negative correlations (CSWA) displayed high phase synchrony (IPSA). IPSA and CSWA were anatomically similar in the default mode network, sensory cortex, insula and cerebellum. Our results suggest that IPSA and CSWA provide comparable characterizations of time-resolved fMRI connectivity for appropriately chosen window lengths. Although IPSA requires narrow-band fMRI filtering, it does not mandate a (semi-)arbitrary choice of window length and window overlap. A code for calculating IPSA is provided.},
langid = {english},
keywords = {fMRI,Instantaneous phase synchrony,Sliding-windows,Time-resolved functional connectivity},
file = {C\:\\Users\\webma\\Zotero\\storage\\9TEK9KTE\\Pedersen et al_2018_On the relationship between instantaneous phase synchrony and correlation-based.pdf;C\:\\Users\\webma\\Zotero\\storage\\N2UIWITH\\S1053811918305275.html}
}
@online{phdFourWaysQuantify2021,
title = {Four Ways to Quantify Synchrony between Time Series Data},
author = {PhD, Jin Cheong},
date = {2021-12-09T05:28:42},
url = {https://towardsdatascience.com/four-ways-to-quantify-synchrony-between-time-series-data-b99136c4a9c9},
urldate = {2023-01-24},
abstract = {Sample code and data to compute synchrony metrics including Pearson correlation, time-lagged cross correlations, dynamic time warping, and…},
langid = {english},
organization = {{Medium}},
file = {C\:\\Users\\webma\\Zotero\\storage\\8TWT63YY\\four-ways-to-quantify-synchrony-between-time-series-data-b99136c4a9c9.html}
}
@article{ramenzoniScalingPerceptionAction2014,
title = {Scaling up Perception–Action Links: {{Evidence}} from Synchronization with Individual and Joint Action.},
shorttitle = {Scaling up Perception–Action Links},
author = {Ramenzoni, Verónica C. and Sebanz, Natalie and Knoblich, Guenther},
date = {2014},
journaltitle = {Journal of Experimental Psychology: Human Perception and Performance},
shortjournal = {Journal of Experimental Psychology: Human Perception and Performance},
volume = {40},
number = {4},
pages = {1551--1565},
issn = {1939-1277, 0096-1523},
doi = {10.1037/a0036925},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/a0036925},
urldate = {2023-02-08},
langid = {english}
}
@article{rizzolattiMIRRORNEURONSYSTEM2004,
title = {{{THE MIRROR-NEURON SYSTEM}}},
author = {Rizzolatti, Giacomo and Craighero, Laila},
date = {2004-07-21},
journaltitle = {Annual Review of Neuroscience},
shortjournal = {Annu. Rev. Neurosci.},