@misc{GlasauerShi, author = {Glasauer, Stefan and Shi, Z.}, title = {150 years of Vierordt's law: The role of experimental protocol}, series = {CA: Society for Neuroscience, 2018}, journal = {CA: Society for Neuroscience, 2018}, address = {San Diego}, language = {en} } @misc{CostalagoMerueloFleurietBakstetal., author = {Costalago Meruelo, Alicia and Fleuriet, J{\´e}rome and Bakst, Leah and Mustari, Michael J. and Glasauer, Stefan}, title = {Modeling and Prediction of Sinusoidal Smooth Pursuit using Artificial Neural Networks}, series = {40th Annual International Conference of the IEEE, Engineering in Medicine and Biology Society, 2018}, journal = {40th Annual International Conference of the IEEE, Engineering in Medicine and Biology Society, 2018}, publisher = {IEEE EMBC Hawaii}, pages = {1}, language = {en} } @misc{Glasauer, author = {Glasauer, Stefan}, title = {Sensorimotor control of smooth pursuit}, series = {Neuroscience 2018 Home}, journal = {Neuroscience 2018 Home}, abstract = {Interaction with the visual world critically depends on when and how we move our eyes. Eye movements, in turn, alter our experience of the world by selecting objects to foveate and tracking objects over time. The interaction between vision and eye movements continues indefinitely, one process modulating the strength and timing of the other. Despite this tight linkage, vision is often studied outside the context of eye movements, during stable fixation. Likewise, eye movements are often studied with sparse visual stimuli and under ideal viewing conditions. A more accurate understanding of vision requires studying perception in the context of eye movements. Different types of eye movements reveal different aspects of how the brain works. The continuous nature of tracking smooth pursuit eye movements, combined with its ubiquitous presence in natural vision, makes it an excellent system in which to analyze perception at fine spatial and temporal resolutions. For example, how does vision change during the transition from fixation to pursuit, and when does the brain allocate resources to account for these changes? In contrast, the ballistic nature of saccades requires analysis divided into discrete epochs, including well before the saccade during the planning stage, during saccade execution, and after the movement. At the neuronal level, how do heterogeneous populations of visual- and saccade-related cells come to represent a single saccade vector during these different epochs? What are the effects on visual perception when saccades and pursuit interact? This symposium will reveal how research incorporating both types of eye movements and visual perception can provide a more ecologically valid account of visual-motor integration and the intervening cognitive processes. This symposium will review innovative approaches and recent advances in understanding the interplay of eye movements and vision. We will take a multifaceted approach to understanding the effects of both exploratory (saccades) and tracking (smooth pursuit) eye movements on vision by using a range of methodologies and animal models (data-driven modeling, clinical populations, and simultaneous recordings of neuronal populations; humans, macaques, and marmosets). This symposium will appeal to vision scientists interested in selective attention, motion processing, neuronal modeling, motor control, neural circuits, and related topics. Our collective goal is to uncover the dynamics of visual perception and the candidate neuronal mechanisms that support eye movements.}, language = {en} } @misc{KnorrGravotGlasaueretal., author = {Knorr, Alexander G. and Gravot, C{\´e}line M. and Glasauer, Stefan and Straka, Hans}, title = {Image motion with color contrast suffices to elicit an optokinetic reflex in Xenopus laevis tadpoles}, series = {Scientific Reports}, volume = {11}, journal = {Scientific Reports}, issn = {2045-2322}, doi = {10.1038/s41598-021-87835-2}, abstract = {The optokinetic reflex is a closed-loop gaze-stabilizing ocular motor reaction that minimizes residual retinal image slip during vestibulo-ocular reflexes. In experimental isolation, the reflex is usually activated by motion of an achromatic large-field visual background with strong influence of radiance contrast on visual motion estimation and behavioral performance. The presence of color in natural environments, however, suggests that chromatic cues of visual scenes provide additional parameters for image motion detection. Here, we employed Xenopus laevis tadpoles to study the influence of color cues on the performance of the optokinetic reflex and multi-unit optic nerve discharge during motion of a large-field visual scene. Even though the amplitude of the optokinetic reflex decreases with smaller radiance contrast, considerable residual eye movements persist at the 'point of equiluminance' of the colored stimuli. Given the color motion preferences of individual optic nerve fibers, the underlying computation potentially originates in retinal circuits. Differential retinal ganglion cell projections and associated ocular motor signal transformation might further reinforce the color dependency in conceptual correspondence with head/body optomotor signaling. Optokinetic reflex performance under natural light conditions is accordingly influenced by radiance contrast as well as by the color composition of the moving visual scene.}, language = {en} } @misc{SchroederWerderRamaiolietal., author = {Schr{\"o}der, Lena and Werder, Dina von and Ramaioli, Cecilia and Wachtler, Thomas and Henningsen, Peter and Glasauer, Stefan and Lehnen, Nadine}, title = {Unstable Gaze in Functional Dizziness: A Contribution to Understanding the Pathophysiology of Functional Disorders}, series = {Frontiers in Neuroscience}, volume = {15}, journal = {Frontiers in Neuroscience}, issn = {1662-453X}, doi = {10.3389/fnins.2021.685590}, abstract = {Objective: We are still lacking a pathophysiological mechanism for functional disorders explaining the emergence and manifestation of characteristic, severely impairing bodily symptoms like chest pain or dizziness. A recent hypothesis based on the predictive coding theory of brain function suggests that in functional disorders, internal expectations do not match the actual sensory body states, leading to perceptual dysregulation and symptom perception. To test this hypothesis, we investigated the account of internal expectations and sensory input on gaze stabilization, a physiologically relevant parameter of gaze shifts, in functional dizziness. Methods: We assessed gaze stabilization in eight functional dizziness patients and 11 healthy controls during two distinct epochs of large gaze shifts: during a counter- rotation epoch (CR epoch), where the brain can use internal models, motor planning, and resulting internal expectations to achieve internally driven gaze stabilization; and during an oscillation epoch (OSC epoch), where, due to terminated motor planning, no movement expectations are present, and gaze is stabilized by sensory input alone. Results: Gaze stabilization differed between functional patients and healthy controls only when internal movement expectations were involved [F(1,17) = 14.63, p = 0.001, and partial η2 = 0.463]: functional dizziness patients showed reduced gaze stabilization during the CR (p = 0.036) but not OSC epoch (p = 0.26). Conclusion: While sensory-driven gaze stabilization is intact, there are marked, well- measurable deficits in internally-driven gaze stabilization in functional dizziness pointing at internal expectations that do not match actual body states. This experimental evidence supports the perceptual dysregulation hypothesis of functional disorders and is an important step toward understanding the underlying pathophysiology.}, language = {en} } @misc{GlasauerShi, author = {Glasauer, Stefan and Shi, Zhuanghua}, title = {The origin of Vierordt's law: The experimental protocol matters}, series = {PsyCH Journal}, volume = {10}, journal = {PsyCH Journal}, number = {5}, issn = {2046-0260}, doi = {10.1002/pchj.464}, pages = {732 -- 741}, abstract = {In 1868, Karl Vierordt discovered one type of errors in time perception—an overestimation of short duration and underestimation of long durations, known as Vierordt's law. Here we reviewed the original study in its historical context and asked whether Vierordt's law is a result of an unnatural experimental randomization protocol. Using iterative Bayesian updating, we simulated the original results with high accuracy. Importantly, the model also predicted that a slowly changing random-walk sequence produces less central tendency than a random sequence with the same durations. This was validated by a duration reproduction experiment from two sequences (random and random walk) with the same sampled distribution. The results showed that trial-wise variation influenced the magnitude of Vierordt's law. We concluded that Vierordt's law is caused by an unnatural yet widely used experimental protocol.}, language = {en} } @misc{GlasauerShi, author = {Glasauer, Stefan and Shi, Zhuanghua}, title = {Differences in beliefs about stimulus generation explain individual perceptual biases}, series = {Bernstein Conference 2021, September 21 - 23, 2021}, volume = {2021}, journal = {Bernstein Conference 2021, September 21 - 23, 2021}, doi = {10.12751/nncn.bc2021.p052}, language = {en} } @misc{ShirzhiyanGlasauer, author = {Shirzhiyan, Zahra and Glasauer, Stefan}, title = {Late CNV-P2 amplitude as neural index of time interval perception}, series = {Bernstein Conference 2021, September 21 - 23, 2021}, volume = {2021}, journal = {Bernstein Conference 2021, September 21 - 23, 2021}, doi = {10.12751/nncn.bc2021.p153}, language = {de} } @misc{KostorzFlanaginGlasauer, author = {Kostorz, Kathrin and Flanagin, Virginia and Glasauer, Stefan}, title = {Intersubject synchrony of viewers during naturalistic observational learning of a complex bimanual task}, series = {Neuroimage: Reports}, volume = {2}, journal = {Neuroimage: Reports}, number = {2}, issn = {2666-9560}, doi = {10.1016/j.ynirp.2022.100084}, pages = {18}, abstract = {Watching an instructional video has become a common way to learn a new task. However, we have but a sparse understanding of the neural processes involved during observational learning in naturalistic settings. Recently developed data driven methods for analyzing brain activity provide an opportunity for further investigation. Here, we evaluate intersubject synchrony during fMRI to understand common brain processes during naturalistic observational learning. Participants solitarily watched an instructional video and learned how to fold a paper figure. Three learning runs were sufficient to successfully solve the task. To assess interbrain synchrony, we extended previous principal component (PCA)-based methods to an intersubject principal component analysis (PCA), which offers multiple measures for additional insights into the nature of the synchrony. Using the different metrics of this method, we show a robust synchronous involvement of the action observation execution network (AOEN) in observational learning, between subjects as well as within subjects, regardless of the task or video content. Importantly, additional areas such as the cerebellum, primary motor cortex, control, and sensory integration areas also showed robust synchrony in observational learning. Complimentary to this robust general synchrony, individual regions of the AOEN exhibited task-related differences. Synchrony decreased during the learning process, likely reflecting task state and individual learning strategies. To test the stimulus as a possible source of synchrony, we quantified the temporal structure as the optic flow of the instructional video. Optic flow was strongly related to common activation of the somatomotor areas of the AOEN well beyond visual areas, but could not completely explain synchrony. Thus, although visual motion provides a proxy for meaningful hand actions, our results suggest that intersubject synchrony reflects common cognitive processing during observa- tional learning beyond sensory input.}, language = {en} } @misc{GlasauerShi, author = {Glasauer, Stefan and Shi, Zhuanghua}, title = {Individual beliefs about temporal continuity explain variation of perceptual biases}, series = {Scientific Reports}, volume = {12}, journal = {Scientific Reports}, issn = {2045-2322}, doi = {10.1038/s41598-022-14939-8}, abstract = {Perception of magnitudes such as duration or distance is often found to be systematically biased. The biases, which result from incorporating prior knowledge in the perceptual process, can vary considerably between individuals. The variations are commonly attributed to differences in sensory precision and reliance on priors. However, another factor not considered so far is the implicit belief about how successive sensory stimuli are generated: independently from each other or with certain temporal continuity. The main types of explanatory models proposed so far—static or iterative—mirror this distinction but cannot adequately explain individual biases. Here we propose a new unifying model that explains individual variation as combination of sensory precision and beliefs about temporal continuity and predicts the experimentally found changes in biases when altering temporal continuity. Thus, according to the model, individual differences in perception depend on beliefs about how stimuli are generated in the world.}, language = {en} }