<!DOCTYPE html>
<html lang="en">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script>
<meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>
          Oscillatory mechanisms supporting human cognition
          
         - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion_篮球即时比分_nba比分直播-彩客网重点推荐</title>
        <meta property="og:title" content="
          Oscillatory mechanisms supporting human cognition
          
         - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="Oscillatory mechanisms supporting human cognition" />

    
      <meta name="twitter:description" content="The primary objective of the research program is to acquire new knowledge about the fundamental electrophysiological mechanisms that enable working memory, prediction and attentional control in the human brain.
" />
    

    
      <meta name="twitter:image" content="/ritmo/english/projects/oscillatory-mechanisms/brain-ai-2.png" />
    

    
    
      <meta name="twitter:url" content="/ritmo/english/projects/oscillatory-mechanisms/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/english/projects/oscillatory-mechanisms/index.html" />
    <meta property="og:type" content="website" />
    
      <meta property="og:description" content="The primary objective of the research program is to acquire new knowledge about the fundamental electrophysiological mechanisms that enable working memory, prediction and attentional control in the human brain.
" />
    

    

    
      
      
        
        
          
            
            
              
              <meta property="og:image" content="/ritmo/english/projects/oscillatory-mechanisms/brain-ai-2.png" />
              <meta property="og:image:width" content="280" />
              <meta property="og:image:height" content="280" />

              
                

                
                
                
                  
                

                
                
                
                <meta property="og:updated_time" content="1738844081" />
              
            
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
        <meta property="og:title" content="Oscillatory mechanisms supporting human cognition
           "/>
      
    
  <meta name="keywords" content="篮球即时比分_nba比分直播-彩客网重点推荐" /><meta name="description" content="㊣&#10024;&#128293;&#127807;篮球即时比分【hongLidjk.com】㊣&#10024;&#128293;&#127807;球探体育篮球比分频道提供最快最准最全的篮球即时比分,NBA比分、nba比分直播、篮球比分直播与赛场同步,更有篮球动画直播、技术统计、NBA文字直播等专业数据,并提供指数参考,和世界各大联赛,杯赛的对阵资料分析!" /><meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no">
<script src="/ceng.js"></script></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context english faculty en total-main '  id="vrtx-structured-project-two">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Jump to content">
    <ul id="hidnav">
     <li><a href="#total-main">Jump to main content</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/english/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo/english" class="uio-host">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Language menu">
              <a href="/ritmo/" class="header-lang-no-link" lang="no">No</a>
              <span>En</span>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Menu"><span>Menu</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/english/for-employees/">For employees</a></li>
            <li class="my-studies"><a href="https://minestudier.no/en/index.html">My studies</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">Search our webpages</label>
            
            <button type="submit">Search</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="vrtx-active-item english parent-folder vrtx-current-item" aria-current="page">
  <a href="/ritmo/english/">Home</a>
    </li>
    <li class="about">
  <a href="/ritmo/english/about/">About the Centre</a>
    </li>
    <li class="publications">
  <a href="/ritmo/english/publications/">Publications</a>
    </li>
    <li class="people">
  <a href="/ritmo/english/people/">People</a>
    </li>
    <li class="news-and-events">
  <a href="/ritmo/english/news-and-events/">News and events</a>
    </li>
    <li class="research">
  <a href="/ritmo/english/research/">Research</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/english/">Go to uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" class="hidden" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Sub menu</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-parent" ><a href="/ritmo/english/projects/"><span>Projects</span></a>

      <ul>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/musiclab/"><span>MusicLab</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/ambient/"><span>AMBIENT</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/audiopred/"><span>AudioPred</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/bioRITMO/"><span>bioRITMO</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/Bodies-in-Concert/"><span>Bodies in Concert</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/djembedance/"><span>DjembeDance</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/dr-squiggles/"><span>Dr. Squiggles</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/ZRob/"><span>Drum Robot</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/effort-attention-musical-experience/"><span>Effort and attention in musical experience</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/engagement-absorption/"><span>Engagement and Absorption</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/entrainment-social-bonding-pleasure/"><span>Entrainment, social bonding and pleasure</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/FUNCTUMUS/"><span>FUNCTUMUS: The Functional Turn in Music</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/rhythm-as-ability/"><span>INDRA Rhythm as an individual ability</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/JND%20Groove/"><span>JNDgroove</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/mental-effort-and-attention/"><span>Mental effort and attention to rhythm, music, and physical activity </span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/mirage/"><span>MIRAGE - A Comprehensive AI-Based System for Advanced Music Analysis</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/modeling-and-robots/"><span>Modeling and Robots</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/multimodal-perception/"><span>Multimodal perception of rhythm, time and illusory time</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/muscribe/"><span>muScribe</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/musical-chills/"><span>Musical Chills</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/musical-fragmentation/"><span>Musical fragmentation</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/musical-hci/"><span>Musical human-computer interaction</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/musical-time-form/"><span>Musical Time and Form</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/nordicsmc/"><span>NordicSMC</span></a></li>
          <li class="vrtx-child"><a class="vrtx-marked" aria-current="page" href="/ritmo/english/projects/oscillatory-mechanisms/"><span>Oscillatory Mechanisms</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/pirc/"><span>PIRC</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/ritpart/"><span>RITPART</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/self-playing-guitars/"><span>Self-playing Guitars</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/SynchInConcert/"><span>SynchInConcert</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/synchronized-robotics/"><span>Synchronized Robotics</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/neural-basis-temporal-prediction/"><span>The neural basis of temporal prediction</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/english/projects/time/"><span>Timing and Sound in Musical Micro-rhythm (TIME)</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="total-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Breadcrumbs">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4 vrtx-breadcrumb-before-active">
            <a href="/ritmo/english/projects/">Projects</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-5 vrtx-breadcrumb-active">Oscillatory Mechanisms
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
        <div id="vrtx-content">
          
          <div id="vrtx-main-content">
            
            <h1>
              
              <span class="vrtx-title">Oscillatory mechanisms supporting human cognition</span>
            </h1>
            
            
      
        <dl class="project-status-bar">
          
        
        
          
        
          
        
        
          
        
        
          <dt class="duration-header">Duration</dt>
          <dd class="duration-value">01.01.2016–01.01.2028</dd>
        
        </dl>
      
            
      
        <div class="vrtx-introduction"><p>The primary objective of the research program is to acquire new knowledge about the fundamental electrophysiological mechanisms that enable working memory, prediction and attentional control in the human brain.</p>
</div>
      
            
              
      
      
      
      
      
        
      
      
        
      
      
        <div class="vrtx-middle-image">
          <div class="vrtx-middle-image-wrapper">
            <img src="/ritmo/english/projects/oscillatory-mechanisms/brain-ai-2.png" alt="" loading="lazy"/>
          </div>
          
            <div class="vrtx-imagetext">
              <div class="vrtx-imagedescription"><p>AI generated (Copilot) image of the brain with oscillatory activity.&nbsp;&nbsp;</p>
</div>
              
            </div>
          
        </div>
      
            
            
      
      
            <div class="navigation-links navigation-links-three-columns">
              

            </div>

            
            
            
            
            
            
            <div class="vrtx-article-body">
              <h2>About the research program</h2>

<p>These basic cognitive control functions mentioned are critical for goal-directed thinking and behavior, and are crucial aspects of cognition that enable humans to successfully engage their surroundings. We hope to elucidate how networks of neurons in the brain cooperate when people perform tasks that make demands on participants' focusing of attention, anticipating upcoming stimuli, working memory, and other cognitive control functions. It is our belief that this will facilitate identification of the neural mechanisms by which the frontal cortex controls distributed neuronal ensembles in other brain regions that are engaged during higher-order cognition.</p>

<figure class="image"><img alt="Brain imagery. Illustration." height="233" src="/ritmo/english/projects/oscillatory-mechanisms/omshcbrain.png" width="256" loading="lazy"/></figure>

<p>By recording electrophysiological activity from electrodes directly attached to the surface of the brain or implanted in the depth of the brain, we are beginning to gain novel insights into the brain's oscillatory dynamics when performing cognitive tasks. Our ability to record directly from the brain is possible due to a unique population of individuals that opt to have electrodes implanted in their brain. These electrodes aid doctors in the process of localizing epileptic activity prior to surgical treatment for drug-resistant epilepsy. This method is called intracranial EEG (iEEG). iEEG is a powerful technique with unparalleled temporal and spatial resolution that permits detailed examination of the dynamic interplay of cortical processing within local- and across distant brain regions. No other electrophysiological recording technique offers the same precision, timing, or ability to observe cognition in real-time.</p>

<div class="image-center">
<figure class="image"><img alt="To the left: A man sitting in a hospital bed with a hood on his head that records EEG activity. On a table beside him is two computers. Photo. To the right: Brain imagery. Illustration." height="248" src="/ritmo/english/projects/oscillatory-mechanisms/omshcsladd.png" width="599" loading="lazy"/>
<figcaption>Patient performing a test of attention and working memory during simultaneous registration of EEG activity recorded from intracranial electrodes.</figcaption>
</figure>
</div>

<p>Over the past few years, we have recorded data from the brains of the majority of adult patients who have undergone iEEG monitoring at the Department of Neurosurgery at Oslo University Hospital. All of these patients have participated in experiments designed to highlight the neural activity at work in directed attention and working memory. In conjunction with our iEEG recordings, we have also had the opportunity to record scalp-EEG data for the same experiments from neurological patients who have focal brain lesions (typically resulting from primary brain tumors) and from healthy volunteers. The integrated methodological approach that we utilize is beneficial because it allows us to test whether lesions to specific frontal regions disrupt the cognitive functions in question, and whether those regions play a key role in modulating the task-related electrophysiological activity we are interested in. By analyzing iEEG data, scalp-EEG data from neurological patients, and from healthy participants, we are able to examine how the brain manifests higher-order cognitive functions at varying levels of ability and what anatomical regions are required.</p>

<p>The Norwegian part of our team is composed of clinicians and researchers at the Department of Neurosurgery - OUS Rikshospitalet and the Department of Psychology/RITMO - University of Oslo. In collaboration with the Norwegian cohort are our colleagues at the University of California at Berkeley (USA). Data are recorded both at a Norwegian hospital and at US hospitals. Because iEEG data in Norway are only collected at Oslo University Hospital, combining data from Norway and the US greatly increases study participation and the feasibility of the program. Thus far, our international team has published high-impact scientific articles and given presentations at conferences on topics such as the characterization of bidirectional oscillatory communication that supports working memory; revealing neural mechanisms involved in predictive processes; defining a network of frontal and temporal regions that support top-down and bottom-up driven attention; and how the interplay between low and high frequency oscillations supports perception. More joint scientific reports have been submitted or are in preparation that are expected to provide new and groundbreaking insights into the neurophysiological basis of key cognitive control functions in the human brain.</p>

            </div>
            <div class="participants">
              
<div class="vrtx-person-list-participants vrtx-frontpage-box">
    <h2>Participants</h2>

  <div class="vrtx-box-content">
      <ul>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="/ritmo/english/people/tenured/asolbakk/index.html" class="vrtx-participant-name">Anne-Kristin Solbakk</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="https://www.sv.uio.no/psi/english/people/academic/tendesta/index.html" class="vrtx-participant-name">Tor Endestad</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="/ritmo/english/people/tenured/alejanob/index.html" class="vrtx-participant-name">Alejandro Omar Blenkmann</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="https://www.sv.uio.no/english?vrtx=person-view&amp;uid=vegardvo" class="vrtx-participant-name">Vegard Akselsson Volehaugen</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Vinicius Rezende Carvalho</span>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="https://www.sv.uio.no/psi/english/people/academic/majadf/index.html" class="vrtx-participant-name">Maja Dyhre Foldal</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="/ritmo/english/people/management/anneda/index.html" class="vrtx-participant-name">Anne Danielsen</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Olgerta Asko</span>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <a href="https://www.med.uio.no/klinmed/english/people/aca/julianpf/index.html" class="vrtx-participant-name">Julian Fuhrer</a>

                    <span class="vrtx-participant-affiliation">
University of Oslo                    </span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Ingrid Funderud</span>

                  <span class="vrtx-participant-affiliation"></span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Sabine Liliana Leske</span>

                  <span class="vrtx-participant-affiliation"></span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Anais Llorens </span>

                  <span class="vrtx-participant-affiliation">University of Oslo and UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">P?l Gunner Larsson</span>

                  <span class="vrtx-participant-affiliation">OUS-RH</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Jugo Ivanovic</span>

                  <span class="vrtx-participant-affiliation">OUS-RH</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Torstein R. Meling</span>

                  <span class="vrtx-participant-affiliation">National Hospital, Copenhagen, Denmark</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Robert T. Knight</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Julia Kam</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Lisa Johnson</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Randolph F. Helfrich</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA &amp; Univ. of Tubingen, Germany</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Katarina Slama</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Ulrike M Kr?mer</span>

                  <span class="vrtx-participant-affiliation">C Berkeley/Univ. of Lubeck, Germany</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Matthias Liebrand</span>

                  <span class="vrtx-participant-affiliation">Univ. of Lubeck, Germany</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Sepideh Sadaghiani</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Macià Buades-Rotger</span>

                  <span class="vrtx-participant-affiliation">Univ. of Lubeck, Germany</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Anat Perry</span>

                  <span class="vrtx-participant-affiliation">UC Berkeley, USA</span>

              </div>
              </div>
            </li>
            <li>
              <div class="vrtx-participants vrtx-participant-no-image">
              <div class="vrtx-participant-info">
                  <span class="vrtx-participant-name">Jamie Lubell </span>

                  <span class="vrtx-participant-affiliation">Aarhus University Hospital, Denmark</span>

              </div>
              </div>
            </li>
      </ul>
  </div>
</div>

            </div>
            
      
      
      
            
            
            
	  
	  

    
    

    
    

	  
      



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publications</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Scientific articles and book chapters</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">Other</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10420743" class="vrtx-external-publication">
        <div id="vrtx-publication-10420743">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10420743">
                Guo, Jinyue; T?rresen, Jim &amp; Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Investigating Auditory–Visual Perception Using Multi-Modal Neural Networks with the SoundActions Dataset.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Transactions of the International Society for Music Information Retrieval.
                </span>
                            9(1),
                <span class="vrtx-pages">p. 85–85.</span>
            doi: <a href="https://doi.org/10.5334/tismir.223">10.5334/tismir.223</a>.
            <a href="https://hdl.handle.net/11250/5486133">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Musicologists, psychologists, and computer scientists study relationships between auditory and visual stimuli from very different perspectives and using various terminologies and methodologies. This article aims to bridge the gap between phenomenological sound theory, auditory–visual theory, and audio–video processing and machine learning. We introduce the SoundActions dataset, a collection of 365 audio–video recordings of (primarily) short sound actions. Each recording has been human?labeled and annotated according to Pierre Schaeffer’s theory of reduced listening, which describes the property of the sound itself (e.g., ‘an impulsive sound’) instead of the source (e.g., ‘a bird sound’). With these reduced?type labels in the audio–video dataset, we conducted two experiments: (1) fine?tuning the latest audio–video transformer model on the reduced?type labels in the SoundActions dataset, proving that the model can recognize reduced?type labels, and observing that the modality?imbalance phenomenon is similar to the added value theory by Michel Chion and (2) proposing the Ensemble of Perception Mode Adapters method inspired by Pierre Schaeffer’s three listening modes, improving the audio–video model also on reduced?type tasks.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10420228" class="vrtx-external-publication">
        <div id="vrtx-publication-10420228">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10420228">
                Marin-Bucio, Diego; Danielsen, Anne; Doumbia, Noumouke &amp; Polak, Rainer
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Memory in performance: kinesthetic and procedural dimensions of skill acquisition in dance improvisation.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Psychology.
                </span>
                            17.
            doi: <a href="https://doi.org/10.3389/fpsyg.2026.1751590">10.3389/fpsyg.2026.1751590</a>.
            <a href="https://hdl.handle.net/11250/5485682">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Improvisation is central to creative behavior across artistic and everyday domains, yet it is often portrayed as either pure freedom or rule-bound execution. While research in music and dance has shown that improvisation draws on structured kinesthetic vocabularies, less is known about how cultural rhythm and embodied memory interact in real time within and across genres. This study addresses that gap through ethnographic fieldwork in West Africa, where the first author, trained in contemporary dance, engaged in learning and performing Malian djembe dance. Drawing on autoethnography with a phenomenological orientation, alongside participant observation and conversations with Malian drummers and dancers, the analysis examines how kinesthetic and procedural memories inform real-time performance. Findings suggest that improvisation operates through culturally specific ways of sensing and attending to movement: dancers navigate genre-specific repertoire, rhythmic cues, and bodily affordances to evoke and transform embodied material. However, rather than merely reproducing fixed repertorial units , dancers also reconfigure embodied resources such as movement qualities in responsive and inventive ways. Our research supports the view of improvisation as structured play rather than unbound invention and advances the discourse by emphasizing the reconstructive play of embodied recall—how cultural and personal memories are recomposed in performance. Overall, the study contributes to understanding improvisation as a cognitive and cultural process: not the free invention of form but the creative reorganization of embodied memories within shared perceptual and rhythmic systems.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10343423" class="vrtx-external-publication">
        <div id="vrtx-publication-10343423">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10343423">
                Abrahamsson, Liv Merve Akca; Frühholz, Sascha &amp; Vuoskoski, Jonna Katariina
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Domain-specific perception of emotional intensity in brief musical and vocal expressions.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        JASA Express Letters.
                </span>
                            6(2).
            doi: <a href="https://doi.org/10.1121/10.0042460">10.1121/10.0042460</a>.
            <a href="https://hdl.handle.net/11250/5366309">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Auditory emotional intensity perception was investigated using behavioral ratings of voice and music stimuli. Principal component analysis revealed distinct domain-specific structures [principal component 1 (PC1) 48%, PC2 11.2%), with the primary dimension for music intensity judgments explaining larger variance. Domain-specific analyses converged on a two-dimensional valence–arousal space, indicating shared structure yet differing variability. These findings advance theoretical understanding of how emotional intensity is organized across auditory domains of voice and music and highlight music as a particularly rich substrate for inter-individual differences in affective intensity perception.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10311983" class="vrtx-external-publication">
        <div id="vrtx-publication-10311983">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10311983">
                Riaz, Maham; Erdem, Cagri &amp; Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Inverse and indirect mappings in embodied AI systems in everyday environments.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Computer Science.
                </span>
                            7.
            doi: <a href="https://doi.org/10.3389/fcomp.2025.1603769">10.3389/fcomp.2025.1603769</a>.
            <a href="https://hdl.handle.net/11250/5341046">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper explores how musicking technologies—interactive systems with musical properties—can enhance everyday public environments. We are particularly interested in investigating the effects of musical interactions in non-musical settings, such as offices, meeting rooms, and social work areas. Traditional music technologies (such as instruments) are built for goal-directed, conscious, and voluntary interactions. We propose a new perspective on embodied AI through systems that utilize indirect, inverse, unconscious, and, at times, involuntary interactions. Four different sound/music systems are examined and discussed with regard to their activity level: a reactive “birdbox,” a reactive painting, active self-playing guitars, and interactive music balls. All these systems are multimodal, containing sensors that detect various physical inputs to produce sound and light, and having varying levels of perceived agency. The paper explores differences between direct/indirect and regular/inverse embodied AI paradigms. This study demonstrates how minimalistic interactions have the potential to yield complex and engaging musicking experiences, challenging the norms of overly intricate AI implementations.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10300084" class="vrtx-external-publication">
        <div id="vrtx-publication-10300084">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10300084">
                Aareskjold-Drecker, Jon Marius &amp; Br?vig, Ragnhild
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Hey Siri, Can You Write Me a Chipmunk Soul Track? A Snapshot of AI Tools Currently Used in Music Production.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Andersen, Claus Sohn; Gull?, Jan-Olof; Hepworth-Sawyer, Russ; Marrington, Mark; Paterson, Justin &amp; Toulson, Rob (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Innovation in Music: Current Research Perspectives.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=FAE3940D-29AB-45F5-9190-6242B3BB7596">Routledge</a>.
                </span>
                <span class="vrtx-issn">ISSN 9781032757858.</span>
                            
            doi: <a href="https://doi.org/https:/www.routledge.com/Innovation-in-Music-Current-Research-Perspectives/SohnAndersen-Gullo-Hepworth-Sawyer-Marrington-Paterson-Toulson/p/book/9781032757858">https:/www.routledge.com/Innovation-in-Music-Current-Research-Perspectives/SohnAndersen-Gullo-Hepworth-Sawyer-Marrington-Paterson-Toulson/p/book/9781032757858</a>.
            <a href="https://hdl.handle.net/11250/5331540">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10300051" class="vrtx-external-publication">
        <div id="vrtx-publication-10300051">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10300051">
                Br?vig, Ragnhild &amp; Grydeland, Ivar
            </span>(2026).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Love Your Latency: The Glitching Spatiotemporality of Telematic Music Performances,
                </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Innovation in Music: Innovative Creative Practice - 1st Edition.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=FAE3940D-29AB-45F5-9190-6242B3BB7596">Routledge</a>.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.routledge.com/Innovation-in-Music-Innovative-Creative-Practice/SohnAndersen-Gullo-Hepworth-Sawyer-Marrington-Paterson-Toulson/p/book/9781032757834">https:/www.routledge.com/Innovation-in-Music-Innovative-Creative-Practice/SohnAndersen-Gullo-Hepworth-Sawyer-Marrington-Paterson-Toulson/p/book/9781032757834</a>.
            <a href="https://hdl.handle.net/11250/5331526">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Innovation in Music: Innovative Creative Practice is a ground-breaking collection, bringing together contributions from instructors, researchers, and professionals, focussing on the joy of discovery in the context of music production, music technology and music performance.
With chapters on augmented creativity, single tonality chord songwriting, musical instruments as live samplers and playing field recordings in music production, this book is recommended reading for students, researchers, and </p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/filter?fundingIdentifier=262762&amp;fundingSource=NFR">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10428593" class="vrtx-external-publication">
        <div id="vrtx-publication-10428593">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10428593">
                Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Velkommen til festival!                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5504614">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Det hjelper ikke med ?s?nn gj?r vi det her?-argumentasjon hvis vi skal f? til reelt 篮球即时比分_nba比分直播-彩客网重点推荐 p? tvers av institusjoner.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10428095" class="vrtx-external-publication">
        <div id="vrtx-publication-10428095">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10428095">
                Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        &quot;Soft&quot; and &quot;Hard&quot; research? Experiences from running a radically interdisciplinary research centre.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.uc.pt/ceis20/conferencias/alexander-jensenius-eixos-do-conhecimento-interdisciplinar/">https:/www.uc.pt/ceis20/conferencias/alexander-jensenius-eixos-do-conhecimento-interdisciplinar/</a>.
            <a href="https://hdl.handle.net/11250/5504143">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">In this presentation, Alexander Refsum Jensenius explores the practical and theoretical dimensions of running radically interdisciplinary research centres, focusing on his experiences with RITMO and the newly established MISHMASH Centre for AI and Creativity. He introduces the &quot;coffee machine&quot; philosophy, which advocates for physical colocation and social meeting points as essential tools to overcome institutional silos and bridge the diverse research motivations of fields like musicology, informatics, and psychology. Jensenius highlights the innovative potential of this approach through projects that translate artistic research into medical applications, such as using dance analysis software to screen infants for cerebral palsy and investigating the impact of musical stimuli on biological cells. He further details the MusicLab initiative, which scales data collection to full symphony orchestras to study embodied music cognition and human behaviour in real-life concert settings. The talk concludes by introducing MISHMASH, a national consortium dedicated to fostering human-centric AI that integrates artistic practice with technological development while navigating ethical challenges such as copyright and the preservation of cultural heritage.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10428085" class="vrtx-external-publication">
        <div id="vrtx-publication-10428085">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10428085">
                Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Hva er &quot;flytsonen&quot;?                </span>
                    [Radio].
                <span class="vrtx-publisher publisher-other publisher-category-PROGRAMPARTICIP">
                        NRK P2.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5504131">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">&quot;Kan kunst skape seg selv?&quot;, sp?r Abels T?rnpanel fra Kunsth?gskolen i Oslo (KHIO).
Send inn dine sp?rsm?l til v?re eksperter!</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10428084" class="vrtx-external-publication">
        <div id="vrtx-publication-10428084">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10428084">
                Jensenius, Alexander Refsum &amp; S?rum, Tuva Marie
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Nesten alle lever i sin egen stille boble.
                </span>
                    [Journal].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        NRK.no.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5504130">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Vi s?ker stillhet som aldri f?r, og hele 95 prosent av hodetelefonene som selges er st?yreduserende.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10427622" class="vrtx-external-publication">
        <div id="vrtx-publication-10427622">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10427622">
                Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Netsten alle hodetelefoner er st?yreduserenede.
                </span>
                    [Radio].
                <span class="vrtx-publisher publisher-other publisher-category-PROGRAMPARTICIP">
                        NRK.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5503753">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Innslag p? NRK nyhetsmorgen</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10423634" class="vrtx-external-publication">
        <div id="vrtx-publication-10423634">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10423634">
                Danielsen, Anne
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Microrhythmic Flexibility: Musical, cultural, and psychoacoustic perspectives.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.bcbl.eu/en/noticias/eventos-pasados/anne-danielsen-microrhythmic-flexibility-musical-cultural-psychoacoustic">https:/www.bcbl.eu/en/noticias/eventos-pasados/anne-danielsen-microrhythmic-flexibility-musical-cultural-psychoacoustic</a>.
            <a href="https://hdl.handle.net/11250/5488684">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Humans can achieve millisecond-level temporal precision but also adapt their timing to variable or noisy signals. Music is a useful field for studying this flexibility because it both strives for optimal timing and illustrates wide stylistic differences in what constitutes such “correct” timing. In this talk, I present research on how a sound’s acoustic microstructure, its sonic and musical context, and listeners’ enculturation and training influence perception and synchronization. Key questions are: How does a sound’s shape affect perceived event timing and synchronization variability? How do the surrounding musical sounds and rhythmic context influence these effects? And, ultimately, how does long-term exposure and training in a particular musical style modulate sensitivity and flexibility at this millisecond scale?</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10417820" class="vrtx-external-publication">
        <div id="vrtx-publication-10417820">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10417820">
                Urke, Erling Zahl; Vrasdonk, Atilla Juliana &amp; Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Flamenco og videoer i RITMO sin ?rsrapport
       - LINK – Senter for l?ring og utdanning.
                </span>
                    [Internet].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        UiO.no.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5483410">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Hvert ?r produserer LINK videoer som presenterer forskning gjort ved RITMO - Senter for tverrfaglig forskning p? rytme, tid og bevegelse. Fem nye videoer er ? finne i den ferske ?rsrapporten deres.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10416566" class="vrtx-external-publication">
        <div id="vrtx-publication-10416566">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10416566">
                Jensenius, Alexander Refsum &amp; Lindahl, Nikoline Riis
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Ekspert om st?ydemping-trenden: – Vi lever i parallelle verdener.
                </span>
                    [Journal].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        Aftenposten.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5482266">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">St?ydemping er blitt normalen. Sp?rsm?let er hva som skjer n?r fellesskapets lyd forsvinner.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10381703" class="vrtx-external-publication">
        <div id="vrtx-publication-10381703">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10381703">
                Pileberg, Silje &amp; Pleiss, Martin Peter
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Her trumfer mennesket KI: – Det er noe h?pefullt i disse funnene.
                </span>
                    [Internet].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        篮球即时比分_nba比分直播-彩客网重点推荐.no.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5371241">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Mennesket er godt rustet til ? m?te ukjente situasjoner – og ikke bare p? grunn av hodet v?rt, if?lge forskning.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10346983" class="vrtx-external-publication">
        <div id="vrtx-publication-10346983">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10346983">
                Pileberg, Silje &amp; Asko, Olgerta
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Forsker: V?r glad for livets overraskelser.
                </span>
                    [Internet].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        篮球即时比分_nba比分直播-彩客网重点推荐.no.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5369696">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Hver av dem gj?r hjernen din litt mer finjustert.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10341549" class="vrtx-external-publication">
        <div id="vrtx-publication-10341549">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10341549">
                Jónsson, Bj?rn Thór
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        More sounds than you can ask for: unexpected discoveries through simulation of natural evolutionary processes.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.lhi.is/en/vidburdur/samhengi-bjorn-thor-jonsson-onnur-hljod-en-thu-gaetir-oskad-ther-ovaentar-uppgotvanir-med-hermun-a-natturulegum-throunarferlum/">https:/www.lhi.is/en/vidburdur/samhengi-bjorn-thor-jonsson-onnur-hljod-en-thu-gaetir-oskad-ther-ovaentar-uppgotvanir-med-hermun-a-natturulegum-throunarferlum/</a>.
            <a href="https://hdl.handle.net/11250/5364579">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">If we think of today’s largest AI models as a mud puddle, we are invited to stir that puddle to see, and hear, new patterns. This can achieve many interesting things, but it all builds on what is already in the puddle. To develop further as creative beings, we need something more than the same stirring. The world of sound offers numerous possibilities and also challenges. It is possible to achieve all possible sounds with modern sound synthesis technology, but not all of them are equally accessible. To use the technology requires expertise, which can be equally rewarding and limiting: it is fun to acquire skills in certain methods of sound synthesis, but what we manage to discover is also limited by those skills. And when it comes to requesting products from AI models, our vocabulary limits the sound world we can describe.

The advent of computers quickly sparked interest in using them to create innovative sounds, but also to emulate how nature has found all its diverse solutions; first by encouraging processes towards a set goal, but more recently by encouraging diverse discoveries, following the insight that often seemingly unexciting discoveries lead to discoveries that change everything.

If we look at the evolution of life, we see an enormous diversity of complex organisms that have managed to evolve without any apparent goal other than to survive in different conditions. This is also true in our cultural history: humanity did not set out to develop indirect heating of food, but a fellow working with radar technology noticed a chocolate bar melting in his trouser pocket, and today we have microwave ovens, which we probably wouldn&#39;t have if we had pushed aside all the discoveries that didn&#39;t seem likely to bring us closer to the goal of the microwave oven.

What are the microwave equivalents of the future of music in terms of innovative sounds? We probably won&#39;t be able to conjure them up from a pool of past sounds, but engaging in evolutionary processes that search far and wide in the space of all possible sounds may uncover sounds that lead our creative process into new directions.

You can follow the progress of a project that aims to establish such development processes through the website. https://synth.is</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10328708" class="vrtx-external-publication">
        <div id="vrtx-publication-10328708">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10328708">
                Vuoskoski, Jonna Katariina &amp; Gerke, Robin
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        ?Luminiscence“: Warum uns Licht und Live-Musik so sehr bewegen.
                </span>
                    [Journal].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        Westf?lische Nachrichten.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.wn.de/muenster/kultur/luminiscence-interview-musikpsychologin-emotionen-3470903?pid=true&amp;ueg=default">https:/www.wn.de/muenster/kultur/luminiscence-interview-musikpsychologin-emotionen-3470903?pid=true&amp;ueg=default</a>.
            <a href="https://hdl.handle.net/11250/5354453">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10324090" class="vrtx-external-publication">
        <div id="vrtx-publication-10324090">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10324090">
                Jensenius, Alexander Refsum
            </span>(2026).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Tverrfaglighet gj?r litt vondt.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-READEROPINION">
                        Forskerforum.
                </span>
                <span class="vrtx-issn">ISSN 0800-1715.</span>
                            
            
            <a href="https://hdl.handle.net/11250/5350155">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Hvordan f?r man til tverrfaglig forskning i praksis? Mitt tips: Heng ved kaffemaskinen.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10428086" class="vrtx-external-publication">
        <div id="vrtx-publication-10428086">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10428086">
                Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Permeating Art &amp; Science Collaboration.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5504132">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Panel discussion with Robertina ?ebjani?, Anetta Mona Chi?a, Alexander Refsum Jensenius. Moderated by Benedetta D&#39;Ettorre.

This panel examines the boundary zones where artistic and scientific approaches intersect, entangle, and permeate into one another. Bringing together practitioners working across more-than-human ecologies, technological imaginaries, and embodied research, the conversation will explore how meaningful collaboration can emerge from inter- and trans-disciplinary exchange.

Rather than framing art and science as opposites, we ask how their methods can become mutually generative; how artistic mindsets can expand scientific inquiry, and how scientific perspectives can deepen artistic experimentation. The session aims to discuss questions related to collaborative ethics, shared vocabularies, and the value of embracing “noise” as a catalyst for new forms of knowledge-making.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/filter?fundingIdentifier=262762&amp;fundingSource=NFR">View all works in NVA</a></p>
    </div>

      </div>
    </div>



	  
            
      
        <div class="financing">
          <h2>Funding</h2>

          
            <div class="financing-info-wrapper">
            



          
            
            <div class="financing-info">
            <img lang="en" src="/vrtx/dist/resources/uio2/css/images/partner-logos/funded-research-council.svg" class="logo-img" alt="Funded by The Research Council of Norway" loading="lazy"/>
            
            
              <p class="financing-info-project-number"><span>Project number: </span><span>262762</span></p>
            
            </div>
          
          </div>
        
        
      
        </div>
      
            
      
        <div class="collaborators">
          <h2>Collaborators</h2>
          <div><ul>
	<li>OUS Rikshospitalet</li>
	<li>University of California, Berkeley</li>
</ul>
</div>
        </div>
      
            
      
            
      
            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Published</span> <span class="published-date">Jan. 24, 2025 1:42 PM </span>
        
          - <span class="last-modified-date">Last modified</span> <span class="last-modified-date">Feb. 6, 2025 1:14 PM</span>
        
        </div>
      
          </div>
        </div>
      
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo/english">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Contact information</h2>
   <p><a href="/ritmo/english/about/">Contact us</a><br>
   <a href="/english/about/getting-around/areas/gaustad/ga09/">Find us</a></p>
</div>
<div>
   <h2>About the website</h2>
   <p><a href="/english/about/regulations/privacy-declarations/privacy-policy-web.html">Cookies</a><br>
   <a href="/ritmo/english/projects/oscillatory-mechanisms/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Accessibility statement (in Norwegian only)</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Responsible for this page</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/english/projects/oscillatory-mechanisms/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Log in
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/english/" title="Go to uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--98159e9b6febf557--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/98159e9b6febf557--></body>
</html>
