<oai_dc:dc xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
  <dc:format>application/pdf</dc:format>
  <dc:creator>Enge, Kajetan (St. Pölten University of Applied Sciences)</dc:creator>
  <dc:creator>Rind, Alexander (St. Pölten University of Applied Sciences)</dc:creator>
  <dc:creator>Iber, Michael (St. Pölten University of Applied Sciences)</dc:creator>
  <dc:creator>Höldrich, Robert (University of Music and Performing Arts Graz)</dc:creator>
  <dc:creator>Aigner, Wolfgang (St. Pölten University of Applied Sciences)</dc:creator>
  <dc:language>eng</dc:language>
  <dc:type xml:lang="eng">Text</dc:type>
  <dc:date>2022</dc:date>
  <dc:source>EuroVisShort2022 (2022)</dc:source>
  <dc:description xml:lang="eng">The metaphor of auscultating with a stethoscope can be an inspiration to combine visualization and sonification for exploratory data analysis. This paper presents SoniScope, a multimodal approach and its prototypical implementation based on this metaphor. It combines a scatterplot with an interactive parameter mapping sonification, thereby conveying additional information about items that were selected with a visual lens. SoniScope explores several design options for the shape of its lens and the sorting of the selected items for subsequent sonification. Furthermore, the open-source prototype serves as a blueprint framework for how to combine D3.js visualization and SuperCollider sonification in the Jupyter notebook environment.</dc:description>
  <dc:identifier>ISBN: 978-3-03868-184-7</dc:identifier>
  <dc:identifier>HTTP/WWW: https://diglib.eg.org:443/handle/10.2312/evs20221095</dc:identifier>
  <dc:identifier>HTTP/WWW: https://doi.org/10.2312/evs.20221095</dc:identifier>
  <dc:identifier>doi:10.2312/evs.20221095</dc:identifier>
  <dc:identifier>https://phaidra.ustp.at/o:4883</dc:identifier>
  <dc:title xml:lang="eng">Towards Multimodal Exploratory Data Analysis: SoniScope as a Prototypical Implementation</dc:title>
  <dc:rights xml:lang="eng">© 2022 The Author(s)
Eurographics Proceedings © 2022 The Eurographics Association.</dc:rights>
  <dc:rights>CC BY 4.0 International</dc:rights>
  <dc:rights>http://creativecommons.org/licenses/by/4.0/</dc:rights>
  <dc:subject xml:lang="eng">based input / output</dc:subject>
  <dc:subject xml:lang="eng">Sound</dc:subject>
  <dc:subject xml:lang="eng">Auditory feedback</dc:subject>
  <dc:subject xml:lang="eng">Visualization systems and tools</dc:subject>
  <dc:subject xml:lang="eng">Human centered computing</dc:subject>
  <dc:publisher>The Eurographics Association</dc:publisher>
</oai_dc:dc>