
<resource xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:datacite="http://datacite.org/schema/kernel-4" xmlns="http://namespace.openaire.eu/schema/oaire/" xsi:schemaLocation="http://namespace.openaire.eu/schema/oaire/ https://www.openaire.eu/schema/repo-lit/4.0/openaire.xsd">
  
<datacite:identifier identifierType="URL">https://phaidra.ustp.at/o:4883</datacite:identifier>

  
<datacite:titles>
  
<datacite:title xml:lang="en">Towards Multimodal Exploratory Data Analysis: SoniScope as a Prototypical Implementation</datacite:title>

  
</datacite:titles>

  
<datacite:creators>
  
<datacite:creator>
  
<datacite:creatorName nameType="Personal">Enge, Kajetan</datacite:creatorName>

  
<datacite:givenName>Kajetan</datacite:givenName>

  
<datacite:familyName>Enge</datacite:familyName>

  
<datacite:nameIdentifier nameIdentifierScheme="ORCID" schemeURI="https://orcid.org/">0000-0002-5456-1140</datacite:nameIdentifier>

  
<datacite:affiliation>St. Pölten University of Applied Sciences</datacite:affiliation>

  
</datacite:creator>

  
<datacite:creator>
  
<datacite:creatorName nameType="Personal">Rind, Alexander</datacite:creatorName>

  
<datacite:givenName>Alexander</datacite:givenName>

  
<datacite:familyName>Rind</datacite:familyName>

  
<datacite:nameIdentifier nameIdentifierScheme="ORCID" schemeURI="https://orcid.org/">0000-0001-8788-4600</datacite:nameIdentifier>

  
<datacite:affiliation>St. Pölten University of Applied Sciences</datacite:affiliation>

  
</datacite:creator>

  
<datacite:creator>
  
<datacite:creatorName nameType="Personal">Iber, Michael</datacite:creatorName>

  
<datacite:givenName>Michael</datacite:givenName>

  
<datacite:familyName>Iber</datacite:familyName>

  
<datacite:nameIdentifier nameIdentifierScheme="ORCID" schemeURI="https://orcid.org/">0000-0002-5929-8716</datacite:nameIdentifier>

  
<datacite:affiliation>St. Pölten University of Applied Sciences</datacite:affiliation>

  
</datacite:creator>

  
<datacite:creator>
  
<datacite:creatorName nameType="Personal">Höldrich, Robert</datacite:creatorName>

  
<datacite:givenName>Robert</datacite:givenName>

  
<datacite:familyName>Höldrich</datacite:familyName>

  
<datacite:nameIdentifier nameIdentifierScheme="ORCID" schemeURI="https://orcid.org/">0000-0002-6887-6637</datacite:nameIdentifier>

  
<datacite:affiliation>University of Music and Performing Arts Graz</datacite:affiliation>

  
</datacite:creator>

  
<datacite:creator>
  
<datacite:creatorName nameType="Personal">Aigner, Wolfgang</datacite:creatorName>

  
<datacite:givenName>Wolfgang</datacite:givenName>

  
<datacite:familyName>Aigner</datacite:familyName>

  
<datacite:nameIdentifier nameIdentifierScheme="ORCID" schemeURI="https://orcid.org/">0000-0001-5762-1869</datacite:nameIdentifier>

  
<datacite:affiliation>St. Pölten University of Applied Sciences</datacite:affiliation>

  
</datacite:creator>

  
</datacite:creators>

  
<dc:publisher>The Eurographics Association</dc:publisher>

  
<resourceType resourceTypeGeneral="literature" uri="http://purl.org/coar/resource_type/c_18cf">text</resourceType>

  
<dc:language>eng</dc:language>

  
<dc:description xml:lang="en">The metaphor of auscultating with a stethoscope can be an inspiration to combine visualization and sonification for exploratory data analysis. This paper presents SoniScope, a multimodal approach and its prototypical implementation based on this metaphor. It combines a scatterplot with an interactive parameter mapping sonification, thereby conveying additional information about items that were selected with a visual lens. SoniScope explores several design options for the shape of its lens and the sorting of the selected items for subsequent sonification. Furthermore, the open-source prototype serves as a blueprint framework for how to combine D3.js visualization and SuperCollider sonification in the Jupyter notebook environment.</dc:description>

  
<datacite:subjects>
  
<datacite:subject xml:lang="en">based input / output</datacite:subject>

  
<datacite:subject xml:lang="en">Sound</datacite:subject>

  
<datacite:subject xml:lang="en">Auditory feedback</datacite:subject>

  
<datacite:subject xml:lang="en">Visualization systems and tools</datacite:subject>

  
<datacite:subject xml:lang="en">Human centered computing</datacite:subject>

  
</datacite:subjects>

  
<licenseCondition uri="http://creativecommons.org/licenses/by/4.0/">http://creativecommons.org/licenses/by/4.0/</licenseCondition>

  
<file mimeType="application/pdf" objectType="fulltext">https://phaidra.ustp.at/api/object/o:4883/download</file>

  
<datacite:alternateIdentifiers>
  
<datacite:alternateIdentifier alternateIdentifierType="ISBN">978-3-03868-184-7</datacite:alternateIdentifier>

  
<datacite:alternateIdentifier alternateIdentifierType="DOI">10.2312/evs.20221095</datacite:alternateIdentifier>

  
</datacite:alternateIdentifiers>

  
<datacite:relatedIdentifiers>
  
<datacite:relatedIdentifier relatedIdentifierType="URL" relationType="IsPartOf">https://phaidra.ustp.at/o:4776</datacite:relatedIdentifier>

  
<datacite:relatedIdentifier relatedIdentifierType="URL" relationType="IsPartOf">https://phaidra.ustp.at/o:7146</datacite:relatedIdentifier>

  
</datacite:relatedIdentifiers>

  
<dc:format>application/pdf</dc:format>

  
<dc:source>EuroVisShort2022 (2022)</dc:source>

  
<citationTitle>EuroVisShort2022</citationTitle>

  
<datacite:sizes>
  
<datacite:size>1.38 MB</datacite:size>

  
</datacite:sizes>

  
<datacite:dates>
  
<datacite:date dateType="Issued">2022</datacite:date>

  
</datacite:dates>

  
</resource>


