<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
 <record>
  <leader>     caa a22        4500</leader>
  <controlfield tag="001">477133002</controlfield>
  <controlfield tag="003">CHVBK</controlfield>
  <controlfield tag="005">20180405111709.0</controlfield>
  <controlfield tag="007">cr unu---uuuuu</controlfield>
  <controlfield tag="008">170330e19970301xx      s     000 0 eng  </controlfield>
  <datafield tag="024" ind1="7" ind2="0">
   <subfield code="a">10.1007/s005300050044</subfield>
   <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="035" ind1=" " ind2=" ">
   <subfield code="a">(NATIONALLICENCE)springer-10.1007/s005300050044</subfield>
  </datafield>
  <datafield tag="245" ind1="0" ind2="0">
   <subfield code="a">Usage of multisensory information in scientific data sensualization</subfield>
   <subfield code="h">[Elektronische Daten]</subfield>
   <subfield code="c">[Tetsuro Ogi, Michitaka Hirose]</subfield>
  </datafield>
  <datafield tag="520" ind1="3" ind2=" ">
   <subfield code="a">Abstract.: Multisensory scientific data sensualization methods that utilize virtual reality technology permit the use of several human sensations, such as visual, acoustic, and tactile sensation to display numerical data. The purposes of multisensory data sensualization can be classified as follows: (a) representing the relationships between different kinds of data; (b) displaying data utilizing sensory integration; and (c) representing conditions using the compound image. By using multisensory information, computers increase the ability to express data. However, these methods lead us to the question of which sensation should be used to display data most effectively. In this study, a multisensory data sensualization environment was developed in which color, loudness, sound frequency, and air flow pressure could be used to display scientific data. In particular, a wind sensation display prototype using air flow pressure was developed to generate tactile sensation. A basic experiment was conducted on sensory interference when subjects used two kinds of sensations simultaneously. From these results, guidelines for the usage of multisensory information for each purpose is proposed.</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
   <subfield code="a">Springer-Verlag Berlin Heidelberg, 1997</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Key words:Scientific visualization - Virtual environments - Multisensory information - Wind sensation display - Data perception</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="700" ind1="1" ind2=" ">
   <subfield code="a">Ogi</subfield>
   <subfield code="D">Tetsuro</subfield>
   <subfield code="u">Intelligent Modeling Laboratory, University of Tokyo 2-11-16 Yayoi, Bunkyo-ku, Tokyo 113, Japan, e-mail: tetsu@ihl.t.u-tokyo.ac.jp, JP</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="700" ind1="1" ind2=" ">
   <subfield code="a">Hirose</subfield>
   <subfield code="D">Michitaka</subfield>
   <subfield code="u">Faculty of Engineering, University of Tokyo 7-3-1 Hongo, Bunkyo-ku, Tokyo 113, Japan, e-mail: hirose@ihl.t.u-tokyo.ac.jp, JP</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2="0">
   <subfield code="u">https://doi.org/10.1007/s005300050044</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="908" ind1=" " ind2=" ">
   <subfield code="D">1</subfield>
   <subfield code="a">research-article</subfield>
   <subfield code="2">jats</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">856</subfield>
   <subfield code="E">40</subfield>
   <subfield code="u">https://doi.org/10.1007/s005300050044</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">700</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Ogi</subfield>
   <subfield code="D">Tetsuro</subfield>
   <subfield code="u">Intelligent Modeling Laboratory, University of Tokyo 2-11-16 Yayoi, Bunkyo-ku, Tokyo 113, Japan, e-mail: tetsu@ihl.t.u-tokyo.ac.jp, JP</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">700</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Hirose</subfield>
   <subfield code="D">Michitaka</subfield>
   <subfield code="u">Faculty of Engineering, University of Tokyo 7-3-1 Hongo, Bunkyo-ku, Tokyo 113, Japan, e-mail: hirose@ihl.t.u-tokyo.ac.jp, JP</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="900" ind1=" " ind2="7">
   <subfield code="a">Metadata rights reserved</subfield>
   <subfield code="b">Springer special CC-BY-NC licence</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="898" ind1=" " ind2=" ">
   <subfield code="a">BK010053</subfield>
   <subfield code="b">XK010053</subfield>
   <subfield code="c">XK010000</subfield>
  </datafield>
  <datafield tag="949" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="F">NATIONALLICENCE</subfield>
   <subfield code="b">NL-springer</subfield>
  </datafield>
 </record>
</collection>
