<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
 <record>
  <leader>     naa a22        4500</leader>
  <controlfield tag="001">510776922</controlfield>
  <controlfield tag="003">CHVBK</controlfield>
  <controlfield tag="005">20180411083233.0</controlfield>
  <controlfield tag="007">cr unu---uuuuu</controlfield>
  <controlfield tag="008">180411e20130901xx      s     000 0 eng  </controlfield>
  <datafield tag="024" ind1="7" ind2="0">
   <subfield code="a">10.3758/s13428-012-0286-x</subfield>
   <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="035" ind1=" " ind2=" ">
   <subfield code="a">(NATIONALLICENCE)springer-10.3758/s13428-012-0286-x</subfield>
  </datafield>
  <datafield tag="100" ind1="1" ind2=" ">
   <subfield code="a">Sogo</subfield>
   <subfield code="D">Hiroyuki</subfield>
   <subfield code="u">Ehime University, 3 Bunkyo-cho, 790-8577, Matsuyama, Ehime, Japan</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="245" ind1="1" ind2="0">
   <subfield code="a">GazeParser: an open-source and multiplatform library for low-cost eye tracking and analysis</subfield>
   <subfield code="h">[Elektronische Daten]</subfield>
   <subfield code="c">[Hiroyuki Sogo]</subfield>
  </datafield>
  <datafield tag="520" ind1="3" ind2=" ">
   <subfield code="a">Eye movement analysis is an effective method for research on visual perception and cognition. However, recordings of eye movements present practical difficulties related to the cost of the recording devices and the programming of device controls for use in experiments. GazeParser is an open-source library for low-cost eye tracking and data analysis; it consists of a video-based eyetracker and libraries for data recording and analysis. The libraries are written in Python and can be used in conjunction with PsychoPy and VisionEgg experimental control libraries. Three eye movement experiments are reported on performance tests of GazeParser. These showed that the means and standard deviations for errors in sampling intervals were less than 1ms. Spatial accuracy ranged from 0.7° to 1.2°, depending on participant. In gap/overlap tasks and antisaccade tasks, the latency and amplitude of the saccades detected by GazeParser agreed with those detected by a commercial eyetracker. These results showed that the GazeParser demonstrates adequate performance for use in psychological experiments.</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
   <subfield code="a">The Author(s), 2012</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Eye tracking</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Open source</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Python</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="773" ind1="0" ind2=" ">
   <subfield code="t">Behavior Research Methods</subfield>
   <subfield code="d">Springer US; http://www.springer-ny.com</subfield>
   <subfield code="g">45/3(2013-09-01), 684-695</subfield>
   <subfield code="q">45:3&lt;684</subfield>
   <subfield code="1">2013</subfield>
   <subfield code="2">45</subfield>
   <subfield code="o">13428</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2="0">
   <subfield code="u">https://doi.org/10.3758/s13428-012-0286-x</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="908" ind1=" " ind2=" ">
   <subfield code="D">1</subfield>
   <subfield code="a">research-article</subfield>
   <subfield code="2">jats</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">856</subfield>
   <subfield code="E">40</subfield>
   <subfield code="u">https://doi.org/10.3758/s13428-012-0286-x</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">100</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Sogo</subfield>
   <subfield code="D">Hiroyuki</subfield>
   <subfield code="u">Ehime University, 3 Bunkyo-cho, 790-8577, Matsuyama, Ehime, Japan</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">773</subfield>
   <subfield code="E">0-</subfield>
   <subfield code="t">Behavior Research Methods</subfield>
   <subfield code="d">Springer US; http://www.springer-ny.com</subfield>
   <subfield code="g">45/3(2013-09-01), 684-695</subfield>
   <subfield code="q">45:3&lt;684</subfield>
   <subfield code="1">2013</subfield>
   <subfield code="2">45</subfield>
   <subfield code="o">13428</subfield>
  </datafield>
  <datafield tag="900" ind1=" " ind2="7">
   <subfield code="a">Metadata rights reserved</subfield>
   <subfield code="b">Springer special CC-BY-NC licence</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="898" ind1=" " ind2=" ">
   <subfield code="a">BK010053</subfield>
   <subfield code="b">XK010053</subfield>
   <subfield code="c">XK010000</subfield>
  </datafield>
  <datafield tag="949" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="F">NATIONALLICENCE</subfield>
   <subfield code="b">NL-springer</subfield>
  </datafield>
 </record>
</collection>
