<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
 <record>
  <leader>     caa a22        4500</leader>
  <controlfield tag="001">605448051</controlfield>
  <controlfield tag="003">CHVBK</controlfield>
  <controlfield tag="005">20210128100135.0</controlfield>
  <controlfield tag="007">cr unu---uuuuu</controlfield>
  <controlfield tag="008">210128e20150801xx      s     000 0 eng  </controlfield>
  <datafield tag="024" ind1="7" ind2="0">
   <subfield code="a">10.1007/s11042-014-1906-5</subfield>
   <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="035" ind1=" " ind2=" ">
   <subfield code="a">(NATIONALLICENCE)springer-10.1007/s11042-014-1906-5</subfield>
  </datafield>
  <datafield tag="245" ind1="0" ind2="2">
   <subfield code="a">A fast algorithm for YCbCr to perception color model conversion based on fixed-point DSP</subfield>
   <subfield code="h">[Elektronische Daten]</subfield>
   <subfield code="c">[Yifang Liu, Yunfeng Zhang, Caiming Zhang]</subfield>
  </datafield>
  <datafield tag="520" ind1="3" ind2=" ">
   <subfield code="a">Real-time video processing heavily relies on the color space conversion. Due to the real-time requirement, the traditional conversion methods often suffer from the moderate conversion speed, inaccuracy and low video quality. In order to meet the efficiency requirement of color space conversion in real-time video systems, we present a fast algorithm for color space YCbCr to perception color model conversion, which is based on the simplified shift and look-up table (SSLUT). The approach can be divided into two steps. At first, the simplified fixed-point shift method is used to convert YCbCr to RGB. Then look-up table assists with the YCbCr to perception color model conversion, such as HSV, HSI and HSL. To validate the proposed fast algorithm, the conversion speed and accuracy are compared with the traditional methods based on Code Composer Studio (CCS) test platform. Moreover, we make the video quality evaluation by using the peak signal-to-ratio (PSNR) and the structural similarity (SSIM). Experimental results illustrate the real-time, robustness and accuracy of the fast algorithm.</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
   <subfield code="a">Springer Science+Business Media New York, 2014</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Real-time video system</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Color space conversion</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Perception color model</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="690" ind1=" " ind2="7">
   <subfield code="a">Video quality assessment</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="700" ind1="1" ind2=" ">
   <subfield code="a">Liu</subfield>
   <subfield code="D">Yifang</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="700" ind1="1" ind2=" ">
   <subfield code="a">Zhang</subfield>
   <subfield code="D">Yunfeng</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="700" ind1="1" ind2=" ">
   <subfield code="a">Zhang</subfield>
   <subfield code="D">Caiming</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="773" ind1="0" ind2=" ">
   <subfield code="t">Multimedia Tools and Applications</subfield>
   <subfield code="d">Springer US; http://www.springer-ny.com</subfield>
   <subfield code="g">74/15(2015-08-01), 6041-6067</subfield>
   <subfield code="x">1380-7501</subfield>
   <subfield code="q">74:15&lt;6041</subfield>
   <subfield code="1">2015</subfield>
   <subfield code="2">74</subfield>
   <subfield code="o">11042</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2="0">
   <subfield code="u">https://doi.org/10.1007/s11042-014-1906-5</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="898" ind1=" " ind2=" ">
   <subfield code="a">BK010053</subfield>
   <subfield code="b">XK010053</subfield>
   <subfield code="c">XK010000</subfield>
  </datafield>
  <datafield tag="900" ind1=" " ind2="7">
   <subfield code="a">Metadata rights reserved</subfield>
   <subfield code="b">Springer special CC-BY-NC licence</subfield>
   <subfield code="2">nationallicence</subfield>
  </datafield>
  <datafield tag="908" ind1=" " ind2=" ">
   <subfield code="D">1</subfield>
   <subfield code="a">research-article</subfield>
   <subfield code="2">jats</subfield>
  </datafield>
  <datafield tag="949" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="F">NATIONALLICENCE</subfield>
   <subfield code="b">NL-springer</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">856</subfield>
   <subfield code="E">40</subfield>
   <subfield code="u">https://doi.org/10.1007/s11042-014-1906-5</subfield>
   <subfield code="q">text/html</subfield>
   <subfield code="z">Onlinezugriff via DOI</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">700</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Liu</subfield>
   <subfield code="D">Yifang</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">700</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Zhang</subfield>
   <subfield code="D">Yunfeng</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">700</subfield>
   <subfield code="E">1-</subfield>
   <subfield code="a">Zhang</subfield>
   <subfield code="D">Caiming</subfield>
   <subfield code="u">School of Computer Science &amp; Technology, Shandong University of Finance and Economics, 250014, Jinan, China</subfield>
   <subfield code="4">aut</subfield>
  </datafield>
  <datafield tag="950" ind1=" " ind2=" ">
   <subfield code="B">NATIONALLICENCE</subfield>
   <subfield code="P">773</subfield>
   <subfield code="E">0-</subfield>
   <subfield code="t">Multimedia Tools and Applications</subfield>
   <subfield code="d">Springer US; http://www.springer-ny.com</subfield>
   <subfield code="g">74/15(2015-08-01), 6041-6067</subfield>
   <subfield code="x">1380-7501</subfield>
   <subfield code="q">74:15&lt;6041</subfield>
   <subfield code="1">2015</subfield>
   <subfield code="2">74</subfield>
   <subfield code="o">11042</subfield>
  </datafield>
 </record>
</collection>
