Logo Search packages:      
Sourcecode: camstream version File versions  Download package

VideoDevice.cc

#ifdef HAVE_CONFIG_H
#include <config.h>
#endif

#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <unistd.h>

#undef TRACE_VIDEODEV_READ
#define TRACE_VIDEODEV_MEM

#include "ccvt.h"
#include "VideoDevice.h"

#ifdef HAVE_PWCIOCTL_H
#include "pwc-ioctl.h"
#endif

/**
  \class CVideoDevice
  \brief A device wrapper for Video4Linux devices
  
  This class wraps itself around a Video4Linux device; at the moment it
  is primarely oriented at webcams, but grabber cards are supported
  as well. This device can be 'opened' multiple times, so more than one
  class can use this device (of course they all get the same picture).
  
  The class can return video images in RGB, YUV or both formats; it also
  has an optional buffer scheme for the images; this can prevent the
  otherwise deep copy that is needed when, for example, image diffs
  need to be calculated.

  The class will use mmap() when it is supported by the device driver. Also
  some device support select(); if not, a QTimer is used. Use the Framerate
  functions to set the framerate or timer.
*/

/**
  \fn CVideoDevice::CVideoDevice(const QString &node_name)
  \param node_name The /dev device name of the video device
  
  The constructor will do some basic checks to see if this is a valid device.
  If yes, \ref IsValid() will return TRUE, otherwise FALSE.
 */  

00052 CVideoDevice::CVideoDevice(const QString &node_name)
{
   struct video_capability vcap;
   int i;

   Opened = 0;
   Buffers = 0;
   FrameRate = 10;
   PalRGB = 0;
   PalYUV = 0;
   Capturing = 0;
   rgb_vid_buffer = yuv_vid_buffer = vid_io_buffer = NULL;
   vid_io_buffer_size = vid_io_image_size = 0;
   NodeName = node_name;
   pImageSocket = NULL;
   pImageTimer = NULL;
   validated = FALSE;
   CurrentVideoInput = -1;
   CurrentAudioInput = -1;
   memset(&VMBuf, 0, sizeof(VMBuf));
   max_w = 0;
   max_h = 0;
   min_w = 0;
   min_h = 0;

   VideoInputs.setAutoDelete(TRUE);
   AudioInputs.setAutoDelete(TRUE);
   
   RGB.setAutoDelete(TRUE);
   Y.setAutoDelete(TRUE);
   U.setAutoDelete(TRUE);
   V.setAutoDelete(TRUE);
   
   for (i = 0; i < 256; i++)
      GrayScale[i] = qRgb(i, i, i);
   pNullImage = new QImage();
   
   // Do a small test
   CamFD = ::open((const char *)node_name, O_RDONLY);
   if (CamFD >= 0) {
     if (ioctl(CamFD, VIDIOCGCAP, &vcap) < 0) {
       qDebug("CVideoDevice::CVideoDevice() could not query capabilities; is this really a video device?");
     }
     else {
       validated = TRUE;
       max_w = vcap.maxwidth;
       max_h = vcap.maxheight;
       min_w = vcap.minwidth;
       min_h = vcap.minheight;
       vcap.name[31] = '\0';
       IntfName = vcap.name;

       /* Query video inputs... */
       VideoInputs.clear();
       if (vcap.channels > 0) {
         VideoInputs.resize(vcap.channels);
         for (i = 0; i < vcap.channels; i++)
            VideoInputs.insert(i, new CVideoDeviceInput(this, i));
       }
       /* ...and audio inputs */
       AudioInputs.clear();
       if (vcap.audios > 0) {
         AudioInputs.resize(vcap.audios);
         for (i = 0; i < vcap.audios; i++)
            AudioInputs.insert(i, new CVideoAudioInput(this, i));
       }
     }
     ::close(CamFD);
   }
   else 
     if (errno == EBUSY)
       validated = TRUE;
   CamFD = -1;
}

/**
  \fn CVideoDevice::~CVideoDevice()
  
  Destroys the object; also closes the associated file descriptor
 */
00132 CVideoDevice::~CVideoDevice()
{
   if (Opened > 1)
     qWarning("Warning: CVideoDevice `%s' was destroyed when it was in use more than once.", (const char *)NodeName);
   CleanUp();
   if (CamFD >= 0) {
     close(CamFD);
     emit Closed();
   }

   ResetImagesRGB();
   ResetImagesYUV();
   delete pNullImage;
}

// private


/** 
  \brief Gather data from video device, like name, size, etc. Also initializes buffers.
  
  This function must be called after every first open.
*/
00155 void CVideoDevice::Init()
{
   struct video_window vwin;
   int i;

qDebug("CVideoDevice::Init()");
   if (CamFD < 0) {
     qDebug("CVideoDevice::Init: device not openend.");
     return;
   }
   if (Opened > 1) {
     qDebug("CVideoDevice::Init: device already opened.");
     return;
   }
   
   if (ioctl(CamFD, VIDIOCGPICT, &VPic) < 0) {
     qDebug("CVideoDevice::Init: could not get picture parameters. (duh?)");
     return;
   }
   Palette = VPic.palette; // To start with

   HasFramerate = FALSE;
   memset(&vwin, 0, sizeof(struct video_window));
   if (ioctl(CamFD, VIDIOCGWIN, &vwin) == 0) {
#if defined(PWC_FPS_SHIFT)
     if ((vwin.flags & PWC_FPS_FRMASK) >> PWC_FPS_SHIFT) {
       HasFramerate = TRUE;
//qDebug("VideoDevice supports framerate setting [%x].", vwin.flags);
     }
#endif
   }
   CurBuffer = 0;

   GetSize(); // Get current image size

   /* See if device has mmap(); */
   VMBuf.size = 0;
   VMBuf.frames = 0;
   vid_io_buffer_size = 0;
   if (ioctl(CamFD, VIDIOCGMBUF, &VMBuf) == 0) {
qDebug("Using mmap(), VMBuf.size = %d", VMBuf.size);
     vid_io_buffer_size = VMBuf.size;
     if (Buffers > 0) {
       // User specified a number of buffers; see if we can match that.
       if (VMBuf.frames < Buffers) {
         qWarning("CVideoDevice::Init(): there are more buffers requested than MBUF can provide. Limiting buffers.\n");
         Buffers = VMBuf.frames;
       }
     }
     else // We grab everything we can.
       Buffers = VMBuf.frames;
   }
   else {
     VMBuf.size = 0; // Just to be sure....
     VMBuf.frames = 0;
   }
   
   /* See if we can actually mmap() the memory */
   if (VMBuf.size > 0) {
     vid_io_buffer = (uchar *)mmap(NULL, vid_io_buffer_size, PROT_READ, MAP_SHARED, CamFD, 0);
     if (vid_io_buffer == (uchar *)-1) {
       qWarning("CVideoDevice::Init(): mmap() failed (%d). Falling back to non-mmap()ed mode.", errno);
       VMBuf.size = 0;
       vid_io_buffer = NULL;
     }
     else {
       vid_io_offsets.resize(Buffers);
       for (i = 0; i < Buffers; i++)
          vid_io_offsets[i] = VMBuf.offsets[i];
     }
   }

   if (VMBuf.size == 0) { // No mmap or failed: allocate a buffer
qDebug("Allocating own buffer.");
     if (Buffers < 0)
       Buffers = 4; // In case the user didn't specify a buffer size, we make one.
     vid_io_buffer_size = Buffers * max_w * max_h * 4;
     vid_io_buffer = new uchar[vid_io_buffer_size];
     vid_io_offsets.resize(Buffers);
     for (i = 0; i < Buffers; i++)
        vid_io_offsets[i] = i * max_w * max_h * 4;
   }
   if (vid_io_buffer == NULL) {
     qWarning("CVideoDevice::Init() Failed to mmap/allocate memory!");
   }
}

/** 
  \brief Cleans up mmap()ed stuff and buffers

  This function must be called after the last Close()
*/
00247 void CVideoDevice::CleanUp()
{
   if (VMBuf.size > 0) {
     MSync(); // Just to make sure
     munmap(vid_io_buffer, vid_io_buffer_size);
     VMBuf.size = 0;
   }
   else
     delete [] vid_io_buffer;
   vid_io_buffer = NULL;
   vid_io_buffer_size = 0;
}

/**
  \fn bool CVideoDevice::TryPalette(int pal, int depth)
  \brief Tries to set a VIDEO_PALETTE_* palette.
  \param pal One of the VIDEO_PALETTE_* palettes.
  \param depth visual depth (?) [Okay, CPIA driver, have it your way *GRRR*]
  \return \b TRUE on success, \b FALSE on failure
 */
00267 bool CVideoDevice::TryPalette(int pal, int depth)
{
   VPic.palette = pal;
   VPic.depth = depth;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   /* Sigh. It was to be expected. The OV511 and IBMCam don't pay attention to the palette field */
   if (ioctl(CamFD, VIDIOCGPICT, &VPic) < 0)
     return FALSE;
   if (VPic.palette == pal) {
     Palette = pal;
     return TRUE;
   }
   return FALSE;
}

void CVideoDevice::SetPalette()
{
   /* Determine most preferable palette... With more and more 
      color conversion going into the apps, we must try quite a few
      palettes before we hit one that we like.
      In case we want both YUV and RGB output, we prefer the YUV output.
    */
   char *pal_names[17] = { "", "grey", "hi240", "rgb565" ,"rgb24", "rgb32", 
                           "rgb555", "yuv422", "yuyv" , "uyvy", "yuv420", 
                           "yuv411", "raw", "yuv422p", "yuv411p", "yuv420p", 
                           "yuv410p" };
   Palette = 0;
   if (CamFD < 0)
     return;

   if (PalYUV) {
     TryPalette(VIDEO_PALETTE_YUV420P, 16) ||
     TryPalette(VIDEO_PALETTE_YUYV, 16)    ||
     TryPalette(VIDEO_PALETTE_YUV422, 16)  ||
     TryPalette(VIDEO_PALETTE_RGB32, 32)   ||
     TryPalette(VIDEO_PALETTE_RGB24, 24)   ||
     TryPalette(VIDEO_PALETTE_GREY, 8);
   }
   else if (PalRGB) {
     TryPalette(VIDEO_PALETTE_RGB32, 32)   ||
     TryPalette(VIDEO_PALETTE_RGB24, 24)   ||
     TryPalette(VIDEO_PALETTE_YUV422, 16)  ||
     TryPalette(VIDEO_PALETTE_YUYV, 16)    ||
     TryPalette(VIDEO_PALETTE_YUV420P, 16) ||
     TryPalette(VIDEO_PALETTE_GREY, 8);
   }
   qDebug("CVideoDevice::SetPalette picked palette %d [%s]", Palette, pal_names[Palette]);
   CalcVidIoSize();
}   


void CVideoDevice::CalcVidIoSize()
{
   switch (Palette) {
     case VIDEO_PALETTE_GREY:    vid_io_image_size = image_w * image_h; break;
     case VIDEO_PALETTE_YUV420P: vid_io_image_size = image_w * image_h * 3 / 2; break;
     case VIDEO_PALETTE_YUV422:
     case VIDEO_PALETTE_YUYV:    vid_io_image_size = image_w * image_h * 2; break;
     case VIDEO_PALETTE_RGB24:   vid_io_image_size = image_w * image_h * 3; break;
     case VIDEO_PALETTE_RGB32:   vid_io_image_size = image_w * image_h * 4; break;
     default:                    vid_io_image_size = 0; break;
   }
}

/** 
  \brief Do ioctl(MCAPTURE), start grabbing frame in buffer
  \param buf The mmap buffer (will be set modulo total buffers)
  
  This function will start the capture of an image into buffer \b buf
  with the current width & height
 */
00339 int CVideoDevice::MCapture(int buf)
{
   struct video_mmap vm;
   if (VMBuf.size == 0)
     return 0; // ignore call

   ASSERT(buf >= 0 && buf <= Buffers);
   vm.frame = buf;
   vm.format = Palette;
   vm.width = image_w;
   vm.height = image_h;
#ifdef TRACE_VIDEODEV_READ
   qDebug("CVideoDevice::MCapture(): buffer %d, format %d, size (%dx%d)", buf, Palette, image_w, image_h);
#endif  
   if (ioctl(CamFD, VIDIOCMCAPTURE, &vm) < 0) {
     perror("CVideoDevice::MCapture() ioctl");
     return -errno;
   }
   return 0;
}

/**
  \brief Do ioctl(SYNC), releasing buffer
*/
00363 int CVideoDevice::MSync()
{
   if (VMBuf.size == 0)
     return 0; // ignore call

#ifdef TRACE_VIDEODEV_READ
   qDebug("CVideoDevice::MSync()   : buffer %d", CurBuffer);
#endif  
   if (ioctl(CamFD, VIDIOCSYNC, &CurBuffer) < 0) {
     perror("CVideoDevice::MSync() ioctl");
     return -errno;
   }
   return 0;
}

void CVideoDevice::CreateImagesRGB()
{
#ifdef TRACE_VIDEODEV_MEM
qDebug("CVideoDevice::CreateImagesRGB()");
#endif
   /* If we have picked RGB32 as our input format, we map directly into
      our image buffer.
    */
   //rgb_image_size = image_w * image_h * 4;
   if (Palette == VIDEO_PALETTE_RGB32) {
#ifdef TRACE_VIDEODEV_MEM
qDebug(" using pre-allocated memory");
#endif
     rgb_vid_buffer = vid_io_buffer;
     rgb_vid_offsets = vid_io_offsets;
     //vid_image_size = rgb_image_size;
   }
   else {
#ifdef TRACE_VIDEODEV_MEM
qDebug(" allocating space for RGB");   
#endif
     rgb_vid_buffer = new uchar[Buffers * image_w * image_h * 4];
     rgb_vid_offsets.resize(Buffers);
     for (int i = 0; i < Buffers; i++) 
        rgb_vid_offsets[i] = i * image_w * image_h * 4;
   }

   RGB.resize((uint)Buffers);
   for (int i = 0; i < Buffers; i++)
      RGB.insert(i, new QImage(rgb_vid_buffer + rgb_vid_offsets[i], image_w, image_h, 32, NULL, 0, QImage::LittleEndian));
}

void CVideoDevice::ResetImagesRGB()
{
#ifdef TRACE_VIDEODEV_MEM
qDebug("CVideoDevice::ResetImagesRGB()");
#endif
   RGB.clear();
   rgb_vid_offsets.resize(0);
   if (rgb_vid_buffer != 0 && rgb_vid_buffer != vid_io_buffer) {
#ifdef TRACE_VIDEODEV_MEM
qDebug(" freeing memory");
#endif
     delete [] rgb_vid_buffer;
   }
   rgb_vid_buffer = NULL;
}

void CVideoDevice::CreateImagesYUV()
{
   int i, m;

#ifdef TRACE_VIDEODEV_MEM
qDebug("CVideoDevice::CreateImagesYUV()");
#endif
//   yuv_image_size = image_w * image_h * 3 / 2;
   if (Palette == VIDEO_PALETTE_YUV420P) {
#ifdef TRACE_VIDEODEV_MEM
qDebug(" using pre-allocated memory");
#endif
     yuv_vid_buffer = vid_io_buffer;
     yuv_vid_offsets = vid_io_offsets;
     //vid_image_size = yuv_image_size;
   }
   else {
#ifdef TRACE_VIDEODEV_MEM
qDebug(" allocating space for YUV");   
#endif
     yuv_vid_buffer = new uchar[Buffers * image_w * image_h * 3 / 2];
     yuv_vid_offsets.resize(Buffers);
     for (int i = 0; i < Buffers; i++) 
        yuv_vid_offsets[i] = i * image_w * image_h * 3 / 2;
   }

   Y.resize((uint)Buffers);
   U.resize((uint)Buffers);
   V.resize((uint)Buffers);
   m = image_w * image_h;
   for (i = 0; i < Buffers; i++) {
      Y.insert(i, new QImage(yuv_vid_buffer + yuv_vid_offsets[i],                image_w,      image_h     , 8, GrayScale, 256, QImage::IgnoreEndian));
      U.insert(i, new QImage(yuv_vid_buffer + yuv_vid_offsets[i] + m,            image_w >> 1, image_h >> 1, 8, GrayScale, 256, QImage::IgnoreEndian));
      V.insert(i, new QImage(yuv_vid_buffer + yuv_vid_offsets[i] + m + (m >> 2), image_w >> 1, image_h >> 1, 8, GrayScale, 256, QImage::IgnoreEndian));
   }
}

void CVideoDevice::ResetImagesYUV()
{
#ifdef TRACE_VIDEODEV_MEM
qDebug("CVideoDevice::ResetImagesYUV()");
#endif
   Y.clear();
   U.clear();
   V.clear();
   yuv_vid_offsets.resize(0);
   if (yuv_vid_buffer != 0 && yuv_vid_buffer != vid_io_buffer)
     delete [] yuv_vid_buffer;
   yuv_vid_buffer = NULL;
}


void CVideoDevice::StartCapture()
{
   if (Capturing == 0) {
qDebug("CVideoDevice::StartCapture() go!");
     if (UseSelect) {
       /* Tie into the Qt framework. Neat, huh? */
       pImageSocket = new QSocketNotifier(CamFD, QSocketNotifier::Read, this);
       connect(pImageSocket, SIGNAL(activated(int)), this, SLOT(LoadImage()));
     }
     else {
        /* For devices without select() we use a timer. */
       pImageTimer = new QTimer(this);
       connect(pImageTimer, SIGNAL(timeout()), this, SLOT(LoadImage()));
       pImageTimer->start(1000 / FrameRate);
     }
     
     MCapture(CurBuffer); // Start capture for mmap()
   }
   Capturing++;
}

void CVideoDevice::StopCapture()
{
   if (Capturing == 0) {
     qDebug("Duh? StopCapture while not capturing?");
     return;
   }
   Capturing--;
   if (Capturing == 0) {
qDebug("CVideoDevice::StopCapture() halt!");
     MSync();
     delete pImageTimer;
     pImageTimer = NULL;
     delete pImageSocket;
     pImageSocket = NULL;
   }
}


// private slots

/**
  \brief stub for automated loading
  
  This function tries to load an image; if that fails, it will emit 
  an \ref Error
*/
00525 void CVideoDevice::LoadImage()
{
   int e;

   e = ReadImage();
   if (e < 0) {
     qDebug("CVideoDevice::LoadImage() Error loading image; errorcode=%d\n", e);
     if (pImageTimer)
       pImageTimer->stop();
     if (pImageSocket)
       pImageSocket->setEnabled(FALSE);
     emit Error(e);
   }
} 

// protected

// protected slots

// public

bool CVideoDevice::IsValid()
{
   return validated;
}

/**
  \brief Open the device, even multiple times
  \param bufs Number of image buffers; -1 use device maximum
  \return 0 upon success, otherwise \b -errno .
  
  This function increments the usage counter of the device; the first time
  this function is called the device is really opened and initialized; all
  subsequent calls will return 0. You will need as many calls to Close() as
  you placed to Open() to get the device really closed.
  
  Open() will also start the automatic loading of images. In case the device
  supports select() a QSocketNotifier is installed, otherwise a timer
  is used.
  
  For double buffering purposes, use a \b buf value > 1. The \b buf parameter
  can only be set at the first Open(); subsequent calls will have no
  effect on the number of buffers. When bufs < 0, the device will be 
  queried for the maximum number of buffers and that amount will be used.
*/
00570 int CVideoDevice::Open(int bufs)
{
   if (Opened++) {
#ifdef TRACE_VIDEODEV_OPEN   
     qDebug("CVideoDevice::Open() again (count = %d).", Opened);
#endif     
     return 0;
   }
     
   if (CamFD >= 0) {
     qWarning("Warning: VideoDevice already opened ?!?!");
     return 0;
   }
   CamFD = ::open(NodeName, O_RDONLY);
   if (CamFD < 0) {
     Opened = 0;
     return -errno;
   }

   Buffers = bufs;
   Init();

   /* Determine if we can use select() on this cam; still only the 
      Philips cams seem to have this.
    */
   UseSelect = FALSE;
   if (IntfName.find("Philips") == 0) {
     qDebug("Using select() call.");
     UseSelect = TRUE;
   }
   return 0;
}

/**
  \fn void CVideoDevice::Close()
  \brief Closes the device provisionally
  
  This function decrements the usage counter of the VideoDevice. If the
  counter reaches 0, the device is really closed. See also \ref Open().
 */
00610 void CVideoDevice::Close()
{
   if (Opened) {
     if (Opened == 1) {
#ifdef TRACE_VIDEODEV_OPEN
       printf("CVideoDevice::Close(): last close.\n");
#endif       
       delete pImageTimer;
       pImageTimer = NULL;
       delete pImageSocket;
       pImageSocket = NULL;
       CleanUp();
       close(CamFD);
       CamFD = -1;
       emit Closed();
     }
     Opened--;
   }
}


/** 
  \fn int CVideoDevice::GetDescriptor() const
  \return file descriptor.
  
  This functions returns the file descriptor for this device. If the device
  is not opened, -1 is returned.
 */
00638 int CVideoDevice::GetDescriptor() const
{
   return CamFD;
}



/** 
  \fn void CVideoDevice::EnableRGB(bool isOn)
  \brief Enable/disable retrieval of RGB image(s)
  
  This tells the object if RGB images are desired. This will help in
  selecting the proper PALETTE for the device. Both RGB and YUV images
  may be enabled. See also \ref EnableYUV.
  
  Multiple calls to EnableRGB(TRUE) will require the same amount of calls
  to EnableRGB(FALSE) to turn RGB retrieval completely off.
 */
00656 void CVideoDevice::EnableRGB(bool isOn)
{
qDebug("EnableRGB: %c", isOn ? '+' : '-');
   if (isOn) 
     PalRGB++;
   else
     PalRGB--;
   if (PalRGB < 0)
     qWarning("Warning: VideoDevice PalRGB is negative?\n");
   if (PalRGB == 0)
     ResetImagesRGB();
   SetPalette();
   if (isOn && PalRGB == 1)
     CreateImagesRGB();
   if (isOn)
     StartCapture();
   else
     StopCapture();
}

/** 
  \fn void CVideoDevice::EnableYUV(bool isOn)
  \brief Enable/disable retrieval of YUV image(s)
  
  This tells the object if YUV (planar) images are desired. This will help in
  selecting the proper PALETTE for the device. Both YUV and RGB images
  may be enabled. See also \ref EnableRGB.

  Multiple calls to EnableYUV(TRUE) will require the same amount of calls
  to EnableYUV(FALSE) to turn YUV retrieval completely off.
 */
00687 void CVideoDevice::EnableYUV(bool isOn)
{
qDebug("EnableYUV: %c", isOn ? '+' : '-');
   if (isOn)
     PalYUV++;
   else
     PalYUV--;
     
   if (PalYUV < 0)
     printf("Warning: VideoDevice PalYUV is negative?\n");
   if (PalYUV == 0)
     ResetImagesYUV();
   SetPalette();
   if (isOn && PalYUV == 1)
     CreateImagesYUV();
   if (isOn)
     StartCapture();
   else
     StopCapture();
}

/** 
  \brief Returns device inode name
  
  This function returns the name of the device inode, like /dev/video0,
  /dev/video1, etc.
*/
00714 QString CVideoDevice::GetNodeName() const
{
   return NodeName;
}

/**
  \brief Returns internal name of device.
  
  This function returns the name of the device through the V4L interface.
*/
00724 QString CVideoDevice::GetIntfName() const
{
   return IntfName;
}
   

/**
  \fn QSize CVideoDevice::GetMinSize() const
  \brief Return the minimum image size this device supports.
  \return an Object of type QSize
  
  With this function the minium image size in pixels is retrieved. 
*/
00737 QSize CVideoDevice::GetMinSize() const
{
   return QSize(min_w, min_h);
}

/**
  \fn QSize CVideoDevice::GetSize()
  \return An object of type QSize.
  \brief Return current size from the driver.

  Returns the current image size as reported by the device. Returns a 
  size of (0, 0) when the device is closed or an error occured.
 */  
00750 QSize CVideoDevice::GetSize()
{
   struct video_window vwin;

   image_w = 0;
   image_h = 0;
   if (CamFD >= 0 && ioctl(CamFD, VIDIOCGWIN, &vwin) == 0) {
     image_w = vwin.width;
     image_h = vwin.height;
   }

//qDebug("CVideoDevice::GetSize() returns %dx%d", image_w, image_h);
//   yuv_image_size = image_w * image_h * 2; // for yuyv; yuv420p takes 1.5
//   rgb_image_size = image_w * image_h * 4;
   return QSize(image_w, image_h);
}

/**
  \fn QSize CVideoDevice::GetMaxSize() const
  \brief Return the maximum image size this device supports.
  \return an Object of type QSize
  
  With this function the maximum image size in pixels is retrieved. See
  also \ref GetMinSize and \ref SetSize
  
  Not all sizes between the minimum and maximum size may be allowed by the
  device; however, there is currently no way to retrieve a list of possible
  sizes. It's safest to stick to CIF (352x288) and SIF (320x240) formats
  and subsizes hereof, and VGA. Also it's wise to use to multiples of
  8 in both directions. \ref SetSize will return FALSE when the size
  was rejected by the driver.
*/
00782 QSize CVideoDevice::GetMaxSize() const
{
   return QSize(max_w, max_h);
}

/**
  \fn bool CVideoDevice::SetSize(int width, int height)
  \brief Set a new image size.
  \return FALSE when the call failed, TRUE when the new size was accepted by the device.

  This function will attempt to set a new image size; not all sizes between
  the minimum and maximum size may be allowed by the device; however, there
  is currently no way to retrieve a list of possible sizes. It is safest to
  stick to CIF (352x288) and SIF (320x240) formats and subsizes hereof, and
  VGA. Also it's wise to use to multiples of 8 in both directions. SetSize
  will return FALSE when the size was rejected by the driver.
 */  
00799 bool CVideoDevice::SetSize(int width, int height)
{
   struct video_window vwin;

printf("CVideoDevice::SetSize(%d, %d)\n", width, height);
   if (CamFD < 0 || width > max_w || height > max_h)
     return FALSE;
   if (ioctl(CamFD, VIDIOCGWIN, &vwin) < 0) {
     perror("GWIN: ");
     return FALSE;
   }
   MSync(); // Drop current frame if we're mmap()ing

   memset(&vwin, 0, sizeof(vwin));
   vwin.width = width;
   vwin.height = height;
   vwin.clipcount = 0;
   if (ioctl(CamFD, VIDIOCSWIN, &vwin) < 0) {
     perror("SWIN");
   }
   else {
     /* Read back; it might be the hardware decides a bit differently 
        (for example, multiples of 8)
      */
     GetSize();
     if (image_w == 0 && image_h == 0) /* woops */
       return FALSE;
   }

   CalcVidIoSize();
   // Reset images
   if (PalRGB) {
     ResetImagesRGB();
     CreateImagesRGB();
   }
   if (PalYUV) {
     ResetImagesYUV();
     CreateImagesYUV();
   }
   MCapture(CurBuffer);

   emit Resized(QSize(image_w, image_h));
   return TRUE;
}

/**
  \override
*/
00847 bool CVideoDevice::SetSize(const QSize &new_size)
{
   return SetSize(new_size.width(), new_size.height());
}


/**
  \brief Returns the current framerate.
  \return The framerate in frames per second.
  
  This applies to some webcams that allow setting of a framerate. In 
  case of a device that does not support select() we use the framerate
  to set the timer. By default the framerate is set to 10.
  
  Returns -1 in case of error.
 */
00863 int CVideoDevice::GetFramerate() const
{
   struct video_window vwin;

   if (CamFD < 0)
     return -1;
   if (HasFramerate) {
     if (ioctl(CamFD, VIDIOCGWIN, &vwin) < 0)
       return -1;
#if defined(PWC_FPS_SHIFT)       
     return (vwin.flags & PWC_FPS_FRMASK) >> PWC_FPS_SHIFT;
#endif     
   }
   return FrameRate;
}

/**
  \brief Try to set a framerate
  \param fps The framerate, in frames per second.
  \return TRUE if the framerate was accepted, FALSE otherwise
  
  Some webcams allow their framerate to be set; this functions tries to do
  just that; in general, the camera will use the framerate closest to what it
  supports. In case a device does not support framerates or only a fixed
  framerate (grabber cards!) we use the framerate to set the timer.
 */
00889 bool CVideoDevice::SetFramerate(int fps)
{
   struct video_window vwin;
qDebug("CVideoDevice::SetFramerate(%d)", fps);
   if (CamFD < 0 || ioctl(CamFD, VIDIOCGWIN, &vwin) < 0)
     return FALSE;

   if (HasFramerate) {
//     MSync(); // Drop current frame
     FrameRate = fps;
     if (FrameRate <= 0)
       FrameRate = 0;
     if (FrameRate > 63)
       FrameRate = 63;
#if defined(PWC_FPS_SHIFT)
     vwin.flags = (vwin.flags & ~PWC_FPS_MASK) | (FrameRate << PWC_FPS_SHIFT);
//qDebug("Setting framerate -> 0x%x\n", vwin.flags);
#endif     
     if (ioctl(CamFD, VIDIOCSWIN, &vwin) < 0)
       return FALSE;
//     MCapture(CurBuffer); // Try to grab new frame
   }
   else {
     FrameRate = fps;
     if (FrameRate <= 0) 
       FrameRate = 1;
     if (FrameRate > 60)
       FrameRate = 60;
     if (pImageTimer)
       pImageTimer->start(1000 / FrameRate);
   }
   emit FramerateChanged(FrameRate);
   return TRUE;
}

/**
  \brief Return number of mmaped() buffers.
  \return 0 if no mmap() support exists.
  \condition The device must be opened.

  In case the device supports mmap(), this returns the number of buffers
  that are available and mapped.
*/
00932 int CVideoDevice::GetMBuffers() const
{
   if (VMBuf.size == 0)
     return 0;
   return VMBuf.frames;
}  

/**
  \brief Return available buffers
  \return The number of image buffers used internally.
  \condition The device must be opened.

  Returns the numbers of buffers in use for this device, regardless if it
  supports mmap() or not.
*/
00947 int CVideoDevice::GetBuffers() const
{
   return Buffers;
}  



/**
  \brief Return number of input channels (sources)

  See \ref CVideoDeviceInput. A device should report at least one
  channel, but buggy device drivers may choose to return 0.
*/
00960 int CVideoDevice::GetVideoInputs() const
{
   return VideoInputs.count();
}

/**
  \brief Return current input
  \return input number, or -1 if current input is unknown
  
  This will return the current input, if known. Unfortunately, there is 
  no way to query the video device for the selected channel so until
  \ref SelectInput() is called this function returns -1.
*/
00973 int CVideoDevice::GetCurrentVideoInput() const
{
   return CurrentVideoInput;
}


/**
  \brief Returns an video input channel (source) object
  \param number The desired input channel; -1 for the current one
  \return An object of type \ref CVideoDeviceInput, or NULL if \b number is out of range
  
  This returns a pointer to a \ref CVideoDeviceInput object; when \b number
  is -1 (the default value), it returns the current input.
*/
00987 CVideoDeviceInput *CVideoDevice::GetVideoInput(int number) const
{
   if (number == -1) {
     if (CurrentVideoInput < 0)
       return NULL;
     else
       return VideoInputs.at((uint)CurrentVideoInput);
   }
   else
     return VideoInputs.at((uint) number);
}

/**
  \brief Select a new input channel.
  
  This function will program the card/chip to use the selected input channel.
  Return TRUE if the call succeeded, otherwise FALSE.
*/
01005 bool CVideoDevice::SelectVideoInput(int number)
{
   struct video_channel arg;
   bool ret;

   ret = FALSE;
   if (CamFD >= 0 && (number >= 0 && number < (int)VideoInputs.count())) {
     arg.channel = number;
     if (ioctl(CamFD, VIDIOCGCHAN, &arg) == 0) {
       if (ioctl(CamFD, VIDIOCSCHAN, &arg) == 0) {
         CurrentVideoInput = number;
         emit ChangedVideoInput(number);
         ret = TRUE;
       }
#ifdef TRACE_VIDEODEV_IOCTL  
       else
         perror("SCHAN");
#endif   
     }
#ifdef TRACE_VIDEODEV_IOCTL  
     else
       perror("GCHAN");
#endif   
   }
   return ret;
}


/**
  \brief Return number of audio channels on the device.
  
  \b Note! There is no SelectAudioInput! I suppose it's switched together
  with the video input... Also there is no way to find out to which audio
  input is associated with which video input. It looks more like this class
  is meant for manipulating the FM sound decoder on the tuner than really 
  setting anything on the input.
*/

01043 int CVideoDevice::GetAudioInputs() const
{
   return AudioInputs.count();
}

/**
  \brief Returns an audio input channel (source) object
  \param number The desired input channel; -1 for the current one
  \return An object of type \ref CVideoAudioInput, or NULL if \b number is out of range
  
  This returns a pointer to a \ref CVideoAudioInput object; when \b number
  is -1 (the default value), it returns the current input.
*/
01056 CVideoAudioInput *CVideoDevice::GetAudioInput(int number) const
{
   if (number == -1) {
     if (CurrentAudioInput < 0)
       return NULL;
     else
       return AudioInputs.at((uint)CurrentAudioInput); 
   }
   else
     return AudioInputs.at((uint) number);
}

bool CVideoDevice::SelectAudioInput(int number)
{
   struct video_audio va;
   bool ret;
   
   ret = FALSE;
   
   if (CamFD >= 0 && (number >= 0 && number < (int)AudioInputs.count())) {
     va.audio = number;
     if (ioctl(CamFD, VIDIOCGAUDIO, &va) == 0) {
       va.audio = number;
       if (ioctl(CamFD, VIDIOCSAUDIO, &va) == 0) {
         CurrentAudioInput = number;
         // FIXME emit ChangedAudioInput(number)
         ret = TRUE;
       }
       else
         qDebug("VIDIOCSAUDIO failed.");
     }
     else
       qDebug("VIDIOCGAUDIO failed.");
   } 
   return ret;
} 




/**
  \brief Return brightness setting of device.
  \return unsigned integer in the range 0-65535. 65535 may indicate setting is not available.
 */
01100 int CVideoDevice::GetBrightness() const
{
   return VPic.brightness;
}

/**
  \fn bool CVideoDevice::SetBrightness(int val)
  \brief Set brightness in device
  \param val An integer in the range 0-65535.
  \return FALSE if the device is closed or the call failed.
  
  The value returned by GetBrightness may be slightly different from 
  what is set with SetBrightness.
*/
01114 bool CVideoDevice::SetBrightness(int val)
{
   if (CamFD < 0)
     return FALSE;

   VPic.brightness = val & 0xffff;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   return TRUE;
}

/**
  \fn int CVideoDevice::GetContrast() const
  \brief Return contrast setting of device.
  \return unsigned integer in the range 0-65535. 65535 may indicate setting is not available.
 */
01130 int CVideoDevice::GetContrast() const
{
   return VPic.contrast;
}

/**
  \fn bool CVideoDevice::SetContrast(int val)
  \brief Set contrast in device.
  \param val An integer in the range 0-65535.
  \return FALSE if the device is closed or the call failed.
  
  The value returned by GetContrast may be slightly different from 
  what is set with SetContrast.
*/
01144 bool CVideoDevice::SetContrast(int val)
{
   if (CamFD < 0)
     return FALSE;

   VPic.contrast = val & 0xffff;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   return TRUE;
}

/**
  \fn int CVideoDevice::GetHue() const
  \brief Return hue (color shift) setting of device.
  \return unsigned integer in the range 0-65535. 65535 may indicate setting is not available.
  
  Hue is a way to correct for colour deviations. 
  It is something different than \ref GetColour.
 */
01163 int CVideoDevice::GetHue() const
{
   return VPic.hue;
}

/**
  \fn bool CVideoDevice::SetHue(int val)
  \brief Set hue in device
  \param val An integer in the range 0-65535.
  \return FALSE if the device is closed or the call failed.
  
  Hue is a way to correct for colour deviations.
  The value returned by GetHue may be slightly different from 
  what is set with SetHue.
*/
01178 bool CVideoDevice::SetHue(int val)
{
   if (CamFD < 0)
     return FALSE;

   VPic.hue = val & 0xffff;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   return TRUE;
}

/**
  \fn int CVideoDevice::GetColour() const
  \brief Return colour saturation setting of device.
  \return unsigned integer in the range 0-65535. 65535 may indicate setting is not available.
  
  A colour saturation of 0 means no colour at all, so the returned
  images are grayscale.
 */
01197 int CVideoDevice::GetColour() const
{
   return VPic.colour;
}

/**
  \fn bool CVideoDevice::SetColour(int val)
  \brief Set colour saturation in device.
  \param val An integer in the range 0-65535.
  \return FALSE if the device is closed or the call failed.
  
  Colour saturation sets how bright colours should appear. A 
  saturation of 0 yields grayscale images.

  The value returned by GetColour may be slightly different from 
  what is set with SetColour.
*/
01214 bool CVideoDevice::SetColour(int val)
{
   if (CamFD < 0)
     return FALSE;

   VPic.colour = val & 0xffff;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   return TRUE;
}

/**
  \fn int CVideoDevice::GetWhiteness() const
  \brief Return gamma setting of device.
  \return unsigned integer in the range 0-65535. 65535 may indicate setting is not available.
  
  Sometimes used as a brightness contrast, but more generally this returns
  the gamma correction the device applies to the image.
 */
01233 int CVideoDevice::GetWhiteness() const
{
   return VPic.whiteness;
}

/**
  \fn bool CVideoDevice::SetWhiteness(int val)
  \brief Set gamma value in device
  \param val An integer in the range 0-65535.
  \return FALSE if the device is closed or the call failed.

  Whiteness is sometimes used as brightness, but usually this sets the
  gamma correction the device will apply to the image.
  
  The value returned by GetWhiteness may be slightly different from 
  what is set with SetWhiteness.
*/
01250 bool CVideoDevice::SetWhiteness(int val)
{
   if (CamFD < 0)
     return FALSE;

   VPic.whiteness = val & 0xffff;
   if (ioctl(CamFD, VIDIOCSPICT, &VPic) < 0)
     return FALSE;
   return TRUE;
}


/**
  \fn int CVideoDevice::ReadImage()
  \brief Read image into Buffers.
  \return 0 on success, \b -errno otherwise.
  
  This function reads the raw data from the device and transforms it into
  RGB and/or YUV images, doing all necessary conversions.
 */
01270 int CVideoDevice::ReadImage()
{
   uchar *dst = 0, *src = 0;
   int i, n;

   if (CamFD < 0)
     return -ENODEV;
   if (vid_io_buffer == NULL)
     return -ENOMEM;
     
   src = vid_io_buffer + vid_io_offsets[CurBuffer];
   if (VMBuf.size > 0) { // mmap()ed.
     i = MSync();
     if (i < 0)
       return i;
     if (Buffers > 1) // we have space, start capture immediately in next buffer. Otherwise, see end of function
       MCapture((CurBuffer + 1) % Buffers); 
   }
   else {
     if (read(CamFD, src, vid_io_image_size) < 0)
       return -errno;
   }
   
   /* At this point we have our (raw) data in the buffer; it could be we
      are finished now because we have put the data into place with
      PalRGB && VIDEO_PALETTE_RGB32 or PalYUV && VIDEO_PALETTE_YUV420P.
      But in most other cases we will have to do some conversion...
    */
   n = image_w * image_h;

   switch (Palette) {
     case VIDEO_PALETTE_YUV420P:
       /* only RGB needs conversion :-) */
       if (PalRGB) {
         dst = RGB[CurBuffer]->bits();
         ccvt_420p_bgr32(image_w, image_h, src, dst);
       }
       break;

     case VIDEO_PALETTE_RGB24:
       /* "Broken" BGR palette */
       if (PalRGB) {
         dst = RGB[CurBuffer]->bits();
         ccvt_bgr24_bgr32(image_w, image_h, src, dst);
       }
/* Does not exist yet 
       if (PalYUV)
         ccvt_bgr24_420p(image_w, image_h, vid_buffer, dy, du, dv);
*/        
       break;

     case VIDEO_PALETTE_RGB32:
       /* Quick-n-dirty palette */
       if (PalRGB) {
         dst = RGB[CurBuffer]->bits();
         memcpy(dst, src, n * 4);
       }
       if (PalYUV) {
         // convert to YUV planes
       }
       break;

#if 0
       // Planar format... easy :)
       if (PalYUV) {
         if (dy != NULL)
           memcpy(dy, src, n);
         src += n;
         n /= 4; /* UV boxes are only a quarter size */
         if (du != NULL)
           memcpy(du, src, n);
         src += n;
         if (dv != NULL)
           memcpy(dv, src, n);
       }
       else {
         dy = src;
         du = src + n;
         dv = du + (n / 4);
       }
#if 0
       /* some code to determine max/min values */
       int miny, maxy, minu, maxu, minv, maxv;
       
       miny = 255; maxy = 0;
       src = dy;
       for (i = 0; i < n; i++) {
          if (*src < miny) miny = *src;
          if (*src > maxy) maxy = *src;
          src++;
       }
       n /= 4;
       minu = 255; maxu = 0;
       src = du;
       for (i = 0; i < n; i++) {
          if (*src < minu) minu = *src;
          if (*src > maxu) maxu = *src;
          src++;
       }
       minv = 255; maxv = 0;
       src = dv;
       for (i = 0; i < n; i++) {
          if (*src < minv) minv = *src;
          if (*src > maxv) maxv = *src;
          src++;
       }
       qDebug("Y: min = %3d, max = %3d | U: min = %3d, max = %3d  | V: min = %3d, max = %3d ", miny, maxy, minu, maxu, minv, maxv);
#endif       
#endif
      
     case VIDEO_PALETTE_YUV422:
     case VIDEO_PALETTE_YUYV:
       if (PalYUV) {
         ccvt_yuyv_420p(image_w, image_h, src, Y[CurBuffer]->bits(), U[CurBuffer]->bits(), V[CurBuffer]->bits());
       }
       if (PalRGB) {
         ccvt_yuyv_bgr32(image_w, image_h, src, dst);
       }
       break;
   }

   emit Notify();

   /* Go to next buffer (including capture). In case of 1 buffer CurBuffer
      will get stuck at 0, so we do this after we processed the buffer
      content.
    */
   CurBuffer = (CurBuffer + 1) % Buffers;
   if (Buffers == 1) {
     if (MCapture(CurBuffer) < 0)
       return -errno;
   }

   return 0;
}

/**
  \fn QImage *CVideoDevice::GetRGB(int offset) const
  \param offset Offset in images buffer.
  \brief Get an RGB image.
  
  Retrieve pointer to an RGB image; note that this is a pointer, not a
  (shallow) copy. The QImage is a 32 bit deep image.
  
  When buffering is active any of the previous images in the buffer can be
  retrieved. The 'offset' parameter indicates the negative offset in the
  (circular) list of images. When offset = 0 (the default) the latest image
  is returned; when offset = 1 is the previous image, offset = 2 the image
  before that, etc. up to the number of Buffers - 1.

  If <b>offset</b> is outside the range a NULL pointer will be returned.
 */
01422 QImage *CVideoDevice::GetRGB(int offset) const
{
   if (offset < 0 || offset >= Buffers)
     return NULL;
   if (!PalRGB)
     return pNullImage;
   return RGB.at((Buffers + CurBuffer - offset) % Buffers);
}

/**
  \fn QImage *CVideoDevice::GetY(int offset) const
  \param offset Offset in circular buffer.
  \brief Get Y (luminance) component of image.
  
  Retrieve pointer to a Y image; note that this is a pointer, not a
  (shallow) copy. The QImage is an 8 bit deep image with grayscale palette.
  
  See \ref GetRGB about double buffering.
 */
01441 QImage *CVideoDevice::GetY(int offset) const
{
   if (offset < 0 || offset >= Buffers)
     return NULL;
   if (!PalYUV)
     return pNullImage;
   return Y.at((Buffers + CurBuffer - offset) % Buffers);
}

/**
  \fn QImage *CVideoDevice::GetU(int offset) const
  \param offset Offset in circular buffer.
  \brief Get U (chrominance) component of image.
  
  Retrieve pointer to a U image; note that this is a pointer, not a
  (shallow) copy. The QImage is an 8 bit deep image with grayscale palette.
  
  See \ref GetRGB about double buffering.
 */
01460 QImage *CVideoDevice::GetU(int offset) const
{
   if (offset < 0 || offset >= Buffers)
     return NULL;
   if (!PalYUV)
     return pNullImage;
   return U.at((Buffers + CurBuffer - offset) % Buffers);
}

/**
  \fn QImage *CVideoDevice::GetV(int offset) const
  \param offset Offset in circular buffer.
  \brief Get V (chrominance) component of image.
  
  Retrieve pointer to a V image; note that this is a pointer, not a
  (shallow) copy. The QImage is an 8 bit deep image with grayscale palette.
  
  See \ref GetRGB about double buffering.
 */
01479 QImage *CVideoDevice::GetV(int offset) const
{
   if (offset < 0 || offset >= Buffers)
     return NULL;
   if (!PalYUV)
     return pNullImage;
   return V.at((Buffers + CurBuffer - offset) % Buffers);
}

Generated by  Doxygen 1.6.0   Back to index