//////////////////////////////////////////////////////// // // GEM - Graphics Environment for Multimedia // // zmoelnig@iem.kug.ac.at // // Implementation file // // Copyright (c) 2010-2011 IOhannes m zmölnig. forum::für::umläute. IEM. zmoelnig@iem.at // For information on usage and redistribution, and for a DISCLAIMER OF ALL // WARRANTIES, see the file, "LICENSE.txt" // ///////////////////////////////////////////////////////// #ifdef HAVE_CONFIG_H # include "config.h" #endif #include "videoAVT.h" #include "plugins/PluginFactory.h" #include #include #ifdef _WIN32 # include #else # include # include #endif #include using namespace gem::plugins; #include "Gem/RTE.h" #include "Gem/Exception.h" #if 0 # define debug ::post #else # define debug #endif ///////////////////////////////////////////////////////// // // videoAVT // ///////////////////////////////////////////////////////// // Constructor // ///////////////////////////////////////////////////////// #ifdef HAVE_AVT #define MAX_CAMERA_LIST 20 REGISTER_VIDEOFACTORY("avt", videoAVT); struct PvApiInitClass { PvApiInitClass(void) { unsigned long major=0, minor=0; PvVersion(&major, &minor); // post("Prosilica AVT SDK %d.%d", major, minor); if(ePvErrResources==PvInitialize()) { throw(GemException("unable to initialization PvAPI")); } } virtual ~PvApiInitClass(void) { PvUnInitialize(); } }; videoAVT :: videoAVT() : videoBase("avt"), m_grabber(NULL) { m_width=0; m_height=0; static PvApiInitClass paic; int i=0; for(i=0;i convert it into a pixbuf */ bool success=true; lock(); m_image.image.xsize=pFrame.Width; m_image.image.ysize=pFrame.Height; m_image.image.setCsizeByFormat(GL_RGBA); m_image.image.reallocate(); switch(pFrame.Format) { case(ePvFmtMono8) : m_image.image.fromGray((unsigned char *)pFrame.ImageBuffer);break; // case(ePvFmtMono16): m_image.image.fromGray((unsigned short*)pFrame.ImageBuffer);break; case(ePvFmtRgb24) : m_image.image.fromRGB ((unsigned char *)pFrame.ImageBuffer);break; case(ePvFmtBgr24) : m_image.image.fromBGR ((unsigned char *)pFrame.ImageBuffer);break; case(ePvFmtRgba32): m_image.image.fromRGBA((unsigned char *)pFrame.ImageBuffer);break; case(ePvFmtBgra32): m_image.image.fromBGRA((unsigned char *)pFrame.ImageBuffer);break; case(ePvFmtBayer8): case(ePvFmtBayer16): do { unsigned char*data=m_image.image.data; // PixelPadding is most likely plain wrong; need to test what it really means PvUtilityColorInterpolate(&pFrame, &data[chRed], &data[chGreen], &data[chBlue], 2, // PixelPadding (Alpha) 0 // LinePadding ); } while(0); break; case (ePvFmtRgb48): case(ePvFmtMono12Packed): case(ePvFmtBayer12Packed): case (ePvFmtYuv411): case(ePvFmtYuv422): case(ePvFmtYuv444): default: // ouch success=false; } if(success) { m_image.image.upsidedown=true; m_image.newimage=true; } unlock(); } void videoAVT::grabCB(tPvFrame*pFrame) { videoAVT*me=(videoAVT*)pFrame->Context[0]; if(me && ePvErrSuccess==pFrame->Status) me->grabbedFrame(*pFrame); // if the frame was completed we re-enqueue it if(pFrame->Status != ePvErrUnplugged && pFrame->Status != ePvErrCancelled) { PvCaptureQueueFrame(me->m_grabber, pFrame, grabCB); } } pixBlock* videoAVT::getFrame(void) { if(!(m_haveVideo && m_capturing))return NULL; lock(); return &m_image; } ///////////////////////////////////////////////////////// // openDevice // ///////////////////////////////////////////////////////// bool videoAVT :: openDevice(gem::Properties&props) { if(m_grabber)closeDevice(); unsigned long cameraNum=PvCameraCount(); tPvCameraInfo*cameraList=new tPvCameraInfo[cameraNum]; if(m_devicenum>=0) { verbose(1, "AVT trying to open #%d of %d devices", m_devicenum, cameraNum); if(cameraNum>m_devicenum && (cameraList[m_devicenum].PermittedAccess == ePvAccessMaster)) { if (PvCameraOpen(cameraList[m_devicenum].UniqueId, ePvAccessMaster, &m_grabber) != ePvErrSuccess) { m_grabber=NULL; } } } else { verbose(1, "AVT trying to open device '%s'", m_devicename.c_str()); /* cameraList[i].SerialString, cameraList[i].DisplayName, cameraList[i].UniqueId, IP */ unsigned long i=0; errno=0; const unsigned long uid=strtoul(m_devicename.c_str(), NULL, 0); if(NULL==m_grabber && 0==errno) { verbose(1, "checking UniqueID: 0x% 8x", uid); for(i=0; iai_next) { struct sockaddr_in*ipv4 = (struct sockaddr_in*)result->ai_addr; unsigned long IpAddr=ipv4->sin_addr.s_addr ; // byte order?? if(OldAddr==IpAddr)continue; OldAddr=IpAddr; verbose(1, "AVT trying to connect to %3d.%3d.%3d.%3d", (IpAddr & 0x0FF), (IpAddr & 0x0FF00)>>8, (IpAddr & 0x0FF0000)>>16, (IpAddr & 0xFF000000)>>24 ); if(ePvErrSuccess == PvCameraOpenByAddr(IpAddr, ePvAccessMaster, &m_grabber)) { break; } m_grabber=NULL; } freeaddrinfo(result); } } } delete[]cameraList; if(m_grabber) { unsigned long FrameSize = 0; if(!PvAttrUint32Get(m_grabber,"TotalBytesPerFrame",&FrameSize)) { resizeFrames(FrameSize); } else { close(); } } return (NULL!=m_grabber); } ///////////////////////////////////////////////////////// // closeDevice // ///////////////////////////////////////////////////////// void videoAVT :: closeDevice() { if(m_grabber)PvCameraClose(m_grabber); m_grabber=NULL; } ///////////////////////////////////////////////////////// // startTransfer // ///////////////////////////////////////////////////////// bool videoAVT :: startTransfer() { PvCaptureStart(m_grabber); if(!PvCommandRun(m_grabber,"AcquisitionStart")) { error("AVT::AcquistionStart failed"); } else { PvCaptureQueueFrame(m_grabber, &m_frames[0], grabCB); } return true; } ///////////////////////////////////////////////////////// // stopTransfer // ///////////////////////////////////////////////////////// bool videoAVT :: stopTransfer() { PvCaptureQueueClear(m_grabber); if(!PvCommandRun(m_grabber,"AcquisitionStop")) { error("AVT::AcquistionStop failed"); } PvCaptureEnd(m_grabber); return true; } std::vector videoAVT::enumerate() { std::vector result; unsigned long cameraNum=PvCameraCount(); tPvCameraInfo*cameraList=new tPvCameraInfo[cameraNum]; cameraNum = PvCameraList(cameraList,cameraNum,NULL); unsigned long i = 0; for (i = 0; i < cameraNum; i++) { result.push_back(cameraList[i].DisplayName); } delete[]cameraList; return result; } bool videoAVT::enumProperties(gem::Properties&readable, gem::Properties&writeable) { tPvAttrListPtr listPtr; unsigned long listLength; if (PvAttrList(m_grabber, &listPtr, &listLength) == ePvErrSuccess) { for (int i = 0; i < listLength; i++) { const char* attributeName = listPtr[i]; std::cerr <<"Attribute["<keys=props.keys(); for(i=0; isv; int index=d; if(index<0)continue; if (PvAttrRangeEnum(m_camera, "AcquisitionMode", enumSet, sizeof(enumSet), NULL) == ePvErrSuccess) { char* member = strtok(enumSet, ","); // strtok isn't always thread safe! while (member != NULL) { sv.push_back(member); member = strtok(NULL, ","); } if(index>=sv.size()) { continue; } PvAttrEnumSet(m_grabber, key.c_str(), sv[index].c_str()); } #endif break; case ePvDatatypeUint32: if(props.get(key, d)) { tPvUint32 v=d; PvAttrUint32Set(m_grabber, key.c_str(), d); } break; case ePvDatatypeFloat32: if(props.get(key, d)) { tPvFloat32 v=d; PvAttrFloat32Set(m_grabber, key.c_str(), d); } break; case ePvDatatypeInt64: if(props.get(key, d)) { tPvInt64 v=d; PvAttrInt64Set(m_grabber, key.c_str(), d); } break; case ePvDatatypeBoolean: if(props.get(key, d)) { tPvBoolean v=d; PvAttrBooleanSet(m_grabber, key.c_str(), d); } break; } } // loop } void videoAVT::getProperties(gem::Properties&props) { int i; std::vectorkeys=props.keys(); for(i=0; i(value)); } } break; case ePvDatatypeFloat32: { tPvFloat32 value; if (ePvErrSuccess==PvAttrFloat32Get(m_grabber, key.c_str(), &value)) { props.set(key, static_cast(value)); } } break; case ePvDatatypeInt64: { tPvInt64 value; if (ePvErrSuccess==PvAttrInt64Get(m_grabber, key.c_str(), &value)) { props.set(key, static_cast(value)); } } break; case ePvDatatypeBoolean: { tPvBoolean value; if (ePvErrSuccess==PvAttrBooleanGet(m_grabber, key.c_str(), &value)) { props.set(key, static_cast(value)); } } break; } } // loop } #else videoAVT :: videoAVT() : videoBase("") { } videoAVT :: ~videoAVT() { } #endif /* HAVE_AVT */