本文整理汇总了C++中openni::VideoStream类的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream类的具体用法?C++ VideoStream怎么用?C++ VideoStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了VideoStream类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: readFrame
void readFrame()
{
openni::Status rc = openni::STATUS_OK;
openni::VideoStream* streams[] = {&g_depthStream, &g_colorStream, &g_irStream};
int changedIndex = -1;
while (rc == openni::STATUS_OK)
{
rc = openni::OpenNI::waitForAnyStream(streams, 3, &changedIndex, 0);
if (rc == openni::STATUS_OK)
{
switch (changedIndex)
{
case 0:
g_depthStream.readFrame(&g_depthFrame); break;
case 1:
g_colorStream.readFrame(&g_colorFrame); break;
case 2:
g_irStream.readFrame(&g_irFrame); break;
default:
printf("Error in wait\n");
}
}
}
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:26,代码来源:Device.cpp
示例2: setResolution
/*
void openni::VideoMode::setResolution()
Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes
-- cited from OpenNI2 help. setResolution() is not recommended.
*/
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
//std::cout << "Ask mode: " << w << "x" << h << " " << fps << " fps. format " << format << std::endl;
bool found = false;
const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
for(int i = 0, i_end = modes.getSize();i < i_end;++i){
// std::cout << "Mode: " << modes[i].getResolutionX() << "x" << modes[i].getResolutionY() << " " << modes[i].getFps() << " fps. format " << modes[i].getPixelFormat() << std::endl;
if(modes[i].getResolutionX() != w){
continue;
}
if(modes[i].getResolutionY() != h){
continue;
}
if(modes[i].getFps() != fps){
continue;
}
if(modes[i].getPixelFormat() != format){
continue;
}
openni::Status rc = stream.setVideoMode(modes[i]);
if(rc != openni::STATUS_OK){
return false;
}
return true;
}
return false;
}
开发者ID:chen0510566,项目名称:mrpt,代码行数:33,代码来源:COpenNI2Generic.cpp
示例3: initONI2Stream
bool initONI2Stream(openni::Device& device, openni::SensorType sensorType, openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
openni::Status rc = openni::STATUS_OK;
const char* strSensor;
if(sensorType == openni::SENSOR_COLOR){
strSensor = "openni::SENSOR_COLOR";
}else if(sensorType == openni::SENSOR_DEPTH){
strSensor = "openni::SENSOR_DEPTH";
}else{
printf("%s:Unknown SensorType -> %d\n", __FUNCTION__, sensorType);
return false;
}
rc = stream.create(device, sensorType);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find sensor %s: %s\n", __FUNCTION__, strSensor, openni::OpenNI::getExtendedError());
return false;
}
openni::VideoMode options = stream.getVideoMode();
printf("%s:Initial resolution %s (%d, %d) FPS %d Format %d\n", __FUNCTION__, strSensor, options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
if(setONI2StreamMode(stream, w, h, fps, format) == false){
printf("%s:Can't find desired mode in the %s\n", __FUNCTION__, strSensor);
return false;
}
options = stream.getVideoMode();
printf(" -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
return true;
}
开发者ID:GYengera,项目名称:mrpt,代码行数:26,代码来源:test.cpp
示例4: toggleStreamState
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name)
{
openni::Status nRetVal = openni::STATUS_OK;
if (!stream.isValid())
{
nRetVal = stream.create(g_device, type);
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
if (isOn)
{
stream.stop();
frame.release();
}
else
{
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
isOn = !isOn;
}
开发者ID:higuchi-yuuki,项目名称:OpenNI2,代码行数:31,代码来源:Device.cpp
示例5: setONI2StreamMode
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
/*
void openni::VideoMode::setResolution()
Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes
-- cited from OpenNI2 help. setResolution() is not recommended.
*/
bool found = false;
const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
for(int i = 0, i_end = modes.getSize();i < i_end;++i){
if(modes[i].getResolutionX() != w){
continue;
}
if(modes[i].getResolutionY() != h){
continue;
}
if(modes[i].getPixelFormat() != format){
continue;
}
openni::Status rc = stream.setVideoMode(modes[i]);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
return true;
}
return false;
}
开发者ID:GYengera,项目名称:mrpt,代码行数:29,代码来源:test.cpp
示例6: openStream
int openStream(openni::Device& device, const char* name, openni::SensorType sensorType, SensorOpenType openType, openni::VideoStream& stream, const openni::SensorInfo** ppSensorInfo, bool* pbIsStreamOn)
{
*ppSensorInfo = device.getSensorInfo(sensorType);
*pbIsStreamOn = false;
if (openType == SENSOR_OFF)
{
return 0;
}
if (*ppSensorInfo == NULL)
{
if (openType == SENSOR_ON)
{
printf("No %s sensor available\n", name);
return -1;
}
else
{
return 0;
}
}
openni::Status nRetVal = stream.create(device, sensorType);
if (nRetVal != openni::STATUS_OK)
{
if (openType == SENSOR_ON)
{
printf("Failed to create %s stream:\n%s\n", openni::OpenNI::getExtendedError(), name);
return -2;
}
else
{
return 0;
}
}
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
stream.destroy();
if (openType == SENSOR_ON)
{
printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
return -3;
}
else
{
return 0;
}
}
*pbIsStreamOn = true;
return 0;
}
开发者ID:higuchi-yuuki,项目名称:OpenNI2,代码行数:57,代码来源:Device.cpp
示例7: onNewFrame
void ColorListener::onNewFrame(openni::VideoStream& vs)
{
openni::VideoFrameRef frame;
vs.readFrame(&frame);
frames->push_back(frame);
if(isUpdate) w->update();
}
开发者ID:kunyue,项目名称:Ubuntu_x64_Openni2.2_NiTE2.2_FreenectDriver,代码行数:7,代码来源:colorlistener.cpp
示例8: onNewFrame
void streamFrameListener::onNewFrame(openni::VideoStream& stream)
{
LockGuard guard(mutex);
stream.readFrame(&frameRef);
if (!frameRef.isValid() || !frameRef.getData())
{
yInfo() << "frame lost";
return;
}
int pixC;
pixF = stream.getVideoMode().getPixelFormat();
pixC = depthCameraDriver::pixFormatToCode(pixF);
w = frameRef.getWidth();
h = frameRef.getHeight();
dataSize = frameRef.getDataSize();
if (isReady == false)
{
isReady = true;
}
if(pixC == VOCAB_PIXEL_INVALID)
{
yError() << "depthCameraDriver: Pixel Format not recognized";
return;
}
image.setPixelCode(pixC);
image.resize(w, h);
if(image.getRawImageSize() != frameRef.getDataSize())
{
yError() << "depthCameraDriver:device and local copy data size doesn't match";
return;
}
memcpy((void*)image.getRawImage(), (void*)frameRef.getData(), frameRef.getDataSize());
stamp.update();
return;
}
开发者ID:barbalberto,项目名称:yarp,代码行数:43,代码来源:depthCameraDriver.cpp
示例9: irCallback
void pcl::io::OpenNI2Grabber::processIRFrame (openni::VideoStream& stream)
{
openni::VideoFrameRef frame;
stream.readFrame (&frame);
FrameWrapper::Ptr frameWrapper = boost::make_shared<Openni2FrameWrapper>(frame);
boost::shared_ptr<IRImage> image = boost::make_shared<IRImage> ( frameWrapper );
irCallback (image, NULL);
}
开发者ID:2php,项目名称:pcl,代码行数:11,代码来源:openni2_grabber.cpp
示例10: setStreamCropping
void setStreamCropping(openni::VideoStream& stream, int originX, int originY, int width, int height)
{
if (!stream.isValid())
{
displayMessage("Stream does not exist!");
return;
}
if (!stream.isCroppingSupported())
{
displayMessage("Stream does not support cropping!");
return;
}
openni::Status nRetVal = stream.setCropping(originX, originY, width, height);
if (nRetVal != openni::STATUS_OK)
{
displayMessage("Failed to set cropping: %s", xnGetStatusString(nRetVal));
return;
}
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:21,代码来源:Device.cpp
示例11: onNewFrame
void ColorStreamListener::onNewFrame(openni::VideoStream& steam){
//Log::i( TAG, "onNewFrame");
steam.readFrame(&(this->Frame));
if (Frame.isValid()){
if ( openni::SENSOR_COLOR == Frame.getSensorType() ){
//cv::Mat mColorMat_BGR;
this->colorMat = new cv::Mat(Frame.getHeight(),Frame.getWidth(),CV_8UC3,(void*)Frame.getData());
//cv::cvtColor(mColorMat,mColorMat_BGR,CV_RGB2BGR);
this->mColorDevice->setData(*(this->colorMat));
}/* End of if */
}/* End of if */
}/* End of onNewFrame */
开发者ID:e61983,项目名称:Kinect,代码行数:12,代码来源:ColorStreamListener.cpp
示例12: resetStreamCropping
void resetStreamCropping(openni::VideoStream& stream)
{
if (!stream.isValid())
{
displayMessage("Stream does not exist!");
return;
}
if (!stream.isCroppingSupported())
{
displayMessage("Stream does not support cropping!");
return;
}
openni::Status nRetVal = stream.resetCropping();
if (nRetVal != openni::STATUS_OK)
{
displayMessage("Failed to reset cropping: %s", xnGetStatusString(nRetVal));
return;
}
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:21,代码来源:Device.cpp
示例13: initONI2RGBStream
bool initONI2RGBStream(openni::Device& device, openni::VideoStream& rgb, int w, int h, int fps, openni::PixelFormat format){
openni::Status rc = openni::STATUS_OK;
rc = rgb.create(device, openni::SENSOR_COLOR);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
rc = rgb.setMirroringEnabled(false);
if (rc != openni::STATUS_OK){
printf("%s:setMirroringEnabled(false) failed:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
openni::VideoMode options = rgb.getVideoMode();
printf("Initial resolution RGB (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
if(setONI2StreamMode(rgb, w, h, fps, format) == false){
printf("%s:Can't find desired rgb mode\n", __FUNCTION__ );
return false;
}
options = rgb.getVideoMode();
printf(" -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
rc = rgb.start();
if (rc != openni::STATUS_OK){
printf("%s:Couldn't start RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
rgb.destroy();
return false;
}
return true;
}
开发者ID:EduFdez,项目名称:mrpt,代码行数:28,代码来源:test.cpp
示例14: record_oni
bool record_oni(char *tmpfile, int bufsize, openni::VideoStream &depth, openni::VideoStream &color, Config &conf) {
openni::Recorder recorder;
time_t t = time(NULL);
strftime(tmpfile, bufsize, "rgbd_%Y%m%d_%H-%M-%S_", localtime(&t));
strncat(tmpfile, getenv("HOSTNAME"), bufsize);
strncat(tmpfile, ".oni", bufsize);
printf("Starting ONI Capture.\n");
depth.start();
color.start();
openni::Status rc = recorder.create(tmpfile);
if(rc != openni::STATUS_OK) {
printf("Error: Failed to open '%s' for writing!\n%s", tmpfile, openni::OpenNI::getExtendedError());
return false;
}
recorder.attach(color);
recorder.attach(depth);
recorder.start();
struct timespec start, tp;
clock_gettime(CLOCK_MONOTONIC, &start);
long tt;
do {
usleep(100);
clock_gettime(CLOCK_MONOTONIC, &tp);
tt = (tp.tv_sec-start.tv_sec)*1000+(tp.tv_nsec-start.tv_nsec)/1000000;
} while(tt < conf.capture_time);
recorder.stop();
color.stop();
depth.stop();
recorder.destroy();
printf("Captured ONI to '%s'\n", tmpfile);
return true;
}
开发者ID:holtfox,项目名称:rgbdsend,代码行数:40,代码来源:rgbdsend.cpp
示例15: printMode
void OpenNI2Interface::printModes(const openni::VideoStream& stream,const openni::VideoMode& requestedMode)
{
const auto& modes = stream.getSensorInfo().getSupportedVideoModes();
std::cout << "Requested mode:\n";
printMode(requestedMode);
std::cout << "Supported modes:\n";
for(int i = 0; i < modes.getSize(); i++)
{
printMode(modes[i]);
}
}
开发者ID:wine3603,项目名称:LightLogger,代码行数:13,代码来源:OpenNI2Interface.cpp
示例16: closecamera
void KinectCamera::closecamera(void)
{
mColorStream.destroy();
mDepthStream.destroy();
mDevice.close();
openni:: OpenNI::shutdown();
}
开发者ID:zing235,项目名称:kinect-test-onlycolor-qml,代码行数:7,代码来源:kinectcamera.cpp
示例17: convertDepthPointToColor
bool convertDepthPointToColor(int depthX, int depthY, openni::DepthPixel depthZ, int* pColorX, int* pColorY)
{
if (!g_depthStream.isValid() || !g_colorStream.isValid())
return false;
return (openni::STATUS_OK == openni::CoordinateConverter::convertDepthToColor(g_depthStream, g_colorStream, depthX, depthY, depthZ, pColorX, pColorY));
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:7,代码来源:Device.cpp
示例18: toggleImageAutoWhiteBalance
void toggleImageAutoWhiteBalance(int)
{
if (g_colorStream.getCameraSettings() == NULL)
{
displayError("Color stream doesn't support camera settings");
return;
}
g_colorStream.getCameraSettings()->setAutoWhiteBalanceEnabled(!g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled());
displayMessage("Auto White balance: %s", g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled() ? "ON" : "OFF");
}
开发者ID:higuchi-yuuki,项目名称:OpenNI2,代码行数:10,代码来源:Device.cpp
示例19: startcamera
void KinectCamera::startcamera(void)
{
openni::OpenNI::initialize();//初始化
mDevice.open( openni::ANY_DEVICE );//打开设备(已在全局变量中声明设备mDevice)
mColorStream.create( mDevice, openni::SENSOR_COLOR );// 创建数据流
mColorStream.start();//开启数据流
mDepthStream.create( mDevice, openni::SENSOR_DEPTH );// 创建数据流
mDepthStream.start();//开启数据流
fig=1;
}
开发者ID:zing235,项目名称:kinect-test-onlycolor-qml,代码行数:10,代码来源:kinectcamera.cpp
示例20: toggleCloseRange
void toggleCloseRange(int )
{
bool bCloseRange;
g_depthStream.getProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, &bCloseRange);
bCloseRange = !bCloseRange;
g_depthStream.setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, bCloseRange);
displayMessage ("Close range: %s", bCloseRange?"On":"Off");
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:11,代码来源:Device.cpp
注:本文中的openni::VideoStream类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论