• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

C++ cvMatMul函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C++中cvMatMul函数的典型用法代码示例。如果您正苦于以下问题:C++ cvMatMul函数的具体用法?C++ cvMatMul怎么用?C++ cvMatMul使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了cvMatMul函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: main

int main()
{
	CvMat* A = cvCreateMat( 3, 3, CV_64FC1 );
	CvMat* B = cvCreateMat( 3, 1, CV_64FC1 );
	CvMat* X = cvCreateMat( 3, 1, CV_64FC1 );
	A->data.db[0] = 11;
	A->data.db[1] = 5;
	A->data.db[2] = 15;
	A->data.db[3] = 5;
	A->data.db[4] = 3;
	A->data.db[5] = 1;
	A->data.db[6] = 15;
	A->data.db[7] = 1;
	A->data.db[8] = 31;
	B->data.db[0] = 1;
	B->data.db[1] = 2;
	B->data.db[2] = 3;
	printf("A :\n[%lf, %lf, %lf]\n[%lf, %lf, %lf]\n[%lf, %lf, %lf]\n", A->data.db[0], A->data.db[1], A->data.db[2], A->data.db[3], A->data.db[4], A->data.db[5], A->data.db[6], A->data.db[7], A->data.db[8]);
	cvCGSolve( icvMatMulOps, A, B, X );
	printf("x : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
	cvMatMul( A, X, X );
	printf("Ax : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
	cvSolve( A, B, X );
	printf("x : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
	cvMatMul( A, X, X );
	printf("Ax : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
	return 0;
}
开发者ID:caomw,项目名称:l1cs,代码行数:28,代码来源:cgsolve_test.cpp


示例2: cvTranspose

void Kalman::predict_P() {
	// P_pred = F*P*trans(F) + Q
	cvTranspose(F, F_trans);
	cvMatMul(P, F_trans, P_pred);
	cvMatMul(F, P_pred, P_pred);
	cvScaleAdd(P_pred, cvScalar(1), Q, P_pred);
}
开发者ID:tzwenn,项目名称:alvar,代码行数:7,代码来源:Kalman.cpp


示例3: cvMatMul

void KalmanSensor::update_P(CvMat *P_pred, CvMat *P) {
	//P = (I - K*H) * P_pred
	cvMatMul(K, H, P_tmp);
	cvSetIdentity(P);
	cvScaleAdd(P_tmp, cvScalar(-1), P, P);
	cvMatMul(P, P_pred, P);
}
开发者ID:tzwenn,项目名称:alvar,代码行数:7,代码来源:Kalman.cpp


示例4: sin

CvMat* AbstractCamera::computeRtMatrix(double a, double b, double g, double tX, double tY, double tZ) {

	//--- Represent 3d rotation with euler angles

	double sinG = sin(g);
	double cosG = cos(g);
	CvMat* rZg = cvCreateMat(3, 3, CV_32FC1);
	cvSetZero(rZg);
	cvmSet(rZg, 0, 0, cosG);
	cvmSet(rZg, 0, 1, -sinG);
	cvmSet(rZg, 1, 0, sinG);
	cvmSet(rZg, 1, 1, cosG);
	cvmSet(rZg, 2, 2, 1.0f);

	double sinB = sin(b);
	double cosB = cos(b);
	CvMat* rXb = cvCreateMat(3, 3, CV_32FC1);
	cvSetZero(rXb);
	cvmSet(rXb, 0, 0, 1.0f);
	cvmSet(rXb, 1, 1, cosB);
	cvmSet(rXb, 1, 2, -sinB);
	cvmSet(rXb, 2, 1, sinB);
	cvmSet(rXb, 2, 2, cosB);

	double sinA = sin(a);
	double cosA = cos(a);
	CvMat* rZa = cvCreateMat(3, 3, CV_32FC1);
	cvSetZero(rZa);
	cvmSet(rZa, 0, 0, cosA);
	cvmSet(rZa, 0, 1, -sinA);
	cvmSet(rZa, 1, 0, sinA);
	cvmSet(rZa, 1, 1, cosA);
	cvmSet(rZa, 2, 2, 1.0f);

	CvMat* rotationMatrix = cvCreateMat(3, 3, CV_32FC1);
	cvMatMul(rZg, rXb, rotationMatrix);
	cvMatMul(rotationMatrix, rZa, rotationMatrix);

	cvReleaseMat(&rZg);
	cvReleaseMat(&rXb);
	cvReleaseMat(&rZa);

	//--- [R|T] ; First camera rotation and translation matrix
	CvMat* rtMatrix = cvCreateMat(3, 4, CV_32FC1);
	for (int i = 0; i < 3; i++) {
		cvmSet(rtMatrix, i, 0, cvmGet(rotationMatrix, i, 0));
		cvmSet(rtMatrix, i, 1, cvmGet(rotationMatrix, i, 1));
		cvmSet(rtMatrix, i, 2, cvmGet(rotationMatrix, i, 2));
	}
	cvmSet(rtMatrix, 0, 3, tX);
	cvmSet(rtMatrix, 1, 3, tY);
	cvmSet(rtMatrix, 2, 3, tZ);

	cvReleaseMat(&rotationMatrix);

	return rtMatrix;
}
开发者ID:M-Samoht,项目名称:OpenMocap,代码行数:57,代码来源:AbstractCamera.cpp


示例5: projectImg

static void projectImg(IplImage *src, int64_t TRANS_X, int64_t TRANS_Y,
		IplImage *dst, CvMat *tmatrix) {
	if (tmatrix->rows == 2) {
		//translate
		CvMat* result = cvCreateMat(2, 3, CV_32FC1);
		cvSetReal2D(result, 0, 0, cvGetReal2D(tmatrix, 0, 0));
		cvSetReal2D(result, 0, 1, cvGetReal2D(tmatrix, 0, 1));
		cvSetReal2D(result, 1, 0, cvGetReal2D(tmatrix, 1, 0));
		cvSetReal2D(result, 1, 1, cvGetReal2D(tmatrix, 1, 1));
		cvSetReal2D(result, 0, 2, cvGetReal2D(tmatrix, 0, 2) + TRANS_X);
		cvSetReal2D(result, 1, 2, cvGetReal2D(tmatrix, 1, 2) + TRANS_Y);
		cvWarpAffine(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
		cvReleaseMat(&result);
	} else if (tmatrix->rows == 3) {
		//translate matrix
		CvMat* offset = cvCreateMat(3, 3, CV_32FC1);
		cvSetReal2D(offset, 0, 0, 1);
		cvSetReal2D(offset, 0, 1, 0);
		cvSetReal2D(offset, 0, 2, TRANS_X);
		cvSetReal2D(offset, 1, 0, 0);
		cvSetReal2D(offset, 1, 1, 1);
		cvSetReal2D(offset, 1, 2, TRANS_Y);
		cvSetReal2D(offset, 2, 0, 0);
		cvSetReal2D(offset, 2, 1, 0);
		cvSetReal2D(offset, 2, 2, 1);
		//translate
		CvMat* result = cvCreateMat(3, 3, CV_32FC1);
		cvMatMul(offset, tmatrix, result);
		cvWarpPerspective(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
		cvReleaseMat(&offset);
		cvReleaseMat(&result);
	}
}
开发者ID:juanbarrios,项目名称:multimedia_tools,代码行数:33,代码来源:image_stitching.c


示例6: cvCreateMat

/**
 * Calculate: K P R|T
 */
CvMat* MultipleViewGeomOld::calculateProjectionMatrix(CvMat *rtMat) {


    // 3 rows, 4 columns
    CvMat* kpMat = cvCreateMat(3, 4, CV_32FC1);
    cvMatMul(kMat,pMat,kpMat);

    CvMat* projMat = cvCreateMat(3, 4, CV_32FC1);
    cvMatMul(kpMat,rtMat,projMat);

    projMat = cvCloneMat(projMat);

    LOG4CPLUS_DEBUG(myLogger,"Projection K P R|T matrix" << endl << printCvMat(projMat));

    return projMat;
}
开发者ID:ricleal,项目名称:3dac,代码行数:19,代码来源:MultipleViewGeomOld.cpp


示例7: cvCreateMat

void CModel::Draw(IplImage* img, CvMat* CamMat, CvScalar color, int thickness)
{
	int nEdges = Edges.size();
	int nVertexes = Vertexes.size();

	// Tao mang anh chieu
	vector<CvMat*> ProjectedVertex;

	ProjectedVertex.resize(Vertexes.size());
	for (int i = 0; i < nVertexes; i++)
		ProjectedVertex[i] = cvCreateMat(3,1,CV_32FC1);

	// Thuc hien phep chieu cac dinh
	for (int i = 0; i < nVertexes; i++)
	{
		cvMatMul(CamMat,Vertexes[i],ProjectedVertex[i]);
		CV_MAT_ELEM(*ProjectedVertex[i],float,0,0) = 
			CV_MAT_ELEM(*ProjectedVertex[i],float,0,0)/CV_MAT_ELEM(*ProjectedVertex[i],float,2,0);
		CV_MAT_ELEM(*ProjectedVertex[i],float,1,0) = 
			CV_MAT_ELEM(*ProjectedVertex[i],float,1,0)/CV_MAT_ELEM(*ProjectedVertex[i],float,2,0);
	}

	for (int i = 0; i < nEdges; i++)
	{
		cvLine(img, 
			cvPoint(CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxFrm],float,0,0), CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxFrm],float,1,0)),
			cvPoint(CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxTo], float,0,0), CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxTo], float,1,0)),
			color, thickness);
	}
	for (int i = 0; i < nVertexes; i++)
		cvReleaseMat(&ProjectedVertex[i]);
	ProjectedVertex.clear();
}
开发者ID:obyrnemj,项目名称:computational-geometry-3d-vision,代码行数:33,代码来源:Model.cpp


示例8: getOptFlow

Point2D getOptFlow(IplImage* currentFrame,Point2D p,IplImage* preFrame)
{
	Point2D temp;
	double b[2];
	b[0]=0;b[1]=0;
	
	double M11=0,M12=0,M22=0;
	for(int i = -OPTICAL_FLOW_POINT_AREA/2; i < OPTICAL_FLOW_POINT_AREA/2; i++)
	{
		for (int j = -OPTICAL_FLOW_POINT_AREA/2;j < OPTICAL_FLOW_POINT_AREA/2;j++)
		{
			temp = partial(currentFrame,Point2D(p.row+i,p.col+j));
			M11 += temp.dcol*temp.dcol;
			M12 += temp.dcol*temp.drow;
			M22 += temp.drow*temp.drow;
			b[0] += temp.dcol*(pixval8U(currentFrame,p.row+i,p.col+j)-pixval8U(preFrame,p.row+i,p.col+j));
			b[1] += temp.drow*(pixval8U(currentFrame,p.row+i,p.col+j)-pixval8U(preFrame,p.row+i,p.col+j));
		}
	}
	double a[] = {M11,M12,M12,M22};
	CvMat M=cvMat(2, 2, CV_64FC1, a);
	CvMat *Mi = cvCloneMat(&M);
	cvInvert(&M,Mi,CV_SVD);
	temp.col=0;
	temp.row=0;
	b[0] = -b[0];
	b[1] = -b[1];
	CvMat Mb = cvMat(2,1,CV_64FC1,b);
	CvMat *Mr = cvCloneMat(&Mb);
	cvMatMul( Mi, &Mb, Mr);
	double vy = (cvmGet(Mr,1,0));
	double vx = (cvmGet(Mr,0,0));
	
	return (Point2D(vy,vx));
}
开发者ID:Mrlonelei,项目名称:SIFT_tracking,代码行数:35,代码来源:utils.cpp


示例9: cvCreateMat

/** Computes the optical center of camera from camera projection matrix (http://en.wikipedia.org/wiki/Camera_matrix )
* @param pM : Camera projection matrix (3x4).
* @return : Optical center of camera.
*/
Point3D Utility::getOpticalCenter( CvMat* pM )
{
	CvMat *A = cvCreateMat(3, 3, CV_64FC1);
	CvMat *Ainv = cvCreateMat(3, 3, CV_64FC1);
	CvMat *b = cvCreateMat(3, 1, CV_64FC1);
	for(int i=0; i<3; ++i)
	{
		for(int j=0; j<3; ++j)
			cvmSet(A, i, j, cvmGet(pM,i,j));
		cvmSet(b, i, 0, cvmGet(pM, i,3));
	}
	cvInvert(A, Ainv);
	CvMat *oc = cvCreateMat(3, 1, CV_64FC1);
	cvMatMul(Ainv, b, oc);
	Point3D toRet;
	toRet.x = -1 * cvmGet(oc, 0, 0);				//NULL SPACE OF MATRIX pM
	toRet.y = -1 * cvmGet(oc, 1, 0);
	toRet.z = -1 * cvmGet(oc, 2, 0);

	cvReleaseMat(&A);
	cvReleaseMat(&Ainv);
	cvReleaseMat(&b);
	cvReleaseMat(&oc);
	return toRet;
}
开发者ID:dgrover,项目名称:flyspy,代码行数:29,代码来源:Utility.cpp


示例10: cvMatMul

/*! Transform the grid with given homograhy and average colors over
 * triangles.
 */
void LightCollector::averageImage(IplImage *im, CvMat *_homography)
{
	if (avgChannels != im->nChannels) {
		if (avgChannels < im->nChannels) { 
			delete[] avg;
			avg = 0;
		}
		avgChannels = im->nChannels;
	}
	if (!avg) avg = new float[avgChannels*nbTri];
	
	// apply the homography to every mesh vertex
	if (_homography)
		cvMatMul(_homography, vertices, transformed);
	else
		cvCopy(vertices, transformed);
	CvMat r1,r2,r3;
	cvGetRow(transformed, &r1, 0);
	cvGetRow(transformed, &r2, 1);
	cvGetRow(transformed, &r3, 2);
	cvDiv(&r1,&r3,&r1);
	cvDiv(&r2,&r3,&r2);
	
	nbPix=0;
	for (int t=0; t<nbTri;t++) {
		int pts[3][2];
		for (int i=0; i<3; i++) {
			assert(triangles[t*3+i] < transformed->cols);
			pts[i][0] = cvRound(CV_MAT_ELEM(*transformed, float, 0, triangles[t*3+i]));
			pts[i][1] = cvRound(CV_MAT_ELEM(*transformed, float, 1, triangles[t*3+i]));
		}
		nbPix+=stat_triangle(im, pts, avg+t*avgChannels);
	}
}
开发者ID:6301158,项目名称:ofx-dev,代码行数:37,代码来源:lightcollector.cpp


示例11: calculateWidthInPixels

int calculateWidthInPixels(CvMat* P, float Y){
  float W = 0.10; //width of road 20cm ~ 0.2m 
  float w = 0.0; //width of the roads in pixels

  CvMat tmp;
  //create P_1 (row 1 of matrix P)
  CvMat *P_1 = cvCreateMat(1,4,CV_32FC1);
  cvGetRow(P,&tmp,0); //row 0
  cvCopy(&tmp,P_1,NULL);

  CvMat *P_3 = cvCreateMat(1,4,CV_32FC1);
  cvGetRow(P,&tmp,2); //row 2
  cvCopy(&tmp,P_3,NULL);

  CvMat* X_1 = cvCreateMat(4,1,CV_32FC1);
  CvMat* X_2 = cvCreateMat(4,1,CV_32FC1);
  CvMat* P_1_times_X_1 = cvCreateMat(1,1,CV_32FC1);
  CvMat* P_3_times_X_1 = cvCreateMat(1,1,CV_32FC1);
  CvMat* P_1_times_X_2 = cvCreateMat(1,1,CV_32FC1);
  CvMat* P_3_times_X_2 = cvCreateMat(1,1,CV_32FC1);

  cvmSet(X_1,0,0,W);
  cvmSet(X_1,1,0,Y);
  cvmSet(X_1,2,0,0.0);
  cvmSet(X_1,3,0,1.0);

  cvmSet(X_2,0,0,0);
  cvmSet(X_2,1,0,Y);
  cvmSet(X_2,2,0,0);
  cvmSet(X_2,3,0,1);

  cvMatMul(P_1,X_1,P_1_times_X_1);
  cvMatMul(P_3,X_1,P_3_times_X_1);	
  cvMatMul(P_1,X_2,P_1_times_X_2);
  cvMatMul(P_3,X_2,P_3_times_X_2);	

  w = ((cvmGet(P_1_times_X_1,0,0) /
        cvmGet(P_3_times_X_1,0,0)
       ) 
      -
      (cvmGet(P_1_times_X_2,0,0) /
       cvmGet(P_3_times_X_2,0,0)
      )); 


  return int(w+0.5);
}
开发者ID:mksun2006,项目名称:road-paint-detector,代码行数:47,代码来源:main.cpp


示例12: mcvGetVanishingPoint

FLOAT_POINT2D mcvGetVanishingPoint(const CameraInfo *cameraInfo)
{
    //get the vp in world coordinates
    FLOAT_MAT_ELEM_TYPE vpp[] = {sin(cameraInfo->yaw)/cos(cameraInfo->pitch),
                                 cos(cameraInfo->yaw)/cos(cameraInfo->pitch), 0};
    CvMat vp = cvMat(3, 1, FLOAT_MAT_TYPE, vpp);

    //transform from world to camera coordinates
    //
    //rotation matrix for yaw
    FLOAT_MAT_ELEM_TYPE tyawp[] = {cos(cameraInfo->yaw), -sin(cameraInfo->yaw), 0,
                                   sin(cameraInfo->yaw), cos(cameraInfo->yaw), 0,
                                   0, 0, 1};
    CvMat tyaw = cvMat(3, 3, FLOAT_MAT_TYPE, tyawp);
    //rotation matrix for pitch
    FLOAT_MAT_ELEM_TYPE tpitchp[] = {1, 0, 0,
                                     0, -sin(cameraInfo->pitch), -cos(cameraInfo->pitch),
                                     0, cos(cameraInfo->pitch), -sin(cameraInfo->pitch)};
    CvMat transform = cvMat(3, 3, FLOAT_MAT_TYPE, tpitchp);
    //combined transform
    cvMatMul(&transform, &tyaw, &transform);

    //
    //transformation from (xc, yc) in camra coordinates
    // to (u,v) in image frame
    //
    //matrix to shift optical center and focal length
    FLOAT_MAT_ELEM_TYPE t1p[] = {
        cameraInfo->focalLength.x, 0,
        cameraInfo->opticalCenter.x,
        0, cameraInfo->focalLength.y,
        cameraInfo->opticalCenter.y,
        0, 0, 1};
    CvMat t1 = cvMat(3, 3, FLOAT_MAT_TYPE, t1p);
    //combine transform
    cvMatMul(&t1, &transform, &transform);
    //transform
    cvMatMul(&transform, &vp, &vp);

    //
    //clean and return
    //
    FLOAT_POINT2D ret;
    ret.x = cvGetReal1D(&vp, 0);
    ret.y = cvGetReal1D(&vp, 1);
    return ret;
}
开发者ID:ChefOtter,项目名称:smartlabatwork-release,代码行数:47,代码来源:camera_calibration_utils.cpp


示例13: printf

//============================================================================
void AAM_Basic::CalcGradientMatrix(const CvMat* CParams, 
								   const CvMat* vCDisps,
								   const CvMat* vPoseDisps,
								   const std::vector<AAM_Shape>& AllShapes, 
								   const std::vector<IplImage*>& AllImages)
{
	int npixels = __cam.__texture.nPixels();
	int np = __cam.nModes();

	// do model parameter experiments
    {
		printf("Calculating parameter gradient matrix...\n");
		CvMat* GParam = cvCreateMat(np, npixels, CV_64FC1);cvZero(GParam);
        CvMat* GtG = cvCreateMat(np, np, CV_64FC1);
        CvMat* GtGInv = cvCreateMat(np, np, CV_64FC1);
        
		// estimate Rc
		EstCParamGradientMatrix(GParam, CParams, AllShapes, AllImages, vCDisps);
        __Rc = cvCreateMat(np, npixels, CV_64FC1);
        cvGEMM(GParam, GParam, 1, NULL, 0, GtG, CV_GEMM_B_T);
        cvInvert(GtG, GtGInv, CV_SVD );
		cvMatMul(GtGInv, GParam, __Rc);
        
		cvReleaseMat(&GtG);
		cvReleaseMat(&GtGInv);
		cvReleaseMat(&GParam);
    }

	// do pose experiments, this is for global shape normalization
    {
		printf("Calculating pose gradient matrix...\n");
		CvMat* GtG = cvCreateMat(4, 4, CV_64FC1);
        CvMat* GtGInv = cvCreateMat(4, 4, CV_64FC1);
        CvMat* GPose = cvCreateMat(4, npixels, CV_64FC1);   cvZero(GPose);

        // estimate Rt
        EstPoseGradientMatrix(GPose, CParams, AllShapes, AllImages, vPoseDisps);
        __Rq = cvCreateMat(4, npixels, CV_64FC1);
        cvGEMM(GPose, GPose, 1, NULL, 0, GtG, CV_GEMM_B_T);
        cvInvert(GtG, GtGInv, CV_SVD);
		cvMatMul(GtGInv, GPose, __Rq);
        
		cvReleaseMat(&GtG);
		cvReleaseMat(&GtGInv);
		cvReleaseMat(&GPose);
    }
}
开发者ID:aodkrisda,项目名称:face-gesture-api,代码行数:48,代码来源:AAM_Basic.cpp


示例14: assert

CvMat* camcalib::matMul(const CvMat* A, const CvMat* B) 
{
  assert(A->cols == B->rows);

  CvMat* M = cvCreateMat(A->rows, B->cols, A->type);
  cvMatMul(A, B, M);
  return M;
}
开发者ID:ulyssesrr,项目名称:carmen_lcad,代码行数:8,代码来源:camcalib.cpp


示例15: cvCopy

	void BazARTracker::show_result(CamAugmentation &augment, IplImage *video, IplImage **dst)
	{
		if (getDebugMode()){
			if (*dst==0) *dst=cvCloneImage(video);
			else cvCopy(video, *dst);
		}

		CvMat *m  = augment.GetProjectionMatrix(0);
		// Flip...(This occured from OpenGL origin / camera origin)
		CvMat *coordinateTrans = cvCreateMat(3, 3, CV_64F);
		cvmSetIdentity(coordinateTrans);
		cvmSet(coordinateTrans, 1, 1, -1);
		cvmSet(coordinateTrans, 1, 2, m_cparam->cparam.ysize);
		cvMatMul(coordinateTrans, m, m);
	
		// extract intrinsic camera parameters from bazar's projection matrix..
		GetARToolKitRTfromBAZARProjMat(g_matIntrinsic, m, matCameraRT4_4);
			
		cvTranspose(matCameraRT4_4, matCameraRT4_4);
		cvReleaseMat(&coordinateTrans);

		// Debug
		if (getDebugMode()) {
			// draw the coordinate system axes
			double w =video->width/2.0;
			double h =video->height/2.0;

			// 3D coordinates of an object
			double pts[4][4] = {
				{w,h,0, 1}, // 0,0,0,1
				{w*2,h,0, 1}, // w, 0
				{w,h*2,0, 1}, // 0, h
				{w,h,-w-h, 1} // 0, 0, -
			};

			CvMat ptsMat, projectedMat;
			cvInitMatHeader(&ptsMat, 4, 4, CV_64FC1, pts);
			cvInitMatHeader(&projectedMat, 3, 4, CV_64FC1, projected);
		
			cvGEMM(m, &ptsMat, 1, 0, 0, &projectedMat, CV_GEMM_B_T );

			for (int i=0; i<4; i++) 
			{
				projected[0][i] /= projected[2][i];
				projected[1][i] /= projected[2][i];
			}

			// draw the projected lines
			cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
				cvPoint((int)projected[0][1], (int)projected[1][1]), CV_RGB(255,0,0), 2);
			cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
				cvPoint((int)projected[0][2], (int)projected[1][2]), CV_RGB(0,255,0), 2);
			cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
				cvPoint((int)projected[0][3], (int)projected[1][3]), CV_RGB(0,0,255), 2);
		}
	}
开发者ID:soulsheng,项目名称:osgART,代码行数:56,代码来源:BazARTracker.cpp


示例16: printf

bool CFeatureExtraction::DoPCA(CvMat * pMat, CvMat * pResultMat, int nSize, int nExpectedSize)
{
	printf("\nCFeatureExtraction::DoPCA in\n");
	int i;	

	printf("DoPCA: Sort our data sets in a vector each\n");	
	// Sort our data sets in a vector each
	CvMat ** pDataSet = new CvMat*[m_nWidth*m_nHeight];
	float * pData = pMat->data.fl;
	for (i=0;i<m_nWidth*m_nHeight;i++)
	{
		pDataSet[i] = (CvMat*) malloc(sizeof(CvMat));
		cvInitMatHeader(pDataSet[i], 1, nSize, CV_32FC1, &pData[i*nSize]);
	}
	
	printf("DoPCA: Calc covariance matrix\n");
	// Calc covariance matrix
	CvMat* pCovMat = cvCreateMat( nSize, nSize, CV_32F );
	CvMat* pMeanVec = cvCreateMat( 1, nSize, CV_32F );
	
	cvCalcCovarMatrix( (const void **)pDataSet, m_nWidth*m_nHeight, pCovMat, pMeanVec, CV_COVAR_SCALE | CV_COVAR_NORMAL );
	
	cvReleaseMat(&pMeanVec);
	
	printf("DoPCA: Do the SVD decomposition\n");
	// Do the SVD decomposition
	CvMat* pMatW = cvCreateMat( nSize, 1, CV_32F );
	CvMat* pMatV = cvCreateMat( nSize, nSize, CV_32F );
	CvMat* pMatU = cvCreateMat( nSize, nSize, CV_32F );
	
	cvSVD(pCovMat, pMatW, pMatU, pMatV, CV_SVD_MODIFY_A+CV_SVD_V_T);
	
	cvReleaseMat(&pCovMat);
	cvReleaseMat(&pMatW);
	cvReleaseMat(&pMatV);

	printf("DoPCA: Extract the requested number of dominant eigen vectors\n");
	// Extract the requested number of dominant eigen vectors
	CvMat* pEigenVecs = cvCreateMat( nSize, nExpectedSize, CV_32F );
	for (i=0;i<nSize;i++)
		memcpy(&pEigenVecs->data.fl[i*nExpectedSize], &pMatU->data.fl[i*nSize], nExpectedSize*sizeof(float));

	printf("DoPCA: Transform to the new basis\n");
	// Transform to the new basis	
	cvMatMul(pMat, pEigenVecs, pResultMat);
	cvReleaseMat(&pMatU);
	cvReleaseMat(&pEigenVecs);
	
	for (i = 0; i < m_nHeight * m_nWidth; i++)
		delete [] pDataSet[i];
	delete [] pDataSet;

	printf("CFeatureExtraction::DoPCA out\n");
	return true;
}
开发者ID:altok234,项目名称:cgworkshop,代码行数:55,代码来源:FeatureExtraction.cpp


示例17: main

int main (int argc, char **argv)
{
	int i, j;
	int nrow = 3;
	int ncol = 3;
	CvMat *src, *dst, *mul;
	double det;
	CvRNG rng = cvRNG (time (NULL));      /* 乱数の初期化 */
	// (1) 行列のメモリ確保
	src = cvCreateMat (nrow, ncol, CV_32FC1);
	dst = cvCreateMat (ncol, nrow, CV_32FC1);
	mul = cvCreateMat (nrow, nrow, CV_32FC1);
	// (2) 行列srcに乱数を代入
	printf ("src\n");
	cvmSet (src, 0, 0, 1);
	for (i = 0; i < src->rows; i++)
	{
		for (j = 0; j < src->cols; j++)
		{
			cvmSet (src, i, j, cvRandReal (&rng));
			printf ("% lf\t", cvmGet (src, i, j));
		}
		printf ("\n");
	}
	// (3) 行列srcの逆行列を求めて,行列dstに代入
	det = cvInvert (src, dst, CV_SVD);
	// (4) 行列srcの行列式を表示
	printf ("det(src)=%lf\n", det);
	// (5) 行列dstの表示
	printf ("dst\n");
	for (i = 0; i < dst->rows; i++)
	{
		for (j = 0; j < dst->cols; j++)
		{
			printf ("% lf\t", cvmGet (dst, i, j));
		}
		printf ("\n");
	}
	// (6) 行列srcとdstの積を計算して確認
	cvMatMul (src, dst, mul);
	printf ("mul\n");
	for (i = 0; i < mul->rows; i++)
	{
		for (j = 0; j < mul->cols; j++)
		{
			printf ("% lf\t", cvmGet (mul, i, j));
		}
		printf ("\n");
	}
	// (7) 行列のメモリを開放
	cvReleaseMat (&src);
	cvReleaseMat (&dst);
	cvReleaseMat (&mul);
	return 0;
}
开发者ID:moho110,项目名称:Machine-learning,代码行数:55,代码来源:matrix.cpp


示例18: cvCGSolve

double cvCGSolve( CvMat* A, CvMat* B, CvMat* X, CvTermCriteria term_crit )
{
	cvZero( X );
	CvMat* R = cvCloneMat( B );
	double delta = cvDotProduct( R, R );
	CvMat* D = cvCloneMat( R );
	double delta0 = delta;
	double bestres = 1.;
	CvMat* BX = cvCloneMat( X );
	CvMat* Q = cvCreateMat( X->rows, X->cols, CV_MAT_TYPE(X->type) );
	for ( int i = 0; i < term_crit.max_iter; i++ )
	{
		cvMatMul( A, D, Q );
		double alpha = delta / cvDotProduct( D, Q );
		cvScaleAdd( D, cvScalar(alpha), X, X );
		if ( (i + 1) % 50 == 0 )
		{
			cvMatMul( A, X, R );
			cvSub( B, R, R );
		} else {
			cvScaleAdd( Q, cvScalar(-alpha), R, R );
		}
		double deltaold = delta;
		delta = cvDotProduct( R, R );
		double beta = delta / deltaold;
		cvScaleAdd( D, cvScalar(beta), R, D );
		double res = delta / delta0;
		if ( res < bestres )
		{
			cvCopy( X, BX );
			bestres = res;
		}
		if ( delta < delta0 * term_crit.epsilon )
			break;
	}
	cvCopy( BX, X );
	cvReleaseMat( &R );
	cvReleaseMat( &D );
	cvReleaseMat( &BX );
	cvReleaseMat( &Q );
	return sqrt( bestres );
}
开发者ID:caomw,项目名称:l1cs,代码行数:42,代码来源:cvcgsolve.cpp


示例19: yRotate

// 
// Rotate around the y axis
void yRotate(CvMat *vec, float angle, CvMat *res)
{
    angle = D2R(angle);
    float vals[] = { cos(angle), 0, -sin(angle), 0,
                     0, 1, 0, 0,
                     sin(angle), 0, cos(angle), 0,
                     0, 0, 0, 1};    
    CvMat mat;
    cvInitMatHeader( &mat, 4, 4, CV_32FC1, &vals );
    cvMatMul(&mat, vec, res);
}
开发者ID:mikejmills,项目名称:AerialImager,代码行数:13,代码来源:Measurement_Function.cpp


示例20: augment_scene

static void augment_scene(CalibModel &model, IplImage *frame, IplImage *display)
{
  cvCopy(frame, display);

  if (!model.detector.object_is_detected) 
    return;

  CvMat *m = model.augm.GetProjectionMatrix(0);
  if (!m) return;

  double pts[4][4];
  double proj[4][4];
  CvMat ptsMat, projMat;
  cvInitMatHeader(&ptsMat, 4, 4, CV_64FC1, pts);
  cvInitMatHeader(&projMat, 3, 4, CV_64FC1, proj);
  for (int i=0; i<4; i++) {
    pts[0][i] = model.corners[i].x;
    pts[1][i] = model.corners[i].y;
    pts[2][i] = 0;
    pts[3][i] = 1;
  }
  cvMatMul(m, &ptsMat, &projMat);
  cvReleaseMat(&m);

  CvPoint projPts[4];
  for (int i=0;i<4; i++) {
    projPts[i].x = cvRound(proj[0][i]/proj[2][i]);
    projPts[i].y = cvRound(proj[1][i]/proj[2][i]);
  }

  CvMat *o2w = model.augm.GetObjectToWorld();
  float normal[3];
  for (int j=0;j<3;j++)
    normal[j] = cvGet2D(o2w, j, 2).val[0];
  cvReleaseMat(&o2w);

  // we want to relight a color present on the model image
  // with an irradiance coming from the irradiance map
  CvScalar color = cvGet2D(model.image, model.image->height/2, model.image->width/2);
  CvScalar irradiance = model.map.readMap(normal);

  // the camera has some gain and bias
  const float *g = model.map.getGain(0);
  const float *b = model.map.getBias(0);

  // relight the 3 RGB channels. The bias value expects 0 black 1 white,
  // but the image are stored with a white value of 255: Conversion is required.
  for (int i=0; i<3; i++) {
    color.val[i] = 255.0*(g[i]*(color.val[i]/255.0)*irradiance.val[i] + b[i]);
  }

  // draw a filled polygon with the relighted color
  cvFillConvexPoly(display, projPts, 4, color);
}
开发者ID:emblem,项目名称:aril,代码行数:54,代码来源:fullcalib.cpp



注:本文中的cvMatMul函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C++ cvMerge函数代码示例发布时间:2022-05-30
下一篇:
C++ cvMat函数代码示例发布时间:2022-05-30
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap