如何将世界坐标转换为相机图像坐标?
How to Convert World Coordinates to Camera Image Coordinates?
我正在尝试模拟虚拟相机!
我想计算 3D 点在相机图像平面中的投影。
参考 this formula,我正在将 3D 世界点转换为 2D 相机坐标,就像这样。
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
cout <<"Input World CoOrdinates 3D: " <<point3D<<endl;
Affine3d viewProjectionMatrix = mCameraIntrinsics * mCameraRT;
point3D = viewProjectionMatrix * point3D;
Point2d ptCamCoOrdinates;
float winX = (( point3D.x + 1 ) / 2.0) * width ;
float winY = (( 1 - point3D.y ) / 2.0) * height ;
ptCamCoOrdinates=Point2d(winX,winY);
cout <<"Output Camera Co Ordinates 2D: " <<ptCamCoOrdinates<<endl;
return ptCamCoOrdinates;
}
但我无法正确计算它并且我正在使用 OpenCV VTK 来验证结果!
VTK 渲染图像点 2D:408,150
估计的 2D 相机点:800053,-147570
这是我的完整源代码:
#include <iostream>
#include <fstream>
#include <string>
#include <Windows.h>
#include <gl/GL.h>
#include <gl/GLU.h>
#pragma comment( lib, "opengl32.lib" )
#pragma comment( lib, "glu32.lib" )
#include <opencv2/opencv.hpp>
#include <opencv2/plot.hpp>
#include <opencv2/viz.hpp>
using namespace std;
using namespace cv;
bool bViewFromCamera = false;
void keyboard_callback(const viz::KeyboardEvent &event, void* cookie)
{
if ( event.action == 0 &&!event.symbol.compare("s") )
bViewFromCamera = !bViewFromCamera;
}
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
cout <<"Input World CoOrdinates 3D: " <<point3D<<endl;
Affine3d viewProjectionMatrix = mCameraIntrinsics * mCameraRT;
point3D = viewProjectionMatrix * point3D;
Point2d ptCamCoOrdinates;
float winX = (( point3D.x + 1 ) / 2.0) * width ;
float winY = (( 1 - point3D.y ) / 2.0) * height ;
ptCamCoOrdinates=Point2d(winX,winY);
cout <<"Output Camera Co Ordinates 2D: " <<ptCamCoOrdinates<<endl;
return ptCamCoOrdinates;
}
int _tmain(int argc, _TCHAR* argv[])
{
const int IMAGE_WIDTH=1176,
IMAGE_HEIGHT=482;
const double fx=1239.911,
fy=1239.911,
cx=519.909,
cy=246.656;
double camMatarray[9] = {fx, 0., cx,
0., fy, cy,
0., 0., 1.};
Matx33d CameraMatrix = Matx33d( fx, 0, cx,
0, fy, cy,
0, 0, 1);
Point3d pt3dSignPosition_Actual(-1.0,-0.50,5.0),
Pt3D_Camera;
Point2d Pt2D_CameraCoOrdinate;
/// Create 3D windows
viz::Viz3d Window_3D("World 3D Frame");
Window_3D.setWindowSize(Size(IMAGE_WIDTH,IMAGE_HEIGHT));
Window_3D.setBackgroundColor(); // black by default
Window_3D.registerKeyboardCallback(&keyboard_callback);
Mat RotationVector=Mat::zeros(1,3,CV_64F);
Mat TranslationVector=Mat::zeros(3,1,CV_64F);
while(!Window_3D.wasStopped())
{
/// Render points as 3D cubes
Affine3d point_pose(Mat::eye(3,3,CV_64F), pt3dSignPosition_Actual);
viz::WCube cube_widget(Point3f(-0.125,-0.125,0.0), Point3f(0.125,0.125,-0.001), true, viz::Color::blue());
cube_widget.setRenderingProperty(viz::LINE_WIDTH, 2.0);
Window_3D.showWidget("Cube"+string("1"), cube_widget, point_pose);
// Create a sphere widget
viz::WSphere sw(Point3d( pt3dSignPosition_Actual), 0.01f);
// Cast sphere widget to cloud widget
viz::WCloud cw = sw.cast<viz::WCloud>();
/// Modify it, and it will be modified in the window.
cw.setColor(viz::Color::red());
// Display it in a window
Window_3D.showWidget("SphereCloudWidget", cw);
Affine3d cam_pose = (Mat::eye(3,3,CV_64F), Point3d(0.0,0.0,0.0));
viz::WCameraPosition cpw(0.125); // Coordinate axes
viz::WCameraPosition cpw_frustum(CameraMatrix, 0.3,viz::Color::yellow()); // Camera frustum
if ( bViewFromCamera )
{
Window_3D.setViewerPose(cam_pose);
//For Verifying the Screen Co Ordinates
Mat mScreenCapture(Size(IMAGE_WIDTH,IMAGE_HEIGHT),CV_8UC3);
//use fast 4-byte alignment (default anyway) if possible
glPixelStorei(GL_PACK_ALIGNMENT, (mScreenCapture.step & 3) ? 1 : 4);
//set length of one complete row in destination data (doesn't need to equal mScreenCapture.cols)
glPixelStorei(GL_PACK_ROW_LENGTH, mScreenCapture.step/mScreenCapture.elemSize());
glReadPixels(0, 0, mScreenCapture.cols, mScreenCapture.rows, GL_BGR_EXT, GL_UNSIGNED_BYTE, mScreenCapture.data);
cv::flip(mScreenCapture, mScreenCapture, 0);
Pt2D_CameraCoOrdinate = World2CameraCoOrdinate(pt3dSignPosition_Actual,cam_pose,CameraMatrix,IMAGE_WIDTH,IMAGE_HEIGHT);
circle(mScreenCapture,Pt2D_CameraCoOrdinate,2,Scalar(0,255,0),2);
imshow("mScreenCapture",mScreenCapture);
}
else
{
Window_3D.showWidget("CPW", cpw, cam_pose);
Window_3D.showWidget("CPW_FRUSTUM", cpw_frustum, cam_pose);
}
Window_3D.spinOnce(100, true);
Window_3D.removeAllWidgets();
}
Window_3D.close();
waitKey();
return 0;
}
好的,明白了!
1) VTK 相机参数未初始化properly.Only 图像大小设置正确,所有其他参数(如焦距、主点)都设置为输入图像的一些默认值!
2) 生成的相机 2D 图像坐标采用归一化设备坐标 (NDC)。即标准化为 Depth/Z 轴,如下所示!
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
Affine3d viewProjectionMatrix = mCameraIntrinsics* mCameraRT;
point3D = viewProjectionMatrix * point3D;
point3D.x/=point3D.z;
point3D.y/=point3D.z;
point3D.z/=point3D.z;
Point2d ptCamCoOrdinates(point3D.x,point3D.y);
return ptCamCoOrdinates;
}
我正在尝试模拟虚拟相机!
我想计算 3D 点在相机图像平面中的投影。 参考 this formula,我正在将 3D 世界点转换为 2D 相机坐标,就像这样。
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
cout <<"Input World CoOrdinates 3D: " <<point3D<<endl;
Affine3d viewProjectionMatrix = mCameraIntrinsics * mCameraRT;
point3D = viewProjectionMatrix * point3D;
Point2d ptCamCoOrdinates;
float winX = (( point3D.x + 1 ) / 2.0) * width ;
float winY = (( 1 - point3D.y ) / 2.0) * height ;
ptCamCoOrdinates=Point2d(winX,winY);
cout <<"Output Camera Co Ordinates 2D: " <<ptCamCoOrdinates<<endl;
return ptCamCoOrdinates;
}
但我无法正确计算它并且我正在使用 OpenCV VTK 来验证结果!
VTK 渲染图像点 2D:408,150
估计的 2D 相机点:800053,-147570
这是我的完整源代码:
#include <iostream>
#include <fstream>
#include <string>
#include <Windows.h>
#include <gl/GL.h>
#include <gl/GLU.h>
#pragma comment( lib, "opengl32.lib" )
#pragma comment( lib, "glu32.lib" )
#include <opencv2/opencv.hpp>
#include <opencv2/plot.hpp>
#include <opencv2/viz.hpp>
using namespace std;
using namespace cv;
bool bViewFromCamera = false;
void keyboard_callback(const viz::KeyboardEvent &event, void* cookie)
{
if ( event.action == 0 &&!event.symbol.compare("s") )
bViewFromCamera = !bViewFromCamera;
}
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
cout <<"Input World CoOrdinates 3D: " <<point3D<<endl;
Affine3d viewProjectionMatrix = mCameraIntrinsics * mCameraRT;
point3D = viewProjectionMatrix * point3D;
Point2d ptCamCoOrdinates;
float winX = (( point3D.x + 1 ) / 2.0) * width ;
float winY = (( 1 - point3D.y ) / 2.0) * height ;
ptCamCoOrdinates=Point2d(winX,winY);
cout <<"Output Camera Co Ordinates 2D: " <<ptCamCoOrdinates<<endl;
return ptCamCoOrdinates;
}
int _tmain(int argc, _TCHAR* argv[])
{
const int IMAGE_WIDTH=1176,
IMAGE_HEIGHT=482;
const double fx=1239.911,
fy=1239.911,
cx=519.909,
cy=246.656;
double camMatarray[9] = {fx, 0., cx,
0., fy, cy,
0., 0., 1.};
Matx33d CameraMatrix = Matx33d( fx, 0, cx,
0, fy, cy,
0, 0, 1);
Point3d pt3dSignPosition_Actual(-1.0,-0.50,5.0),
Pt3D_Camera;
Point2d Pt2D_CameraCoOrdinate;
/// Create 3D windows
viz::Viz3d Window_3D("World 3D Frame");
Window_3D.setWindowSize(Size(IMAGE_WIDTH,IMAGE_HEIGHT));
Window_3D.setBackgroundColor(); // black by default
Window_3D.registerKeyboardCallback(&keyboard_callback);
Mat RotationVector=Mat::zeros(1,3,CV_64F);
Mat TranslationVector=Mat::zeros(3,1,CV_64F);
while(!Window_3D.wasStopped())
{
/// Render points as 3D cubes
Affine3d point_pose(Mat::eye(3,3,CV_64F), pt3dSignPosition_Actual);
viz::WCube cube_widget(Point3f(-0.125,-0.125,0.0), Point3f(0.125,0.125,-0.001), true, viz::Color::blue());
cube_widget.setRenderingProperty(viz::LINE_WIDTH, 2.0);
Window_3D.showWidget("Cube"+string("1"), cube_widget, point_pose);
// Create a sphere widget
viz::WSphere sw(Point3d( pt3dSignPosition_Actual), 0.01f);
// Cast sphere widget to cloud widget
viz::WCloud cw = sw.cast<viz::WCloud>();
/// Modify it, and it will be modified in the window.
cw.setColor(viz::Color::red());
// Display it in a window
Window_3D.showWidget("SphereCloudWidget", cw);
Affine3d cam_pose = (Mat::eye(3,3,CV_64F), Point3d(0.0,0.0,0.0));
viz::WCameraPosition cpw(0.125); // Coordinate axes
viz::WCameraPosition cpw_frustum(CameraMatrix, 0.3,viz::Color::yellow()); // Camera frustum
if ( bViewFromCamera )
{
Window_3D.setViewerPose(cam_pose);
//For Verifying the Screen Co Ordinates
Mat mScreenCapture(Size(IMAGE_WIDTH,IMAGE_HEIGHT),CV_8UC3);
//use fast 4-byte alignment (default anyway) if possible
glPixelStorei(GL_PACK_ALIGNMENT, (mScreenCapture.step & 3) ? 1 : 4);
//set length of one complete row in destination data (doesn't need to equal mScreenCapture.cols)
glPixelStorei(GL_PACK_ROW_LENGTH, mScreenCapture.step/mScreenCapture.elemSize());
glReadPixels(0, 0, mScreenCapture.cols, mScreenCapture.rows, GL_BGR_EXT, GL_UNSIGNED_BYTE, mScreenCapture.data);
cv::flip(mScreenCapture, mScreenCapture, 0);
Pt2D_CameraCoOrdinate = World2CameraCoOrdinate(pt3dSignPosition_Actual,cam_pose,CameraMatrix,IMAGE_WIDTH,IMAGE_HEIGHT);
circle(mScreenCapture,Pt2D_CameraCoOrdinate,2,Scalar(0,255,0),2);
imshow("mScreenCapture",mScreenCapture);
}
else
{
Window_3D.showWidget("CPW", cpw, cam_pose);
Window_3D.showWidget("CPW_FRUSTUM", cpw_frustum, cam_pose);
}
Window_3D.spinOnce(100, true);
Window_3D.removeAllWidgets();
}
Window_3D.close();
waitKey();
return 0;
}
好的,明白了!
1) VTK 相机参数未初始化properly.Only 图像大小设置正确,所有其他参数(如焦距、主点)都设置为输入图像的一些默认值!
2) 生成的相机 2D 图像坐标采用归一化设备坐标 (NDC)。即标准化为 Depth/Z 轴,如下所示!
Point2d World2CameraCoOrdinate(Point3d point3D,const Affine3d mCameraRT,
const Affine3d mCameraIntrinsics,const int width,const int height)
{
Affine3d viewProjectionMatrix = mCameraIntrinsics* mCameraRT;
point3D = viewProjectionMatrix * point3D;
point3D.x/=point3D.z;
point3D.y/=point3D.z;
point3D.z/=point3D.z;
Point2d ptCamCoOrdinates(point3D.x,point3D.y);
return ptCamCoOrdinates;
}