C++ 如何使用ArCore确定点相对于相机的世界坐标?
使用下面的代码,我试图从深度图像创建3d点,但当着色器通过投影和视图矩阵变换点时,这些点位于我认为的世界坐标中的0,0位置。 当我试图让着色器使用带有投影和视图矩阵的模型矩阵时,根本看不到点C++ 如何使用ArCore确定点相对于相机的世界坐标?,c++,arcore,C++,Arcore,使用下面的代码,我试图从深度图像创建3d点,但当着色器通过投影和视图矩阵变换点时,这些点位于我认为的世界坐标中的0,0位置。 当我试图让着色器使用带有投影和视图矩阵的模型矩阵时,根本看不到点 for(int i=0 ; i < image_height * image_width ; i++) { int r_i = i / (int)image_width; int c_i = i % (int)image_width; u_short*
for(int i=0 ; i < image_height * image_width ; i++)
{
int r_i = i / (int)image_width;
int c_i = i % (int)image_width;
u_short* pixel = nullptr;
pixel = (u_short*)&depth_data[i * image_pixel_stride];
//~2-3 feet capture zone.
if(*pixel < 1000 || *pixel > 1500)
continue;
float x,y,z;
float d_i = (float)*pixel / (float)8191;
//calculate x-coordinate
float alpha_h = (M_PI - fovW) / 2;
float gamma_i_h = alpha_h + (float)c_i*(fovW / image_width);
x= ( d_i / tan(gamma_i_h));
//calculate y-coordinate
float alpha_v = 2 * M_PI - (fovH / 2);
float gamma_i_v = alpha_v + (float)r_i*(fovH / image_height);
y = ( d_i * tan(gamma_i_v)*-1);
//z-coordinate
vertices[vertexIndex++] = x;
vertices[vertexIndex++] = y;
vertices[vertexIndex++] = d_i;
}
ArAnchor* aa;
ArSession_acquireNewAnchor(ar_session, arCameraPose, &aa);
glm::mat4 model_mat(1.0f);
ArTrackingState tracking_state = AR_TRACKING_STATE_STOPPED;
ArAnchor_getTrackingState(ar_session, aa,
&tracking_state);
if (tracking_state == AR_TRACKING_STATE_TRACKING) {
// Render object only if the tracking state is AR_TRACKING_STATE_TRACKING.
util::GetTransformMatrixFromAnchor(*aa, ar_session,
&model_mat);...