9 #ifndef CObservation3DRangeScan_project3D_impl_H 10 #define CObservation3DRangeScan_project3D_impl_H 21 template <
class POINTMAP>
24 POINTMAP & dest_pointcloud,
40 const size_t WH = W*H;
69 *kys++ = (r_cx -
c) * r_fx_inv;
70 *kzs++ = (r_cy -
r) * r_fy_inv;
88 if ((W & 0x07)==0 && projectParams.
USE_SSE2)
104 for (
int r=0;
r<H;
r++)
105 for (
int c=0;
c<W;
c++)
110 const float Kz = (r_cy -
r) * r_fy_inv;
111 const float Ky = (r_cx -
c) * r_fx_inv;
141 for (
int r=0;
r<H;
r++)
142 for (
int c=0;
c<W;
c++)
147 const float Ky = (r_cx -
c) * r_fx_inv;
148 const float Kz = (r_cy -
r) * r_fy_inv;
150 D / std::sqrt(1+Ky*Ky+Kz*Kz),
182 if (!isDirectCorresp)
191 T_inv.block<3,3>(0,0)=R_inv.cast<
float>();
192 T_inv.block<3,1>(0,3)=t_inv.cast<
float>();
195 Eigen::Matrix<float,4,1> pt_wrt_color, pt_wrt_depth;
201 const size_t nPts = pca.size();
202 for (
size_t i=0;i<nPts;i++)
204 int img_idx_x, img_idx_y;
205 bool pointWithinImage =
false;
208 pointWithinImage=
true;
215 pca.getPointXYZ(i,pt_wrt_depth[0],pt_wrt_depth[1],pt_wrt_depth[2]);
216 pt_wrt_color.noalias() = T_inv*pt_wrt_depth;
219 if (pt_wrt_color[2]) {
223 img_idx_x>=0 && img_idx_x<imgW &&
224 img_idx_y>=0 && img_idx_y<imgH;
228 if (pointWithinImage)
230 if (hasColorIntensityImg) {
238 pCol.
R = pCol.
G = pCol.
B =
c;
243 pCol.
R = pCol.
G = pCol.
B = 255;
246 pca.setPointRGBu8(i,pCol.
R,pCol.
G,pCol.
B);
264 Eigen::Matrix<float,4,1> pt, pt_transf;
267 const size_t nPts = pca.size();
268 for (
size_t i=0;i<nPts;i++)
270 pca.getPointXYZ(i,pt[0],pt[1],pt[2]);
271 pt_transf.noalias() = HM*pt;
272 pca.setPointXYZ(i,pt_transf[0],pt_transf[1],pt_transf[2]);
278 template <
class POINTMAP>
284 for (
int r=0;
r<H;
r++)
285 for (
int c=0;
c<W;
c++)
287 const float D = rangeImage.coeff(
r,
c);
291 pca.setInvalidPoint(idx);
297 pca.setPointXYZ(idx, D , *kys++ * D , *kzs++ * D );
306 template <
class POINTMAP>
312 const int W_4 = W >> 2;
315 const __m128 D_zeros = _mm_set_ps(.0f,.0f,.0f,.0f);
317 _mm_cmpneq_ps(D_zeros,D_zeros) :
318 _mm_cmpeq_ps(D_zeros,D_zeros);
319 for (
int r=0;
r<H;
r++)
321 const float *D_ptr = &rangeImage.coeffRef(
r,0);
325 for (
int c=0;
c<W_4;
c++)
327 const __m128 D = _mm_load_ps(D_ptr);
328 const __m128 nz_mask = _mm_cmpgt_ps(D, D_zeros);
329 __m128 valid_range_mask;
332 valid_range_mask = nz_mask;
340 const __m128 Dmin = _mm_load_ps(Dgt_ptr);
341 valid_range_mask = _mm_and_ps(_mm_cmpgt_ps(D, Dmin ), _mm_cmpgt_ps(Dmin, D_zeros));
345 const __m128 Dmax = _mm_load_ps(Dlt_ptr);
346 valid_range_mask = _mm_and_ps(_mm_cmplt_ps(D, Dmax ), _mm_cmpgt_ps(Dmax, D_zeros));
348 valid_range_mask = _mm_and_ps(valid_range_mask, nz_mask );
353 const __m128 Dmin = _mm_load_ps(Dgt_ptr);
354 const __m128 Dmax = _mm_load_ps(Dlt_ptr);
356 const __m128 gt_mask = _mm_cmpgt_ps(D, Dmin );
357 const __m128 lt_mask = _mm_and_ps( _mm_cmplt_ps(D, Dmax), nz_mask );
358 valid_range_mask = _mm_and_ps(gt_mask, lt_mask );
359 valid_range_mask = _mm_xor_ps(valid_range_mask, xormask );
361 valid_range_mask = _mm_or_ps(valid_range_mask, _mm_and_ps(_mm_cmpeq_ps(Dmin,D_zeros),_mm_cmpeq_ps(Dmax,D_zeros)) );
363 valid_range_mask = _mm_and_ps(valid_range_mask, nz_mask );
366 const int valid_range_maski = _mm_movemask_epi8(_mm_castps_si128(valid_range_mask));
367 if (valid_range_maski!=0)
369 const __m128 KY = _mm_load_ps(kys);
370 const __m128 KZ = _mm_load_ps(kzs);
372 _mm_storeu_ps(xs , D);
373 _mm_storeu_ps(ys , _mm_mul_ps(KY,D));
374 _mm_storeu_ps(zs , _mm_mul_ps(KZ,D));
376 for (
int q=0;
q<4;
q++)
377 if ((valid_range_maski & (1<<(
q*4))) !=0) {
378 pca.setPointXYZ(idx,xs[
q],ys[
q],zs[
q]);
379 idxs_x[idx]=(
c<<2)+
q;
383 else if (!MAKE_DENSE)
385 pca.setInvalidPoint(idx);
389 else if (!MAKE_DENSE)
391 for(
int q=0;
q<4;
q++)
393 pca.setInvalidPoint(idx);
398 if (Dgt_ptr) Dgt_ptr+=4;
399 if (Dlt_ptr) Dlt_ptr+=4;
#define ASSERT_EQUAL_(__A, __B)
Mainly for internal use within CObservation3DRangeScan::project3DPointsFromDepthImageInto() ...
void resizePoints3DVectors(const size_t nPoints)
Use this method instead of resizing all three points3D_x, points3D_y & points3D_z to allow the usage ...
const mrpt::poses::CPose3D * robotPoseInTheWorld
(Default: NULL) Read takeIntoAccountSensorPoseOnRobot
mrpt::utils::TCamera cameraParamsIntensity
Projection parameters of the intensity (graylevel or RGB) camera.
mrpt::math::CVectorFloat Kzs
GLdouble GLdouble GLdouble GLdouble q
std::vector< uint16_t > points3D_idxs_x
double cy() const
Get the value of the principal point y-coordinate (in pixels).
Declares a class derived from "CObservation" that encapsules a 3D range scan measurement, as from a time-of-flight range camera or any other RGBD sensor.
const mrpt::math::CMatrix * rangeMask_min
(Default: NULL) If provided, each data range will be tested to be greater-than (rangeMask_min) or les...
Used in CObservation3DRangeScan::project3DPointsFromDepthImageInto()
mrpt::math::CMatrix rangeImage
If hasRangeImage=true, a matrix of floats with the range data as captured by the camera (in meters) ...
mrpt::utils::TCamera cameraParams
Projection parameters of the depth camera.
double fx() const
Get the value of the focal length x-value (in pixels).
double fy() const
Get the value of the focal length y-value (in pixels).
A numeric matrix of compile-time fixed size.
This base provides a set of functions for maths stuff.
mrpt::poses::CPose3D relativePoseIntensityWRTDepth
Relative pose of the intensity camera wrt the depth camera (which is the coordinates origin for this ...
std::vector< uint16_t > points3D_idxs_y
//!< If hasPoints3D=true, the (x,y) pixel coordinates for each (X,Y,Z) point in points3D_x, points3D_y, points3D_z
mrpt::math::CArrayDouble< 3 > m_coords
The translation vector [x,y,z] access directly or with x(), y(), z() setter/getter methods...
void composeFrom(const CPose3D &A, const CPose3D &B)
Makes "this = A (+) B"; this method is slightly more efficient than "this= A + B;" since it avoids th...
mrpt::math::CMatrixDouble44 getHomogeneousMatrixVal() const
void project3DPointsFromDepthImageInto(mrpt::obs::CObservation3DRangeScan &src_obs, POINTMAP &dest_pointcloud, const mrpt::obs::T3DPointsProjectionParams &projectParams, const mrpt::obs::TRangeImageFilterParams &filterParams)
bool hasRangeImage
true means the field rangeImage contains valid data
An adapter to different kinds of point cloud object.
void do_project_3d_pointcloud_SSE2(const int H, const int W, const float *kys, const float *kzs, const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter< POINTMAP > &pca, std::vector< uint16_t > &idxs_x, std::vector< uint16_t > &idxs_y, const mrpt::obs::TRangeImageFilterParams &filterParams, bool MAKE_DENSE)
mrpt::math::CVectorFloat Kys
double cx() const
Get the value of the principal point x-coordinate (in pixels).
static TCached3DProjTables m_3dproj_lut
3D point cloud projection look-up-table
bool USE_SSE2
(Default:true) If possible, use SSE2 optimized code.
This is the global namespace for all Mobile Robot Programming Toolkit (MRPT) libraries.
GLdouble GLdouble GLdouble r
mrpt::poses::CPose3D sensorPose
The 6D pose of the sensor on the robot.
const mrpt::math::CMatrix * rangeMask_max
void homogeneousMatrixInverse(const MATRIXLIKE1 &M, MATRIXLIKE2 &out_inverse_M)
Efficiently compute the inverse of a 4x4 homogeneous matrix by only transposing the rotation 3x3 part...
A class used to store a 3D pose (a 3D translation + a rotation in 3D).
Used in CObservation3DRangeScan::project3DPointsFromDepthImageInto()
bool hasIntensityImage
true means the field intensityImage contains valid data
int round(const T value)
Returns the closer integer (int) to x.
bool PROJ3D_USE_LUT
(Default:true) [Only used when range_is_depth=true] Whether to use a Look-up-table (LUT) to speed up ...
bool doDepthAndIntensityCamerasCoincide() const
Return true if relativePoseIntensityWRTDepth equals the pure rotation (0,0,0,-90deg,0,-90deg) (with a small comparison epsilon)
bool isColor() const
Returns true if the image is RGB, false if it is grayscale.
unsigned char * get_unsafe(unsigned int col, unsigned int row, unsigned int channel=0) const
Access to pixels without checking boundaries - Use normally the () operator better, which checks the coordinates.
An adapter to different kinds of point cloud object.
void getRotationMatrix(mrpt::math::CMatrixDouble33 &ROT) const
Get the 3x3 rotation matrix.
bool MAKE_DENSE
(Default:true) set to false if you want to preserve the organization of the point cloud ...
mrpt::utils::CImage intensityImage
If hasIntensityImage=true, a color or gray-level intensity image of the same size than "rangeImage"...
This class is a "CSerializable" wrapper for "CMatrixFloat".
bool range_is_depth
true: Kinect-like ranges: entries of rangeImage are distances along the +X axis; false: Ranges in ran...
void do_project_3d_pointcloud(const int H, const int W, const float *kys, const float *kzs, const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter< POINTMAP > &pca, std::vector< uint16_t > &idxs_x, std::vector< uint16_t > &idxs_y, const mrpt::obs::TRangeImageFilterParams &filterParams, bool MAKE_DENSE)
size_t getWidth() const MRPT_OVERRIDE
Returns the width of the image in pixels.
bool takeIntoAccountSensorPoseOnRobot
(Default: false) If false, local (sensor-centric) coordinates of points are generated. Otherwise, points are transformed with sensorPose. Furthermore, if provided, those coordinates are transformed with robotPoseInTheWorld
size_t getHeight() const MRPT_OVERRIDE
Returns the height of the image in pixels.
mrpt::utils::TCamera prev_camParams
bool do_range_filter(size_t r, size_t c, const float D) const
Returns true if the point (r,c) with depth D passes all filters.
bool rangeCheckBetween
Only used if both rangeMask_min and rangeMask_max are present.