Skip to content

Commit 0eda296

Browse files
author
Alexander Panov
authored
Merge pull request #3256 from AleksandrPanov:fix_aruco_axes_docs
fix axes and docs * fix axes docs, tutorial and add estimateParameters, change estimateParameters in test * update docs, add singlemarkersaxes2.jpg * fix docs
1 parent 63cab1b commit 0eda296

File tree

5 files changed

+96
-26
lines changed

5 files changed

+96
-26
lines changed

modules/aruco/include/opencv2/aruco.hpp

Lines changed: 62 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ the use of this software, even if advised of the possibility of such damage.
4040
#define __OPENCV_ARUCO_HPP__
4141

4242
#include <opencv2/core.hpp>
43+
#include <opencv2/calib3d.hpp>
4344
#include <vector>
4445
#include "opencv2/aruco/dictionary.hpp"
4546

@@ -219,7 +220,55 @@ struct CV_EXPORTS_W DetectorParameters {
219220
CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &dictionary, OutputArrayOfArrays corners,
220221
OutputArray ids, const Ptr<DetectorParameters> &parameters = DetectorParameters::create(),
221222
OutputArrayOfArrays rejectedImgPoints = noArray());
223+
/** @brief
224+
* rvec/tvec define the right handed coordinate system of the marker.
225+
* PatternPos defines center this system and axes direction.
226+
* Axis X (red color) - first coordinate, axis Y (green color) - second coordinate,
227+
* axis Z (blue color) - third coordinate.
228+
* @sa estimatePoseSingleMarkers(), @ref tutorial_aruco_detection
229+
*/
230+
enum PatternPos {
231+
/** @brief The marker coordinate system is centered on the middle of the marker.
232+
* The coordinates of the four corners (CCW order) of the marker in its own coordinate system are:
233+
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0),
234+
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0).
235+
*
236+
* These pattern points define this coordinate system:
237+
* ![Image with axes drawn](images/singlemarkersaxes.jpg)
238+
*/
239+
CCW_center,
240+
/** @brief The marker coordinate system is centered on the top-left corner of the marker.
241+
* The coordinates of the four corners (CW order) of the marker in its own coordinate system are:
242+
* (0, 0, 0), (markerLength, 0, 0),
243+
* (markerLength, markerLength, 0), (0, markerLength, 0).
244+
*
245+
* These pattern points define this coordinate system:
246+
* ![Image with axes drawn](images/singlemarkersaxes2.jpg)
247+
*/
248+
CW_top_left_corner
249+
};
250+
251+
/** @brief
252+
* Pose estimation parameters
253+
* @param pattern Defines center this system and axes direction (default PatternPos::CCW_center).
254+
* @param useExtrinsicGuess Parameter used for SOLVEPNP_ITERATIVE. If true (1), the function uses the provided
255+
* rvec and tvec values as initial approximations of the rotation and translation vectors, respectively, and further
256+
* optimizes them (default false).
257+
* @param solvePnPMethod Method for solving a PnP problem: see @ref calib3d_solvePnP_flags (default SOLVEPNP_ITERATIVE).
258+
* @sa PatternPos, solvePnP(), @ref tutorial_aruco_detection
259+
*/
260+
struct CV_EXPORTS_W EstimateParameters {
261+
CV_PROP_RW PatternPos pattern;
262+
CV_PROP_RW bool useExtrinsicGuess;
263+
CV_PROP_RW SolvePnPMethod solvePnPMethod;
264+
265+
EstimateParameters(): pattern(CCW_center), useExtrinsicGuess(false),
266+
solvePnPMethod(SOLVEPNP_ITERATIVE) {}
222267

268+
CV_WRAP static Ptr<EstimateParameters> create() {
269+
return makePtr<EstimateParameters>();
270+
}
271+
};
223272

224273

225274
/**
@@ -240,21 +289,28 @@ CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &diction
240289
* @param tvecs array of output translation vectors (e.g. std::vector<cv::Vec3d>).
241290
* Each element in tvecs corresponds to the specific marker in imgPoints.
242291
* @param _objPoints array of object points of all the marker corners
292+
* @param estimateParameters set the origin of coordinate system and the coordinates of the four corners of the marker
293+
* (default estimateParameters.pattern = PatternPos::CCW_center, estimateParameters.useExtrinsicGuess = false,
294+
* estimateParameters.solvePnPMethod = SOLVEPNP_ITERATIVE).
243295
*
244296
* This function receives the detected markers and returns their pose estimation respect to
245297
* the camera individually. So for each marker, one rotation and translation vector is returned.
246298
* The returned transformation is the one that transforms points from each marker coordinate system
247299
* to the camera coordinate system.
248-
* The marker corrdinate system is centered on the middle of the marker, with the Z axis
249-
* perpendicular to the marker plane.
250-
* The coordinates of the four corners of the marker in its own coordinate system are:
251-
* (0, 0, 0), (markerLength, 0, 0),
252-
* (markerLength, markerLength, 0), (0, markerLength, 0)
300+
* The marker coordinate system is centered on the middle (by default) or on the top-left corner of the marker,
301+
* with the Z axis perpendicular to the marker plane.
302+
* estimateParameters defines the coordinates of the four corners of the marker in its own coordinate system (by default) are:
303+
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0),
304+
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0)
253305
* @sa use cv::drawFrameAxes to get world coordinate system axis for object points
306+
* @sa @ref tutorial_aruco_detection
307+
* @sa EstimateParameters
308+
* @sa PatternPos
254309
*/
255310
CV_EXPORTS_W void estimatePoseSingleMarkers(InputArrayOfArrays corners, float markerLength,
256311
InputArray cameraMatrix, InputArray distCoeffs,
257-
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray());
312+
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray(),
313+
Ptr<EstimateParameters> estimateParameters = EstimateParameters::create());
258314

259315

260316

modules/aruco/src/aruco.cpp

Lines changed: 27 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -810,19 +810,31 @@ static void _identifyCandidates(InputArray _image, vector< vector< vector< Point
810810

811811

812812
/**
813-
* @brief Return object points for the system centered in a single marker, given the marker length
813+
* @brief Return object points for the system centered in a middle (by default) or in a top left corner of single
814+
* marker, given the marker length
814815
*/
815-
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints) {
816+
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints,
817+
EstimateParameters estimateParameters) {
816818

817819
CV_Assert(markerLength > 0);
818820

819821
_objPoints.create(4, 1, CV_32FC3);
820822
Mat objPoints = _objPoints.getMat();
821823
// set coordinate system in the top-left corner of the marker, with Z pointing out
822-
objPoints.ptr< Vec3f >(0)[0] = Vec3f(0.f, 0.f, 0);
823-
objPoints.ptr< Vec3f >(0)[1] = Vec3f(markerLength, 0.f, 0);
824-
objPoints.ptr< Vec3f >(0)[2] = Vec3f(markerLength, markerLength, 0);
825-
objPoints.ptr< Vec3f >(0)[3] = Vec3f(0.f, markerLength, 0);
824+
if (estimateParameters.pattern == CW_top_left_corner) {
825+
objPoints.ptr<Vec3f>(0)[0] = Vec3f(0.f, 0.f, 0);
826+
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength, 0.f, 0);
827+
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength, markerLength, 0);
828+
objPoints.ptr<Vec3f>(0)[3] = Vec3f(0.f, markerLength, 0);
829+
}
830+
else if (estimateParameters.pattern == CCW_center) {
831+
objPoints.ptr<Vec3f>(0)[0] = Vec3f(-markerLength/2.f, markerLength/2.f, 0);
832+
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength/2.f, markerLength/2.f, 0);
833+
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength/2.f, -markerLength/2.f, 0);
834+
objPoints.ptr<Vec3f>(0)[3] = Vec3f(-markerLength/2.f, -markerLength/2.f, 0);
835+
}
836+
else
837+
CV_Error(Error::StsBadArg, "Unknown estimateParameters pattern");
826838
}
827839

828840

@@ -1221,17 +1233,17 @@ class SinglePoseEstimationParallel : public ParallelLoopBody {
12211233
public:
12221234
SinglePoseEstimationParallel(Mat& _markerObjPoints, InputArrayOfArrays _corners,
12231235
InputArray _cameraMatrix, InputArray _distCoeffs,
1224-
Mat& _rvecs, Mat& _tvecs)
1236+
Mat& _rvecs, Mat& _tvecs, EstimateParameters _estimateParameters)
12251237
: markerObjPoints(_markerObjPoints), corners(_corners), cameraMatrix(_cameraMatrix),
1226-
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs) {}
1238+
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs), estimateParameters(_estimateParameters) {}
12271239

12281240
void operator()(const Range &range) const CV_OVERRIDE {
12291241
const int begin = range.start;
12301242
const int end = range.end;
12311243

12321244
for(int i = begin; i < end; i++) {
1233-
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs,
1234-
rvecs.at<Vec3d>(i), tvecs.at<Vec3d>(i));
1245+
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs, rvecs.at<Vec3d>(i),
1246+
tvecs.at<Vec3d>(i), estimateParameters.useExtrinsicGuess, estimateParameters.solvePnPMethod);
12351247
}
12361248
}
12371249

@@ -1242,21 +1254,20 @@ class SinglePoseEstimationParallel : public ParallelLoopBody {
12421254
InputArrayOfArrays corners;
12431255
InputArray cameraMatrix, distCoeffs;
12441256
Mat& rvecs, tvecs;
1257+
EstimateParameters estimateParameters;
12451258
};
12461259

12471260

12481261

1249-
1250-
/**
1251-
*/
12521262
void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength,
12531263
InputArray _cameraMatrix, InputArray _distCoeffs,
1254-
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints) {
1264+
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints,
1265+
Ptr<EstimateParameters> estimateParameters) {
12551266

12561267
CV_Assert(markerLength > 0);
12571268

12581269
Mat markerObjPoints;
1259-
_getSingleMarkerObjectPoints(markerLength, markerObjPoints);
1270+
_getSingleMarkerObjectPoints(markerLength, markerObjPoints, *estimateParameters);
12601271
int nMarkers = (int)_corners.total();
12611272
_rvecs.create(nMarkers, 1, CV_64FC3);
12621273
_tvecs.create(nMarkers, 1, CV_64FC3);
@@ -1272,7 +1283,7 @@ void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength,
12721283
// this is the parallel call for the previous commented loop (result is equivalent)
12731284
parallel_for_(Range(0, nMarkers),
12741285
SinglePoseEstimationParallel(markerObjPoints, _corners, _cameraMatrix,
1275-
_distCoeffs, rvecs, tvecs));
1286+
_distCoeffs, rvecs, tvecs, *estimateParameters));
12761287
if(_objPoints.needed()){
12771288
markerObjPoints.convertTo(_objPoints, -1);
12781289
}

modules/aruco/test/test_charucodetection.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -439,10 +439,12 @@ void CV_CharucoDiamondDetection::run(int) {
439439
}
440440
}
441441

442+
Ptr<aruco::EstimateParameters> estimateParameters = aruco::EstimateParameters::create();
443+
estimateParameters->pattern = aruco::CW_top_left_corner;
442444
// estimate diamond pose
443445
vector< Vec3d > estimatedRvec, estimatedTvec;
444-
aruco::estimatePoseSingleMarkers(diamondCorners, squareLength, cameraMatrix,
445-
distCoeffs, estimatedRvec, estimatedTvec);
446+
aruco::estimatePoseSingleMarkers(diamondCorners, squareLength, cameraMatrix, distCoeffs, estimatedRvec,
447+
estimatedTvec, noArray(), estimateParameters);
446448

447449
// check result
448450
vector< Point2f > projectedDiamondCornersPose;
78 KB
Loading

modules/aruco/tutorials/aruco_detection/aruco_detection.markdown

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -286,8 +286,9 @@ translation vectors of the estimated poses will be in the same unit
286286
- The output parameters `rvecs` and `tvecs` are the rotation and translation vectors respectively, for each of the markers
287287
in `markerCorners`.
288288

289-
The marker coordinate system that is assumed by this function is placed at the center of the marker
290-
with the Z axis pointing out, as in the following image. Axis-color correspondences are X: red, Y: green, Z: blue. Note the axis directions of the rotated markers in this image.
289+
The marker coordinate system that is assumed by this function is placed in the center (by default) or
290+
in the top left corner of the marker with the Z axis pointing out, as in the following image.
291+
Axis-color correspondences are X: red, Y: green, Z: blue. Note the axis directions of the rotated markers in this image.
291292

292293
![Image with axes drawn](images/singlemarkersaxes.jpg)
293294

0 commit comments

Comments
 (0)