Ocean
RANSAC.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  *
4  * This source code is licensed under the MIT license found in the
5  * LICENSE file in the root directory of this source tree.
6  */
7 
8 #ifndef META_OCEAN_GEOMETRY_RANSAC_H
9 #define META_OCEAN_GEOMETRY_RANSAC_H
10 
13 #include "ocean/geometry/Error.h"
16 
17 #include "ocean/base/Accessor.h"
18 #include "ocean/base/Lock.h"
20 #include "ocean/base/Subset.h"
21 #include "ocean/base/Worker.h"
22 
23 #include "ocean/math/Box3.h"
25 #include "ocean/math/Numeric.h"
28 
29 namespace Ocean
30 {
31 
32 namespace Geometry
33 {
34 
35 /**
36  * This class implements several RANSAC functions for pose determination.
37  * @ingroup geometry
38  */
39 class OCEAN_GEOMETRY_EXPORT RANSAC
40 {
41  protected:
42 
43  /**
44  * Definition of a function pointer determining a geometric transformation e.g., a affine transformation.
45  * Parameter0: Pointer to the left image point, must be valid
46  * Parameter1: Pointer to the right image points, must be valid
47  * Parameter2: Number of point correspondences, with range [1, infinity)
48  * Parameter3: Resulting transformation
49  * Return parameter: True, if succeeded
50  */
51  typedef bool (*GeometricTransformFunction)(const ImagePoint*, const ImagePoint*, const size_t, SquareMatrix3&);
52 
53  public:
54 
55  /**
56  * Calculates the number of iterations necessary to find an outlier-free model data set.
57  * @param model Number of data elements sufficient to define a test model
58  * @param successProbability Probability that at least one outlier-free model will be selected, with range (0, 1)
59  * @param faultyRate Rate of faulty data elements inside the given data set, with range [0, 1)
60  * @param maximalIterations The number of maximal iterations which can be returned, with range [1, infinity)
61  * @return Minimal number of iterations to guarantee the specified probability, with range [1, maximalIterations]
62  */
63  static unsigned int iterations(const unsigned int model, const Scalar successProbability = Scalar(0.99), const Scalar faultyRate = Scalar(0.2), const unsigned int maximalIterations = 1000000u);
64 
65  /**
66  * Calculates a pose using the perspective pose problem with three point correspondences using any camera.
67  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
68  * @param anyCamera The camera object specifying the projection, must be valid
69  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
70  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each object point
71  * @param randomGenerator A random generator to be used
72  * @param world_T_camera The resulting pose transforming camera points to world points
73  * @param minimalValidCorrespondences Minimal number of valid correspondences, with range [4, objectPointAccessor.size()]
74  * @param refine Determines whether a not linear least square algorithm is used to increase the pose accuracy after the RANSAC step
75  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
76  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
77  * @param usedIndices Optional vector receiving the indices of all valid correspondences
78  * @param sqrAccuracy Optional resulting average square pixel error
79  * @return True, if succeeded
80  */
81  static bool p3p(const AnyCamera& anyCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, HomogenousMatrix4& world_T_camera, const unsigned int minimalValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr);
82 
83  /**
84  * Deprecated.
85  *
86  * Calculates a pose using the perspective pose problem with three point correspondences.
87  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
88  * This function does not used any apriori information (like e.g. a previous pose).<br>
89  * Beware: There is another p3p() function with almost identical functionality/parameter layout.<br>
90  * However, this function here supports 'weights' parameters and thus creates a bigger binary footprint.
91  * @param pinholeCamera The pinhole camera object specifying the intrinsic camera parameters
92  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
93  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each object point
94  * @param randomGenerator A random generator object
95  * @param useDistortionParameters True, if the provided image points are distorted and if the camera's distortion parameters should be applied during the pose determination
96  * @param pose Resulting pose
97  * @param minimalValidCorrespondences Minimal number of valid correspondences
98  * @param refine Determines whether a not linear least square algorithm is used to increase the pose accuracy after the RANSAC step
99  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
100  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
101  * @param usedIndices Optional vector receiving the indices of all valid correspondences
102  * @param sqrAccuracy Optional resulting average square pixel error
103  * @param weights Optional explicit weights to weight the point correspondences individually
104  * @return True, if succeeded
105  */
106  static inline bool p3p(const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, const unsigned int minimalValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights);
107 
108  /**
109  * Calculates a camera pose using the perspective pose problem with three point correspondences.
110  * The calculation uses a rough camera pose (e.g., from a previous camera frame) to increase the robustness.
111  * @param world_T_roughCamera The already known rough camera pose, transforming rough camera points to world points, with default camera pointing towards the negative z-space with y-axis upwards, must be valid
112  * @param camera The camera profile defining the project, must be valid
113  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
114  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each 3D object point
115  * @param randomGenerator The random generator to be used
116  * @param world_T_camera The resulting camera pose, transforming camera points to world points, with default camera pointing towards the negative z-space with y-axis upwards, must be valid
117  * @param maxPositionOffset Maximal position offset between initial and final pose for three axis
118  * @param maxOrientationOffset Maximal orientation offset between initial and final pose in radian angle
119  * @param minValidCorrespondences Minimal number of valid correspondences
120  * @param refine True, if optimization step will be applied to increase the pose accuracy after the RANSAC step
121  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
122  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
123  * @param usedIndices Optional vector receiving the indices of all valid correspondences
124  * @param sqrAccuracy Optional resulting average square pixel error, with range [0, infinity)
125  * @param weights Optional weights to weight the individual point correspondences individually, nullptr to use the same weight for all point correspondences
126  * @return True, if succeeded
127  */
128  static inline bool p3p(const HomogenousMatrix4& world_T_roughCamera, const AnyCamera& camera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, HomogenousMatrix4& world_T_camera, const Vector3& maxPositionOffset = Vector3(Scalar(0.1), Scalar(0.1), Scalar(0.1)), const Scalar maxOrientationOffset = Numeric::deg2rad(10), const unsigned int minValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr, const Scalar* weights = nullptr);
129 
130  /**
131  * Deprecated.
132  *
133  * Calculates a pose using the perspective pose problem with three point correspondences.
134  * The calculation uses a pose from a previous calculation to increase the robustness.<br>
135  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
136  * This function uses a rough pose as apriori information to improve the robustness of the result.
137  * @param initialPose Rough initial pose to increase determination robustness
138  * @param pinholeCamera The pinhole camera object specifying the intrinsic camera parameters
139  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
140  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each object point
141  * @param randomGenerator A random generator object
142  * @param useDistortionParameters True, if the provided image points are distorted and if the camera's distortion parameters should be applied during the pose determination
143  * @param pose Resulting pose
144  * @param maxPositionOffset Maximal position offset between initial and final pose for three axis
145  * @param maxOrientationOffset Maximal orientation offset between initial and final pose in radian angle
146  * @param minValidCorrespondences Minimal number of valid correspondences
147  * @param refine True, if optimization step will be applied to increase the pose accuracy after the RANSAC step
148  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
149  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
150  * @param usedIndices Optional vector receiving the indices of all valid correspondences
151  * @param sqrAccuracy Optional resulting average square pixel error
152  * @param weights Optional explicit weights to weight the point correspondences individually
153  * @return True, if succeeded
154  */
155  static inline bool p3p(const HomogenousMatrix4& initialPose, const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, const Vector3& maxPositionOffset = Vector3(Scalar(0.1), Scalar(0.1), Scalar(0.1)), const Scalar maxOrientationOffset = Numeric::deg2rad(10), const unsigned int minValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr, const Scalar* weights = nullptr);
156 
157  /**
158  * Calculates a pose including zoom factor using the perspective pose problem with three point correspondences.
159  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
160  * This function does not used any apriori information (like e.g. a previous pose).
161  * @param pinholeCamera The pinhole camera object specifying the intrinsic camera parameters
162  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
163  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each object point
164  * @param randomGenerator A random generator object
165  * @param useDistortionParameters True, if the provided image points are distorted and if the camera's distortion parameters should be applied during the pose determination
166  * @param pose Resulting pose
167  * @param zoom Resulting zoom factor matching to the resulting pose, with range (0, infinity)
168  * @param minimalValidCorrespondences Minimal number of valid correspondences
169  * @param refine Determines whether a not linear least square algorithm is used to increase the pose accuracy after the RANSAC step
170  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
171  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
172  * @param usedIndices Optional vector receiving the indices of all valid correspondences
173  * @param sqrAccuracy Optional resulting average square pixel error
174  * @param weights Optional explicit weights to weight the point correspondences individually
175  * @return True, if succeeded
176  */
177  static inline bool p3pZoom(const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, Scalar& zoom, const unsigned int minimalValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr, const Scalar* weights = nullptr);
178 
179  /**
180  * Calculates a pose including zoom factor, using the perspective pose problem with three point correspondences.
181  * The calculation uses a pose from a previous calculation to increase the robustness.<br>
182  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
183  * This function uses a rough pose as apriori information to improve the robustness of the result.
184  * @param initialPose Rough initial pose to increase determination robustness
185  * @param initialZoom Rough initial zoom factor to increase the determination robustness, with range (0, infinity)
186  * @param pinholeCamera The pinhole camera object specifying the intrinsic camera parameters
187  * @param objectPointAccessor The accessor providing the 3D object points, at least 4
188  * @param imagePointAccessor The accessor providing the 2D image points, one image point for each object point
189  * @param randomGenerator A random generator object
190  * @param useDistortionParameters True, if the provided image points are distorted and if the camera's distortion parameters should be applied during the pose determination
191  * @param pose Resulting pose
192  * @param zoom Resulting zoom factor matching to the resulting pose, with range (0, infinity)
193  * @param maxPositionOffset Maximal position offset between initial and final pose for three axis
194  * @param maxOrientationOffset Maximal orientation offset between initial and final pose in radian angle
195  * @param minValidCorrespondences Minimal number of valid correspondences
196  * @param refine True, to apply an optimization step to increase the pose accuracy after the RANSAC step
197  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
198  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates, with range (0, infinity)
199  * @param usedIndices Optional vector receiving the indices of all valid correspondences
200  * @param sqrAccuracy Optional resulting average square pixel error
201  * @param weights Optional explicit weights to weight the point correspondences individually
202  * @return True, if succeeded
203  */
204  static inline bool p3pZoom(const HomogenousMatrix4& initialPose, const Scalar initialZoom, const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, Scalar& zoom, const Vector3& maxPositionOffset = Vector3(Scalar(0.1), Scalar(0.1), Scalar(0.1)), const Scalar maxOrientationOffset = Numeric::deg2rad(10), const unsigned int minValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr, const Scalar* weights = nullptr);
205 
206  /**
207  * Determines the 2D line best fitting to a set of given 2D positions.
208  * @param positions The 2D positions for which the line will be determined, at least 2
209  * @param randomGenerator Random generator object
210  * @param line The resulting line best fitting with the given points
211  * @param refine True, to apply an optimization step to increase the accuracy of the line
212  * @param iterations The number of RANSAC iterations, with range [1, infinity)
213  * @param maxSqrError The maximal square error between a direction candidate and a direction from the provided set to count as valid, with range (0, infinity)
214  * @param finalError Optional resulting final average square error between all valid directions and the mean direction
215  * @param usedIndices Optional resulting indices of all used point correspondences
216  * @return True, if succeeded
217  */
218  static bool line(const ConstIndexedAccessor<Vector2>& positions, RandomGenerator& randomGenerator, Line2& line, const bool refine = true, const unsigned int iterations = 20u, const Scalar maxSqrError = Scalar(5 * 5), Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
219 
220  /**
221  * Determines the 2D direction from a set of given 2D directions provided as unit vectors.
222  * @param directions The sets of 2D unit vectors from which the major direction is determined, at least 1
223  * @param randomGenerator A random generator object
224  * @param direction The resulting major direction of the provided set of vectors, will be a unit vector
225  * @param acceptOppositeDirections True, to accept opposite direction (unit vector almost parallel but pointing towards the opposite direction); False, to treat opposite directions as different directions
226  * @param refine True, to apply an optimization step to increase the accuracy of the direction
227  * @param iterations The number of RANSAC iterations, with range [1, infinity)
228  * @param maxAngle The maximal angle between a direction candidate and a direction from the provided set to count as valid, in radian, with range [0, PI)
229  * @param finalError Optional resulting final average angle between all valid directions and the mean direction, this is an approximation only, with range [0, PI)
230  * @param usedIndices Optional resulting indices of all used point correspondences
231  * @return True, if succeeded
232  */
233  static bool direction(const ConstIndexedAccessor<Vector2>& directions, RandomGenerator& randomGenerator, Vector2& direction, const bool acceptOppositeDirections, const bool refine = true, const unsigned int iterations = 20u, const Scalar maxAngle = Numeric::deg2rad(5), Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
234 
235  /**
236  * Determines the 2D translation (offset/translation vector) from a set of given 2D vectors.
237  * @param translations The sets of 2D vectors from which the major translation/offset is determined, at least 1
238  * @param randomGenerator A random generator object
239  * @param translation The resulting major translation of the provided set of vectors
240  * @param refine True, to apply an optimization step to increase the accuracy of the translation
241  * @param iterations The number of RANSAC iterations, with range [1, infinity)
242  * @param maxSqrError The maximal square error between a translation candidate and a translation from the provided set to count as valid, with range (0, infinity)
243  * @param finalError Optional resulting final average square error between all valid translations and the mean translation
244  * @param usedIndices Optional resulting indices of all used point correspondences
245  * @return True, if succeeded
246  */
247  static bool translation(const ConstIndexedAccessor<Vector2>& translations, RandomGenerator& randomGenerator, Vector2& translation, const bool refine = true, const unsigned int iterations = 20u, const Scalar maxSqrError = Scalar(5 * 5), Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
248 
249  /**
250  * Determines the 3DOF rotation of a camera pose for a set of given 2D/3D point correspondences by minimizing the projection error between 3D object points and 2D image points.
251  * @param camera The camera profile defining the projection, must be valid
252  * @param objectPoints The accessor providing the given 3D object points, at least 2
253  * @param imagePoints The accessor providing the given 2D image points, one image point for each 3D object point
254  * @param randomGenerator Random generator object to be used for creating random numbers
255  * @param world_R_camera The resulting camera orientation, transforming camera to world, with default camera pointing towards the negative z-space with y-axis upwards
256  * @param minValidCorrespondences The minimal number of valid correspondences which are necessary for a valid orientation
257  * @param iterations The number of RANSAC iterations, with range [1, infinity)
258  * @param maxSqrError The maximal square pixel error between a projected 3D object point and a 2D image point to count as valid correspondence, with range (0, infinity)
259  * @param finalError Optional resulting final average square pixel error between all valid point correspondences for the resulting orientation
260  * @param usedIndices Optional resulting indices of all used point correspondences
261  * @return True, if succeeded
262  */
263  static bool orientation(const AnyCamera& camera, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, RandomGenerator& randomGenerator, SquareMatrix3& world_R_camera, const unsigned int minValidCorrespondences = 5u, const unsigned int iterations = 20u, const Scalar maxSqrError = Scalar(5 * 5), Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
264 
265  /**
266  * Determines the 3D object point for a set of image points observing the same object point under individual camera poses (with rotational and translational camera motion).
267  * @param camera The camera profile for all camera frames and poses, must be valid
268  * @param world_T_cameras The camera poses of the camera frames in which the image points are located, one for each image point, with default camera pointing towards the negative z-space with y-axis upwards, at least two
269  * @param imagePoints The image points observing the 3D object point, one image point for each pose
270  * @param randomGenerator Random generator object to be used for creating random numbers
271  * @param objectPoint Resulting 3D object point defined in world
272  * @param iterations Number of RANSAC iterations, with range [1, infinity)
273  * @param maximalSqrError The maximal square pixel error between a projected object point and an image point, with range [0, infinity)
274  * @param minValidCorrespondences The minimal number of image points that have a projected pixel error smaller than 'maximalError', with range [2, correspondences]
275  * @param onlyFrontObjectPoint True, if the resulting object point must lie in front of the camera
276  * @param refinementEstimator An robust estimator to invoke an optimization step to increase the accuracy of the resulting position, ET_INVALID to avoid the refinement
277  * @param finalRobustError Optional the resulting final robust error
278  * @param usedIndices Optional resulting indices of valid image points
279  * @return True, if succeeded
280  */
281  static inline bool objectPoint(const AnyCamera& camera, const ConstIndexedAccessor<HomogenousMatrix4>& world_T_cameras, const ConstIndexedAccessor<ImagePoint>& imagePoints, RandomGenerator& randomGenerator, ObjectPoint& objectPoint, const unsigned int iterations = 20u, const Scalar maximalSqrError = Scalar(3 * 3), const unsigned int minValidCorrespondences = 2u, const bool onlyFrontObjectPoint = true, const Estimator::EstimatorType refinementEstimator = Estimator::ET_SQUARE, Scalar* finalRobustError = nullptr, Indices32* usedIndices = nullptr);
282 
283  /**
284  * Determines the 3D object point for a set of image points observing the same object point under individual camera poses (with rotational and translational camera motion), while each camera pose has an own camera profile.
285  * @param cameras The camera profiles for all camera frames and poses, must be valid
286  * @param world_T_cameras The camera poses of the camera frames in which the image points are located, one for each image point, with default camera pointing towards the negative z-space with y-axis upwards, at least two
287  * @param imagePoints The image points observing the 3D object point, one image point for each pose
288  * @param randomGenerator Random generator object to be used for creating random numbers
289  * @param objectPoint Resulting 3D object point, defined in world
290  * @param iterations Number of RANSAC iterations, with range [1, infinity)
291  * @param maximalSqrError The maximal square pixel error between a projected object point and an image point, with range [0, infinity)
292  * @param minValidCorrespondences The minimal number of image points that have a projected pixel error smaller than 'maximalError', with range [2, correspondences]
293  * @param onlyFrontObjectPoint True, if the resulting object point must lie in front of the camera
294  * @param refinementEstimator An robust estimator to invoke an optimization step to increase the accuracy of the resulting position, ET_INVALID to avoid the refinement
295  * @param finalRobustError Optional the resulting final robust error
296  * @param usedIndices Optional resulting indices of valid image points
297  * @return True, if succeeded
298  */
299  static bool objectPoint(const ConstIndexedAccessor<const AnyCamera*>& cameras, const ConstIndexedAccessor<HomogenousMatrix4>& world_T_cameras, const ConstIndexedAccessor<ImagePoint>& imagePoints, RandomGenerator& randomGenerator, ObjectPoint& objectPoint, const unsigned int iterations = 20u, const Scalar maximalSqrError = Scalar(3 * 3), const unsigned int minValidCorrespondences = 2u, const bool onlyFrontObjectPoint = true, const Estimator::EstimatorType refinementEstimator = Estimator::ET_SQUARE, Scalar* finalRobustError = nullptr, Indices32* usedIndices = nullptr);
300 
301  /**
302  * Determines the 3D object point for a set of image points observing the same object point under individual camera poses (with rotational camera motion only).
303  * The center position of each camera is located at the origin of the coordinate system.
304  * @param camera The camera profile defining the projection for all camera frames and poses, must be valid
305  * @param world_R_cameras The camera orientations of the camera frames in which the image points are located, transforming cameras to world, with default camera pointing towards the negeative z-space with y-axis upwards, at least two
306  * @param imagePoints The image points observing the 3D object point, one image point for each pose
307  * @param randomGenerator Random generator object to be used for creating random numbers
308  * @param objectPoint The resulting 3D object point
309  * @param objectPointDistance The distance between the origin and a resulting object point, with range (0, infinity)
310  * @param iterations Number of RANSAC iterations, with range [1, infinity)
311  * @param maximalError The maximal square pixel error between a projected object point and an image point, with range (0, infinity)
312  * @param minValidCorrespondences The minimal number of image points that have a projected pixel error smaller than 'maximalError', with range [2, correspondences]
313  * @param onlyFrontObjectPoint True, if the resulting object point must lie in front of the camera
314  * @param refinementEstimator An robust estimator to invoke an optimization step to increase the accuracy of the resulting position, ET_INVALID to avoid the refinement
315  * @param finalError Optional the resulting final error
316  * @param usedIndices Optional resulting indices of valid image points
317  * @return True, if succeeded
318  */
319  static bool objectPoint(const AnyCamera& camera, const ConstIndexedAccessor<SquareMatrix3>& world_R_cameras, const ConstIndexedAccessor<ImagePoint>& imagePoints, RandomGenerator& randomGenerator, ObjectPoint& objectPoint, const Scalar objectPointDistance, const unsigned int iterations = 20u, const Scalar maximalError = Scalar(3 * 3), const unsigned int minValidCorrespondences = 2u, const bool onlyFrontObjectPoint = true, const Estimator::EstimatorType refinementEstimator = Estimator::ET_SQUARE, Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
320 
321  /**
322  * Determines a 3D plane best matching to a set of given 3D object points.
323  * @param objectPoints The 3D object points for which the best fitting plane will be determined, at least 3
324  * @param randomGenerator Random generator object to be used for creating random numbers
325  * @param plane Resulting 3D plane
326  * @param iterations Number of RANSAC iterations, with range [1, infinity)
327  * @param medianDistanceFactor The factor which is used to defined the maximal distance between an object point on the plane to count as valid, the factor is multiplied with the median distance between all object points and the median object point, with range [0, infinity), -1 to accept all object points as valid
328  * @param minValidCorrespondences The minimal number of image points that have an error smaller than the determine median-based threshold, with range [2, correspondences]
329  * @param refinementEstimator An robust estimator to invoke an optimization step to increase the accuracy of the resulting position, ET_INVALID to avoid the refinement
330  * @param finalError Optional the resulting final error
331  * @param usedIndices Optional resulting indices of valid object points
332  * @return True, if succeeded
333  */
334  static bool plane(const ConstIndexedAccessor<ObjectPoint>& objectPoints, RandomGenerator& randomGenerator, Plane3& plane, const unsigned int iterations = 20u, const Scalar medianDistanceFactor = Scalar(0.1), const unsigned int minValidCorrespondences = 3u, const Estimator::EstimatorType refinementEstimator = Estimator::ET_SQUARE, Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
335 
336  /**
337  * Optimizes an already known 3D plane by minimizing the error between the plane and a set of given 3D object points.
338  * @param initialPlane The already known initial plane which will be optimized, must be valid
339  * @param objectPoints The 3D object points for which the best fitting plane will be determined, at least 3
340  * @param randomGenerator Random generator object to be used for creating random numbers
341  * @param plane Resulting 3D plane
342  * @param iterations Number of RANSAC iterations, with range [1, infinity)
343  * @param maximalNormalOrientationOffset The maximal angle between the normal of the initial plane and the final optimized plane in radian, with range [0, PI/2]
344  * @param medianDistanceFactor The factor which is used to defined the maximal distance between an object point on the plane to count as valid, the factor is multiplied with the median distance between all object points and the initial plane, with range [0, infinity), -1 to accept all object points as valid
345  * @param minValidCorrespondences The minimal number of image points that have an error smaller than the determine median-based threshold, with range [2, correspondences]
346  * @param refinementEstimator A robust estimator to invoke an optimization step to increase the accuracy of the resulting position, ET_INVALID to avoid the refinement
347  * @param finalError Optional resulting final error
348  * @param usedIndices Optional resulting indices of valid object points
349  * @return True, if succeeded
350  */
351  static bool plane(const Plane3& initialPlane, const ConstIndexedAccessor<ObjectPoint>& objectPoints, RandomGenerator& randomGenerator, Plane3& plane, const unsigned int iterations = 20u, const Scalar maximalNormalOrientationOffset = Numeric::deg2rad(30), const Scalar medianDistanceFactor = Scalar(0.1), const unsigned int minValidCorrespondences = 3u, const Estimator::EstimatorType refinementEstimator = Estimator::ET_SQUARE, Scalar* finalError = nullptr, Indices32* usedIndices = nullptr);
352 
353  /**
354  * Calculates the fundamental matrix by given point correspondences for two stereo images.
355  * In each RANSAC iterations randomly 8 correspondences are selected and the resulting fundamental matrix is tested against all given correspondences.
356  * @param leftImagePoints Left image points (each point corresponds to a right image point)
357  * @param rightImagePoints Right image points (each point corresponds to a left image point)
358  * @param correspondences Number of point correspondences, with range [testCandidates, infinity)
359  * @param width The width of the left image in pixel
360  * @param height The height of the left image in pixel
361  * @param fundamental Resulting fundamental matrix
362  * @param testCandidates Number of candidates used in each RANSAC iteration, with range [8, correspondences]
363  * @param iterations Number of RANSAC iterations, with range (0, infinity)
364  * @param errorThreshold Maximal error threshold for valid RANSAC candidates, with range (0, infinity)
365  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
366  * @return True, if succeeded
367  * @see extrinsicMatrix(), Geometry::EpipolarGeometry::fundamental2essential().
368  */
369  static bool fundamentalMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, const unsigned int width, const unsigned int height, SquareMatrix3& fundamental, const unsigned int testCandidates = 8u, const unsigned int iterations = 20u, const Scalar errorThreshold = Numeric::weakEps(), Indices32* usedIndices = nullptr);
370 
371  /**
372  * Calculates the extrinsic camera matrix by given point correspondences for two stereo images.
373  * The extrinsic matrix can be determined up to a scale factor for the translation vector.<br>
374  * The left camera is expected to be located in the origin while looking towards the negative z-axis with y-axis as up vector.<br>
375  * The resulting transformation defines the extrinsic camera matrix for the right camera.
376  * @param leftCamera Left camera corresponding to the left image points
377  * @param rightCamera Right camera corresponding to the right image points
378  * @param leftImagePoints Left image points (each point corresponds to a right image point)
379  * @param rightImagePoints Right image points (each point corresponds to a left image point)
380  * @param correspondences Number of point correspondences, with range [testCandidates, infinity)
381  * @param transformation Resulting extrinsic camera transformation matrix which transforms points lying in the right camera coordinate system into points lying in the left camera coordinate system
382  * @param testCandidates Number of candidates used in each RANSAC iteration, with range [8, correspondences]
383  * @param iterations Number of RANSAC iterations, with range [1, infinity)
384  * @param squarePixelErrorThreshold Maximal square pixel error of a valid projection of a 3D point onto the 2D image plane, with range (0, infinity)
385  * @param maxTranslation Optional maximal resulting translation bounding box vector, if defined should have a maximal expansion of +/- 1 in each axis
386  * @param maxRotation Optional maximal resulting rotation in radian
387  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
388  * @return True, if succeeded
389  * @see fundamentalMatrix().
390  */
391  static bool extrinsicMatrix(const PinholeCamera& leftCamera, const PinholeCamera& rightCamera, const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, HomogenousMatrix4& transformation, const unsigned int testCandidates = 8u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), const Box3& maxTranslation = Box3(), const Scalar maxRotation = Numeric::pi(), Indices32* usedIndices = nullptr);
392 
393  /**
394  * Calculates the homography between two images transforming the given image points between two images.
395  * This function needs bijective correspondences (each left image points matches exactly with one unique right image points).<br>
396  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = H * leftPoint).<br>
397  * Beware: This function is intended for prototyping purposes only. When binary size matters, please use the corresponding function using template parameters instead of 'refine' and 'useSVD'.
398  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image, must be valid
399  * @param rightImagePoints Image points in the right camera, one for each point in the left frame, must be valid
400  * @param correspondences Number of points correspondences, with range [4, infinity)
401  * @param randomGenerator Random generator object to be used for creating random numbers
402  * @param homography Resulting homography for the given image points
403  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [4, correspondences]
404  * @param refine True, to apply a non-linear least square optimization to increase the transformation accuracy after the RANSAC step
405  * @param iterations Number of RANSAC iterations, with range [1, infinity)
406  * @param squarePixelErrorThreshold Maximal square pixel error between a right point and a transformed left point so that a point correspondence counts as valid, with range (0, infinity)
407  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
408  * @param worker Optional worker object to distribute the computation
409  * @param useSVD True, to use the slower SVD approach (i.e., Homography::homographyMatrixSVD); False, to use the two-step approach (i.e., Homography::homographyMatrixLinearWithOptimizations)
410  * @return True, if succeeded
411  * @see homographyMatrix<tRefine, tUseSVD>(), Geometry::Homography::homographyMatrix(), homographyMatrixForNonBijectiveCorrespondences().
412  */
413  static inline bool homographyMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& homography, const unsigned int testCandidates = 8u, const bool refine = true, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr, const bool useSVD = true);
414 
415  /**
416  * Calculates the homography between two images transforming the given image points between two images.
417  * This function can be applied if the given correspondences are not bijective.<br>
418  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = H * leftPoint).<br>
419  * Beware: This function is intended for prototyping purposes only. When binary size matters, please use the corresponding function using template parameters instead of 'refine' and 'useSVD'.
420  * @param leftImagePoints Image points in the left camera, must be valid
421  * @param numberLeftImagePoints the number of given left image points, with range [4, infinity)
422  * @param rightImagePoints Image points in the right camera, must be valid
423  * @param numberRightImagePoints The number of given right image points, with range [4, infinity)
424  * @param correspondences The pairs defining the non-bijective correspondences, with range [0, numberLeftImagePoints - 1]x[0, numberRightImagePoints - 1], must be valid
425  * @param numberCorrespondences The number of given correspondence pairs, with range [4, infinity)
426  * @param randomGenerator Random generator object to be used for creating random numbers
427  * @param right_H_left Resulting homography for the given image points, (rightPoint = right_H_left * leftPoint)
428  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [4, correspondences]
429  * @param refine True, to apply a non-linear least square optimization to increase the transformation accuracy after the RANSAC step
430  * @param iterations Number of RANSAC iterations, with range [1, infinity)
431  * @param squarePixelErrorThreshold Maximal square pixel error between a right point and a transformed left point so that a point correspondence counts as valid, with range (0, infinity)
432  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
433  * @param worker Optional worker object to distribute the computation
434  * @param useSVD True, to use the slower SVD approach (i.e., Homography::homographyMatrixSVD); False, to use the two-step approach (i.e., Homography::homographyMatrixLinearWithOptimizations)
435  * @return True, if succeeded
436  * @see homographyMatrixForNonBijectiveCorrespondences<tRefine, tUseSVD>(), homographyMatrix().
437  */
438  static inline bool homographyMatrixForNonBijectiveCorrespondences(const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator& randomGenerator, SquareMatrix3& right_H_left, const unsigned int testCandidates = 8u, const bool refine = true, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr, const bool useSVD = true);
439 
440  /**
441  * Calculates the homography between two images transforming the given image points between two images.
442  * This function needs bijective correspondences (each left image points matches exactly with one unique right image points).<br>
443  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = H * leftPoint).<br>
444  * This function applies template parameters for refine and useSVD. Thus, this function will create smaller binary sizes.
445  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image
446  * @param rightImagePoints Image points in the right camera, one for each point in the left frame
447  * @param correspondences Number of points correspondences, with range [4, infinity)
448  * @param randomGenerator Random generator object to be used for creating random numbers
449  * @param homography Resulting homography for the given image points
450  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [4, correspondences]
451  * @param iterations Number of RANSAC iterations, with range [1, infinity)
452  * @param squarePixelErrorThreshold Maximal square pixel error between a right point and a transformed left point so that a point correspondence counts as valid, with range (0, infinity)
453  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
454  * @param worker Optional worker object to distribute the computation
455  * @return True, if succeeded
456  * @tparam tRefine True, to apply a non-linear least square optimization to increase the transformation accuracy after the RANSAC step
457  * @tparam tUseSVD True, to use the slower SVD approach (i.e., Homography::homographyMatrixSVD); False, to use the two-step approach (i.e., Homography::homographyMatrixLinearWithOptimizations)
458  * @see homographyMatrix(), Geometry::Homography::homographyMatrix().
459  */
460  template <bool tRefine, bool tUseSVD>
461  static bool homographyMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& homography, const unsigned int testCandidates = 8u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr);
462 
463  /**
464  * Calculates the homography between two images transforming the given image points between two images.
465  * This function can be applied if the given correspondences are not bijective.<br>
466  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = H * leftPoint).<br>
467  * This function applies template parameters for refine and useSVD. Thus, this function will create smaller binary sizes.
468  * @param leftImagePoints Image points in the left camera, must be valid
469  * @param numberLeftImagePoints the number of given left image points, with range [4, infinity)
470  * @param rightImagePoints Image points in the right camera, must be valid
471  * @param numberRightImagePoints The number of given right image points, with range [4, infinity)
472  * @param correspondences The pairs defining the non-bijective correspondences, with range [0, numberLeftImagePoints - 1]x[0, numberRightImagePoints - 1], must be valid
473  * @param numberCorrespondences The number of given correspondence pairs, with range [4, infinity)
474  * @param randomGenerator Random generator object to be used for creating random numbers
475  * @param right_H_left Resulting homography for the given image points, (rightPoint = right_H_left * leftPoint)
476  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [4, correspondences]
477  * @param iterations Number of RANSAC iterations, with range [1, infinity)
478  * @param squarePixelErrorThreshold Maximal square pixel error between a right point and a transformed left point so that a point correspondence counts as valid, with range (0, infinity)
479  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
480  * @param worker Optional worker object to distribute the computation
481  * @return True, if succeeded
482  * @tparam tRefine True, to apply a non-linear least square optimization to increase the transformation accuracy after the RANSAC step
483  * @tparam tUseSVD True, to use the slower SVD approach (i.e., Homography::homographyMatrixSVD); False, to use the two-step approach (i.e., Homography::homographyMatrixLinearWithOptimizations)
484  * @see homographyMatrixForNonBijectiveCorrespondences().
485  */
486  template <bool tRefine, bool tUseSVD>
487  static bool homographyMatrixForNonBijectiveCorrespondences(const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator& randomGenerator, SquareMatrix3& right_H_left, const unsigned int testCandidates = 8u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr);
488 
489  /**
490  * Calculates four homographies between two images transforming the given image points between two images.
491  * The resulting homographies transforms image points defined in the left image to image points defined in the right image (rightPoint = H[i] * leftPoint).<br>
492  * Four homographies are determined, one homography for each quadrant in the frame allowing to address minor distortions in the frames.<br>
493  * The quadrant order of the homographies is as follows: top left, top right, bottom left, bottom right.
494  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image
495  * @param rightImagePoints Image points in the right camera
496  * @param correspondences Number of points correspondences
497  * @param leftQuadrantCenter The center of the four quadrants in the left frame (this may be the center of the left frame), with range [0, leftImageWidth)x[0, leftImageHeight)
498  * @param randomGenerator Random generator object to be used for creating random numbers
499  * @param homographies Resulting four homographies for the given image points
500  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [4, correspondences]
501  * @param iterations Number of RANSAC iterations, with range [1, infinity)
502  * @param squarePixelErrorThreshold Maximal square pixel error between a right point and a transformed left point so that a point correspondence counts as valid, with range (0, infinity)
503  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
504  * @param worker Optional worker object to distribute the computation
505  * @return True, if succeeded
506  * @see Geometry::Homography::homographyMatrix().
507  */
508  static bool homographyMatrices(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, const Vector2& leftQuadrantCenter, RandomGenerator& randomGenerator, SquareMatrix3 homographies[4], const unsigned int testCandidates = 8u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr);
509 
510  /**
511  * Calculates inverted flipped camera poses (up to a common 3d projection transformation) for image point corresponds between images from multiple views.
512  * @param imagePointsPerPose Image points per view (at least 3 views and at least 6 point correspondences)
513  * @param posesIF Resulting camera projection matrices per view (inverted flipped)
514  * @param iterations Number of RANSAC iterations, with range [1, infinity)
515  * @param squarePixelErrorThreshold Maximal square pixel error of a valid projection of a 3D point onto the 2D image plane, with range (0, infinity)
516  * @param objectPointsIF Optional vector which will receive the object points of the image point correspondences, if defined
517  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
518  * @param worker Optional worker object to distribute the computation
519  * @return True, if succeeded
520  * @see Geometry::MultipleViewGeometry::projectiveReconstructionFrom6PointsIF().
521  */
522  static bool projectiveReconstructionFrom6PointsIF(const ConstIndexedAccessor<ImagePoints>& imagePointsPerPose, NonconstIndexedAccessor<HomogenousMatrix4>* posesIF, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), NonconstArrayAccessor<ObjectPoint>* objectPointsIF = nullptr, Indices32* usedIndices = nullptr, Worker* worker = nullptr);
523 
524  /**
525  * Determines the camera calibration for several given camera calibration patterns, each pattern object holds the 2D/3D point correspondences for a planar calibration pattern observed from an individual viewing position.
526  * The camera profile must be constant (must be identical) for all given calibration patterns.
527  *
528  * This calibration function uses RANSAC iterations to find the best calibration for the given point correspondence sets.<br>
529  * Thus, only a subset of the given correspondence sets are used for camera determination and then tested against the entire set.<br>
530  * The best matching subset is then used to create a calibration for all valid calibration patterns.
531  * @param width The width of the camera frame in pixel, with range [1, infinity)
532  * @param height The height of the camera frame in pixel, with range [1, infinity)
533  * @param calibrationPatterns The detected calibration patterns used for calibration, at least 4
534  * @param calibrationPatternBoxSize Size of one calibration pattern box in a specific unit (e.g., meter), with ranges (0, infinity)x(0, infinity)
535  * @param testCandidates Number of candidates used in each RANSAC iteration, with range [4, correspondences]
536  * @param pinholeCamera Resulting camera holding all extracted calibration information like e.g. intrinsic camera and distortion parameters
537  * @param iterations Number of RANSAC iterations, with range [1, infinity)
538  * @param sqrAccuracy Optional accuracy value receiving the average square pixel error, if defined
539  * @param worker Optional worker object to distribute the computation to several CPU cores
540  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
541  * @return True, if succeeded
542  */
543  static bool determineCameraCalibrationPlanar(const unsigned int width, const unsigned int height, const CameraCalibration::Patterns& calibrationPatterns, const Vector2& calibrationPatternBoxSize, const unsigned int testCandidates, PinholeCamera& pinholeCamera, const unsigned int iterations = 20u, Scalar* sqrAccuracy = nullptr, Worker* worker = nullptr, Indices32* usedIndices = nullptr);
544 
545  /**
546  * Performs execution of a camera calibration for a given subset of the entire data provided.
547  * The calibration is done for several individual groups of 3D object points all lying on the same 3D plane and corresponding 2D image points.
548  * The individual groups of image points can e.g., be the result of individual images of a calibration pattern observed from different viewing positions.
549  * @param width The width of the camera image in pixel, with range [1, infinity)
550  * @param height The height of the camera image in pixel, with range [1, infinity)
551  * @param objectPointGroups The groups of object points (all points lie in the Z == 0 plane), each group has a corresponding group of image points
552  * @param imagePointGroups The groups of image points, one group for each corresponding group of object points
553  * @param indices Indices defining the subset of given image and object points to be used for camera calibration calculation
554  * @param pinholeCamera Resulting camera profile holding all extracted calibration information like e.g. intrinsic camera and distortion parameters
555  * @param sqrAccuracy Optional resulting average square projection error in pixel accuracy, if the iteration fails the accuracy value is untouched
556  */
557  static void determineCameraCalibrationPlanarIteration(const unsigned int width, const unsigned int height, const ConstIndexedAccessor<Vectors3>* objectPointGroups, const ConstIndexedAccessor<Vectors2>* imagePointGroups, Indices32* indices, PinholeCamera* pinholeCamera, Scalar* sqrAccuracy);
558 
559  /**
560  * Calculates the affine transformation (6DOF - translation, rotation, scale, aspect ratio, shear) between two sets of 2D image points.
561  * @param leftImagePoints The image points in the left image, must be valid
562  * @param rightImagePoints The image points in the right image, one for each point in the left image, must be valid
563  * @param correspondences The number of points correspondences, with range [3, infinity)
564  * @param randomGenerator The random generator to be used
565  * @param right_A_left The resulting affine transformation matrix transforming left to right image points
566  * @param testCandidates The number of candidates used in each RANSAC iterations, with range [3, correspondences]
567  * @param iterations The number of RANSAC iterations, with range [1, infinity)
568  * @param squarePixelErrorThreshold The maximal square pixel error between two corresponding image points to count as valid, with range (0, infinity)
569  * @param usedIndices Optional resulting indices of the used point correspondences, if defined
570  * @param worker Optional worker object to distribute the computation
571  * @return True, if succeeded
572  * @see Homography::affineMatrix().
573  */
574  static bool affineMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& right_A_left, const unsigned int testCandidates = 6u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(3 * 3), Indices32* usedIndices = nullptr, Worker* worker = nullptr);
575 
576  /**
577  * Calculates the similarity transformation (4DOF - translation, rotation, scale) between two images transforming the given image points between two images.
578  * The resulting similarity transforms image points defined in the left image to image points defined in the right image (rightPoint = S * leftPoint).
579  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image
580  * @param rightImagePoints Image points in the right camera
581  * @param correspondences Number of points correspondences
582  * @param randomGenerator Random generator object to be used for creating random numbers
583  * @param similarity Resulting similarity for the given image points so that (rightPoint = S * leftPoint) holds
584  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [2, correspondences]
585  * @param iterations Number of RANSAC iterations, with range [1, infinity)
586  * @param squarePixelErrorThreshold The maximal square pixel error, with range (0, infinity)
587  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
588  * @param worker Optional worker object to distribute the computation
589  * @return True, if succeeded
590  * @see Geometry::Homography::similarityMatrix().
591  */
592  static bool similarityMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& similarity, const unsigned int testCandidates = 4u, const unsigned int iterations = 20u, const Scalar squarePixelErrorThreshold = Scalar(9), Indices32* usedIndices = nullptr, Worker* worker = nullptr);
593 
594  /**
595  * Determines the 6-DOF object transformation between world and object so that the transformation fits with two observations in two images (left and right stereo image).
596  * @param anyCameraA The camera profile of the first camera, must be valid
597  * @param anyCameraB the camera profile of the second camera, must be valid
598  * @param world_T_cameraA The transformation between first camera and world, must be valid
599  * @param world_T_cameraB The transformation between second camera and world, must be valid
600  * @param objectPointsA The 3D object points of the observation of the object in the first camera, defined in the coordinate system of the object
601  * @param objectPointsB The 3D object points of the observation of the object in the second camera, defined in the coordinate system of the object
602  * @param imagePointsA The 2D image points of the observation of the object in the first camera, one for each 3D object point
603  * @param imagePointsB The 2D image points of the observation of the object in the second camera, one for each 3D object point
604  * @param randomGenerator Random generator object to be used
605  * @param world_T_object The resulting transformation between object and world
606  * @param minimalValidCorrespondences Minimal number of correspondences so that the resulting transformation counts as valid, with range [4, objectPointsA.size() + objectPointsB.size()]
607  * @param refine True, to apply a non-linear least square optimization to increase the transformation accuracy after the RANSAC step
608  * @param iterations The number of RANSAC iteration to be applied, with range [1, infinity)
609  * @param sqrPixelErrorThreshold Square projection error for valid 2D/3D correspondences, with range [0, infinity)
610  * @param usedIndicesA Optional resulting indices of valid correspondences from the first camera
611  * @param usedIndicesB Optional resulting indices of valid correspondences from the second camera
612  * @param sqrAccuracy Optional resulting average square pixel error
613  * @param allowMonoObservation True, to allow that the object is visible in one of both camera frames only; False, to fail in case the object is visible in only one camera frame
614  * @return True, if succeeded
615  */
616  static bool objectTransformationStereo(const AnyCamera& anyCameraA, const AnyCamera& anyCameraB, const HomogenousMatrix4& world_T_cameraA, const HomogenousMatrix4& world_T_cameraB, const ConstIndexedAccessor<Vector3>& objectPointsA, const ConstIndexedAccessor<Vector3>& objectPointsB, const ConstIndexedAccessor<Vector2>& imagePointsA, const ConstIndexedAccessor<Vector2>& imagePointsB, RandomGenerator& randomGenerator, HomogenousMatrix4& world_T_object, const unsigned int minimalValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndicesA = nullptr, Indices32* usedIndicesB = nullptr, Scalar* sqrAccuracy = nullptr, const bool allowMonoObservation = true);
617 
618  private:
619 
620  /**
621  * Calculates a camera pose using the perspective pose problem with three point correspondences.
622  * The calculation can use a rough camera pose (e.g., from a previous camera frame) to increase the robustness.
623  * @param world_T_roughCamera Optional already known rough camera pose, transforming rough camera points to world points, with default camera pointing towards the negative z-space with y-axis upwards, nullptr if unknown
624  * @param camera The camera profile defining the project, must be valid
625  * @param objectPointAccessor The accessor providing the 3D object points, each corresponding to one image point, at least four
626  * @param imagePointAccessor The accessor providing the 2D image points, each corresponding to one object point, at least four
627  * @param randomGenerator The random generator to be used
628  * @param world_T_camera The resulting camera pose, transforming camera points to world points, with default camera pointing towards the negative z-space with y-axis upwards, must be valid
629  * @param maxPositionOffset Optional maximal position offset between initial and final pose for three axis
630  * @param maxOrientationOffset Optional maximal orientation offset between initial and final pose in radian angle
631  * @param minValidCorrespondences Minimal number of valid correspondences
632  * @param refine Determines whether a not linear least square algorithm is used to increase the pose accuracy after the RANSAC step
633  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
634  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates
635  * @param usedIndices Optional vector receiving the indices of all valid correspondences
636  * @param sqrAccuracy Optional resulting average square pixel error, with range [0, infinity)
637  * @param weights Optional weights to weight the individual point correspondences individually, nullptr to use the same weight for all point correspondences
638  * @return True, if succeeded
639  */
640  static bool p3p(const HomogenousMatrix4* world_T_roughCamera, const AnyCamera& camera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, HomogenousMatrix4& world_T_camera, const Vector3* maxPositionOffset, const Scalar* maxOrientationOffset, const unsigned int minValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights);
641 
642  /**
643  * Calculates a pose including zoom factor using the perspective pose problem with three point correspondences.
644  * The calculation can use a pose from a previous calculation to increase the robustness.<br>
645  * The specified point correspondences should be sorted by strength or stability to enhance the pose determination.<br>
646  * This function may use apriori information (like e.g. a previous pose).
647  * @param initialPose Optional rough initial pose to increase determination robustness
648  * @param initialZoom Optional rough initial zoom to increase determination robustness
649  * @param pinholeCamera The pinhole camera object specifying the intrinsic camera parameters
650  * @param objectPointAccessor The accessor providing the 3D object points, each corresponding to one image point, at least four
651  * @param imagePointAccessor The accessor providing the 2D image points, each corresponding to one object point, at least four
652  * @param randomGenerator A random generator object
653  * @param useDistortionParameters True, to use the distortion parameters of the camera profile; False, otherwise
654  * @param pose Resulting pose
655  * @param zoom Resulting zoom factor
656  * @param maxPositionOffset Optional maximal position offset between initial and final pose for three axis
657  * @param maxOrientationOffset Optional maximal orientation offset between initial and final pose in radian angle
658  * @param minValidCorrespondences Minimal number of valid correspondences
659  * @param refine Determines whether a not linear least square algorithm is used to increase the pose accuracy after the RANSAC step
660  * @param iterations Number of maximal RANSAC iterations, with range [1, infinity)
661  * @param sqrPixelErrorThreshold Square pixel error threshold for valid RANSAC candidates
662  * @param usedIndices Optional vector receiving the indices of all valid correspondences
663  * @param sqrAccuracy Optional resulting average square pixel error
664  * @param weights Optional weights for each point correspondence
665  * @return True, if succeeded
666  */
667  static bool p3pZoom(const HomogenousMatrix4* initialPose, const Scalar* initialZoom, const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, Scalar& zoom, const Vector3* maxPositionOffset, const Scalar* maxOrientationOffset, const unsigned int minValidCorrespondences = 5u, const bool refine = true, const unsigned int iterations = 20u, const Scalar sqrPixelErrorThreshold = Scalar(5 * 5), Indices32* usedIndices = nullptr, Scalar* sqrAccuracy = nullptr, const Scalar* weights = nullptr);
668 
669  /**
670  * Calculates the geometry transformation between two images transforming the given image points between two images.
671  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = M * leftPoint).
672  * @param geometricTransformFunction The pointer to the geometric transformation function to be applied, must be valid
673  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image
674  * @param rightImagePoints Image points in the right camera
675  * @param correspondences Number of points correspondences
676  * @param randomGenerator Random generator object to be used for creating random numbers
677  * @param transformMatrix Resulting homography for the given image points
678  * @param testCandidates Number of candidates used in each RANSAC iterations
679  * @param iterations Number of RANSAC iterations, with range [1, infinity)
680  * @param squarePixelErrorThreshold The maximal square pixel error, with range (0, infinity)
681  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
682  * @param worker Optional worker object to distribute the computation
683  * @return True, if succeeded
684  */
685  static bool geometricTransform(const GeometricTransformFunction geometricTransformFunction, const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& transformMatrix, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker);
686 
687  /**
688  * Calculates the geometry transformation between two images transforming the given image points between two images.
689  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = M * leftPoint).
690  * @param geometricTransformFunction The pointer to the geometric transformation function to be applied, must be valid
691  * @param leftImagePoints Image points in the left camera, must be valid
692  * @param numberLeftImagePoints The number of given left image points, with range [1, infinity)
693  * @param rightImagePoints Image points in the right camera, must be valid
694  * @param numberRightImagePoints The number of given right image points, with range [1, infinity)
695  * @param correspondences The pairs defining the non-bijective correspondences, with range [0, numberLeftImagePoints - 1]x[0, numberRightImagePoints - 1], must be valid
696  * @param numberCorrespondences The number of given correspondence pairs, with range [1, infinity)
697  * @param randomGenerator Random generator object to be used for creating random numbers
698  * @param transformMatrix Resulting homography for the given image points
699  * @param testCandidates Number of candidates used in each RANSAC iterations
700  * @param iterations Number of RANSAC iterations, with range [1, infinity)
701  * @param squarePixelErrorThreshold The maximal square pixel error, with range (0, infinity)
702  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
703  * @param worker Optional worker object to distribute the computation
704  * @return True, if succeeded
705  */
706  static bool geometricTransformForNonBijectiveCorrespondences(const GeometricTransformFunction geometricTransformFunction, const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator& randomGenerator, SquareMatrix3& transformMatrix, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker);
707 
708  /**
709  * Internal function to calculate the geometry transformation between two images transforming the projected planar object points between the two images.
710  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = M * leftPoint).
711  * @param geometricTransformFunction The pointer to the geometric transformation function to be applied, must be valid
712  * @param leftImagePoints Image points in the left camera, each point corresponds to one point in the right image
713  * @param rightImagePoints Image points in the right camera
714  * @param correspondences Number of points correspondences
715  * @param randomGenerator Random generator object to be used for creating random numbers
716  * @param transformMatrix Resulting homography for the given image points
717  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [1, correspondences]
718  * @param squarePixelErrorThreshold The maximal square pixel error, with range (0, infinity)
719  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
720  * @param maxValidCandidates Maximal number of candidates found already, this parameter will be used by all threads concurrently
721  * @param minSquareErrors Minimal square errors of candidates found already, this parameter will be used by all threads concurrently
722  * @param lock The lock object to avoid race conditions for the global parameter, should be nullptr in a single threaded call, must be defined in a multi threaded call
723  * @param firstIteration First iteration to be applied
724  * @param numberIterations Number of iterations to be applied
725  */
726  static void geometricTransformSubset(const GeometricTransformFunction geometricTransformFunction, const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator* randomGenerator, SquareMatrix3* transformMatrix, const unsigned int testCandidates, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, unsigned int* maxValidCandidates, Scalar* minSquareErrors, Lock* lock, const unsigned int firstIteration, const unsigned int numberIterations);
727 
728  /**
729  * Internal function to calculate the geometry transformation between two images transforming the projected planar object points between the two images.
730  * The resulting homography transforms image points defined in the left image to image points defined in the right image (rightPoint = M * leftPoint).
731  * @param geometricTransformFunction The pointer to the geometric transformation function to be applied, must be valid
732  * @param leftImagePoints Image points in the left camera, must be valid
733  * @param numberLeftImagePoints The number of given left image points, with range [1, infinity)
734  * @param rightImagePoints Image points in the right camera, must be valid
735  * @param numberRightImagePoints The number of given right image points, with range [1, infinity)
736  * @param correspondences The pairs defining the non-bijective correspondences, with range [0, numberLeftImagePoints - 1]x[0, numberRightImagePoints - 1], must be valid
737  * @param numberCorrespondences The number of given correspondence pairs, with range [1, infinity)
738  * @param randomGenerator Random generator object to be used for creating random numbers
739  * @param transformMatrix Resulting homography for the given image points
740  * @param testCandidates Number of candidates used in each RANSAC iterations, with range [1, correspondences]
741  * @param squarePixelErrorThreshold The maximal square pixel error, with range (0, infinity)
742  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
743  * @param maxValidCandidates Maximal number of candidates found already, this parameter will be used by all threads concurrently
744  * @param minSquareErrors Minimal square errors of candidates found already, this parameter will be used by all threads concurrently
745  * @param lock The lock object to avoid race conditions for the global parameter, should be nullptr in a single threaded call, must be defined in a multi threaded call
746  * @param firstIteration First iteration to be applied
747  * @param numberIterations Number of iterations to be applied
748  */
749  static void geometricTransformForNonBijectiveCorrespondencesSubset(const GeometricTransformFunction geometricTransformFunction, const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator* randomGenerator, SquareMatrix3* transformMatrix, const unsigned int testCandidates, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, unsigned int* maxValidCandidates, Scalar* minSquareErrors, Lock* lock, const unsigned int firstIteration, const unsigned int numberIterations);
750 
751  /**
752  * Internal function to calculate inverted flipped camera poses (up to a common 3d projection transformation) for image point corresponds between images from multiple views.
753  * @param imagePointsPerPose Image points per view (at least 3 views and at least 6 point correspondences)
754  * @param views Count of views
755  * @param randomGenerator Random generator object to be used for creating random numbers
756  * @param posesIF Resulting camera projection matrices per view (inverted flipped)
757  * @param squarePixelErrorThreshold Maximal square pixel error of a valid projection of a 3D point onto the 2D image plane, with range (0, infinity)
758  * @param objectPointsIF Optional vector which will receive the object points of the image point correspondences, if defined
759  * @param usedIndices Optional vector which will receive the indices of the used image correspondences, if defined
760  * @param minSquareErrors Minimal square errors of candidates found already, this parameter will be used by all threads concurrently
761  * @param lock The lock object to avoid race conditions for the global parameter, should be nullptr in a single threaded call, must be defined in a multi threaded call
762  * @param firstIteration First iteration to be applied
763  * @param numberIterations Number of iterations to be applied
764  * @see Geometry::MultipleViewGeometry::projectiveReconstructionFrom6PointsIF().
765  */
766  static void projectiveReconstructionFrom6PointsIFSubset(const ConstIndexedAccessor<ImagePoints>* imagePointsPerPose, const size_t views, RandomGenerator* randomGenerator, NonconstIndexedAccessor<HomogenousMatrix4>* posesIF, const Scalar squarePixelErrorThreshold, NonconstArrayAccessor<ObjectPoint>* objectPointsIF, Indices32* usedIndices, Scalar* minSquareErrors, Lock* lock, const unsigned int firstIteration, const unsigned int numberIterations);
767 };
768 
769 inline bool RANSAC::homographyMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& homography, const unsigned int testCandidates, const bool refine, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker, const bool useSVD)
770 {
771  ocean_assert(leftImagePoints != nullptr && rightImagePoints != nullptr);
772  ocean_assert(correspondences >= 4);
773  ocean_assert(testCandidates >= 4u);
774  ocean_assert(squarePixelErrorThreshold > 0);
775 
776  if (refine)
777  {
778  if (useSVD)
779  {
780  return homographyMatrix<true, true>(leftImagePoints, rightImagePoints, correspondences, randomGenerator, homography, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
781  }
782  else
783  {
784  return homographyMatrix<true, false>(leftImagePoints, rightImagePoints, correspondences, randomGenerator, homography, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
785  }
786  }
787  else
788  {
789  if (useSVD)
790  {
791  return homographyMatrix<false, true>(leftImagePoints, rightImagePoints, correspondences, randomGenerator, homography, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
792  }
793  else
794  {
795  return homographyMatrix<false, false>(leftImagePoints, rightImagePoints, correspondences, randomGenerator, homography, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
796  }
797  }
798 }
799 
800 inline bool RANSAC::homographyMatrixForNonBijectiveCorrespondences(const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator& randomGenerator, SquareMatrix3& right_H_left, const unsigned int testCandidates, const bool refine, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker, const bool useSVD)
801 {
802  ocean_assert(leftImagePoints != nullptr && rightImagePoints != nullptr);
803  ocean_assert(numberLeftImagePoints >= 4 && numberRightImagePoints >= 4);
804  ocean_assert(correspondences != nullptr);
805  ocean_assert(numberCorrespondences >= 4);
806  ocean_assert(testCandidates >= 4u);
807  ocean_assert(squarePixelErrorThreshold > 0);
808 
809  if (refine)
810  {
811  if (useSVD)
812  {
813  return homographyMatrixForNonBijectiveCorrespondences<true, true>(leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, randomGenerator, right_H_left, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
814  }
815  else
816  {
817  return homographyMatrixForNonBijectiveCorrespondences<true, false>(leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, randomGenerator, right_H_left, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
818  }
819  }
820  else
821  {
822  if (useSVD)
823  {
824  return homographyMatrixForNonBijectiveCorrespondences<false, true>(leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, randomGenerator, right_H_left, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
825  }
826  else
827  {
828  return homographyMatrixForNonBijectiveCorrespondences<false, false>(leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, randomGenerator, right_H_left, testCandidates, iterations, squarePixelErrorThreshold, usedIndices, worker);
829  }
830  }
831 }
832 
833 template <bool tRefine, bool tUseSVD>
834 bool RANSAC::homographyMatrix(const ImagePoint* leftImagePoints, const ImagePoint* rightImagePoints, const size_t correspondences, RandomGenerator& randomGenerator, SquareMatrix3& homography, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker)
835 {
836  ocean_assert(leftImagePoints != nullptr && rightImagePoints != nullptr);
837  ocean_assert(correspondences >= 4);
838  ocean_assert(testCandidates >= 4u);
839  ocean_assert(squarePixelErrorThreshold > 0);
840 
841  if (testCandidates < 4u || correspondences < testCandidates)
842  {
843  return false;
844  }
845 
846  Indices32 tmpIndices;
847  Indices32* indices = (tRefine || usedIndices) ? &tmpIndices : nullptr;
848 
849  unsigned int maxValidCorrespondences = testCandidates - 1u;
850  Scalar minSquareErrors = Numeric::maxValue();
851 
852  if (worker != nullptr)
853  {
854  Lock lock;
855 
856  if constexpr (tUseSVD)
857  {
858  worker->executeFunction(Worker::Function::createStatic(&geometricTransformSubset, Homography::homographyMatrixSVD, leftImagePoints, rightImagePoints, correspondences, &randomGenerator, &homography, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, (Lock*)(&lock), 0u, 0u), 0u, iterations, 12u, 13u, 5u);
859  }
860  else
861  {
862  worker->executeFunction(Worker::Function::createStatic(&geometricTransformSubset, Homography::homographyMatrixLinearWithoutOptimations, leftImagePoints, rightImagePoints, correspondences, &randomGenerator, &homography, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, (Lock*)(&lock), 0u, 0u), 0u, iterations, 12u, 13u, 5u);
863  }
864  }
865  else
866  {
867  if constexpr (tUseSVD)
868  {
869  geometricTransformSubset(Homography::homographyMatrixSVD, leftImagePoints, rightImagePoints, correspondences, &randomGenerator, &homography, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, nullptr, 0u, iterations);
870  }
871  else
872  {
873  geometricTransformSubset(Homography::homographyMatrixLinearWithoutOptimations, leftImagePoints, rightImagePoints, correspondences, &randomGenerator, &homography, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, nullptr, 0u, iterations);
874  }
875  }
876 
877  if (maxValidCorrespondences < testCandidates || homography.isSingular())
878  {
879  return false;
880  }
881 
883 
884  if constexpr (tRefine)
885  {
886  ocean_assert(indices);
887 
888  const Vectors2 validLeftImagePoints(Subset::subset(leftImagePoints, correspondences, *indices));
889  const Vectors2 validRightImagePoints(Subset::subset(rightImagePoints, correspondences, *indices));
890 
891  SquareMatrix3 optimizedHomography;
892  if (Geometry::NonLinearOptimizationHomography::optimizeHomography<Geometry::Estimator::ET_SQUARE>(homography, validLeftImagePoints.data(), validRightImagePoints.data(), validLeftImagePoints.size(), 9u, optimizedHomography, 20u))
893  {
894  homography = optimizedHomography;
895 
896  if (usedIndices)
897  {
898  indices->clear();
899 
900  for (size_t n = 0; n < correspondences; ++n)
901  {
902  if (rightImagePoints[n].sqrDistance(homography * leftImagePoints[n]) <= squarePixelErrorThreshold)
903  {
904  indices->emplace_back(Index32(n));
905  }
906  }
907  }
908  }
909  }
910 
911  if (usedIndices != nullptr)
912  {
913  *usedIndices = std::move(tmpIndices);
914  }
915 
916  return true;
917 }
918 
919 template <bool tRefine, bool tUseSVD>
920 bool RANSAC::homographyMatrixForNonBijectiveCorrespondences(const ImagePoint* leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint* rightImagePoints, const size_t numberRightImagePoints, const IndexPair32* correspondences, const size_t numberCorrespondences, RandomGenerator& randomGenerator, SquareMatrix3& right_H_left, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32* usedIndices, Worker* worker)
921 {
922  ocean_assert(squarePixelErrorThreshold > 0);
923  ocean_assert(leftImagePoints && rightImagePoints);
924  ocean_assert(correspondences != nullptr);
925  ocean_assert(numberCorrespondences >= 4);
926 
927  if (testCandidates < 4u || numberCorrespondences < testCandidates)
928  {
929  return false;
930  }
931 
932  Indices32 tmpIndices;
933  Indices32* indices = (tRefine || usedIndices) ? &tmpIndices : nullptr;
934 
935  unsigned int maxValidCorrespondences = testCandidates - 1u;
936  Scalar minSquareErrors = Numeric::maxValue();
937 
938  if (worker != nullptr)
939  {
940  Lock lock;
941 
942  if constexpr (tUseSVD)
943  {
944  worker->executeFunction(Worker::Function::createStatic(&geometricTransformForNonBijectiveCorrespondencesSubset, Homography::homographyMatrixSVD, leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, &randomGenerator, &right_H_left, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, (Lock*)&lock, 0u, 0u), 0u, iterations, 15u, 16u, 5u);
945  }
946  else
947  {
948  worker->executeFunction(Worker::Function::createStatic(&geometricTransformForNonBijectiveCorrespondencesSubset, Homography::homographyMatrixLinearWithoutOptimations, leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, &randomGenerator, &right_H_left, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, (Lock*)&lock, 0u, 0u), 0u, iterations, 15u, 16u, 5u);
949  }
950  }
951  else
952  {
953  if constexpr (tUseSVD)
954  {
955  geometricTransformForNonBijectiveCorrespondencesSubset(Homography::homographyMatrixSVD, leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, &randomGenerator, &right_H_left, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, nullptr, 0u, iterations);
956  }
957  else
958  {
959  geometricTransformForNonBijectiveCorrespondencesSubset(Homography::homographyMatrixLinearWithoutOptimations, leftImagePoints, numberLeftImagePoints, rightImagePoints, numberRightImagePoints, correspondences, numberCorrespondences, &randomGenerator, &right_H_left, testCandidates, squarePixelErrorThreshold, indices, &maxValidCorrespondences, &minSquareErrors, nullptr, 0u, iterations);
960  }
961  }
962 
963  if (maxValidCorrespondences < testCandidates || right_H_left.isSingular())
964  {
965  return false;
966  }
967 
968  Homography::normalizeHomography(right_H_left);
969 
970  if constexpr (tRefine)
971  {
972  ocean_assert(indices);
973 
974  Vectors2 validLeftImagePoints;
975  Vectors2 validRightImagePoints;
976 
977  validLeftImagePoints.reserve(indices->size());
978  validRightImagePoints.reserve(indices->size());
979 
980  for (const Index32& index : *indices)
981  {
982  const IndexPair32& correspondencePair = correspondences[index];
983 
984  const unsigned int leftIndex = correspondencePair.first;
985  const unsigned int rightIndex = correspondencePair.second;
986 
987  validLeftImagePoints.push_back(leftImagePoints[leftIndex]);
988  validRightImagePoints.push_back(rightImagePoints[rightIndex]);
989  }
990 
991  SquareMatrix3 optimizedHomography;
992  if (Geometry::NonLinearOptimizationHomography::optimizeHomography<Geometry::Estimator::ET_SQUARE>(right_H_left, validLeftImagePoints.data(), validRightImagePoints.data(), validLeftImagePoints.size(), 9u, optimizedHomography, 20u))
993  {
994  right_H_left = optimizedHomography;
995 
996  if (usedIndices)
997  {
998  indices->clear();
999 
1000  for (size_t n = 0; n < numberCorrespondences; ++n)
1001  {
1002  const IndexPair32& correspondencePair = correspondences[n];
1003 
1004  const unsigned int leftIndex = correspondencePair.first;
1005  const unsigned int rightIndex = correspondencePair.second;
1006 
1007  if (rightImagePoints[rightIndex].sqrDistance(right_H_left * leftImagePoints[leftIndex]) <= squarePixelErrorThreshold)
1008  {
1009  indices->push_back((unsigned int)n);
1010  }
1011  }
1012  }
1013  }
1014  }
1015 
1016  if (usedIndices != nullptr)
1017  {
1018  *usedIndices = std::move(tmpIndices);
1019  }
1020 
1021  return true;
1022 }
1023 
1024 inline bool RANSAC::p3p(const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, const unsigned int minimalValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights)
1025 {
1026  const AnyCameraPinhole anyCameraPinhole(PinholeCamera(pinholeCamera, useDistortionParameters));
1027 
1028  return p3p(nullptr, anyCameraPinhole, objectPointAccessor, imagePointAccessor, randomGenerator, pose, nullptr, nullptr, minimalValidCorrespondences, refine, iterations, sqrPixelErrorThreshold, usedIndices, sqrAccuracy, weights);
1029 }
1030 
1031 inline bool RANSAC::p3p(const HomogenousMatrix4& world_T_roughCamera, const AnyCamera& camera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, HomogenousMatrix4& world_T_camera, const Vector3& maxPositionOffset, const Scalar maxOrientationOffset, const unsigned int minValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights)
1032 {
1033  return p3p(&world_T_roughCamera, camera, objectPointAccessor, imagePointAccessor, randomGenerator, world_T_camera, &maxPositionOffset, &maxOrientationOffset, minValidCorrespondences, refine, iterations, sqrPixelErrorThreshold, usedIndices, sqrAccuracy, weights);
1034 }
1035 
1036 inline bool RANSAC::p3p(const HomogenousMatrix4& initialPose, const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, const Vector3& maxPositionOffset, const Scalar maxOrientationOffset, const unsigned int minValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights)
1037 {
1038  const AnyCameraPinhole anyCameraPinhole(PinholeCamera(pinholeCamera, useDistortionParameters));
1039 
1040  return p3p(&initialPose, anyCameraPinhole, objectPointAccessor, imagePointAccessor, randomGenerator, pose, &maxPositionOffset, &maxOrientationOffset, minValidCorrespondences, refine, iterations, sqrPixelErrorThreshold, usedIndices, sqrAccuracy, weights);
1041 }
1042 
1043 inline bool RANSAC::p3pZoom(const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, Scalar& zoom, const unsigned int minimalValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights)
1044 {
1045  return p3pZoom(nullptr, nullptr, pinholeCamera, objectPointAccessor, imagePointAccessor, randomGenerator, useDistortionParameters, pose, zoom, nullptr, nullptr, minimalValidCorrespondences, refine, iterations, sqrPixelErrorThreshold, usedIndices, sqrAccuracy, weights);
1046 }
1047 
1048 inline bool RANSAC::p3pZoom(const HomogenousMatrix4& initialPose, const Scalar initialZoom, const PinholeCamera& pinholeCamera, const ConstIndexedAccessor<ObjectPoint>& objectPointAccessor, const ConstIndexedAccessor<ImagePoint>& imagePointAccessor, RandomGenerator& randomGenerator, const bool useDistortionParameters, HomogenousMatrix4& pose, Scalar& zoom, const Vector3& maxPositionOffset, const Scalar maxOrientationOffset, const unsigned int minValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32* usedIndices, Scalar* sqrAccuracy, const Scalar* weights)
1049 {
1050  ocean_assert(initialZoom > Numeric::eps());
1051  return p3pZoom(&initialPose, &initialZoom, pinholeCamera, objectPointAccessor, imagePointAccessor, randomGenerator, useDistortionParameters, pose, zoom, &maxPositionOffset, &maxOrientationOffset, minValidCorrespondences, refine, iterations, sqrPixelErrorThreshold, usedIndices, sqrAccuracy, weights);
1052 }
1053 
1054 inline bool RANSAC::objectPoint(const AnyCamera& camera, const ConstIndexedAccessor<HomogenousMatrix4>& world_T_cameras, const ConstIndexedAccessor<ImagePoint>& imagePoints, RandomGenerator& randomGenerator, ObjectPoint& objectPoint, const unsigned int iterations, const Scalar maximalSqrError, const unsigned int minValidCorrespondences, const bool onlyFrontObjectPoint, const Estimator::EstimatorType refinementEstimator, Scalar* finalRobustError, Indices32* usedIndices)
1055 {
1056 #if 1
1057  // creating local pointer to avoid Clang compiler bug
1058  const AnyCamera* cameraPointer = &camera;
1059  const ConstElementAccessor<const AnyCamera*> cameraAccessor(world_T_cameras.size(), cameraPointer);
1060 #else
1061  const ConstElementAccessor<const AnyCamera*> cameraAccessor(world_T_cameras.size(), &camera);
1062 #endif
1063 
1064  return RANSAC::objectPoint(cameraAccessor, world_T_cameras, imagePoints, randomGenerator, objectPoint, iterations, maximalSqrError, minValidCorrespondences, onlyFrontObjectPoint, refinementEstimator, finalRobustError, usedIndices);
1065 }
1066 
1067 }
1068 
1069 }
1070 
1071 #endif // META_OCEAN_GEOMETRY_RANSAC_H
virtual size_t size() const =0
Returns the number of accessible elements of this accessor object.
This class implements the abstract base class for all AnyCamera objects.
Definition: AnyCamera.h:130
This class implements a specialized AnyCamera object wrapping the actual camera model.
Definition: AnyCamera.h:494
static Caller< void > createStatic(typename StaticFunctionPointerMaker< void, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass, NullClass >::Type function)
Creates a new caller container for a static function with no function parameter.
Definition: Caller.h:2876
This class implements an accessor providing direct access to a constant array of elements while all e...
Definition: Accessor.h:942
This class implements a base class for all indexed-based accessors allowing a constant reference acce...
Definition: Accessor.h:241
std::vector< Pattern > Patterns
Definition of a vector holding calibration patterns.
Definition: CameraCalibration.h:136
EstimatorType
Definition of individual robust estimator types.
Definition: Estimator.h:34
@ ET_SQUARE
The standard square error estimator (L2).
Definition: Estimator.h:52
static void normalizeHomography(SquareMatrixT3< T > &homography)
Normalizes a given homography forcing a 1 in the lower right matrix corner.
Definition: Homography.h:466
static bool homographyMatrixSVD(const Vector2 *leftPoints, const Vector2 *rightPoints, const size_t correspondences, SquareMatrix3 &right_H_left)
Calculates the homography (8DOF - translation, rotation, scale, aspect ratio, shear,...
static bool homographyMatrixLinearWithoutOptimations(const Vector2 *leftPoints, const Vector2 *rightPoints, const size_t correspondences, SquareMatrix3 &right_H_left)
Calculates the homography (8DOF - translation, rotation, scale, aspect ratio, shear,...
This class implements several RANSAC functions for pose determination.
Definition: RANSAC.h:40
static bool fundamentalMatrix(const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, const unsigned int width, const unsigned int height, SquareMatrix3 &fundamental, const unsigned int testCandidates=8u, const unsigned int iterations=20u, const Scalar errorThreshold=Numeric::weakEps(), Indices32 *usedIndices=nullptr)
Calculates the fundamental matrix by given point correspondences for two stereo images.
static void geometricTransformForNonBijectiveCorrespondencesSubset(const GeometricTransformFunction geometricTransformFunction, const ImagePoint *leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint *rightImagePoints, const size_t numberRightImagePoints, const IndexPair32 *correspondences, const size_t numberCorrespondences, RandomGenerator *randomGenerator, SquareMatrix3 *transformMatrix, const unsigned int testCandidates, const Scalar squarePixelErrorThreshold, Indices32 *usedIndices, unsigned int *maxValidCandidates, Scalar *minSquareErrors, Lock *lock, const unsigned int firstIteration, const unsigned int numberIterations)
Internal function to calculate the geometry transformation between two images transforming the projec...
static bool objectPoint(const ConstIndexedAccessor< const AnyCamera * > &cameras, const ConstIndexedAccessor< HomogenousMatrix4 > &world_T_cameras, const ConstIndexedAccessor< ImagePoint > &imagePoints, RandomGenerator &randomGenerator, ObjectPoint &objectPoint, const unsigned int iterations=20u, const Scalar maximalSqrError=Scalar(3 *3), const unsigned int minValidCorrespondences=2u, const bool onlyFrontObjectPoint=true, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalRobustError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 3D object point for a set of image points observing the same object point under indivi...
static bool geometricTransform(const GeometricTransformFunction geometricTransformFunction, const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, RandomGenerator &randomGenerator, SquareMatrix3 &transformMatrix, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32 *usedIndices, Worker *worker)
Calculates the geometry transformation between two images transforming the given image points between...
static bool p3p(const HomogenousMatrix4 *world_T_roughCamera, const AnyCamera &camera, const ConstIndexedAccessor< ObjectPoint > &objectPointAccessor, const ConstIndexedAccessor< ImagePoint > &imagePointAccessor, RandomGenerator &randomGenerator, HomogenousMatrix4 &world_T_camera, const Vector3 *maxPositionOffset, const Scalar *maxOrientationOffset, const unsigned int minValidCorrespondences, const bool refine, const unsigned int iterations, const Scalar sqrPixelErrorThreshold, Indices32 *usedIndices, Scalar *sqrAccuracy, const Scalar *weights)
Calculates a camera pose using the perspective pose problem with three point correspondences.
static bool objectPoint(const AnyCamera &camera, const ConstIndexedAccessor< HomogenousMatrix4 > &world_T_cameras, const ConstIndexedAccessor< ImagePoint > &imagePoints, RandomGenerator &randomGenerator, ObjectPoint &objectPoint, const unsigned int iterations=20u, const Scalar maximalSqrError=Scalar(3 *3), const unsigned int minValidCorrespondences=2u, const bool onlyFrontObjectPoint=true, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalRobustError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 3D object point for a set of image points observing the same object point under indivi...
Definition: RANSAC.h:1054
static bool projectiveReconstructionFrom6PointsIF(const ConstIndexedAccessor< ImagePoints > &imagePointsPerPose, NonconstIndexedAccessor< HomogenousMatrix4 > *posesIF, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), NonconstArrayAccessor< ObjectPoint > *objectPointsIF=nullptr, Indices32 *usedIndices=nullptr, Worker *worker=nullptr)
Calculates inverted flipped camera poses (up to a common 3d projection transformation) for image poin...
static bool homographyMatrices(const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, const Vector2 &leftQuadrantCenter, RandomGenerator &randomGenerator, SquareMatrix3 homographies[4], const unsigned int testCandidates=8u, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), Indices32 *usedIndices=nullptr, Worker *worker=nullptr)
Calculates four homographies between two images transforming the given image points between two image...
static bool p3pZoom(const HomogenousMatrix4 *initialPose, const Scalar *initialZoom, const PinholeCamera &pinholeCamera, const ConstIndexedAccessor< ObjectPoint > &objectPointAccessor, const ConstIndexedAccessor< ImagePoint > &imagePointAccessor, RandomGenerator &randomGenerator, const bool useDistortionParameters, HomogenousMatrix4 &pose, Scalar &zoom, const Vector3 *maxPositionOffset, const Scalar *maxOrientationOffset, const unsigned int minValidCorrespondences=5u, const bool refine=true, const unsigned int iterations=20u, const Scalar sqrPixelErrorThreshold=Scalar(5 *5), Indices32 *usedIndices=nullptr, Scalar *sqrAccuracy=nullptr, const Scalar *weights=nullptr)
Calculates a pose including zoom factor using the perspective pose problem with three point correspon...
static bool similarityMatrix(const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, RandomGenerator &randomGenerator, SquareMatrix3 &similarity, const unsigned int testCandidates=4u, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), Indices32 *usedIndices=nullptr, Worker *worker=nullptr)
Calculates the similarity transformation (4DOF - translation, rotation, scale) between two images tra...
static bool translation(const ConstIndexedAccessor< Vector2 > &translations, RandomGenerator &randomGenerator, Vector2 &translation, const bool refine=true, const unsigned int iterations=20u, const Scalar maxSqrError=Scalar(5 *5), Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 2D translation (offset/translation vector) from a set of given 2D vectors.
static bool p3p(const AnyCamera &anyCamera, const ConstIndexedAccessor< ObjectPoint > &objectPointAccessor, const ConstIndexedAccessor< ImagePoint > &imagePointAccessor, RandomGenerator &randomGenerator, HomogenousMatrix4 &world_T_camera, const unsigned int minimalValidCorrespondences=5u, const bool refine=true, const unsigned int iterations=20u, const Scalar sqrPixelErrorThreshold=Scalar(5 *5), Indices32 *usedIndices=nullptr, Scalar *sqrAccuracy=nullptr)
Calculates a pose using the perspective pose problem with three point correspondences using any camer...
static bool orientation(const AnyCamera &camera, const ConstIndexedAccessor< ObjectPoint > &objectPoints, const ConstIndexedAccessor< ImagePoint > &imagePoints, RandomGenerator &randomGenerator, SquareMatrix3 &world_R_camera, const unsigned int minValidCorrespondences=5u, const unsigned int iterations=20u, const Scalar maxSqrError=Scalar(5 *5), Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 3DOF rotation of a camera pose for a set of given 2D/3D point correspondences by minim...
static unsigned int iterations(const unsigned int model, const Scalar successProbability=Scalar(0.99), const Scalar faultyRate=Scalar(0.2), const unsigned int maximalIterations=1000000u)
Calculates the number of iterations necessary to find an outlier-free model data set.
static bool objectTransformationStereo(const AnyCamera &anyCameraA, const AnyCamera &anyCameraB, const HomogenousMatrix4 &world_T_cameraA, const HomogenousMatrix4 &world_T_cameraB, const ConstIndexedAccessor< Vector3 > &objectPointsA, const ConstIndexedAccessor< Vector3 > &objectPointsB, const ConstIndexedAccessor< Vector2 > &imagePointsA, const ConstIndexedAccessor< Vector2 > &imagePointsB, RandomGenerator &randomGenerator, HomogenousMatrix4 &world_T_object, const unsigned int minimalValidCorrespondences=5u, const bool refine=true, const unsigned int iterations=20u, const Scalar sqrPixelErrorThreshold=Scalar(5 *5), Indices32 *usedIndicesA=nullptr, Indices32 *usedIndicesB=nullptr, Scalar *sqrAccuracy=nullptr, const bool allowMonoObservation=true)
Determines the 6-DOF object transformation between world and object so that the transformation fits w...
static void projectiveReconstructionFrom6PointsIFSubset(const ConstIndexedAccessor< ImagePoints > *imagePointsPerPose, const size_t views, RandomGenerator *randomGenerator, NonconstIndexedAccessor< HomogenousMatrix4 > *posesIF, const Scalar squarePixelErrorThreshold, NonconstArrayAccessor< ObjectPoint > *objectPointsIF, Indices32 *usedIndices, Scalar *minSquareErrors, Lock *lock, const unsigned int firstIteration, const unsigned int numberIterations)
Internal function to calculate inverted flipped camera poses (up to a common 3d projection transforma...
static bool determineCameraCalibrationPlanar(const unsigned int width, const unsigned int height, const CameraCalibration::Patterns &calibrationPatterns, const Vector2 &calibrationPatternBoxSize, const unsigned int testCandidates, PinholeCamera &pinholeCamera, const unsigned int iterations=20u, Scalar *sqrAccuracy=nullptr, Worker *worker=nullptr, Indices32 *usedIndices=nullptr)
Determines the camera calibration for several given camera calibration patterns, each pattern object ...
static bool objectPoint(const AnyCamera &camera, const ConstIndexedAccessor< SquareMatrix3 > &world_R_cameras, const ConstIndexedAccessor< ImagePoint > &imagePoints, RandomGenerator &randomGenerator, ObjectPoint &objectPoint, const Scalar objectPointDistance, const unsigned int iterations=20u, const Scalar maximalError=Scalar(3 *3), const unsigned int minValidCorrespondences=2u, const bool onlyFrontObjectPoint=true, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 3D object point for a set of image points observing the same object point under indivi...
static bool geometricTransformForNonBijectiveCorrespondences(const GeometricTransformFunction geometricTransformFunction, const ImagePoint *leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint *rightImagePoints, const size_t numberRightImagePoints, const IndexPair32 *correspondences, const size_t numberCorrespondences, RandomGenerator &randomGenerator, SquareMatrix3 &transformMatrix, const unsigned int testCandidates, const unsigned int iterations, const Scalar squarePixelErrorThreshold, Indices32 *usedIndices, Worker *worker)
Calculates the geometry transformation between two images transforming the given image points between...
static bool line(const ConstIndexedAccessor< Vector2 > &positions, RandomGenerator &randomGenerator, Line2 &line, const bool refine=true, const unsigned int iterations=20u, const Scalar maxSqrError=Scalar(5 *5), Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 2D line best fitting to a set of given 2D positions.
static void geometricTransformSubset(const GeometricTransformFunction geometricTransformFunction, const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, RandomGenerator *randomGenerator, SquareMatrix3 *transformMatrix, const unsigned int testCandidates, const Scalar squarePixelErrorThreshold, Indices32 *usedIndices, unsigned int *maxValidCandidates, Scalar *minSquareErrors, Lock *lock, const unsigned int firstIteration, const unsigned int numberIterations)
Internal function to calculate the geometry transformation between two images transforming the projec...
static bool affineMatrix(const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, RandomGenerator &randomGenerator, SquareMatrix3 &right_A_left, const unsigned int testCandidates=6u, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(3 *3), Indices32 *usedIndices=nullptr, Worker *worker=nullptr)
Calculates the affine transformation (6DOF - translation, rotation, scale, aspect ratio,...
static bool direction(const ConstIndexedAccessor< Vector2 > &directions, RandomGenerator &randomGenerator, Vector2 &direction, const bool acceptOppositeDirections, const bool refine=true, const unsigned int iterations=20u, const Scalar maxAngle=Numeric::deg2rad(5), Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 2D direction from a set of given 2D directions provided as unit vectors.
static bool plane(const Plane3 &initialPlane, const ConstIndexedAccessor< ObjectPoint > &objectPoints, RandomGenerator &randomGenerator, Plane3 &plane, const unsigned int iterations=20u, const Scalar maximalNormalOrientationOffset=Numeric::deg2rad(30), const Scalar medianDistanceFactor=Scalar(0.1), const unsigned int minValidCorrespondences=3u, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Optimizes an already known 3D plane by minimizing the error between the plane and a set of given 3D o...
static bool homographyMatrixForNonBijectiveCorrespondences(const ImagePoint *leftImagePoints, const size_t numberLeftImagePoints, const ImagePoint *rightImagePoints, const size_t numberRightImagePoints, const IndexPair32 *correspondences, const size_t numberCorrespondences, RandomGenerator &randomGenerator, SquareMatrix3 &right_H_left, const unsigned int testCandidates=8u, const bool refine=true, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), Indices32 *usedIndices=nullptr, Worker *worker=nullptr, const bool useSVD=true)
Calculates the homography between two images transforming the given image points between two images.
Definition: RANSAC.h:800
static bool plane(const ConstIndexedAccessor< ObjectPoint > &objectPoints, RandomGenerator &randomGenerator, Plane3 &plane, const unsigned int iterations=20u, const Scalar medianDistanceFactor=Scalar(0.1), const unsigned int minValidCorrespondences=3u, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines a 3D plane best matching to a set of given 3D object points.
static void determineCameraCalibrationPlanarIteration(const unsigned int width, const unsigned int height, const ConstIndexedAccessor< Vectors3 > *objectPointGroups, const ConstIndexedAccessor< Vectors2 > *imagePointGroups, Indices32 *indices, PinholeCamera *pinholeCamera, Scalar *sqrAccuracy)
Performs execution of a camera calibration for a given subset of the entire data provided.
static bool homographyMatrix(const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, RandomGenerator &randomGenerator, SquareMatrix3 &homography, const unsigned int testCandidates=8u, const bool refine=true, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), Indices32 *usedIndices=nullptr, Worker *worker=nullptr, const bool useSVD=true)
Calculates the homography between two images transforming the given image points between two images.
Definition: RANSAC.h:769
static bool extrinsicMatrix(const PinholeCamera &leftCamera, const PinholeCamera &rightCamera, const ImagePoint *leftImagePoints, const ImagePoint *rightImagePoints, const size_t correspondences, HomogenousMatrix4 &transformation, const unsigned int testCandidates=8u, const unsigned int iterations=20u, const Scalar squarePixelErrorThreshold=Scalar(9), const Box3 &maxTranslation=Box3(), const Scalar maxRotation=Numeric::pi(), Indices32 *usedIndices=nullptr)
Calculates the extrinsic camera matrix by given point correspondences for two stereo images.
static bool p3pZoom(const PinholeCamera &pinholeCamera, const ConstIndexedAccessor< ObjectPoint > &objectPointAccessor, const ConstIndexedAccessor< ImagePoint > &imagePointAccessor, RandomGenerator &randomGenerator, const bool useDistortionParameters, HomogenousMatrix4 &pose, Scalar &zoom, const unsigned int minimalValidCorrespondences=5u, const bool refine=true, const unsigned int iterations=20u, const Scalar sqrPixelErrorThreshold=Scalar(5 *5), Indices32 *usedIndices=nullptr, Scalar *sqrAccuracy=nullptr, const Scalar *weights=nullptr)
Calculates a pose including zoom factor using the perspective pose problem with three point correspon...
Definition: RANSAC.h:1043
This class implements an infinite line in 2D space.
Definition: Line2.h:83
This class implements a recursive lock object.
Definition: Lock.h:31
This class implements an accessor providing direct access to an array of elements.
Definition: Accessor.h:707
This class implements a base class for all indexed-based accessors allowing a non-constant reference ...
Definition: Accessor.h:284
static constexpr T deg2rad(const T deg)
Converts deg to rad.
Definition: Numeric.h:3232
static constexpr T weakEps()
Returns a weak epsilon.
static constexpr T pi()
Returns PI which is equivalent to 180 degree.
Definition: Numeric.h:926
static constexpr T eps()
Returns a small epsilon.
static constexpr T maxValue()
Returns the max scalar value.
Definition: Numeric.h:3244
This class implements a generator for random numbers.
Definition: RandomGenerator.h:42
const T * data() const
Returns a pointer to the internal values.
Definition: SquareMatrix3.h:1046
bool isSingular() const
Returns whether this matrix is singular (and thus cannot be inverted).
Definition: SquareMatrix3.h:1341
static std::vector< T > subset(const std::vector< T > &objects, const std::vector< TIndex > &indices)
Extracts a subset of a given set of objects by usage of an index vector holding the indices of all ob...
Definition: Subset.h:751
This class implements a worker able to distribute function calls over different threads.
Definition: Worker.h:33
bool executeFunction(const Function &function, const unsigned int first, const unsigned int size, const unsigned int firstIndex=(unsigned int)(-1), const unsigned int sizeIndex=(unsigned int)(-1), const unsigned int minimalIterations=1u, const unsigned int threadIndex=(unsigned int)(-1))
Executes a callback function separable by two function parameters.
unsigned int sqrDistance(const char first, const char second)
Returns the square distance between two values.
Definition: base/Utilities.h:1089
std::vector< Index32 > Indices32
Definition of a vector holding 32 bit index values.
Definition: Base.h:96
std::pair< Index32, Index32 > IndexPair32
Definition of a pair holding 32 bit indices.
Definition: Base.h:138
uint32_t Index32
Definition of a 32 bit index value.
Definition: Base.h:84
float Scalar
Definition of a scalar type.
Definition: Math.h:128
BoxT3< Scalar > Box3
Definition of the Box3 object, depending on the OCEAN_MATH_USE_SINGLE_PRECISION either with single or...
Definition: Box3.h:21
VectorT3< Scalar > Vector3
Definition of a 3D vector.
Definition: Vector3.h:22
std::vector< Vector2 > Vectors2
Definition of a vector holding Vector2 objects.
Definition: Vector2.h:64
PinholeCameraT< Scalar > PinholeCamera
Definition of an pinhole camera object with Scalar precision.
Definition: PinholeCamera.h:32
The namespace covering the entire Ocean framework.
Definition: Accessor.h:15