Ocean
Solver3.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  *
4  * This source code is licensed under the MIT license found in the
5  * LICENSE file in the root directory of this source tree.
6  */
7 
8 #ifndef META_OCEAN_TRACKING_SOLVER_3_H
9 #define META_OCEAN_TRACKING_SOLVER_3_H
10 
13 
14 #include "ocean/base/ShiftVector.h"
15 #include "ocean/base/Subset.h"
16 #include "ocean/base/Worker.h"
17 
18 #include "ocean/cv/SubRegion.h"
19 
25 #include "ocean/geometry/RANSAC.h"
27 
28 #include "ocean/math/Vector2.h"
30 
31 namespace Ocean
32 {
33 
34 namespace Tracking
35 {
36 
37 /**
38  * This class implements a Structure From Motion solver for unconstrained 3D object points and unconstrained 6-DOF camera poses.
39  * @ingroup tracking
40  */
41 class OCEAN_TRACKING_EXPORT Solver3
42 {
43  public:
44 
45  /**
46  * Definition of individual camera motion types.
47  */
49  {
50  /// Invalid camera motion.
51  CM_INVALID = 0,
52  /// Static camera.
53  CM_STATIC = (1 << 0),
54  /// Rotational camera motion (panning or tilting).
55  CM_ROTATIONAL = (1 << 1),
56  /// Translational camera motion.
57  CM_TRANSLATIONAL = (1 << 2),
58  /// Tiny rotational camera motion.
59  CM_ROTATIONAL_TINY = CM_ROTATIONAL | (1 << 3),
60  /// Moderate rotational camera motion.
61  CM_ROTATIONAL_MODERATE = CM_ROTATIONAL | (1 << 4),
62  /// Significant rotational camera motion.
63  CM_ROTATIONAL_SIGNIFICANT = CM_ROTATIONAL | (1 << 5),
64  /// Tiny translational camera motion.
65  CM_TRANSLATIONAL_TINY = CM_TRANSLATIONAL | (1 << 6),
66  /// Moderate translational camera motion.
67  CM_TRANSLATIONAL_MODERATE = CM_TRANSLATIONAL | (1 << 7),
68  /// Significant translational camera motion.
69  CM_TRANSLATIONAL_SIGNIFICANT = CM_TRANSLATIONAL | (1 << 8),
70  /// An unknown (arbitrary) camera motion with rotational and translational motion.
71  CM_UNKNOWN = CM_ROTATIONAL | CM_TRANSLATIONAL | (1 << 9)
72  };
73 
74  /**
75  * Definition of individual methods to determine the accuracy of object points.
76  */
78  {
79  /// Invalid method.
81  /// Determination of the minimal absolute cosine values between the mean observation direction and each observation direction.
83  /// Determination of the mean absolute cosine value between the mean observation direction and each observation direction.
85  /// Determination of the median absolute cosine value between the mean observation direction and each observation direction.
86  AM_MEAN_DIRECTION_MEDIAN_COSINE
87  };
88 
89  /**
90  * Definition of a class allowing to define a relative threshold with lower and upper boundary for individual reference values.
91  */
93  {
94  public:
95 
96  /**
97  * Creates a new threshold object.
98  * @param lowerBoundary The lower boundary of the resulting threshold value
99  * @param factor The factor which will be applied to an external reference values to defined an individual threshold
100  * @param upperBoundary The upper boundary of the resulting threshold value
101  */
102  inline RelativeThreshold(const unsigned int lowerBoundary, const Scalar factor, const unsigned int upperBoundary);
103 
104  /**
105  * Returns the lower boundary of this object.
106  * @return Lower boundary
107  */
108  inline unsigned int lowerBoundary() const;
109 
110  /**
111  * Returns the factor of this object.
112  * @return Relative factor
113  */
114  inline Scalar factor() const;
115 
116  /**
117  * Returns the upper boundary of this object.
118  * @return Upper boundary
119  */
120  inline unsigned int upperBoundary() const;
121 
122  /**
123  * Returns the relative threshold for a given reference value.
124  * @param value The reference value for which the individual threshold will be determined
125  * @return The relative threshold: min[minmax(lowerBoundary(), value * factor(), upperBoundary()), value]
126  */
127  inline unsigned int threshold(const unsigned int value) const;
128 
129  /**
130  * Returns whether for a given reference value a valid relative threshold can be determined.
131  * @param value The reference value which will be checked
132  * @param threshold Optional resulting relative threshold for the given reference value: min[minmax(lowerBoundary(), value * factor(), upperBoundary()), value]
133  * @return True, if value >= lowerBoundary()
134  */
135  inline bool hasValidThreshold(const unsigned int value, unsigned int* threshold = nullptr) const;
136 
137  /**
138  * Returns whether for a given reference value a valid relative threshold can be determined.
139  * @param value The reference value which will be checked
140  * @param threshold Optional resulting relative threshold for the given reference value: min[minmax(max[lowerBoundary(), tLowerBoundary], value * factor(), upperBoundary()), value]
141  * @return True, if value >= max(lowerBoundary(), tLowerBoundary)
142  * @tparam tLowerBoundary An explicit lower boundary which is checked in parallel to the internal lower boundary
143  */
144  template <unsigned int tLowerBoundary>
145  inline bool hasValidThreshold(const unsigned int value, unsigned int* threshold = nullptr) const;
146 
147  protected:
148 
149  /// The lower boundary of the relative threshold.
151 
152  /// The factor defining the relative threshold.
154 
155  /// The upper boundary of the relative threshold.
157  };
158 
159  /**
160  * This class implements an accessor for groups of pairs of object point ids and image points.
161  * Each group represents one camera pose, while the pairs within the groups represent the visual information in the individual camera poses (camera frames).<br>
162  * The groups of pairs have the following structure, each group (camera pose) can have an arbitrary number of pairs:
163  * <pre>
164  * poseIndex_0 -> (objectPointId_0, imagePoint_0)
165  * -> (objectPointId_1, imagePoint_4)
166  * -> (objectPointId_5, imagePoint_9)
167  *
168  * poseIndex_1 -> (objectPointId_0, imagePoint_2)
169  *
170  * poseIndex_2 -> (objectPointId_2, imagePoint_3)
171  * (objectPointId_1, imagePoint_8)
172  *
173  * poseIndex_3 -> (objectPointId_9, imagePoint_5)
174  * </pre>
175  */
177  {
178  public:
179 
180  /**
181  * Copy constructor.
182  * @param accessor The accessor object to copy
183  */
185 
186  /**
187  * Move constructor.
188  * @param accessor The accessor object to move
189  */
191 
192  /**
193  * Creates a new accessor object and extracts the necessary information from a given database.
194  * Beware: Due to the given threshold of minimal visible object points the number of resulting groups can be smaller than the number of given pose ids.<br>
195  * @param database The database providing all information of this accessor
196  * @param poseIds The ids of the camera poses which will be used to extract the individual groups, at most one group for each provided camera pose (can be less due to the defined threshold of minimal visible object points)
197  * @param objectPointIds The ids of the object points for which the individual correspondences will be determined, if an object point is not visible in a given camera pose the associated correspondence (between object point id and image point) will not be part of the associated group
198  * @param minimalVisibleObjectPoints The minimal number of object points (from the set of given object point ids) which must be visible in a camera pose so that a group will be created for that pose)
199  * @param validPoseIndices Optional resulting indices of valid pose ids (the indices of the specified pose ids for which a corresponding groups has been created)
200  * @param usedObjectPointIndices Optional resulting indices of the used object points (the indices of the specified object point ids which are used at least in one group)
201  */
202  PoseToObjectPointIdImagePointCorrespondenceAccessor(const Database& database, const Indices32& poseIds, const Indices32& objectPointIds, const unsigned int minimalVisibleObjectPoints = 10u, Indices32* validPoseIndices = nullptr, Indices32* usedObjectPointIndices = nullptr);
203 
204  /**
205  * Assign operator.
206  * @param accessor The accessor object to assign
207  * @return Reference to this object
208  */
210 
211  /**
212  * Move operator.
213  * @param accessor The accessor object to move
214  * @return Reference to this object
215  */
217  };
218 
219  /**
220  * This class implements an accessor for groups of pairs of pose indices (not pose ids) and image points.
221  * Each group represents one object point, while the pairs within the groups represent the visual information in the individual camera poses (camera frames).<br>
222  * The groups of pairs have the following structure, each group (object point) can have an arbitrary number of pairs:
223  * <pre>
224  * objectPointIndex_0 -> (poseIndex_0, imagePoint_0)
225  * -> (poseIndex_1, imagePoint_4)
226  * -> (poseIndex_5, imagePoint_9)
227  *
228  * objectPointIndex_1 -> (poseIndex_0, imagePoint_2)
229  *
230  * objectPointIndex_2 -> (poseIndex_2, imagePoint_3)
231  * (poseIndex_1, imagePoint_8)
232  *
233  * objectPointIndex_3 -> (poseIndex_9, imagePoint_5)
234  * </pre>
235  */
237  {
238  public:
239 
240  /**
241  * Copy constructor.
242  * @param accessor The accessor object to copy
243  */
245 
246  /**
247  * Move constructor.
248  * @param accessor The accessor object to move
249  */
251 
252  /**
253  * Creates a new accessor object by moving a subset of a given accessor object.
254  * @param accessor The accessor object from which the subset will be moved
255  * @param validGroupIndices The indices of the valid groups which will be copied
256  */
258 
259  /**
260  * Creates a new accessor object and extracts the necessary information from a given database.
261  * This constructor focuses on the provided pose ids and tries to use as much object points as possible as long as the provided thresholds do not violate any condition.<br>
262  * Beware: Due to the given threshold of minimal visible observations per key frame the number of resulting groups can be smaller than the number of given object points ids.<br>
263  * The pose indices of the individual (group) pairs provide the index in relation to the given set of pose ids, not the pose id itself.<br>
264  * @param database The database providing all information of this accessor
265  * @param poseIds The ids of the camera poses for which the individual correspondences will be determined, if an object is not visible in a given camera pose the associated correspondence (between camera pose index and image point) will not be part of the associated group
266  * @param objectPointCandidateIds The ids of the object point candidates which will be used to extract the individual groups, at most one group for each provided candidate (can be less due to the defined threshold of minimal observations per keyframes)
267  * @param minimalObservationsInKeyframes The minimal number of observations (from the set of given pose ids) which must exist for one object point so that a group will be created for that object point, with range [1, infinity)
268  * @param validObjectPoints Optional resulting indices of valid object point ids (the indices of the specified object point ids for which a corresponding groups has been created)
269  */
270  ObjectPointToPoseImagePointCorrespondenceAccessor(const Database& database, const Indices32& poseIds, const Indices32& objectPointCandidateIds, const unsigned int minimalObservationsInKeyframes = 2u, Indices32* validObjectPoints = nullptr);
271 
272  /**
273  * Creates a new accessor object and extracts the necessary information from a given database.
274  * This constructor focuses on the provided object points and guarantees to create one group for each object point.<br>
275  * However, each group may have less pairs as specified by the thresholds if an object point does not provided enough observations.<br>
276  * The pose indices of the individual (group) pairs provide the index in relation to the given set of pose ids, not the pose id itself.
277  * @param database The database providing all information of this accessor
278  * @param lowerPoseId The id of the lowest pose which can be used as possible key frame
279  * @param upperPoseId The id of the last pose which can be used as possible key frame, with range [lowerPoseId, infinity)
280  * @param objectPointIds The ids of the object points for which groups will be created
281  * @param minimalObservationsInKeyframes The minimal number of observations which should be found for each object points, if an object point does not have so many observations less observations will be used, with range [2, infinity)
282  * @param minimalKeyFrames The minimal number of key frames which will be used
283  * @param usedKeyFrameIds Optional resulting ids of all used key frames
284  */
285  ObjectPointToPoseImagePointCorrespondenceAccessor(const Database& database, const Index32 lowerPoseId, const Index32 upperPoseId, const Indices32& objectPointIds, const unsigned int minimalObservationsInKeyframes = 2u, const unsigned int minimalKeyFrames = 2u, Indices32* usedKeyFrameIds = nullptr);
286 
287  /**
288  * Assign operator.
289  * @param accessor The accessor to assign
290  * @return The reference to this object
291  */
293 
294  /**
295  * Move operator.
296  * @param accessor The accessor to move
297  * @return The reference to this object
298  */
300 
301  };
302 
303  protected:
304 
305  /**
306  * Definition of a map mapping 32 bit indices to 32 bit indices.
307  */
308  typedef std::map<unsigned int, unsigned int> IndexMap32;
309 
310  /**
311  * Definition of a shift vector holding groups of image points.
312  */
314 
315  /**
316  * This class implements an accessor providing access to observation pairs (the observation of a projected object points in camera poses/frames) for a set of object points.
317  * This accessor is designed for specific object points which are all visible in a some camera poses/frames.<br>
318  * The accessor provides the following structure:
319  * <pre>
320  * objectpoint_0 -> (observation_0, imagepoint_a)
321  * -> (observation_1, imagepoint_b)
322  * -> (observation_2, imagepoint_c)
323  *
324  * objectpoint_1 -> (observation_0, imagepoint_d)
325  * -> (observation_1, imagepoint_e)
326  * -> (observation_2, imagepoint_f)
327  * </pre>
328  * Thus, we have n object points which are all visible in m camera poses.
329  */
331  {
332  public:
333 
334  /**
335  * Copy constructor.
336  * @param accessor The accessor to copy
337  */
339 
340  /**
341  * Move constructor.
342  * @param accessor The accessor to move
343  */
345 
346  /**
347  * Creates a new accessor object.
348  * @param imagePointGroups The image point groups from which the individual image points are extracted
349  * @param posesSubsetBlock Indices defining a subset of the image point groups while the indices are defined as if the image groups start with the frame index 0
350  */
351  ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const std::vector<Vectors2>& imagePointGroups, const Indices32& posesSubsetBlock);
352 
353  /**
354  * Creates a new accessor object.
355  * @param imagePointGroups The image point groups from which the individual image points are extracted
356  * @param posesSubsetBlock Indices defining a subset of the image point groups while the indices are defined as if the image groups start with the frame index 0
357  */
359 
360  /**
361  * Creates a new accessor object.
362  * @param imagePointGroups The image point groups from which the individual image points are extracted
363  * @param posesSubsetBlock Indices defining a subset of the image point groups while the indices are defined as if the image groups start with the frame index 0
364  * @param objectPointsSubset Indices defining a subset of the object points (within the image point groups)
365  */
366  ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const std::vector<Vectors2>& imagePointGroups, const Indices32& posesSubsetBlock, const Indices32& objectPointsSubset);
367 
368  /**
369  * Creates a new accessor object.
370  * @param imagePointGroups The image point groups from which the individual image points are extracted
371  * @param posesSubsetBlock Indices defining a subset of the image point groups while the indices are defined as if the image groups start with the frame index 0
372  * @param objectPointsSubset Indices defining a subset of the object points (within the image point groups)
373  */
374  ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const ShiftVector<Vectors2>& imagePointGroups, const Indices32& posesSubsetBlock, const Indices32& objectPointsSubset);
375 
376  /**
377  * Assign operator.
378  * @param accessor The accessor to assign
379  * @return Reference to this object
380  */
382 
383  /**
384  * Move operator.
385  * @param accessor The accessor to move
386  * @return Reference to this object
387  */
389  };
390 
391  /**
392  * Definition of a pair combining a pose id and an error parameter.
393  */
394  typedef std::pair<Index32, Scalar> PoseErrorPair;
395 
396  /**
397  * Definition of a vector holding pose error pairs.
398  */
399  typedef std::vector<PoseErrorPair> PoseErrorPairs;
400 
401  public:
402 
403  /**
404  * Determines the initial positions of 3D object points in a database if no camera poses or structure information is known.
405  * Feature points are tracked from frame to frame within a defined camera frame range as long as the number of tracked points fall under a defined threshold.<br>
406  * Key frames are selected from this range of (tracked) camera frames with representative geometry information.<br>
407  * This function can be configured so that (perfectly) static image points located in all frames at the same position are identified not used for calculations.<br>
408  * Static image points can be located (visible) at static logos (bands) in the frames so that these image points must not be used.
409  * @param database The database defining the topology of 3D object points and corresponding 2D image points
410  * @param pinholeCamera The pinhole camera profile which will be applied
411  * @param randomGenerator a random generator object
412  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
413  * @param startFrame The index of the frame from which the algorithm will start, with range [lowerFrame, upperFrame]
414  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range (lowerFrame, infinity)
415  * @param maximalStaticImagePointFilterRatio The maximal ratio between (perfectly) static image points and the overall number of image points so that these static image points will be filtered and not used, with ratio [0, 1), 0 to avoid any filtering
416  * @param initialObjectPoints The resulting initial 3D positions of object points that could be extracted
417  * @param initialObjectPointIds The resulting ids of the resulting object points, one id for each resulting object point
418  * @param pointsThreshold The threshold of image points which must be visible in each camera frame
419  * @param minimalKeyFrames The minimal number of keyframes that will be extracted
420  * @param maximalKeyFrames The maximal number of keyframes that will be extracted
421  * @param maximalSqrError The maximal square distance between an image points and a projected object point
422  * @param usedPoseIds Optional resulting ids of all camera poses which have been used to determine the initial object points
423  * @param finalSqrError Optional resulting final average error
424  * @param finalImagePointDistance Optional resulting final average distance between the individual image points and the center of these image points
425  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
426  * @return True, if succeeded
427  */
428  static bool determineInitialObjectPointsFromSparseKeyFrames(const Database& database, const PinholeCamera& pinholeCamera, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3& initialObjectPoints, Indices32& initialObjectPointIds, const RelativeThreshold& pointsThreshold = RelativeThreshold(20u, Scalar(0.5), 100u), const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 10u, const Scalar maximalSqrError = Scalar(3.5 * 3.5), Indices32* usedPoseIds = nullptr, Scalar* finalSqrError = nullptr, Scalar* finalImagePointDistance = nullptr, bool* abort = nullptr);
429 
430  /**
431  * Determines the initial positions of 3D object points in a database if no camera poses or structure information is known.
432  * Feature points are tracked from frame to frame within a defined camera frame range as long as the number of tracked points fall under a defined threshold.<br>
433  * The entire range of frames with tracked points are use to determine the locations of the 3D object points.<br>
434  * This function can be configured so that (perfectly) static image points located in all frames at the same position are identified not used for calculations.<br>
435  * Static image points can be located (visible) at static logos (bands) in the frames so that these image points must not be used.
436  * @param database The database defining the topology of 3D object points and corresponding 2D image points
437  * @param pinholeCamera The pinhole camera profile which will be applied
438  * @param randomGenerator a random generator object
439  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
440  * @param startFrame The index of the frame from which the algorithm will start, with range [lowerFrame, upperFrame]
441  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range (lowerFrame, infinity)
442  * @param regionOfInterest Optional region of interest defining a specific area in the start frame so that the object points lying in the region are handled with higher priority, an invalid region to avoid any special region of interest handling
443  * @param maximalStaticImagePointFilterRatio The maximal ratio between (perfectly) static image points and the overall number of image points so that these static image points will be filtered and not used, with ratio [0, 1), 0 to avoid any filtering
444  * @param initialObjectPoints The resulting initial 3D positions of object points that could be extracted
445  * @param initialObjectPointIds The resulting ids of the resulting object points, one id for each resulting object point
446  * @param pointsThreshold The threshold of image points which must be visible in each camera frame
447  * @param minimalTrackedFramesRatio The minimal number of frames that finally have been tracked (the entire range of frames in which the object points are visible) defined as ratio of the entire frame range, with range (0, 1], does not have any meaning if no start frame or region of interest is defined
448  * @param minimalKeyFrames The minimal number of keyframes that will be extracted
449  * @param maximalKeyFrames The maximal number of keyframes that will be extracted
450  * @param maximalSqrError The maximal square distance between an image points and a projected object point
451  * @param usedPoseIds Optional resulting ids of all camera poses which have been used to determine the initial object points
452  * @param finalSqrError Optional resulting final average error
453  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
454  * @return True, if succeeded
455  */
456  static bool determineInitialObjectPointsFromDenseFrames(const Database& database, const PinholeCamera& pinholeCamera, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const CV::SubRegion& regionOfInterest, const Scalar maximalStaticImagePointFilterRatio, Vectors3& initialObjectPoints, Indices32& initialObjectPointIds, const RelativeThreshold& pointsThreshold = RelativeThreshold(20u, Scalar(0.5), 100u), const Scalar minimalTrackedFramesRatio = Scalar(0.1), const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 10u, const Scalar maximalSqrError = Scalar(3.5 * 3.5), Indices32* usedPoseIds = nullptr, Scalar* finalSqrError = nullptr, bool* abort = nullptr);
457 
458  /**
459  * Determines the initial positions of 3D object points in a database if no camera poses or structure information is known.
460  * Feature points are tracked from frame to frame within a defined camera frame range as long as the number of tracked points fall under a defined threshold.<br>
461  * Key frames are selected from this range of (tracked) camera frames with representative geometry information.<br>
462  * This function internally applies several individual iterations beginning from individual start frames so that the best result within the entire frame range is returned.<br>
463  * @param database The database defining the topology of 3D object points and corresponding 2D image points
464  * @param steps The number of steps that are applied within the defined frame range, with range [1, upperFrame - lowerFrame + 1]
465  * @param pinholeCamera The pinhole camera profile which will be applied
466  * @param randomGenerator A random generator object
467  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
468  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range (lowerFrame, infinity)
469  * @param maximalStaticImagePointFilterRatio The maximal ratio between (perfectly) static image points and the overall number of image points so that these static image points will be filtered and not used, with ratio [0, 1), 0 to avoid any filtering
470  * @param initialObjectPoints The resulting initial 3D positions of object points that could be extracted
471  * @param initialObjectPointIds The resulting ids of the resulting object points, one id for each resulting object point
472  * @param pointsThreshold The threshold of image points which must be visible in each camera frame
473  * @param minimalKeyFrames The minimal number of keyframes that will be extracted
474  * @param maximalKeyFrames The maximal number of keyframes that will be extracted
475  * @param maximalSqrError The maximal square distance between an image points and a projected object point
476  * @param usedPoseIds Optional resulting ids of all camera poses which have been used to determine the initial object points
477  * @param worker Optional worker object to distribute the computation
478  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
479  * @return True, if succeeded
480  */
481  static bool determineInitialObjectPointsFromSparseKeyFramesBySteps(const Database& database, const unsigned int steps, const PinholeCamera& pinholeCamera, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3& initialObjectPoints, Indices32& initialObjectPointIds, const RelativeThreshold& pointsThreshold = RelativeThreshold(20u, Scalar(0.5), 100u), const unsigned int minimalKeyFrames = 2u, const unsigned int maximalKeyFrames = 10u, const Scalar maximalSqrError = Scalar(3.5 * 3.5), Indices32* usedPoseIds = nullptr, Worker* worker = nullptr, bool* abort = nullptr);
482 
483  /**
484  * Determines the initial object point positions for a set of key frames (image point groups) observing the unique object points in individual camera poses.
485  * This function applies a RANSAC mechanism randomly selecting individual start key frames (pairs of image points).<br>
486  * The key frames (image point groups) provide the following topology:<br>
487  * For n unique object points visible in m individual frames we have n object points (op) and n * m overall image points (ip):
488  * <pre>
489  * op_1, op_2, op_3, op_4, ..., op_n
490  * sparse_pose_0 -> ip_1_1, ip_1_2, ip_1_3, ip_1_4, ..., ip_1_n
491  * sparse_pose_1 -> ip_2_1, ip_2_2, ip_2_3, ip_2_4, ..., ip_2_n
492  * sparse_pose_2 -> ip_3_1, ip_3_2, ip_3_3, ip_3_4, ..., ip_3_n
493  * sparse_pose_3 -> ip_4_1, ip_4_2, ip_4_3, ip_4_4, ..., ip_4_n
494  * ...
495  * sparse pose_m -> ip_m_1, ip_m_2, ip_m_3, ip_m_4, ..., ip_y_n
496  * </pre>
497  * @param pinholeCamera The pinhole camera profile to be applied
498  * @param imagePointGroups Key frames of image points, all points in one group are located in the same camera frame and the individual points correspond to the same unique object points
499  * @param randomGenerator A random generator object
500  * @param validPoses The resulting poses that could be determined
501  * @param validPoseIndices The indices of resulting valid poses in relation to the given image point groups
502  * @param objectPoints The resulting object points that could be determined
503  * @param validObjectPointIndices The indices of resulting valid object points in relation to the given image point groups
504  * @param iterations The number of RANSAC iterations trying to find a better result than already determined
505  * @param minimalValidObjectPoints The threshold of object points that must be valid
506  * @param maximalSqrError The maximal square distance between an image points and a projected object point
507  * @param database Optional database holding the image points from the imagePointGroups to validate the resulting 3D object positions even for camera poses not corresponding to the provided groups of image points; if defined also 'keyFrameIds' and 'objectPointIds' must be defined
508  * @param keyFrameIds Optional ids of the individual keyframes to which the set of image point groups from 'imagePointGroups' belong, each key frame id corresponds with one group of image points, if defined also 'database' and 'objectPointIds' must be defined
509  * @param objectPointIds Optional ids of the individual object points which projections are provided as groups of image points in 'imagePointGroups', if defined also 'database' and 'keyFrameIds' must be defined
510  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
511  * @return True, if succeeded
512  * @see determineInitialObjectPointsFromDenseFramesRANSAC().
513  */
514  static bool determineInitialObjectPointsFromSparseKeyFramesRANSAC(const PinholeCamera& pinholeCamera, const Database::ImagePointGroups& imagePointGroups, RandomGenerator& randomGenerator, HomogenousMatrices4& validPoses, Indices32& validPoseIndices, Vectors3& objectPoints, Indices32& validObjectPointIndices, const unsigned int iterations = 20u, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError = Scalar(3.5 * 3.5), const Database* database = nullptr, const Indices32* keyFrameIds = nullptr, const Indices32* objectPointIds = nullptr, bool* abort = nullptr);
515 
516  /**
517  * Determines the initial object point positions for a set of frames (image point groups) observing the unique object points in individual camera poses.
518  * This function applies a RANSAC mechanism randomly selecting individual start key frames (pairs of image points).<br>
519  * The key frames (image point groups) provide the following topology:<br>
520  * For n unique object points visible in m individual frames we have n object points (op) and n * m overall image points (ip):
521  * <pre>
522  * op_1, op_2, op_3, op_4, ..., op_n
523  * ...
524  * dense_pose_2 -> ip_3_1, ip_3_2, ip_3_3, ip_3_4, ..., ip_3_n
525  * dense_pose_3 -> ip_4_1, ip_4_2, ip_4_3, ip_4_4, ..., ip_4_n
526  * ...
527  * </pre>
528  * @param pinholeCamera The pinhole camera profile to be applied
529  * @param imagePointGroups Frames of image points, all points in one group are located in the same camera frame and the individual points correspond to the same unique object points
530  * @param randomGenerator A random generator object
531  * @param validPoses The resulting poses that could be determined
532  * @param validPoseIds The ids of resulting valid poses, one id for each resulting valid pose (the order of the ids is arbitrary)
533  * @param objectPoints The resulting object points that could be determined
534  * @param validObjectPointIndices The indices of resulting valid object points in relation to the given image point groups
535  * @param iterations The number of RANSAC iterations trying to find a better result than already determined
536  * @param minimalValidObjectPoints The threshold of object points that must be valid
537  * @param maximalSqrError The maximal square distance between an image points and a projected object point
538  * @param worker Optional worker object to distribute the computation
539  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
540  * @return True, if succeeded
541  * @see determineInitialObjectPointsFromSparseKeyFramesRANSAC().
542  */
543  static bool determineInitialObjectPointsFromDenseFramesRANSAC(const PinholeCamera& pinholeCamera, const ImagePointGroups& imagePointGroups, RandomGenerator& randomGenerator, HomogenousMatrices4& validPoses, Indices32& validPoseIds, Vectors3& objectPoints, Indices32& validObjectPointIndices, const unsigned int iterations = 20u, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Worker* worker = nullptr, bool* abort = nullptr);
544 
545  /**
546  * Determines the initial object point positions for a set of key frames (image point groups) observing unique object points.
547  * This function starts with two explicit key frames (image point groups) and then tries to find more matching key frames (image point groups).<br>
548  * The set of given image points should not contain image points located (visible) at a static logo in the frame as these points may violate the pose determination algorithms.<br>
549  * The key frames (image point groups) provide the following topology:<br>
550  * For n unique object points visible in m individual key frames we have n object points (op) and n * m overall image points (ip):
551  * <pre>
552  * op_1, op_2, op_3, op_4, ..., op_n
553  * sparse_pose_0 -> ip_1_1, ip_1_2, ip_1_3, ip_1_4, ..., ip_1_n
554  * sparse_pose_1 -> ip_2_1, ip_2_2, ip_2_3, ip_2_4, ..., ip_2_n
555  * sparse_pose_2 -> ip_3_1, ip_3_2, ip_3_3, ip_3_4, ..., ip_3_n
556  * sparse_pose_3 -> ip_4_1, ip_4_2, ip_4_3, ip_4_4, ..., ip_4_n
557  * ...
558  * sparse pose_m -> ip_m_1, ip_m_2, ip_m_3, ip_m_4, ..., ip_y_n
559  * </pre>
560  * @param pinholeCamera The pinhole camera profile to be applied
561  * @param imagePointGroups Key frames (groups) of image points, all points in one key frame (group) are located in the same camera key frame and the individual points correspond to the same unique object points
562  * @param randomGenerator A random generator object
563  * @param firstGroupIndex The index of the first key frame (image point group) which is applied as the first stereo frame, with range [0, imagePointGroups.size())
564  * @param secondGroupIndex The index of the second key frame (image point group) which is applied as the second stereo frame, with range [0, imagePointGroups.size()), with firstGroupIndex != secondGroupIndex
565  * @param poses The resulting poses that could be determined
566  * @param validPoseIndices The indices of resulting valid poses in relation to the given image point groups
567  * @param objectPoints The resulting object points that could be determined
568  * @param validObjectPointIndices The indices of resulting valid object points in relation to the given image point groups
569  * @param minimalValidObjectPoints The minimal number of valid object points which must be reached
570  * @param maximalSqrError The maximal square distance between an image points and a projected object point
571  * @return True, if succeeded
572  * @see determineInitialObjectPointsFromFrameRange().
573  */
574  static bool determineInitialObjectPointsFromSparseKeyFrames(const PinholeCamera& pinholeCamera, const Database::ImagePointGroups& imagePointGroups, RandomGenerator& randomGenerator, const unsigned int firstGroupIndex, const unsigned int secondGroupIndex, HomogenousMatrices4& poses, Indices32& validPoseIndices, Vectors3& objectPoints, Indices32& validObjectPointIndices, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError = Scalar(3.5 * 3.5));
575 
576  /**
577  * Determines the initial object point positions for a set of image point groups (covering a range of image frames) observing the unique object points in individual frames.
578  * This function starts with two explicit frames (image point groups) and then tries to find more matching frames (image point groups).<br>
579  * The set of given image points should not contain image points located (visible) at a static logo in the frame as these points may violate the pose determination algorithms.<br>
580  * All frames (image point groups) within the frame range provide the following topology:<br>
581  * For n unique object points visible in m individual frames we have n object points (op) and n * m overall image points (ip):
582  * <pre>
583  * op_1, op_2, op_3, op_4, ..., op_n
584  * ...
585  * dense_pose_2 -> ip_3_1, ip_3_2, ip_3_3, ip_3_4, ..., ip_3_n
586  * dense_pose_3 -> ip_4_1, ip_4_2, ip_4_3, ip_4_4, ..., ip_4_n
587  * ...
588  * </pre>
589  * @param pinholeCamera The pinhole camera profile to be applied
590  * @param imagePointGroups Frames (groups) of image points, all points in one frame (group) are located in the same camera frame and the individual points correspond to the same unique object points
591  * @param randomGenerator A random generator object
592  * @param firstGroupIndex The index of the first frame (image point group) which is applied as the first stereo frame, with range [imagePointGroups.firstIndex(), imagePointGroups.lastIndex()]
593  * @param secondGroupIndex The index of the second frame (image point group) which is applied as the second stereo frame, with range [imagePointGroups.firstIndex(), imagePointGroups.lastIndex()], with firstGroupIndex != secondGroupIndex
594  * @param validPoses The resulting poses that could be determined
595  * @param validPoseIds The ids of resulting valid poses, one id for each valid resulting pose (the order of the ids is arbitrary)
596  * @param totalSqrError The resulting sum of square pixel errors for all valid poses
597  * @param objectPoints The resulting object points that could be determined
598  * @param validObjectPointIndices The indices of resulting valid object points in relation to the given image point groups, with range [5, infinity)
599  * @param minimalValidObjectPoints The minimal number of valid object points which must be reached
600  * @param maximalSqrError The maximal square distance between an image points and a projected object point
601  * @return True, if succeeded
602  * @see determineInitialObjectPointsFromKeyFrames().
603  */
604  static bool determineInitialObjectPointsFromDenseFrames(const PinholeCamera& pinholeCamera, const ImagePointGroups& imagePointGroups, RandomGenerator& randomGenerator, const unsigned int firstGroupIndex, const unsigned int secondGroupIndex, HomogenousMatrices4& validPoses, Indices32& validPoseIds, Scalar& totalSqrError, Vectors3& objectPoints, Indices32& validObjectPointIndices, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError = Scalar(3.5 * 3.5));
605 
606  /**
607  * Optimizes the positions of already known initial 3D object points when a given database holds neither valid 3D positions or valid 6DOF poses.
608  * The optimization is done by a bundle adjustment between the camera poses of distinct keyframes and the given 3D object points, however the optimized camera poses are not provided.<br>
609  * This function can optimize a subset of the given initial object points to allow more camera poses (camera frames) to be involved.<br>
610  * @param database The database defining the topology of 3D object points and corresponding 2D image points
611  * @param camera The camera profile defining the projection, must be valid
612  * @param randomGenerator Random generator object
613  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
614  * @param startFrame The index of the frame from which the algorithm will start, in this frame the specified initial object points must all be visible, with range [lowerFrame, upperFrame]
615  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
616  * @param initialObjectPoints The already known initial 3D positions of object points
617  * @param initialObjectPointIds The ids of the already known object points, one id for each given initial object point
618  * @param optimizedObjectPoints The resulting optimized 3D positions of the given initial object points
619  * @param optimizedObjectPointIds The resulting ids of the optimized object points, one id for each optimized object point
620  * @param minimalObjectPoints The minimal number of object points that will be optimized (the higher the number the less camera poses may be used as some object points may not be visible in all camera frames), with range [5, initialObjectPoints.size()); however, tue to pose inaccuracies the algorithm finally may use less object points
621  * @param minimalKeyFrames The minimal number of keyframes that will be used, with range [2, maximalKeyFrames]
622  * @param maximalKeyFrames The maximal number of keyframes that will be used, with range [minimalKeyFrames, upperFrame - lowerFrame + 1]; however, due to pose inaccuracies the algorithm finally may use more keyframes
623  * @param maximalSqrError The maximal squared projection error for a 3D object point, points with larger error are excluded after a first optimization iteration
624  * @param usedPoseIds Optional resulting ids of all camera poses which have been used to optimized the object points
625  * @param initialSqrError Optional resulting initial average squared error
626  * @param finalSqrError Optional resulting final average squared error
627  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
628  * @return True, if succeeded
629  */
630  static bool optimizeInitialObjectPoints(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const Vectors3& initialObjectPoints, const Indices32& initialObjectPointIds, Vectors3& optimizedObjectPoints, Indices32& optimizedObjectPointIds, const unsigned int minimalObjectPoints = 5u, const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 10u, const Scalar maximalSqrError = Scalar(3.5 * 3.5), Indices32* usedPoseIds = nullptr, Scalar* initialSqrError = nullptr, Scalar* finalSqrError = nullptr, bool* abort = nullptr);
631 
632  /**
633  * Determines the positions of new object points from a database within a specified frame range.
634  * Only camera frames with valid camera poses are used to determine the new object points.<br>
635  * This function extracts a subset of representative camera poses and triangulates image points from individual camera poses to determine new 3D object points.<br>
636  * Object points in the database with valid 3D positions are not investigated.<br>
637  * @param database The database defining the topology of 3D object points, corresponding 2D image points and corresponding camera poses
638  * @param camera The camera profile defining the projection, must be valid
639  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
640  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
641  * @param newObjectPoints The resulting positions of new object points
642  * @param newObjectPointIds The resulting ids of the new object points, each id corresponds with a positions from 'newObjectPoints'
643  * @param minimalKeyFrames The minimal number of key frames which must be valid for a 3D object point, with range [minimalKeyFrames, upperFrame - lowerFrame + 1]
644  * @param maximalKeyFrames The maximal number of key frames which will be use to determine the 3D object point positions, with range [minimalKeyFrames, upperFrame - lowerFrame + 1]
645  * @param maximalSqrError The maximal squared error between a projected 3D object point and an image point so that the combination of object point and image point count as valid
646  * @param worker Optional worker object to distribute the computation
647  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
648  * @return True, if succeeded
649  */
650  static bool determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const unsigned int lowerFrame, const unsigned int upperFrame, Vectors3& newObjectPoints, Indices32& newObjectPointIds, const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 10u, const Scalar maximalSqrError = Scalar(3.5 * 3.5), Worker* worker = nullptr, bool* abort = nullptr);
651 
652  /**
653  * Determines the positions of a set of (currently unknown) object points.
654  * Only camera frames with valid camera pose are used to determined the new object points.<br>
655  * @param database The database form which the object point, image point and pose information is extracted
656  * @param camera The camera profile defining the projection, must be valid
657  * @param cameraMotion The motion of the camera, can be CM_ROTATIONAL or CM_TRANSLATIONAL
658  * @param unknownObjectPointIds The ids of all (currently unknown) object points for which a 3D position will be determined, must all be valid
659  * @param newObjectPoints The resulting 3D location of the new object points
660  * @param newObjectPointIds The ids of the resulting new object points, one id for each resulting new object point
661  * @param randomGenerator Random generator object to be used for creating random numbers, must be defined
662  * @param newObjectPointObservations Optional resulting number of observations for each determined 3D object point, one number for each resulting 3D object point location
663  * @param minimalObservations The minimal number of observations for each new object points which are necessary to determine the 3D location
664  * @param useAllObservations True, to use all observations (with valid camera pose) to determine the 3D locations; False, to apply a RANSAC mechanism taking a subset of all observations to determine the 3D locations
665  * @param estimator The robust estimator which is applied during optimization of each individual new 3D location, must be defined
666  * @param ransacMaximalSqrError The maximal squared projection error between a new 3D object point and the corresponding image points for the RANSAC mechanism
667  * @param averageRobustError The (average) robust error for a new 3D object point after optimization of the 3D location
668  * @param maximalSqrError The maximal error for a new valid 3D object point after optimization of the 3D location
669  * @param worker Optional worker object to distribute the computation
670  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
671  * @return True, if succeeded and not aborted
672  */
673  static bool determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, const Indices32& unknownObjectPointIds, Vectors3& newObjectPoints, Indices32& newObjectPointIds, RandomGenerator& randomGenerator, Indices32* newObjectPointObservations = nullptr, const unsigned int minimalObservations = 2u, const bool useAllObservations = true, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar averageRobustError = Scalar(3.5 * 3.5), const Scalar maximalSqrError = Numeric::maxValue(), Worker* worker = nullptr, bool* abort = nullptr);
674 
675  /**
676  * Determines the positions of all (currently unknown) object points.
677  * Only camera frames with valid camera pose are used to determined the locations of the new object points.<br>
678  * All unknown object points with more or equal observations (in valid poses) than specified will be handled.<br>
679  * However, the number of resulting object points with valid 3D position may be smaller than the maximal possible number due to e.g., the defined maximal error parameters.<br>
680  * @param database The database form which the object point, image point and pose information is extracted
681  * @param camera The camera profile defining the projection, must be valid
682  * @param cameraMotion The motion of the camera, can be CM_ROTATIONAL or CM_TRANSLATIONAL
683  * @param newObjectPoints The resulting 3D location of the new object points
684  * @param newObjectPointIds The ids of the resulting new object points, one id for each resulting new object point
685  * @param randomGenerator Random generator object to be used for creating random numbers, must be defined
686  * @param newObjectPointObservations Optional resulting number of observations (with valid camera poses) for each determined 3D object point, one number for each resulting 3D object point location
687  * @param minimalObjectPointPriority The minimal priority value of the resulting unknown object points
688  * @param minimalObservations The minimal number of observations (with valid camera poses) for each new object points which are necessary to determine the 3D location, with range [2, infinity)
689  * @param useAllObservations True, to use all observations (with valid camera pose) to determine the 3D locations; False, to apply a RANSAC mechanism taking a subset of all observations to determine the 3D locations
690  * @param estimator The robust estimator which is applied during optimization of each individual new 3D location, must be defined
691  * @param ransacMaximalSqrError The maximal squared projection error between a new 3D object point and the corresponding image points for the RANSAC mechanism
692  * @param averageRobustError The (average) robust error for a new 3D object point after optimization of the 3D location
693  * @param maximalSqrError The maximal error for a new valid 3D object point after optimization of the 3D location
694  * @param worker Optional worker object to distribute the computation
695  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
696  * @return True, if succeeded and not aborted
697  */
698  static inline bool determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, Vectors3& newObjectPoints, Indices32& newObjectPointIds, RandomGenerator& randomGenerator, Indices32* newObjectPointObservations = nullptr, const Scalar minimalObjectPointPriority = Scalar(-1), const unsigned int minimalObservations = 10u, const bool useAllObservations = true, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar averageRobustError = Scalar(3.5 * 3.5), const Scalar maximalSqrError = Numeric::maxValue(), Worker* worker = nullptr, bool* abort = nullptr);
699 
700  /**
701  * Determines the positions of (currently unknown) object points which are visible in specified poses (the poses are specified by a lower and upper frame range).
702  * Only camera frames with valid camera pose are used to determined the locations of the new object points.<br>
703  * All unknown object points with more or equal observations (in valid poses) than specified will be handled.<br>
704  * However, the number of resulting object points with valid 3D position may be small than the maximal possible number due to e.g., the defined maximal error parameters.<br>
705  * @param database The database form which the object point, image point and pose information is extracted
706  * @param camera The camera profile defining the projection, must be valid
707  * @param cameraMotion The motion of the camera, can be CM_ROTATIONAL or CM_TRANSLATIONAL
708  * @param lowerPoseId The lower id of the camera pose in which the unknown object points can/must be visible
709  * @param upperPoseId The upper id of the camera pose in which the unknown object points can/must be visible, with range [lowerPoseId, infinity)
710  * @param newObjectPoints The resulting 3D location of the new object points
711  * @param newObjectPointIds The ids of the resulting new object points, one id for each resulting new object point
712  * @param randomGenerator Random generator object to be used for creating random numbers, must be defined
713  * @param newObjectPointObservations Optional resulting number of observations (with valid camera poses) for each determined 3D object point, one number for each resulting 3D object point location
714  * @param minimalObjectPointPriority The minimal priority value of the resulting unknown object points
715  * @param minimalObservations The minimal number of observations (with valid camera poses) for each new object points which are necessary to determine the 3D location
716  * @param useAllObservations True, to use all observations (with valid camera pose) to determine the 3D locations; False, to apply a RANSAC mechanism taking a subset of all observations to determine the 3D locations
717  * @param estimator The robust estimator which is applied during optimization of each individual new 3D location, must be defined
718  * @param ransacMaximalSqrError The maximal squared projection error between a new 3D object point and the corresponding image points for the RANSAC mechanism
719  * @param averageRobustError The (average) robust error for a new 3D object point after optimization of the 3D location
720  * @param maximalSqrError The maximal error for a new valid 3D object point after optimization of the 3D location
721  * @param worker Optional worker object to distribute the computation
722  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
723  * @return True, if succeeded and not aborted
724  * @tparam tVisibleInAllPoses True, if the object points must be visible in all poses (frames) of the specified pose range; False, if the object point can be visible in any poses (frames) within the specified pose range
725  */
726  template <bool tVisibleInAllPoses>
727  static inline bool determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, const Index32 lowerPoseId, const Index32 upperPoseId, Vectors3& newObjectPoints, Indices32& newObjectPointIds, RandomGenerator& randomGenerator, Indices32* newObjectPointObservations = nullptr, const Scalar minimalObjectPointPriority = Scalar(-1), const unsigned int minimalObservations = 10u, const bool useAllObservations = true, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar averageRobustError = Scalar(3.5 * 3.5), const Scalar maximalSqrError = Numeric::maxValue(), Worker* worker = nullptr, bool* abort = nullptr);
728 
729  /**
730  * Optimizes a set of 3D object points (having a quite good accuracy already) without optimizing the camera poses concurrently.
731  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
732  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
733  * @param pinholeCamera The pinhole camera profile to be applied
734  * @param cameraMotion The motion of the camera, CM_ROTATIONAL if the camera poses do not have a translational part, CM_TRANSLATIONAL otherwise
735  * @param objectPointIds The ids of the object points for which the positions will be optimized (all points must have already initial 3D positions)
736  * @param optimizedObjectPoints The resulting positions of the optimized object points
737  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
738  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized
739  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
740  * @param maximalRobustError The maximal error between a projected object point and the individual image points; beware the error must be defined w.r.t. the selected estimator
741  * @param worker Optional worker object to distribute the computation
742  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
743  * @return True, if succeeded
744  * @see optimizeObjectPointsWithVariablePoses().
745  */
746  static bool optimizeObjectPointsWithFixedPoses(const Database& database, const PinholeCamera& pinholeCamera, const CameraMotion cameraMotion, const Indices32& objectPointIds, Vectors3& optimizedObjectPoints, Indices32& optimizedObjectPointIds, unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar maximalRobustError = Scalar(3.5 * 3.5), Worker* worker = nullptr, bool* abort = nullptr);
747 
748  /**
749  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses concurrently.
750  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
751  * Representative key frames with valid camera poses are selected and all object points visible in these key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
752  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
753  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
754  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
755  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
756  * @param pinholeCamera The pinhole camera profile to be applied
757  * @param optimizedObjectPoints The resulting positions of the optimized object points
758  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
759  * @param optimizedKeyFramePoses Optional resulting camera poses, one for each keyframe which has been used during optimization, nullptr if not of interest
760  * @param optimizedKeyFramePoseIds Optional resulting ids of the camera poses which have been used as key frame during optimization, one for each 'optimizedKeyFramePoses', nullptr if not of interest
761  * @param minimalKeyFrames The minimal number of key frames (with valid poses) which are necessary for the optimization, with range [2, maximalkeyFrames]
762  * @param maximalKeyFrames The maximal number of key frames (with valid poses) which will be used for the optimization, with range [minimalKeyFrames, infinity)
763  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
764  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
765  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
766  * @param initialRobustError Optional the initial average robust error before optimization
767  * @param finalRobustError Optional the final average robust error after optimization
768  * @return True, if succeeded
769  * @see optimizeObjectPointsWithFixedPoses().
770  */
771  static bool optimizeObjectPointsWithVariablePoses(const Database& database, const PinholeCamera& pinholeCamera, Vectors3& optimizedObjectPoints, Indices32& optimizedObjectPointIds, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, Indices32* optimizedKeyFramePoseIds = nullptr, const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 20u, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
772 
773  /**
774  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses concurrently.
775  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
776  * Representative key frames with valid camera poses must be provided and all object points visible in these key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
777  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
778  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
779  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
780  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
781  * @param pinholeCamera The pinhole camera profile to be applied
782  * @param keyFrameIds The ids of all poses defining representative key frames for the optimization, at least two
783  * @param optimizedObjectPoints The resulting positions of the optimized object points, at least one
784  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
785  * @param optimizedKeyFramePoses Optional resulting optimized camera poses, one for each key frame id
786  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
787  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
788  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
789  * @param initialRobustError Optional the initial average robust error before optimization
790  * @param finalRobustError Optional the final average robust error after optimization
791  * @return True, if succeeded
792  * @see optimizeObjectPointsWithFixedPoses().
793  */
794  static bool optimizeObjectPointsWithVariablePoses(const Database& database, const PinholeCamera& pinholeCamera, const Indices32& keyFrameIds, Vectors3& optimizedObjectPoints, Indices32& optimizedObjectPointIds, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
795 
796  /**
797  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses concurrently.
798  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
799  * Representative key frames with valid camera poses must be provided, further a set of object point ids must be provided which should be used for optimization, the object points visible in the key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
800  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
801  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
802  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
803  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
804  * @param pinholeCamera The pinhole camera profile to be applied
805  * @param keyFrameIds The ids of all poses defining representative key frames for the optimization, at least two
806  * @param objectPointIds The ids of the object points which will be optimized (may be a subset only), at least one
807  * @param optimizedObjectPoints The resulting positions of the optimized object points
808  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
809  * @param optimizedKeyFramePoses Optional resulting optimized camera poses, one for each key frame id
810  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
811  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
812  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
813  * @param initialRobustError Optional the initial average robust error before optimization
814  * @param finalRobustError Optional the final average robust error after optimization
815  * @return True, if succeeded
816  * @see optimizeObjectPointsWithFixedPoses().
817  */
818  static bool optimizeObjectPointsWithVariablePoses(const Database& database, const PinholeCamera& pinholeCamera, const Indices32& keyFrameIds, const Indices32& objectPointIds, Vectors3& optimizedObjectPoints, Indices32& optimizedObjectPointIds, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
819 
820  static bool optimizeObjectPointsWithVariablePoses(const Database& database, const PinholeCamera& pinholeCamera, const Index32 lowerPoseId, const Index32 upperPoseId, const Indices32& objectPointIds, Indices32& usedKeyFrameIds, Vectors3& optimizedObjectPoints, const unsigned int minimalObservations = 10u, const unsigned int minimalKeyFrames = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
821 
822  /**
823  * Supposes pure rotational camera motion for a given database with stable camera poses determined by initial but stable object points.
824  * If the camera profile is not well approximated during determination of the camera poses and the initial 3D object points the camera motion may contain translational motion although in reality the motion is only rotational.<br>
825  * Especially, if the camera comes with a significant distortion the motion determination may go wrong.<br>
826  * Therefore, this function supposes sole rotational camera motion, determined the new 3D object points locations, selected a set of suitable keyframes best representing the entire number of valid camera poses, optimizes the camera's field of view and the distortion parameter.<br>
827  * If the projection error between 3D object points and 2D image points falls below a defined threshold (should be strong), than the camera motion can be expected to provide only rotational parts.<br>
828  * Beware: Valid object points (with valid location) not visible within the specified frame range will not be investigated.
829  * @param database The database providing a set initial 3D object points visible in several valid camera poses
830  * @param pinholeCamera The pinhole camera profile which has been used to determine the camera poses and 3D object point locations in the given database
831  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
832  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
833  * @param findInitialFieldOfView True, to apply a determination of the initial field of view of the camera (should be done if the camera's field of view is not known)
834  * @param optimizationStrategy The optimization strategy for the camera parameters which will be applied, OS_INVALID to avoid any optimization of the camera parameters
835  * @param optimizedCamera The resulting optimized camera profile with adjusted field of view and distortion parameters
836  * @param optimizedDatabase The resulting database with optimized camera poses and 3D object point locations
837  * @param minimalObservations The minimal number of observations an object points must have so that it will be investigated to measure whether the camera motion is pure rotational
838  * @param minimalKeyframes The minimal number of key frames (with valid poses) which are necessary for the determination/optimization, with range [2, minimalKeyFrames)
839  * @param maximalKeyframes The maximal number of key frames (with valid poses) which will be used for the optimization, with range [minimalKeyFrames, upperFrame - lowerFrame + 1]
840  * @param lowerFovX The lower threshold border for the final (ideal) horizontal field of view of the camera profile, with range (0, upperFovX],
841  * @param upperFovX The upper threshold border for the final (ideal) horizontal field of view of the camera profile, with range [lowerFoVX, PI)
842  * @param maximalSqrError The maximal average projection error between the 3D object points and the 2D image points so that a correspondence counts as valid
843  * @param worker Optional worker object to distribute the computation
844  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
845  * @param finalMeanSqrError Optional resulting final mean squared pose error (averaged)
846  * @return True, if the camera motion is pure rotational
847  * @see optimizeCamera().
848  */
849  static bool supposeRotationalCameraMotion(const Database& database, const PinholeCamera& pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool findInitialFieldOfView, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera& optimizedCamera, Database& optimizedDatabase, const unsigned int minimalObservations = 0u, const unsigned int minimalKeyframes = 3u, const unsigned int maximalKeyframes = 20u, const Scalar lowerFovX = Numeric::deg2rad(20), const Scalar upperFovX = Numeric::deg2rad(140), const Scalar maximalSqrError = (1.5 * 1.5), Worker* worker = nullptr, bool* abort = nullptr, Scalar* finalMeanSqrError = nullptr);
850 
851  /**
852  * Optimizes the camera profile for a given database with stable camera poses determined by initial but stable object points.
853  * This function selected a representative subset of the valid poses within the specified frame range and considers all object points visible in the subset of camera frames.<br>
854  * The resulting optimized database (with optimized object point locations) invalidates all object point locations of object points not visible in the selected subset of camera frames.<br>
855  * Therefore, this function should be invoked after the initial set of stable object points are determined but before the database stores too many object points (which would get lost).<br>
856  * Further, this function supposes a translational (and optional rotational) camera motion.<br>
857  * @param database The database providing a set initial 3D object points visible in several valid camera poses
858  * @param pinholeCamera The pinhole camera profile which has been used to determine the camera poses and 3D object point locations in the given database
859  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
860  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
861  * @param findInitialFieldOfView True, to apply a determination of the initial field of view of the camera (should be done if the camera's field of view is not known)
862  * @param optimizationStrategy The optimization strategy for the camera parameters which will be applied, OS_INVALID to avoid any optimization of the camera parameters
863  * @param optimizedCamera The resulting optimized camera profile with adjusted field of view and distortion parameters
864  * @param optimizedDatabase The resulting database with optimized camera poses and 3D object point locations
865  * @param minimalObservationsInKeyframes The minimal number of observations an object point must have under all selected keyframes so that it will be investigated to optimized the camera profile and so that this object point will be optimized
866  * @param minimalKeyframes The minimal number of key frames (with valid poses) which are necessary for the determination/optimization, with range [2, minimalKeyFrames)
867  * @param maximalKeyframes The maximal number of key frames (with valid poses) which will be used for the optimization, with range [minimalKeyFrames, upperFrame - lowerFrame + 1]
868  * @param lowerFovX The lower threshold border for the final (ideal) horizontal field of view of the camera profile, with range (0, upperFovX],
869  * @param upperFovX The upper threshold border for the final (ideal) horizontal field of view of the camera profile, with range [lowerFoVX, PI)
870  * @param worker Optional worker object to distribute the computation
871  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
872  * @param finalMeanSqrError Optional resulting final mean squared pose error (averaged)
873  * @return True, if the camera profile and the 3D object point locations and the camera poses in the given database could be optimized
874  * @see supposeRotationalCameraMotion().
875  */
876  static bool optimizeCamera(const Database& database, const PinholeCamera& pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool findInitialFieldOfView, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera& optimizedCamera, Database& optimizedDatabase, const unsigned int minimalObservationsInKeyframes = 2u, const unsigned int minimalKeyframes = 3u, const unsigned int maximalKeyframes = 20u, const Scalar lowerFovX = Numeric::deg2rad(20), const Scalar upperFovX = Numeric::deg2rad(140), Worker* worker = nullptr, bool* abort = nullptr, Scalar* finalMeanSqrError = nullptr);
877 
878  /**
879  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and camera profile concurrently.
880  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
881  * Representative key frames with valid camera poses are selected and all object points visible in these key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
882  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
883  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
884  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
885  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
886  * @param pinholeCamera The pinhole camera profile to be applied
887  * @param optimizationStrategy The optimization strategy for the camera parameters which will be applied, OS_INVALID to avoid any optimization of the camera parameters
888  * @param optimizedCamera The resulting optimized camera profile with adjusted field of view and distortion parameters
889  * @param optimizedObjectPoints The resulting positions of the optimized object points
890  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
891  * @param optimizedKeyFramePoses Optional resulting camera poses, one for each keyframe which has been used during optimization, nullptr if not of interest
892  * @param optimizedKeyFramePoseIds Optional resulting ids of the camera poses which have been used as key frame during optimization, one for each 'optimizedKeyFramePoses', nullptr if not of interest
893  * @param minimalKeyFrames The minimal number of key frames (with valid poses) which are necessary for the optimization, with range [2, maximalkeyFrames]
894  * @param maximalKeyFrames The maximal number of key frames (with valid poses) which will be used for the optimization, with range [minimalKeyFrames, infinity)
895  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
896  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
897  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
898  * @param initialRobustError Optional the initial average robust error before optimization
899  * @param finalRobustError Optional the final average robust error after optimization
900  * @return True, if succeeded
901  * @see optimizeObjectPointsWithFixedPoses().
902  */
903  static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database& database, const PinholeCamera& pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera& optimizedCamera, Vectors3* optimizedObjectPoints = nullptr, Indices32* optimizedObjectPointIds = nullptr, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, Indices32* optimizedKeyFramePoseIds = nullptr, const unsigned int minimalKeyFrames = 3u, const unsigned int maximalKeyFrames = 20u, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
904 
905  /**
906  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and camera profile concurrently.
907  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
908  * Representative key frames with valid camera poses must be provided and all object points visible in these key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
909  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
910  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
911  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
912  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
913  * @param pinholeCamera The pinhole camera profile to be applied
914  * @param optimizationStrategy The optimization strategy for the camera parameters which will be applied, OS_INVALID to avoid any optimization of the camera parameters
915  * @param keyFrameIds The ids of all poses defining representative key frames for the optimization, at least two
916  * @param optimizedCamera The resulting optimized camera profile with adjusted field of view and distortion parameters
917  * @param optimizedObjectPoints The resulting positions of the optimized object points, at least one
918  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
919  * @param optimizedKeyFramePoses Optional resulting optimized camera poses, one for each key frame id
920  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
921  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
922  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
923  * @param initialRobustError Optional the initial average robust error before optimization
924  * @param finalRobustError Optional the final average robust error after optimization
925  * @return True, if succeeded
926  * @see optimizeObjectPointsWithFixedPoses().
927  */
928  static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database& database, const PinholeCamera& pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, const Indices32& keyFrameIds, PinholeCamera& optimizedCamera, Vectors3* optimizedObjectPoints = nullptr, Indices32* optimizedObjectPointIds = nullptr, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
929 
930  /**
931  * Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and camera profile concurrently.
932  * The optimization is based on a bundle adjustment for camera poses and object points minimizing the projection error between projected object points and image points located in the camera frames.<br>
933  * Representative key frames with valid camera poses must be provided, further a set of object point ids must be provided which should be used for optimization, the object points visible in the key frames will be optimized as long as the object points can be observed in more key frames than the defined threshold 'minimalObservations'.<br>
934  * However, the number of observations for each individual object point and the ids of the key frames in which the object points are visible can be arbitrary (as long as the defined thresholds hold).<br>
935  * The database must hold the valid initial 3D object positions, the image point positions and must hold valid camera poses.<br>
936  * Beware: Neither any pose nor any object point in the database will be updated, use the resulting optimized object point locations to update the database!<br>
937  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
938  * @param pinholeCamera The pinhole camera profile to be applied
939  * @param optimizationStrategy The optimization strategy for the camera parameters which will be applied, OS_INVALID to avoid any optimization of the camera parameters
940  * @param keyFrameIds The ids of all poses defining representative key frames for the optimization, at least two
941  * @param objectPointIds The ids of the object points which will be optimized (may be a subset only), at least one
942  * @param optimizedCamera The resulting optimized camera profile
943  * @param optimizedObjectPoints The resulting positions of the optimized object points
944  * @param optimizedObjectPointIds Optional resulting ids of the optimized object points, one id for each positions in 'optimizedObjectPoints', nullptr if not of interest
945  * @param optimizedKeyFramePoses Optional resulting optimized camera poses, one for each key frame id
946  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [minimalKeyFrames, infinity)
947  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
948  * @param iterations The number of optimization iterations which will be applied, with range [1, infinity)
949  * @param initialRobustError Optional the initial average robust error before optimization
950  * @param finalRobustError Optional the final average robust error after optimization
951  * @return True, if succeeded
952  * @see optimizeObjectPointsWithFixedPoses().
953  */
954  static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database& database, const PinholeCamera& pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, const Indices32& keyFrameIds, const Indices32& objectPointIds, PinholeCamera& optimizedCamera, Vectors3* optimizedObjectPoints = nullptr, Indices32* optimizedObjectPointIds = nullptr, HomogenousMatrices4* optimizedKeyFramePoses = nullptr, const unsigned int minimalObservations = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const unsigned int iterations = 50u, Scalar* initialRobustError = nullptr, Scalar* finalRobustError = nullptr);
955 
956  /**
957  * Updates the camera poses of the database depending on valid 2D/3D points correspondences within a range of camera frames.
958  * The camera poses will be set to invalid if no valid pose can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
959  * Pose determination starts at a specified frame and moves to higher and lower frame indices afterwards.<br>
960  * Poses from successive frames are applied as initial guess for a new frame.<br>
961  * The resulting poses will have either a sole rotational motion or a rotational and translational motion, this depends on the defined camera motion.<br>
962  * @param database The database from which the point correspondences are extracted and which receives the determined camera poses
963  * @param camera The camera profile defining the projection, must be valid
964  * @param cameraMotion The motion of the camera, use CM_UNKNOWN if the motion is unknown so that 6-DOF poses will be determined
965  * @param randomGenerator Random generator object
966  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
967  * @param startFrame The index of the frame from which the algorithm will start, in this frame the specified initial object points must all be visible, with range [lowerFrame, upperFrame]
968  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
969  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
970  * @param estimator The robust estimator which is applied for the non-linear pose optimization
971  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
972  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
973  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a pose counts as valid, with range (0, infinity)
974  * @param finalAverageError Optional resulting average final error for all valid poses, the error depends on the selected robust estimator
975  * @param validPoses Optional resulting number of valid poses
976  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
977  * @return True, if all poses have been updated (the poses may be invalid)
978  */
979  static bool updatePoses(Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar maximalRobustError = Scalar(3.5 * 3.5), Scalar* finalAverageError = nullptr, size_t* validPoses = nullptr, bool* abort = nullptr);
980 
981  /**
982  * Updates the camera poses of the database depending on valid 2D/3D points correspondences within a range of camera frames.
983  * The camera poses will be set to invalid if no valid pose can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
984  * If a worker is provided every pose is determined independently.<br>
985  * The resulting poses will have either a sole rotational motion or a rotational and translational motion, this depends on the defined camera motion.<br>
986  * @param database The database from which the point correspondences are extracted and which receives the determined camera poses
987  * @param camera The camera profile defining the projection, must be valid
988  * @param cameraMotion The motion of the camera, use CM_UNKNOWN if the motion is unknown so that 6-DOF poses will be determined
989  * @param randomGenerator Random generator object
990  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
991  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
992  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
993  * @param estimator The robust estimator which is applied for the non-linear pose optimization
994  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
995  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
996  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a pose counts as valid, with range (0, infinity)
997  * @param finalAverageError Optional resulting average final error for all valid poses, the error depends on the selected robust estimator
998  * @param validPoses Optional resulting number of valid poses
999  * @param worker Optional worker object to distribute the computation
1000  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1001  * @return True, if all poses have been updated (the poses may be invalid)
1002  */
1003  static bool updatePoses(Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar maximalRobustError = Scalar(3.5 * 3.5), Scalar* finalAverageError = nullptr, size_t* validPoses = nullptr, Worker* worker = nullptr, bool* abort = nullptr);
1004 
1005  /**
1006  * Determines the camera poses depending on valid 2D/3D points correspondence within a range of camera frames.
1007  * The camera poses will be set to invalid if no valid pose can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
1008  * The resulting poses will have either a sole rotational motion or a rotational and translational motion, this depends on the defined camera motion.<br>
1009  * @param database The database from which the point correspondences are extracted
1010  * @param camera The camera profile defining the projection, must be valid
1011  * @param cameraMotion The motion of the camera, use CM_UNKNOWN if the motion is unknown so that 6-DOF poses will be determined
1012  * @param priorityObjectPointIds Optional ids of the object points for which the poses will be optimized with higher priority, may be zero so that all object points are investigated with the same priority
1013  * @param solePriorityPoints True, to apply only the priority object points for pose determination, has no meaning if no priority points are provided
1014  * @param randomGenerator Random generator object
1015  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1016  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1017  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
1018  * @param poses The resulting determined poses starting with the lower frame and ending with the upper frame
1019  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1020  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1021  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
1022  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a pose counts as valid, with range (0, infinity)
1023  * @param finalAverageError Optional resulting average final error for all valid poses, the error depends on the selected robust estimator
1024  * @param worker Optional worker object to distribute the computation
1025  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1026  * @return True, if all poses have been determined (some poses may be invalid)
1027  */
1028  static bool determinePoses(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, const IndexSet32& priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector<HomogenousMatrix4>& poses, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar ransacMaximalSqrError = Scalar(3.5 * 3.5), const Scalar maximalRobustError = Scalar(3.5 * 3.5), Scalar* finalAverageError = nullptr, Worker* worker = nullptr, bool* abort = nullptr);
1029 
1030  /**
1031  * This functions tracks image points (defined by their object points) from one frame to the sibling frames as long as the number of tracked points fall below a specified number or as long as a minimal number of sibling frames has been processed.
1032  * Thus, this function supports two individual termination conditions: either the specification of a minimal number of tracked points or the specification of the minimal number of used sibling frames (with at least one tracked point).<br>
1033  * If the number of tracked object points exceeds 'maximalTrackedObjectPoints' we select the most 'interesting' (by taking object points widely spread over the start frame) object points and remove the remaining.<br>
1034  * The tracking is applied forward and backward starting at a specific frame.<br>
1035  * @param database The database defining the topology of 3D object points and corresponding 2D image points, object point positions and camera poses may be invalid as this information is not used
1036  * @param objectPointIds The ids of the initial object points defining the image points which will be tracked, each object point should have a corresponding image point
1037  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1038  * @param startFrame The index of the frame from which the algorithm will start, in this frame the specified initial object points must all be visible, with range [lowerFrame, upperFrame]
1039  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1040  * @param minimalTrackedObjectPoints One of two termination conditions: The minimal number of tracked points, with range [1, objectPointIds.size()], must be 0 if minimalTrackedFrames is not 0
1041  * @param minimalTrackedFrames One of two termination conditions: The minimal number of tracked frames, with range [1, upperFrame - lowerFrame + 1u], must be 0 if minimalTrackedObjectPoints is not 0
1042  * @param maximalTrackedObjectPoints The maximal number of tracked points, with range [minimalTrackedObjectPoints, objectPointIds.size()]
1043  * @param trackedObjectPointIds The resulting ids of the tracked object points, one id for each tracked object point
1044  * @param trackedImagePointGroups The resulting groups of tracked image point, one groups for each camera frame, one image point for each object point
1045  * @param trackedValidIndices Optional resulting indices of the given object point ids that could be tracked
1046  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1047  * @return True, if succeeded
1048  */
1049  static bool trackObjectPoints(const Database& database, const Indices32& objectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalTrackedObjectPoints, const unsigned int minimalTrackedFrames, const unsigned int maximalTrackedObjectPoints, Indices32& trackedObjectPointIds, ImagePointGroups& trackedImagePointGroups, Indices32* trackedValidIndices = nullptr, bool* abort = nullptr);
1050 
1051  /**
1052  * This functions tracks two individual groups (disjoined) image points (defined by their object points) from one frame to the sibling frames as long as the number of tracked points fall below a specified number.
1053  * The tracking is applied forward and backward starting at a specific frame<br>.
1054  * First, the priority points will be tracked as long as possible which defined the tracking range for the remaining points.<br>
1055  * Afterwards, the remaining points will be tracked as long as possible but not outside the frame range which results from the tracking of the priority points.<br>
1056  * Last, the results of both groups will be joined to one large set of tracked object points, first the priority object points, followed by the remaining object points.<br>
1057  * @param database The database defining the topology of 3D object points and corresponding 2D image points, object point positions and camera poses may be invalid as this information is not used
1058  * @param priorityObjectPointIds The ids of the initial priority object points defining the first group of image points which will be tracked, each object point should have a corresponding image point
1059  * @param remainingObjectPointIds The ids of the initial remaining object points defining the second group of image points which will be tracked, each object point should have a corresponding image point
1060  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1061  * @param startFrame The index of the frame from which the algorithm will start, in this frame the specified initial object points must all be visible, with range [lowerFrame, upperFrame]
1062  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1063  * @param minimalTrackedPriorityObjectPoints The minimal number of tracked priority points, with range [1, priorityObjectPointIds.size())
1064  * @param minimalRemainingFramesRatio The minimal number of frames in which remaining points must be tracked (must be visible) defined as a ratio of the number of frames in which the priority points are visible, with range (0, 1]
1065  * @param maximalTrackedPriorityObjectPoints The maximal number of tracked priority points, with range [minimalTrackedPriorityObjectPoints, priorityObjectPointIds.size()]
1066  * @param maximalTrackedRemainingObjectPoints The maximal number of tracked remaining points, with range [minimalTrackedRemainingObjectPoints, remainingObjectPointIds.size()]
1067  * @param trackedObjectPointIds The resulting ids of the tracked object points, one id for each tracked object point
1068  * @param trackedImagePointGroups The resulting groups of tracked image point, one groups for each camera frame, one image point for each object point
1069  * @param trackedValidPriorityIndices Optional resulting indices of the given priority object point ids that could be tracked
1070  * @param trackedValidRemainingIndices Optional resulting indices of the given remaining object point ids that could be tracked
1071  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1072  * @return True, if succeeded
1073  */
1074  static bool trackObjectPoints(const Database& database, const Indices32& priorityObjectPointIds, const Indices32& remainingObjectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalTrackedPriorityObjectPoints, const Scalar minimalRemainingFramesRatio, const unsigned int maximalTrackedPriorityObjectPoints, const unsigned int maximalTrackedRemainingObjectPoints, Indices32& trackedObjectPointIds, ImagePointGroups& trackedImagePointGroups, Indices32* trackedValidPriorityIndices = nullptr, Indices32* trackedValidRemainingIndices = nullptr, bool* abort = nullptr);
1075 
1076  /**
1077  * This function tracks a group of object points from one frame to both (if available) neighbor frames and counts the minimal number of tracked points.
1078  * Use this function to measure the scene complexity at a specific frame.<br>
1079  * The less object points can be tracked the more complex the scene.
1080  * @param database The database defining the topology of 3D object points and corresponding 2D image points, object point positions and camera poses may be invalid as this information is not used
1081  * @param objectPointIds The ids of the object points which will be tracked, each object point should have a corresponding image point
1082  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1083  * @param startFrame The index of the frame from which the algorithm will start, in this frame the specified initial object points must all be visible, with range [lowerFrame, upperFrame]
1084  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1085  * @return The resulting tracked object points (the object points visible in the range of three image frames)
1086  */
1087  static Indices32 trackObjectPointsToNeighborFrames(const Database& database, const Indices32& objectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame);
1088 
1089  /**
1090  * Determines a set of representative camera poses from a given database within a specified frame range.
1091  * Only valid camera poses from the database will be investigated.<br>
1092  * @param database The database from which the representative camera poses are extracted
1093  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1094  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1095  * @param numberRepresentative The number of representative poses that will be determined
1096  * @return The ids of the representative camera poses
1097  */
1098  static Indices32 determineRepresentativePoses(const Database& database, const unsigned int lowerFrame, const unsigned int upperFrame, const size_t numberRepresentative);
1099 
1100  /**
1101  * Determines a set of representative camera poses from a given database from a set of given camera poses.
1102  * @param database The database from which the representative camera poses are extracted
1103  * @param poseIds The camera pose ids from which the representative camera poses are extracted, all poses must be valid
1104  * @param numberRepresentative The number of representative poses that will be determined
1105  * @return The ids of the representative camera poses
1106  */
1107  static Indices32 determineRepresentativePoses(const Database& database, const Indices32& poseIds, const size_t numberRepresentative);
1108 
1109  /**
1110  * Determines the camera 6-DOF pose for a specific camera frame.
1111  * @param database The database from which the object point and image point correspondences are extracted
1112  * @param camera The camera profile defining the projection, must be valid
1113  * @param randomGenerator Random generator object
1114  * @param frameId The id of the frame for which the camera pose will be determined
1115  * @param roughPose Optional a rough camera pose to speedup the computation and accuracy
1116  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
1117  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1118  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1119  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1120  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1121  * @param correspondences Optional resulting number of 2D/3D point correspondences which were available
1122  * @return The resulting camera pose, an invalid pose if no pose can be determined
1123  */
1124  static inline HomogenousMatrix4 determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const HomogenousMatrix4& roughPose = HomogenousMatrix4(false), const unsigned int minimalCorrespondences = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, unsigned int* correspondences = nullptr);
1125 
1126  /**
1127  * Determines the camera 6-DOF pose for a specific camera frame.
1128  * @param database The database from which the object point and image point correspondences are extracted
1129  * @param camera The camera profile defining the projection, must be valid
1130  * @param randomGenerator Random generator object
1131  * @param frameId The id of the frame for which the camera pose will be determined
1132  * @param roughPose Optional a rough camera pose to speedup the computation and accuracy
1133  * @param priorityObjectPointIds Ids of object points for which the poses will be optimized
1134  * @param solePriorityPoints True, to apply only the priority object points for pose determination
1135  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
1136  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1137  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1138  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1139  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1140  * @param correspondences Optional resulting number of 2D/3D point correspondences which were available
1141  * @return The resulting camera pose, an invalid pose if no pose can be determined
1142  */
1143  static inline HomogenousMatrix4 determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const IndexSet32& priorityObjectPointIds, const bool solePriorityPoints, const HomogenousMatrix4& roughPose = HomogenousMatrix4(false), const unsigned int minimalCorrespondences = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, unsigned int* correspondences = nullptr);
1144 
1145  /**
1146  * Determines the camera 6-DOF pose for a specific camera frame.
1147  * @param database The database from which the image points are extracted
1148  * @param camera The camera profile defining the projection, must be valid
1149  * @param randomGenerator Random generator object
1150  * @param frameId The id of the frame for which the camera pose will be determined
1151  * @param objectPoints The object points which are all visible in the specified frame
1152  * @param objectPointIds The ids of the object points, one id for each object points
1153  * @param roughPose Optional a rough camera pose to speedup the computation and accuracy
1154  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1155  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1156  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1157  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1158  * @return The resulting camera pose, an invalid pose if no pose can be determined
1159  */
1160  static inline HomogenousMatrix4 determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<Index32>& objectPointIds, const HomogenousMatrix4& roughPose = HomogenousMatrix4(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr);
1161 
1162  /**
1163  * Determines the camera 6-DOF pose for a set of object point and image point correspondences.
1164  * @param camera The camera profile defining the projection, must be valid
1165  * @param randomGenerator Random generator object
1166  * @param objectPoints The object points which are visible in a frame
1167  * @param imagePoints The image points which are projections of the given object points, one image point corresponds to one object point
1168  * @param roughPose Optional a rough camera pose to speedup the computation and accuracy
1169  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1170  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1171  * @param maximalSqrError The maximal robust squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1172  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1173  * @param validIndices Optional resulting indices of the valid point correspondences
1174  * @return The resulting camera pose, an invalid pose if no pose can be determined
1175  */
1176  static inline HomogenousMatrix4 determinePose(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const HomogenousMatrix4& roughPose = HomogenousMatrix4(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, Indices32* validIndices = nullptr);
1177 
1178  /**
1179  * Determines the camera 6-DOF pose for a set of object point and image point correspondences.
1180  * The point correspondences are separated to a set of priority correspondences and remaining correspondences ensuring that the pose mainly matches for the priority point correspondences.
1181  * @param camera The camera profile defining the projection, must be valid
1182  * @param randomGenerator Random generator object
1183  * @param objectPoints The object points which are visible in a frame, first all priority object points followed by the remaining object points
1184  * @param imagePoints The image points which are projections of the given object points, one image point corresponds to one object point
1185  * @param priorityCorrespondences The number of priority point correspondences
1186  * @param roughPose Optional a rough camera pose to speedup the computation and accuracy
1187  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1188  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1189  * @param maximalSqrError The maximal robust squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1190  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1191  * @return The resulting camera pose, an invalid pose if no pose can be determined
1192  */
1193  static inline HomogenousMatrix4 determinePose(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const size_t priorityCorrespondences, const HomogenousMatrix4& roughPose = HomogenousMatrix4(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr);
1194 
1195  /**
1196  * Determines the camera 3-DOF orientation (as the camera has rotational motion only) for a specific camera frame.
1197  * @param database The database from which the object point and image point correspondences are extracted
1198  * @param camera The camera profile defining the projection, must be valid
1199  * @param randomGenerator Random generator object
1200  * @param frameId The id of the frame for which the camera orientation will be determined
1201  * @param roughOrientation Optional a rough camera orientation to speedup the computation and accuracy
1202  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera orientation, with range [5, infinity)
1203  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1204  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1205  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1206  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1207  * @param correspondences Optional resulting number of 2D/3D point correspondences which were available
1208  * @return The resulting camera orientation, an invalid orientation if no orientation can be determined
1209  */
1210  static inline SquareMatrix3 determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const SquareMatrix3& roughOrientation = SquareMatrix3(false), const unsigned int minimalCorrespondences = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, unsigned int* correspondences = nullptr);
1211 
1212  /**
1213  * Determines the camera 3-DOF orientation (as the camera has rotational motion only) for a specific camera frame.
1214  * @param database The database from which the object point and image point correspondences are extracted
1215  * @param camera The camera profile defining the projection, must be valid
1216  * @param randomGenerator Random generator object
1217  * @param frameId The id of the frame for which the camera orientation will be determined
1218  * @param priorityObjectPointIds Ids of object points for which the poses will be optimized
1219  * @param solePriorityPoints True, to apply only the priority object points for pose determination
1220  * @param roughOrientation Optional a rough camera orientation to speedup the computation and accuracy
1221  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera orientation, with range [5, infinity)
1222  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1223  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1224  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1225  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1226  * @param correspondences Optional resulting number of 2D/3D point correspondences which were available
1227  * @return The resulting camera orientation, an invalid orientation if no orientation can be determined
1228  */
1229  static inline SquareMatrix3 determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const IndexSet32& priorityObjectPointIds, const bool solePriorityPoints, const SquareMatrix3& roughOrientation = SquareMatrix3(false), const unsigned int minimalCorrespondences = 10u, const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, unsigned int* correspondences = nullptr);
1230 
1231  /**
1232  * Determines the camera 3-DOF orientation (as the camera has rotational motion only) for a specific camera frame.
1233  * @param database The database from which the image points are extracted
1234  * @param camera The camera profile defining the projection, must be valid
1235  * @param randomGenerator Random generator object
1236  * @param frameId The id of the frame for which the camera orientation will be determined
1237  * @param objectPoints The object points which are all visible in the specified frame
1238  * @param objectPointIds The ids of the object points, one id for each object points
1239  * @param numberObjectPoints The number of given object points, with range [5, infinity)
1240  * @param roughOrientation Optional a rough camera orientation to speedup the computation and accuracy
1241  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1242  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1243  * @param maximalSqrError The maximal squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1244  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1245  * @return The resulting camera orientation, an invalid orientation if no orientation can be determined
1246  */
1247  static inline SquareMatrix3 determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const ObjectPoint* objectPoints, const Index32* objectPointIds, const size_t numberObjectPoints, const SquareMatrix3& roughOrientation = SquareMatrix3(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr);
1248 
1249  /**
1250  * Determines the camera 3-DOF orientation for a set of object point and image point correspondences.
1251  * @param camera The camera profile defining the projection, must be valid
1252  * @param randomGenerator Random generator object
1253  * @param objectPoints The object points which are visible in a frame
1254  * @param imagePoints The image points which are projections of the given object points, one image point corresponds to one object point
1255  * @param roughOrientation Optional a rough camera orientation to speedup the computation and accuracy
1256  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1257  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1258  * @param maximalSqrError The maximal robust squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1259  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1260  * @param validIndices Optional resulting indices of the valid point correspondences
1261  * @return The resulting camera orientation, an invalid orientation if no orientation can be determined
1262  */
1263  static inline SquareMatrix3 determineOrientation(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const SquareMatrix3& roughOrientation = SquareMatrix3(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr, Indices32* validIndices = nullptr);
1264 
1265  /**
1266  * Determines the camera 3-DOF orientation for a set of object point and image point correspondences.
1267  * @param camera The camera profile defining the projection, must be valid
1268  * @param randomGenerator Random generator object
1269  * @param objectPoints The object points which are visible in a frame, first all priority object points followed by the remaining object points
1270  * @param imagePoints The image points which are projections of the given object points, one image point corresponds to one object point
1271  * @param priorityCorrespondences The number of priority point correspondences
1272  * @param roughOrientation Optional a rough camera orientation to speedup the computation and accuracy
1273  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1274  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1275  * @param maximalSqrError The maximal robust squared pixel error between image point and projected object points for the RANSAC algorithm, with range (0, infinity)
1276  * @param finalRobustError Optional resulting final average robust error, in relation to the defined estimator
1277  * @return The resulting camera orientation, an invalid orientation if no orientation can be determined
1278  */
1279  static inline SquareMatrix3 determineOrientation(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const size_t priorityCorrespondences, const SquareMatrix3& roughOrientation = SquareMatrix3(false), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Scalar* finalRobustError = nullptr);
1280 
1281  /**
1282  * Determines valid poses for a range of camera frames while for each frame a group of image points is given which correspond to the given object points.
1283  * Two individual camera poses must be known within the range of camera frames.<br>
1284  * @param camera The camera profile defining the projection, must be valid
1285  * @param objectPoints The object points with known locations, each object point has a corresponding image point in the groups of image points
1286  * @param imagePointGroups The groups of image points, each set of image points corresponds to the object points, each group of image points represents one camera pose (the observed object points respectively)
1287  * @param randomGenerator Random number generator
1288  * @param cameraMotion The motion of the camera, use CM_UNKNOWN if the motion is unknown so that 6-DOF poses will be determined
1289  * @param firstValidPoseIndex The index of the frame for which the first pose is known, with range [imagePointGroups.firstIndex(), imagePointGroups.lastIndex()]
1290  * @param firstValidPose The first known pose, must be valid
1291  * @param secondValidPoseIndex The index of the frame for which the second pose is known, with range [imagePointGroups.firstIndex(), imagePointGroups.lastIndex()] with firstValidPoseIndex != secondValidPoseIndex
1292  * @param secondValidPose The second known pose, must be valid
1293  * @param minimalValidCorrespondenceRatio The minimal ratio of valid correspondences (w.r.t. the given object points), if the number of valid correspondences is too low the pose is not valid, with range (0, 1]
1294  * @param maximalSqrError The maximal pixel error between a projected object point and the corresponding image point so that the correspondence is valid
1295  * @param validObjectPointIndices Optional resulting indices of the object points which are all valid in all determined valid poses
1296  * @param poses Optional resulting valid poses (corresponding to poseIds)
1297  * @param poseIds Optional resulting ids of all valid poses, each id has a corresponding resulting pose (however the ids themselves have no order)
1298  * @param totalSqrError Optional resulting sum of square pixel errors for all valid poses
1299  * @return The number of valid poses
1300  */
1301  static size_t determineValidPoses(const AnyCamera& camera, const Vectors3& objectPoints, const ImagePointGroups& imagePointGroups, RandomGenerator& randomGenerator, const CameraMotion cameraMotion, const unsigned int firstValidPoseIndex, const HomogenousMatrix4& firstValidPose, const unsigned int secondValidPoseIndex, const HomogenousMatrix4& secondValidPose, const Scalar minimalValidCorrespondenceRatio = Scalar(1), const Scalar maximalSqrError = Scalar(3.5 * 3.5), Indices32* validObjectPointIndices = nullptr, HomogenousMatrices4* poses = nullptr, Indices32* poseIds = nullptr, Scalar* totalSqrError = nullptr);
1302 
1303  /**
1304  * Determines the camera motion from the camera poses within a specified frame range covering only valid poses.
1305  * @param database The database from which the camera pose are taken
1306  * @param pinholeCamera The pinhole camera profile which is applied
1307  * @param lowerFrame The index of the frame defining the lower border of the camera frames which will be investigated
1308  * @param upperFrame The index of the frame defining the upper border of the camera frames which will be investigated, with range [lowerFrame, infinity)
1309  * @param onlyVisibleObjectPoints True, to use only object points which are visible within the defined frame range; False, to use all object points
1310  * @param worker Optional worker object to distribute the computation
1311  * @param minimalTinyTranslationObservationAngle The minimal angle of observation rays for 3D object points so that the motion contains a tiny translational motion, with range (0, PI/2)
1312  * @param minimalModerateTranslationObservationAngle The minimal angle of observation rays for 3D object points so that the motion contains a moderate translational motion, with range (minimalTinyTranslationObservationAngle, PI/2)
1313  * @param minimalSignificantTranslationObservationAngle The minimal angle of observation rays for 3D object points so that the motion contains a significant translational motion, with range (minimalSignificantTranslationObservationAngle, PI/2)
1314  * @param minimalTinyRotationAngle The minimal angle between the viewing directions so that the motion contains a tiny rotational motion, with range (0, PI/2)
1315  * @param minimalModerateRotationAngle The minimal angle between the viewing directions so that the motion contains a moderate rotational motion, with range (minimalTinyRotationAngle, PI/2)
1316  * @param minimalSignificantRotationAngle The minimal angle between the viewing directions so that the motion contains a significant rotational motion, with range (minimalSignificantRotationAngle, PI/2)
1317  * @return The resulting motion of the camera
1318  */
1319  static CameraMotion determineCameraMotion(const Database& database, const PinholeCamera& pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool onlyVisibleObjectPoints = true, Worker* worker = nullptr, const Scalar minimalTinyTranslationObservationAngle = Numeric::deg2rad(Scalar(0.15)), const Scalar minimalModerateTranslationObservationAngle = Numeric::deg2rad(1), const Scalar minimalSignificantTranslationObservationAngle = Numeric::deg2rad(5), const Scalar minimalTinyRotationAngle = Numeric::deg2rad(Scalar(0.25)), const Scalar minimalModerateRotationAngle = Numeric::deg2rad(5), const Scalar minimalSignificantRotationAngle = Numeric::deg2rad(10));
1320 
1321  /**
1322  * Measures the accuracy of a 3D object point in combination with a set of camera poses and image points (the projections of the object point).
1323  * The accuracy of the point can be determined by individual methods, while the basic idea is to use the angles between the individual observation rays of the object point.<br>
1324  * @param pinholeCamera The pinhole camera profile which is applied
1325  * @param poses The camera poses in which the object point is visible
1326  * @param imagePoints The individual image points in the individual camera frames
1327  * @param observations The number of observations (pairs of camera poses and image points)
1328  * @param accuracyMethod The method which is applied to determine the accuracy, must be valid
1329  * @return The resulting accuracy parameter depending on the specified method
1330  */
1331  static Scalar determineObjectPointAccuracy(const PinholeCamera& pinholeCamera, const HomogenousMatrix4* poses, const Vector2* imagePoints, const size_t observations, const AccuracyMethod accuracyMethod);
1332 
1333  /**
1334  * Measures the accuracy of several 3D object points.
1335  * This methods extracts the 3D object point locations from the given database.<br>
1336  * The accuracy of the points can be determined by individual methods, while the basic idea is to use the angles between the individual observation rays of the object points.<br>
1337  * @param database The database providing the location of the 3D object points, the camera poses and the image point positions.<br>
1338  * @param pinholeCamera The pinhole camera profile which is applied
1339  * @param objectPointIds The ids of the object points for which the accuracies will be determined, each object point must be valid
1340  * @param accuracyMethhod The method which is applied to determine the accuracy, must be valid
1341  * @param lowerFrame Optional index of the frame defining the lower border of camera poses which will be investigated, -1 if no lower and no upper border is defined
1342  * @param upperFrame Optional index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity), -1 if also 'lowerFrame' is -1
1343  * @param worker Optional worker object to distribute the computation
1344  * @return The resulting accuracy parameters depending on the specified method, one parameter of each object point, an accuracy of -1 indicates an invalid point (e.g., due to too less measurements)
1345  */
1346  static Scalars determineObjectPointsAccuracy(const Database& database, const PinholeCamera& pinholeCamera, const Indices32& objectPointIds, const AccuracyMethod accuracyMethhod, const unsigned int lowerFrame = (unsigned int)(-1), const unsigned int upperFrame = (unsigned int)(-1), Worker* worker = nullptr);
1347 
1348  /**
1349  * Determines the projection errors of a 3D object point in combination with a set of camera poses and image points (the projections of the object point).
1350  * @param camera The camera profile defining the projection, must be valid
1351  * @param objectPoint The 3D object point for which the quality will be measured
1352  * @param world_T_cameras The camera poses in which the object point is visible
1353  * @param imagePoints The individual image points in the individual camera frames
1354  * @param minimalSqrError Optional resulting minimal (best) projection error for the object point
1355  * @param averageSqrError Optional resulting averaged projection error for the object point
1356  * @param maximalSqrError Optional resulting maximal (worst) projection error for the object point
1357  */
1358  static void determineProjectionErrors(const AnyCamera& camera, const Vector3& objectPoint, const ConstIndexedAccessor<HomogenousMatrix4>& world_T_cameras, const ConstIndexedAccessor<Vector2>& imagePoints, Scalar* minimalSqrError = nullptr, Scalar* averageSqrError = nullptr, Scalar* maximalSqrError = nullptr);
1359 
1360  /**
1361  * Determines the accuracy of a camera pose for all valid object points visible in the frame by measuring the projection error between the projected object points and their corresponding image points.
1362  * @param database The database providing the locations of the 3D object points, the camera poses and the image points
1363  * @param pinholeCamera The pinhole camera profile which is applied
1364  * @param poseId The id of the camera frame for which the accuracy of the pose will be determined
1365  * @param useDistortionParameters True, to apply the distortion parameter of the camera
1366  * @param validCorrespondences Optional resulting number of valid pose correspondences
1367  * @param minimalSqrError Optional resulting minimal (best) projection error for the pose
1368  * @param averageSqrError Optional resulting averaged projection error for the pose
1369  * @param maximalSqrError Optional resulting maximal (worst) projection error for the pose
1370  * @return True, if the database holds a valid pose for the specified camera frame and at least one valid point correspondence
1371  */
1372  static bool determineProjectionError(const Database& database, const PinholeCamera& pinholeCamera, const Index32 poseId, const bool useDistortionParameters, unsigned int* validCorrespondences = nullptr, Scalar* minimalSqrError = nullptr, Scalar* averageSqrError = nullptr, Scalar* maximalSqrError = nullptr);
1373 
1374  /**
1375  * Determines the averaged and maximal squared pixel errors between the projections of individual 3D object points and their corresponding image points in individual camera frames.
1376  * @param database The database from which the camera poses, the object points and the image points are extracted
1377  * @param pinholeCamera The pinhole camera profile which is applied
1378  * @param objectPointIds The ids of all object points for which the maximal squared pixel errors are determined
1379  * @param useDistortionParameters True, to use the distortion parameters of the camera to distort the projected object points
1380  * @param lowerFrame Optional index of the frame defining the lower border of camera poses which will be investigated, -1 if no lower and no upper border is defined
1381  * @param upperFrame Optional index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity), -1 if also 'lowerFrame' is -1
1382  * @param minimalSqrErrors Optional resulting minimal squared pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1383  * @param averagedSqrErrors Optional resulting averaged pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1384  * @param maximalSqrErrors Optional resulting maximal squared pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1385  * @param observations Optional resulting observations for each object point, one number of observations for each given object point id
1386  * @param worker Optional worker object to distribute the computation
1387  * @return True, if succeeded
1388  */
1389  static bool determineProjectionErrors(const Database& database, const PinholeCamera& pinholeCamera, const Indices32& objectPointIds, const bool useDistortionParameters, const unsigned int lowerFrame = (unsigned int)(-1), const unsigned int upperFrame = (unsigned int)(-1), Scalar* minimalSqrErrors = nullptr, Scalar* averagedSqrErrors = nullptr, Scalar* maximalSqrErrors = nullptr, unsigned int* observations = nullptr, Worker* worker = nullptr);
1390 
1391  /**
1392  * Determines the individual cosine values between the mean coordinate axis of a range of poses and the coordinate axis of the individual poses.
1393  * The specified range of camera pose must cover a range with valid poses.<br>
1394  * @param database The database providing the camera poses
1395  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1396  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1397  * @param xOrientations The resulting cosine values for the poses' xAxis, one for each camera pose
1398  * @param yOrientations The resulting cosine values for the poses' yAxis, one for each camera pose
1399  * @param zOrientations The resulting cosine values for the poses' zAxis, one for each camera pose
1400  */
1401  static void determinePosesOrientation(const Database& database, const unsigned int lowerFrame, const unsigned int upperFrame, Scalar* xOrientations, Scalar* yOrientations, Scalar* zOrientations);
1402 
1403  /**
1404  * Determines the number of valid correspondences between image points and object points for each frame within a specified frame range.
1405  * @param database The database providing the 3D object points, the 2D image points and the topology between image and object points
1406  * @param needValidPose True, if the pose must be valid so that the number of valid correspondences will be determined, otherwise the number of correspondences will be zero
1407  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1408  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1409  * @param minimalCorrespondences Optional resulting minimal number of correspondences for all frames within the defined frame range
1410  * @param averageCorrespondences Optional resulting averaged number of correspondences for all frames within the defined frame range
1411  * @param medianCorrespondences Optional resulting median of all correspondences for all frames within the defined frame range
1412  * @param maximalCorrespondences Optional resulting maximal number correspondences for all frames within the defined frame range
1413  * @param worker Optional worker object to distribute the computation
1414  * @return True, if succeeded
1415  */
1416  static bool determineNumberCorrespondences(const Database& database, const bool needValidPose, const unsigned int lowerFrame, const unsigned int upperFrame, unsigned int* minimalCorrespondences = nullptr, Scalar* averageCorrespondences = nullptr, unsigned int* medianCorrespondences = nullptr, unsigned int* maximalCorrespondences = nullptr, Worker* worker = nullptr);
1417 
1418  /**
1419  * Determines a 3D plane best fitting to a set of given 3D object points.
1420  * @param objectPoints The object points for which the best matching plane will be determined, at least 3
1421  * @param randomGenerator Random number generator
1422  * @param plane The resulting 3D plane
1423  * @param minimalValidObjectPoints The minimal number of valid object points so that a valid plane will be determined
1424  * @param estimator The robust estimator which will be applied to determine the 3D plane
1425  * @param finalError Optional resulting final error
1426  * @param validIndices Optional resulting indices of all valid object points
1427  * @return True, if succeeded
1428  */
1429  static inline bool determinePlane(const ConstIndexedAccessor<Vector3>& objectPoints, RandomGenerator& randomGenerator, Plane3& plane, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(3u, Scalar(0.5), 20u), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_HUBER, Scalar* finalError = nullptr, Indices32* validIndices = nullptr);
1430 
1431  /**
1432  * Determines a 3D plane best fitting to a set of given 3D object point ids.
1433  * @param database The database holding the 3D object point locations
1434  * @param objectPointIds The ids of the object points for which the best matching plane will be determined, at least 3, must have valid locations in the database
1435  * @param randomGenerator Random number generator
1436  * @param plane The resulting 3D plane
1437  * @param minimalValidObjectPoints The minimal number of valid object points so that a valid plane will be determined
1438  * @param estimator The robust estimator which will be applied to determine the 3D plane
1439  * @param finalError Optional resulting final error
1440  * @param validIndices Optional resulting indices of all valid object points
1441  * @return True, if succeeded
1442  */
1443  static inline bool determinePlane(const Database& database, const Indices32& objectPointIds, RandomGenerator& randomGenerator, Plane3& plane, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(3u, Scalar(0.5), 20u), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_HUBER, Scalar* finalError = nullptr, Indices32* validIndices = nullptr);
1444 
1445  /**
1446  * Determines a 3D plane best fitting to a set of given 3D object point ids which are specified by a given sub-region in the camera frame.
1447  * @param database The database holding the 3D object point locations
1448  * @param frameIndex The index of the frame in which the plane is visible for which the given sub-region defines the area of image points for which the corresponding object points define the 3D plane, the pose must be valid
1449  * @param subRegion The sub-region which defines the plane area in the camera frame
1450  * @param randomGenerator Random number generator
1451  * @param plane The resulting 3D plane
1452  * @param minimalValidObjectPoints The minimal number of valid object points so that a valid plane will be determined
1453  * @param estimator The robust estimator which will be applied to determine the 3D plane
1454  * @param finalError Optional resulting final error
1455  * @param usedObjectPointIds Optional resulting ids of the used object points
1456  * @return True, if succeeded
1457  */
1458  static bool determinePlane(const Database& database, const Index32 frameIndex, const CV::SubRegion& subRegion, RandomGenerator& randomGenerator, Plane3& plane, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(3u, Scalar(0.5), 20u), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_HUBER, Scalar* finalError = nullptr, Indices32* usedObjectPointIds = nullptr);
1459 
1460  /**
1461  * Determines a 3D plane best fitting to image points in a specified sub-region in a specified frame and best fitting to this area visible in a specified frame range.
1462  * @param database The database holding the 3D object point locations
1463  * @param pinholeCamera The pinhole camera profile defining the projection, must be valid
1464  * @param lowerFrameIndex The index of the frame defining the lower border of camera poses which will be investigated
1465  * @param subRegionFrameIndex The index of the frame for which the sub-region is specified
1466  * @param upperFrameIndex The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1467  * @param subRegion The sub-region defining the area in the image frame for which the 3D plane will be determined
1468  * @param randomGenerator The random number generator object
1469  * @param plane The resulting 3D plane best fitting for the given data
1470  * @param useDistortionParameters True, to use the distortion parameters of the camera
1471  * @param minimalValidObjectPoints The minimal number of valid 3D points in relation to the 3D object points which are projected into the sub-region in the sub-region frame
1472  * @param medianDistanceFactor The factor with which the median distance between the initial 3D plane and the initial 3D object points is multiplied to determine the maximal distance between the finial plane and any 3D object point which defines the plane, with range (0, infinity)
1473  * @param estimator The robust estimator used to determine the initial plane for the sub-region frame
1474  * @param finalError Optional resulting final square error
1475  * @param usedObjectPointIds Optional resulting ids of all 3D object points which have been used to determine the 3D plane
1476  * @return True, if succeeded
1477  */
1478  static bool determinePlane(const Database& database, const PinholeCamera& pinholeCamera, const unsigned int lowerFrameIndex, const unsigned int subRegionFrameIndex, const unsigned int upperFrameIndex, const CV::SubRegion& subRegion, RandomGenerator& randomGenerator, Plane3& plane, const bool useDistortionParameters, const RelativeThreshold& minimalValidObjectPoints = RelativeThreshold(3u, Scalar(0.5), 20u), const Scalar medianDistanceFactor = Scalar(6), const Geometry::Estimator::EstimatorType estimator = Geometry::Estimator::ET_HUBER, Scalar* finalError = nullptr, Indices32* usedObjectPointIds = nullptr);
1479 
1480  /**
1481  * Determines a 3D plane perpendicular to the camera with specified distance to the camera.
1482  * This function may be used for e.g., rotational camera motion as e.g., initial guess.
1483  * @param database The database holding the 3D object point locations
1484  * @param pinholeCamera The pinhole camera profile defining the projection, must be valid
1485  * @param frameIndex The index of the frame in which the given image point is visible
1486  * @param imagePoint The image point to which (to the viewing ray respectively) the resulting plane will be perpendicular, must lie inside the camera frame
1487  * @param distance The distance of the plane to the camera, with range (0, infinity)
1488  * @param plane The resulting 3D plane best fitting for the given data
1489  * @param useDistortionParameters True, to use the distortion parameters of the camera
1490  * @param pointOnPlane Optional resulting 3D intersection point of resulting plane and the viewing ray of the provided image point
1491  * @return True, if succeeded
1492  */
1493  static bool determinePerpendicularPlane(const Database& database, const PinholeCamera& pinholeCamera, const unsigned int frameIndex, const Vector2& imagePoint, const Scalar distance, Plane3& plane, const bool useDistortionParameters, Vector3* pointOnPlane = nullptr);
1494 
1495  /**
1496  * Determines a 3D plane perpendicular to the camera with specified distance to the camera.
1497  * This function may be used for e.g., rotational camera motion as e.g., initial guess.
1498  * @param pinholeCamera The pinhole camera profile defining the projection, must be valid
1499  * @param pose The pose of the camera, must be valid
1500  * @param imagePoint The image point to which (to the viewing ray respectively) the resulting plane will be perpendicular, must lie inside the camera frame
1501  * @param distance The distance of the plane to the camera, with range (0, infinity)
1502  * @param plane The resulting 3D plane best fitting for the given data
1503  * @param useDistortionParameters True, to use the distortion parameters of the camera
1504  * @param pointOnPlane Optional resulting 3D intersection point of resulting plane and the viewing ray of the provided image point
1505  * @return True, if succeeded
1506  */
1507  static bool determinePerpendicularPlane(const PinholeCamera& pinholeCamera, const HomogenousMatrix4& pose, const Vector2& imagePoint, const Scalar distance, Plane3& plane, const bool useDistortionParameters, Vector3* pointOnPlane = nullptr);
1508 
1509  /**
1510  * Removes very far object points from the database if the amount of object points does not exceed a specified ratio (compared to the remaining object points).
1511  * Optimization functions for camera poses or bundle adjustment functions may fail if the database holds a large set of dense object points and a small number of very sparse object points.<br>
1512  * Thus, this function can be used to improve the 'quality' of a database.
1513  * @param database The database from which the very sparse object points will be removed
1514  * @param minimalBoundingBoxDiagonal the minimal size of the diagonal of the bounding box of the object points so that the database can be modified, with range (0, infinity)
1515  * @param medianFactor The factor which is multiplied with the median distance between the median object point and the object points of the database to identify very sparse (very far) object points
1516  * @param maximalSparseObjectPointRatio The maximal ratio between the very spars object points and the entire number of object points so that the database will be modified
1517  * @return True, if at least one very sparse object point has been removed from the database
1518  */
1519  static bool removeSparseObjectPoints(Database& database, const Scalar minimalBoundingBoxDiagonal = Scalar(1e+7), const Scalar medianFactor = Scalar(100), const Scalar maximalSparseObjectPointRatio = Scalar(0.05));
1520 
1521  /**
1522  * Removes all valid 3D object points (and their corresponding 2D image points) from the database which are at least in one frame not in front of the camera while having an existing 2D image point as observation.
1523  * @param database The database from which the 3D object points will be removed
1524  * @param removedObjectPointIds Optional resulting ids of all object points which have been removed, nullptr if not of interest
1525  * @return The number of removed 3D object points
1526  */
1527  static size_t removeObjectPointsNotInFrontOfCamera(Database& database, Indices32* removedObjectPointIds = nullptr);
1528 
1529  /**
1530  * Removes any 3D object point (and it's corresponding 2D image points) from the database with less then a specified number of observations.
1531  * @param database The database from which the 3D object points will be removed
1532  * @param minimalNumberObservations The minimal number of observations a 3D object point must have to stay in the database, with range [1, infinity)
1533  * @param removedObjectPointIds Optional resulting ids of all object points which have been removed, nullptr if not of interest
1534  * @return The number of removed 3D object points
1535  */
1536  static size_t removeObjectPointsWithoutEnoughObservations(Database& database, const size_t minimalNumberObservations, Indices32* removedObjectPointIds = nullptr);
1537 
1538  /**
1539  * Removes any 3D object point (and it's corresponding 2D image points) from the database if all their corresponding camera poses are located within a too small bounding box.
1540  * The bounding box is determined based on the translational parts of the camera poses.
1541  * @param database The database from which the 3D object points will be removed
1542  * @param minimalBoxDiagonal The minimal diagonal of the bounding box of all camera poses of supporting an object point to stay in the database
1543  * @param removedObjectPointIds Optional resulting ids of all object points which have been removed, nullptr if not of interest
1544  * @return The number of removed 3D object points
1545  */
1546  static size_t removeObjectPointsWithSmallBaseline(Database& database, const Scalar minimalBoxDiagonal, Indices32* removedObjectPointIds = nullptr);
1547 
1548  /**
1549  * Translates a camera motion value to a string providing the detailed motion as readable string.
1550  * @param cameraMotion The camera motion for which a readable string is requested
1551  * @return The readable string of the camera motion
1552  */
1553  static std::string translateCameraMotion(const CameraMotion cameraMotion);
1554 
1555  protected:
1556 
1557  /**
1558  * Determines a subset of perfectly static image points which may be image points located (visible) at static logos in the frames.
1559  * @param imagePointGroups Groups of image points where each group holds the projection of the same 3D object points
1560  * @param objectPointIds The ids of the object points which have the corresponding projected image points in the groups of image points
1561  * @param maximalStaticImagePointFilterRatio The maximal ratio of static image points in relation to the entire number of image points in each group, with range [0, 1]
1562  * @return The number of static image points that have been removed
1563  */
1564  static size_t filterStaticImagePoints(ImagePointGroups& imagePointGroups, Indices32& objectPointIds, const Scalar maximalStaticImagePointFilterRatio);
1565 
1566  /**
1567  * Determines the initial positions of 3D object points in a database if no camera poses or structure information is known.
1568  * This functions processes a subset of pre-defined start frames from which the point tracking starts.<br>
1569  * @param database The database defining the topology of 3D object points and corresponding 2D image points
1570  * @param pinholeCamera The pinhole camera profile which will be applied
1571  * @param randomGenerator A random generator object
1572  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1573  * @param startFrames The entire set of start frames from which a subset will be processed
1574  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated
1575  * @param maximalStaticImagePointFilterRatio The maximal ratio between (perfectly) static image points and the overall number of image points so that these static image points will be filtered and not used, with ratio [0, 1), 0 to avoid any filtering
1576  * @param initialObjectPoints The resulting initial 3D positions of object points that could be extracted
1577  * @param initialObjectPointIds The resulting ids of the resulting object points, one id for each resulting object point
1578  * @param initialPoseIds The resulting ids of all camera poses which have been used to determine the resulting initial object points
1579  * @param initialPointDistance The resulting distance between the image points which have been used to determine the initial object points, which is a measure for the reliability of the resulting 3D object points
1580  * @param pointsThreshold The threshold of image points which must be visible in each camera frame
1581  * @param minimalKeyFrames The minimal number of keyframes that will be extracted
1582  * @param maximalKeyFrames The maximal number of keyframes that will be extracted
1583  * @param maximalSqrError The maximal square distance between an image points and a projected object point
1584  * @param lock The lock object which must be defined if this function is executed in parallel on several threads, otherwise nullptr
1585  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1586  * @param numberThreads The number of threads on which this function is executed in parallel, with range [1, infinity)
1587  * @param threadIndex The index of the thread on which this function is executed
1588  * @param numberThreadsOne Must be 1
1589  */
1590  static void determineInitialObjectPointsFromSparseKeyFramesByStepsSubset(const Database* database, const PinholeCamera* pinholeCamera, RandomGenerator* randomGenerator, const unsigned int lowerFrame, const Indices32* startFrames, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3* initialObjectPoints, Indices32* initialObjectPointIds, Indices32* initialPoseIds, Scalar* initialPointDistance, const RelativeThreshold* pointsThreshold, const unsigned int minimalKeyFrames, const unsigned int maximalKeyFrames, const Scalar maximalSqrError, Lock* lock, bool* abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne);
1591 
1592  /**
1593  * Determines the initial object point positions for a set of frames (image point groups) observing the unique object points in individual camera poses by a RANSAC algorithm.
1594  * This function applies a RANSAC mechanism randomly selecting individual start key frames (pairs of image points).<br>
1595  * The key frames (image point groups) provide the following topology:<br>
1596  * For n unique object points visible in m individual frames we have n object points (op) and n * m overall image points (ip):
1597  * <pre>
1598  * op_1, op_2, op_3, op_4, ..., op_n
1599  * ...
1600  * dense_pose_2 -> ip_3_1, ip_3_2, ip_3_3, ip_3_4, ..., ip_3_n
1601  * dense_pose_3 -> ip_4_1, ip_4_2, ip_4_3, ip_4_4, ..., ip_4_n
1602  * ...
1603  * </pre>
1604  * @param pinholeCamera The pinhole camera profile to be applied
1605  * @param imagePointGroups Frames of image points, all points in one group are located in the same camera frame and the individual points correspond to the same unique object points
1606  * @param randomGenerator A random generator object
1607  * @param validPoses The resulting poses that could be determined
1608  * @param validPoseIds The ids of resulting valid poses, one id for each resulting valid pose (the order of the ids is arbitrary)
1609  * @param objectPoints The resulting object points that could be determined
1610  * @param validObjectPointIndices The indices of resulting valid object points in relation to the given image point groups
1611  * @param totalError The resulting total error of the best RANSAC iteration
1612  * @param minimalValidObjectPoints The threshold of object points that must be valid
1613  * @param maximalSqrError The maximal square distance between an image points and a projected object point
1614  * @param remainingIterations The number of RANSAC iterations that still need to be applied
1615  * @param lock The lock object which must be defined if this function is executed in parallel on several threads, otherwise nullptr
1616  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1617  * @param firstIteration The first RANSAC iteration to apply, has no meaning as 'remainingIterations' is used instead
1618  * @param numberIterations The number of RANSAC iterations to apply, has no meaning as 'remainingIterations' is used instead
1619  * @see determineInitialObjectPointsFromDenseFramesRANSAC().
1620  */
1621  static void determineInitialObjectPointsFromDenseFramesRANSACSubset(const PinholeCamera* pinholeCamera, const ImagePointGroups* imagePointGroups, RandomGenerator* randomGenerator, HomogenousMatrices4* validPoses, Indices32* validPoseIds, Vectors3* objectPoints, Indices32* validObjectPointIndices, Scalar* totalError, const RelativeThreshold* minimalValidObjectPoints, const Scalar maximalSqrError, unsigned int* remainingIterations, Lock* lock, bool* abort, unsigned int firstIteration, unsigned int numberIterations);
1622 
1623  /**
1624  * Updates a subset of the camera poses depending on valid 2D/3D points correspondences within a range of camera frames.
1625  * The camera poses will be set to invalid if no valid pose can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
1626  * @param database The database from which the point correspondences are extracted and which receives the determined camera poses
1627  * @param camera The camera profile defining the projection, must be valid
1628  * @param randomGenerator Random generator object
1629  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1630  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1631  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
1632  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1633  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
1634  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1635  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a pose counts as valid, with range (0, infinity)
1636  * @param totalError The resulting accumulated total error for all poses
1637  * @param validPoses The resulting number of valid poses
1638  * @param lock The lock object which must be defined if this function is executed in parallel on several individual threads
1639  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1640  * @param numberThreads The overall number of threads which are used in parallel
1641  * @param threadIndex The index of the thread executing this function, with range [0, numberThreads)
1642  * @param numberThreadsOne Must be 1
1643  */
1644  static void updatePosesSubset(Database* database, const AnyCamera* camera, RandomGenerator* randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar* totalError, size_t* validPoses, Lock* lock, bool* abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne);
1645 
1646  /**
1647  * Updates a subset of the camera orientations (as the camera has rotational motion only) depending on valid 2D/3D points correspondences within a range of camera frames.
1648  * The camera orientations (their poses respectively) will be set to invalid if no valid orientation can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
1649  * @param database The database from which the point correspondences are extracted and which receives the determined camera orientations (the 6-DOF poses with zero translation)
1650  * @param camera The camera profile defining the projection, must be valid
1651  * @param randomGenerator Random generator object
1652  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1653  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1654  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera orientations, with range [5, infinity)
1655  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1656  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
1657  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1658  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a orientation counts as valid, with range (0, infinity)
1659  * @param totalError The resulting accumulated total error for all poses (orientations)
1660  * @param validPoses The resulting number of valid poses (orientations)
1661  * @param lock The lock object which must be defined if this function is executed in parallel on several individual threads
1662  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1663  * @param numberThreads The overall number of threads which are used in parallel
1664  * @param threadIndex The index of the thread executing this function, with range [0, numberThreads)
1665  * @param numberThreadsOne Must be 1
1666  */
1667  static void updateOrientationsSubset(Database* database, const AnyCamera* camera, RandomGenerator* randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar* totalError, size_t* validPoses, Lock* lock, bool* abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne);
1668 
1669  /**
1670  * Determines a subset of the camera poses depending on valid 2D/3D points correspondences within a range of camera frames.
1671  * The camera poses will be set to invalid if no valid pose can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
1672  * @param database The database from which the point correspondences are extracted and which receives the determined camera poses
1673  * @param camera The camera profile defining the projection, must be valid
1674  * @param priorityObjectPointIds Optional ids of the object points for which the poses will be optimized, may be zero so that all object points are investigated with the same priority
1675  * @param solePriorityPoints True, to apply only the priority object points for pose determination
1676  * @param randomGenerator Random generator object
1677  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1678  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1679  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera pose, with range [5, infinity)
1680  * @param poses The resulting determined poses starting with the lower frame and ending with the upper frame
1681  * @param estimator The robust estimator which is applied for the non-linear pose optimization
1682  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
1683  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1684  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a pose counts as valid, with range (0, infinity)
1685  * @param totalError The resulting accumulated total error for all poses
1686  * @param lock The lock object which must be defined if this function is executed in parallel on several individual threads
1687  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1688  * @param numberThreads The overall number of threads which are used in parallel
1689  * @param threadIndex The index of the thread executing this function, with range [0, numberThreads)
1690  * @param numberThreadsOne Must be 1
1691  */
1692  static void determinePosesSubset(const Database* database, const AnyCamera* camera, const IndexSet32* priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator* randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector<HomogenousMatrix4>* poses, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar* totalError, Lock* lock, bool* abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne);
1693 
1694  /**
1695  * Determines a subset of the camera orientations (as the camera has rotational motion only) depending on valid 2D/3D points correspondences within a range of camera frames.
1696  * The camera orientations (their poses respectively) will be set to invalid if no valid orientation can be determined (e.g., if not enough valid point correspondences are known for a specific camera frame).<br>
1697  * @param database The database from which the point correspondences are extracted and which receives the determined camera orientations (the 6-DOF poses with zero translation)
1698  * @param camera The camera profile defining the projection, must be valid
1699  * @param priorityObjectPointIds Optional ids of the object points for which the poses will be optimized, may be zero so that all object points are investigated with the same priority
1700  * @param solePriorityPoints True, to apply only the priority object points for pose determination
1701  * @param randomGenerator Random generator object
1702  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1703  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1704  * @param minimalCorrespondences The minimal number of 2D/3D points correspondences which are necessary to determine a valid camera orientations, with range [5, infinity)
1705  * @param poses The resulting determined poses starting with the lower frame and ending with the upper frame
1706  * @param estimator The robust estimator which is applied for the non-linear orientation optimization
1707  * @param ransacMaximalSqrError The maximal squared pixel error between image point and projected object points for RANSAC iterations, with range (0, infinity)
1708  * @param minimalValidCorrespondenceRatio The ratio of the minimal number of valid correspondences (the valid correspondences will be determined from a RANSAC iteration), with range [0, 1]
1709  * @param maximalRobustError The maximal average robust pixel error between image point and projected object points so that a orientation counts as valid, with range (0, infinity)
1710  * @param totalError The resulting accumulated total error for all poses (orientations)
1711  * @param lock The lock object which must be defined if this function is executed in parallel on several individual threads
1712  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1713  * @param numberThreads The overall number of threads which are used in parallel
1714  * @param threadIndex The index of the thread executing this function, with range [0, numberThreads)
1715  * @param numberThreadsOne Must be 1
1716  */
1717  static void determineOrientationsSubset(const Database* database, const AnyCamera* camera, const IndexSet32* priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator* randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector<HomogenousMatrix4>* poses, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar* totalError, Lock* lock, bool* abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne);
1718 
1719  /**
1720  * Determines the semi-precise location of 3D object points and the camera poses for a sole rotational camera motion.
1721  * The locations and the camera poses may not match with a pure rotational camera motion before.<br>
1722  * Only object points with an already valid location will receive a precise location matching to the rotational motion.<br>
1723  * Only valid camera poses will receive a precise pose matching to the rotational motion.
1724  * @param database The database providing already known locations of 3D object points (may not match with a sole rotational camera motion), already known valid camera poses (may also not match with a sole rotational camera motion)
1725  * @param pinholeCamera The pinhole camera profile defined the projection
1726  * @param randomGenerator Random generator object
1727  * @param lowerFrame The index of the frame defining the lower border of camera poses which will be investigated
1728  * @param upperFrame The index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity)
1729  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized, with range [0, infinity)
1730  * @param relocatedObjectPointIds Optional resulting ids of all object points which have been relocated
1731  * @return True, if succeeded
1732  */
1733  static bool updateDatabaseToRotationalMotion(Database& database, const PinholeCamera& pinholeCamera, RandomGenerator& randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalObservations, IndexSet32* relocatedObjectPointIds);
1734 
1735  /**
1736  * Determines the positions of new object points from a database within a specified frame range.
1737  * @param camera The camera profile defining the projection, must be valid
1738  * @param database The database from which the object point and image point correspondences are extracted
1739  * @param objectPointsData The data holding groups of pose ids and image point ids for each individual object point
1740  * @param randomGenerator Random generator object to be used for creating random numbers, must be defined
1741  * @param maximalSqrError The maximal squared error between a projected 3D object point and an image point so that the combination of object point and image point count as valid
1742  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1743  * @param lock The lock object which must be defined if this function is invoked in parallel
1744  * @param newObjectPoints The resulting positions of new object points
1745  * @param newObjectPointIds The resulting ids of the new object points, each id corresponds with a positions from 'newObjectPoints'
1746  * @param firstObjectPoint The first object point to be handled, with range [0, numberObjectPoints)
1747  * @param numberObjectPoints The number of object points to be handled, with range [0, objectPointData->size()]
1748  */
1749  static void determineUnknownObjectPointsSubset(const AnyCamera* camera, const Database* database, const Database::PoseImagePointTopologyGroups* objectPointsData, RandomGenerator* randomGenerator, const Scalar maximalSqrError, bool* abort, Lock* lock, Vectors3* newObjectPoints, Indices32* newObjectPointIds, unsigned int firstObjectPoint, unsigned int numberObjectPoints);
1750 
1751  /**
1752  * Determines the positions of a subset of (currently unknown) object points.
1753  * @param database The database form which the object point, image point and pose information is extracted
1754  * @param camera The camera profile defining the projection, must be valid
1755  * @param cameraMotion The motion of the camera, can be CM_ROTATIONAL or CM_TRANSLATIONAL
1756  * @param objectPointIds The ids of all (currently unknown) object points for which a 3D position will be determined, must all be valid
1757  * @param newObjectPoints The resulting 3D location of the new object points
1758  * @param newObjectPointIds The ids of the resulting new object points, one id for each resulting new object point
1759  * @param newObjectPointObservations Optional resulting number of observations for each resulting new object point, one number for each resulting new object point
1760  * @param randomGenerator Random generator object to be used for creating random numbers, must be defined
1761  * @param minimalObservations The minimal number of observations for each new object points which are necessary to determine the 3D location
1762  * @param useAllObservations True, to use all observations (with valid camera pose) to determine the 3D locations; False, to apply a RANSAC mechanism taking a subset of all observations to determine the 3D locations
1763  * @param estimator The robust estimator which is applied during optimization of each individual new 3D location, must be defined
1764  * @param ransacMaximalSqrError The maximal squared projection error between a new 3D object point and the corresponding image points for the RANSAC mechanism
1765  * @param averageRobustError The (average) robust error for a new 3D object point after optimization of the 3D location
1766  * @param maximalSqrError The maximal error for a new valid 3D object point after optimization of the 3D location
1767  * @param look Lock object which must be defined if this function is executed in parallel on individual threads
1768  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1769  * @param firstObjectPoint First object point to be handled
1770  * @param numberObjectPoints Number of object points to be handled
1771  */
1772  static void determineUnknownObjectPointsSubset(const Database* database, const AnyCamera* camera, const CameraMotion cameraMotion, const Index32* objectPointIds, Vectors3* newObjectPoints, Indices32* newObjectPointIds, Indices32* newObjectPointObservations, RandomGenerator* randomGenerator, const unsigned int minimalObservations, const bool useAllObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar ransacMaximalSqrError, const Scalar averageRobustError, const Scalar maximalSqrError, Lock* look, bool* abort, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints);
1773 
1774  /**
1775  * Optimizes a subset of a set of 3D object points which have a quite good accuracy already without optimizing the camera poses concurrently.
1776  * The database must hold the valid initial 3D object positions and must hold valid camera poses.<br>
1777  * @param database The database from which the initial 3D object point positions and the individual camera poses (in which the object points are visible) are extracted
1778  * @param pinholeCamera The pinhole camera profile to be applied
1779  * @param cameraMotion The motion of the camera, CM_ROTATIONAL if the camera poses do not have a translational part, CM_TRANSLATIONAL otherwise
1780  * @param objectPointIds The ids of the object points for which the positions will be optimized (all points must have already initial 3D positions)
1781  * @param optimizedObjectPoints The resulting positions of the optimized object points
1782  * @param optimizedObjectPointIds The ids of the optimized object points, one id for each positions in 'optimizedObjectPoints'
1783  * @param minimalObservations The minimal number of observations a 3D object point must have so that the position of the object point will be optimized
1784  * @param estimator The robust estimator which is applied to determine the projection error between 3D object point positions and the image points in individual camera frames
1785  * @param maximalRobustError The maximal error between a projected object point and the individual image points; beware the error must be defined w.r.t. the selected estimator
1786  * @param look Optional lock object ensuring a safe distribution of the computation, must be defined if this function is executed in parallel
1787  * @param abort Optional abort statement allowing to stop the execution; True, if the execution has to stop
1788  * @param firstObjectPoint First object point to be handled
1789  * @param numberObjectPoints The number of object points to be handled
1790  */
1791  static void optimizeObjectPointsWithFixedPosesSubset(const Database* database, const PinholeCamera* pinholeCamera, const CameraMotion cameraMotion, const Index32* objectPointIds, Vectors3* optimizedObjectPoints, Indices32* optimizedObjectPointIds, const unsigned int minimalObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar maximalRobustError, Lock* look, bool* abort, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints);
1792 
1793  /**
1794  * Measures the accuracy of a subset of several 3D object points.
1795  * @param database The database providing the location of the 3D object points, the camera poses and the image point positions.<br>
1796  * @param pinholeCamera The pinhole camera profile which is applied
1797  * @param objectPointIds The ids of the object points for which the accuracies will be determined, each object point must be valid
1798  * @param accuracyMethhod The method which is applied to determine the accuracy
1799  * @param lowerFrame Optional index of the frame defining the lower border of camera poses which will be investigated, -1 if no lower and no upper border is defined
1800  * @param upperFrame Optional index of the frame defining the upper border of camera poses which will be investigated, with range [lowerFrame, infinity), -1 if also 'lowerFrame' is -1
1801  * @param values The resulting accuracy parameters depending on the specified method, one parameter of each object point
1802  * @param firstObjectPoint First object point to be handled
1803  * @param numberObjectPoints The number of object points to be handled
1804  * @see measureObjectPointsAccuracy().
1805  */
1806  static void determineObjectPointsAccuracySubset(const Database* database, const PinholeCamera* pinholeCamera, const Index32* objectPointIds, const AccuracyMethod accuracyMethhod, const unsigned int lowerFrame, const unsigned int upperFrame, Scalar* values, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints);
1807 
1808  /**
1809  * Determines the maximal squared pixel errors between the projections of a subset of individual 3D object points and their corresponding image points in individual camera frames.
1810  * @param database The database from which the camera poses, the object points and the image points are extracted
1811  * @param pinholeCamera The pinhole camera profile which is applied
1812  * @param objectPointIds The ids of all object points for which the maximal squared pixel errors are determined
1813  * @param posesIF The inverted and flipped poses of all camera frames which will be investigated, the poses can be valid or invalid, the first pose is the camera pose for the frame with id 'lowerPoseId'
1814  * @param lowerPoseId The id of the first provided pose
1815  * @param upperPoseId The id of the last provided pose, thus posesIF must store (upperPoseId - lowerPoseId + 1) poses
1816  * @param useDistortionParameters True, to use the distortion parameters of the camera to distort the projected object points
1817  * @param minimalSqrErrors Optional resulting minimal squared pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1818  * @param averagedSqrErrors Optional resulting averaged pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1819  * @param maximalSqrErrors Optional resulting maximal squared pixel errors, one error for each given object point id, invalid object points or object points without corresponding observation receive Numeric::maxValue() as error
1820  * @param observations Optional resulting observations for each object point, one number of observations for each given object point id
1821  * @param firstObjectPoint The first object point to handle
1822  * @param numberObjectPoints The number of object points to handle
1823  */
1824  static void determineProjectionErrorsSubset(const Database* database, const PinholeCamera* pinholeCamera, const Index32* objectPointIds, const HomogenousMatrix4* posesIF, const Index32 lowerPoseId, const unsigned int upperPoseId, const bool useDistortionParameters, Scalar* minimalSqrErrors, Scalar* averagedSqrErrors, Scalar* maximalSqrErrors, unsigned int* observations, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints);
1825 
1826  /**
1827  * Determines the average distance between the center of a set of given points and each of the points.
1828  * @param points The set of points for which the average distance will be determined
1829  * @param size The number of points in the set, with range [1, infinity)
1830  * @return The average distance
1831  */
1832  static Scalar averagePointDistance(const Vector2* points, const size_t size);
1833 };
1834 
1835 inline Solver3::RelativeThreshold::RelativeThreshold(const unsigned int lowerBoundary, const Scalar factor, const unsigned int upperBoundary) :
1836  thresholdLowerBoundary(lowerBoundary),
1837  thresholdFactor(factor),
1838  thresholdUpperBoundary(upperBoundary)
1839 {
1840  // nothing to do here
1841 }
1842 
1844 {
1845  return thresholdLowerBoundary;
1846 }
1847 
1849 {
1850  return thresholdFactor;
1851 }
1852 
1854 {
1855  return thresholdUpperBoundary;
1856 }
1857 
1858 inline unsigned int Solver3::RelativeThreshold::threshold(const unsigned int value) const
1859 {
1860  return min(minmax<unsigned int>(thresholdLowerBoundary, (unsigned int)Numeric::round32(Scalar(value) * thresholdFactor), thresholdUpperBoundary), value);
1861 }
1862 
1863 inline bool Solver3::RelativeThreshold::hasValidThreshold(const unsigned int value, unsigned int* threshold) const
1864 {
1865  const unsigned int result = min(minmax<unsigned int>(thresholdLowerBoundary, (unsigned int)Numeric::round32(Scalar(value) * thresholdFactor), thresholdUpperBoundary), value);
1866 
1867  if (value < thresholdLowerBoundary)
1868  return false;
1869 
1870  ocean_assert(result <= value);
1871  ocean_assert(result >= thresholdLowerBoundary);
1872  ocean_assert(result <= thresholdUpperBoundary);
1873 
1874  if (threshold)
1875  *threshold = result;
1876 
1877  return true;
1878 }
1879 
1880 template <unsigned int tLowerBoundary>
1881 inline bool Solver3::RelativeThreshold::hasValidThreshold(const unsigned int value, unsigned int* threshold) const
1882 {
1883  const unsigned int result = min(minmax<unsigned int>(max(thresholdLowerBoundary, tLowerBoundary), (unsigned int)Numeric::round32(Scalar(value) * thresholdFactor), thresholdUpperBoundary), value);
1884 
1885  if (value < tLowerBoundary || value < thresholdLowerBoundary)
1886  return false;
1887 
1888  ocean_assert(result <= value);
1889  ocean_assert(result >= thresholdLowerBoundary);
1890  ocean_assert(result <= thresholdUpperBoundary);
1891 
1892  if (threshold)
1893  *threshold = result;
1894 
1895  return true;
1896 }
1897 
1899  PoseGroupsAccessor(accessor.elementGroups_)
1900 {
1901  // nothing to do here
1902 }
1903 
1905  PoseGroupsAccessor(std::move(accessor.elementGroups_))
1906 {
1907  // nothing to do here
1908 }
1909 
1911 {
1912  PoseGroupsAccessor::operator=(accessor);
1913  return *this;
1914 }
1915 
1917 {
1918  PoseGroupsAccessor::operator=(std::move(accessor));
1919  return *this;
1920 }
1921 
1923  ObjectPointGroupsAccessor(accessor.elementGroups_)
1924 {
1925  // nothing to do here
1926 }
1927 
1929  ObjectPointGroupsAccessor(std::move(accessor.elementGroups_))
1930 {
1931  // nothing to do here
1932 }
1933 
1935 {
1936  elementGroups_.reserve(validGroupIndices.size());
1937 
1938  for (Indices32::const_iterator i = validGroupIndices.begin(); i != validGroupIndices.end(); ++i)
1939  {
1940  ocean_assert(*i < accessor.elementGroups_.size());
1941  elementGroups_.emplace_back(std::move(accessor.elementGroups_[*i]));
1942  }
1943 }
1944 
1946 {
1947  ObjectPointGroupsAccessor::operator=(accessor);
1948  return *this;
1949 }
1950 
1952 {
1953  ObjectPointGroupsAccessor::operator=(std::move(accessor));
1954  return *this;
1955 }
1956 
1958  ObjectPointGroupsAccessor(accessor.elementGroups_)
1959 {
1960  // nothing to do here
1961 }
1962 
1964  ObjectPointGroupsAccessor(std::move(accessor.elementGroups_))
1965 {
1966  // nothing to do here
1967 }
1968 
1970 {
1971  ObjectPointGroupsAccessor::operator=(accessor);
1972  return *this;
1973 }
1974 
1976 {
1977  ObjectPointGroupsAccessor::operator=(std::move(accessor));
1978  return *this;
1979 }
1980 
1981 inline bool Solver3::determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, Vectors3& newObjectPoints, Indices32& newObjectPointIds, RandomGenerator& randomGenerator, Indices32* newObjectPointObservations, const Scalar minimalObjectPointPriority, const unsigned int minimalObservations, const bool useAllObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar ransacMaximalSqrError, const Scalar averageRobustError, const Scalar maximalSqrError, Worker* worker, bool* abort)
1982 {
1983  ocean_assert(cameraMotion != CM_INVALID);
1984 
1985  const Indices32 invalidObjectPointIds = database.objectPointIds<false, true>(Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), nullptr, minimalObjectPointPriority);
1986 
1987  return determineUnknownObjectPoints(database, camera, cameraMotion, invalidObjectPointIds, newObjectPoints, newObjectPointIds, randomGenerator, newObjectPointObservations, minimalObservations, useAllObservations, estimator, ransacMaximalSqrError, averageRobustError, maximalSqrError, worker, abort);
1988 }
1989 
1990 template <bool tVisibleInAllPoses>
1991 inline bool Solver3::determineUnknownObjectPoints(const Database& database, const AnyCamera& camera, const CameraMotion cameraMotion, const Index32 lowerPoseId, const Index32 upperPoseId, Vectors3& newObjectPoints, Indices32& newObjectPointIds, RandomGenerator& randomGenerator, Indices32* newObjectPointObservations, const Scalar minimalObjectPointPriority, const unsigned int minimalObservations, const bool useAllObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar ransacMaximalSqrError, const Scalar averageRobustError, const Scalar maximalSqrError, Worker* worker, bool* abort)
1992 {
1993  ocean_assert(cameraMotion != CM_INVALID);
1994  ocean_assert(lowerPoseId <= upperPoseId);
1995 
1996  const Indices32 invalidObjectPointIds = database.objectPointIds<false, true, tVisibleInAllPoses>(lowerPoseId, upperPoseId, Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), minimalObjectPointPriority);
1997 
1998  return determineUnknownObjectPoints(database, camera, cameraMotion, invalidObjectPointIds, newObjectPoints, newObjectPointIds, randomGenerator, newObjectPointObservations, minimalObservations, useAllObservations, estimator, ransacMaximalSqrError, averageRobustError, maximalSqrError, worker, abort);
1999 }
2000 
2001 inline HomogenousMatrix4 Solver3::determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const HomogenousMatrix4& roughPose, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, unsigned int* correspondences)
2002 {
2003  ocean_assert(camera.isValid());
2004 
2005  Vectors2 imagePoints;
2006  Vectors3 objectPoints;
2007  database.imagePointsObjectPoints<false, false>(frameId, imagePoints, objectPoints, Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()));
2008  ocean_assert(imagePoints.size() == objectPoints.size());
2009 
2010  if (correspondences != nullptr)
2011  {
2012  *correspondences = (unsigned int)imagePoints.size();
2013  }
2014 
2015  // check whether enough points correspondences could be found
2016  if (imagePoints.size() < minimalCorrespondences)
2017  {
2018  return HomogenousMatrix4(false);
2019  }
2020 
2021  return determinePose(camera, randomGenerator, ConstArrayAccessor<Vector3>(objectPoints), ConstArrayAccessor<Vector2>(imagePoints), roughPose, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2022 }
2023 
2024 inline HomogenousMatrix4 Solver3::determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const IndexSet32& priorityObjectPointIds, const bool solePriorityPoints, const HomogenousMatrix4& roughPose, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, unsigned int* correspondences)
2025 {
2026  ocean_assert(camera.isValid());
2027 
2028  ocean_assert(!priorityObjectPointIds.empty());
2029 
2030  Vectors2 priorityImagePoints, remainingImagePoints;
2031  Vectors3 priorityObjectPoints, remainingObjectPoints;
2032  database.imagePointsObjectPoints<false, false>(frameId, priorityObjectPointIds, priorityImagePoints, priorityObjectPoints, remainingImagePoints, remainingObjectPoints, Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()));
2033  ocean_assert(priorityImagePoints.size() == priorityObjectPoints.size());
2034  ocean_assert(remainingImagePoints.size() == remainingObjectPoints.size());
2035 
2036  if (solePriorityPoints)
2037  {
2038  if (correspondences != nullptr)
2039  {
2040  *correspondences = (unsigned int)priorityImagePoints.size();
2041  }
2042 
2043  if (priorityImagePoints.size() < minimalCorrespondences)
2044  {
2045  return HomogenousMatrix4(false);
2046  }
2047 
2048  return determinePose(camera, randomGenerator, ConstArrayAccessor<Vector3>(priorityObjectPoints), ConstArrayAccessor<Vector2>(priorityImagePoints), roughPose, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2049  }
2050  else
2051  {
2052  if (correspondences != nullptr)
2053  {
2054  *correspondences = (unsigned int)(priorityImagePoints.size() + remainingImagePoints.size());
2055  }
2056 
2057  // check whether enough points correspondences could be found
2058  if (priorityImagePoints.size() + remainingImagePoints.size() < minimalCorrespondences)
2059  {
2060  return HomogenousMatrix4(false);
2061  }
2062 
2063  const size_t priorityCorrespondences = priorityImagePoints.size();
2064 
2065  priorityImagePoints.insert(priorityImagePoints.end(), remainingImagePoints.begin(), remainingImagePoints.end());
2066  priorityObjectPoints.insert(priorityObjectPoints.end(), remainingObjectPoints.begin(), remainingObjectPoints.end());
2067 
2068  return determinePose(camera, randomGenerator, ConstArrayAccessor<Vector3>(priorityObjectPoints), ConstArrayAccessor<Vector2>(priorityImagePoints), priorityCorrespondences, roughPose, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2069  }
2070 }
2071 
2072 inline HomogenousMatrix4 Solver3::determinePose(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<Index32>& objectPointIds, const HomogenousMatrix4& roughPose, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError)
2073 {
2074  const ScopedConstMemoryAccessor<Index32> scopedObjectPointIdMemoryAccessor(objectPointIds);
2075 
2076  Indices32 validIndices;
2077  const Vectors2 imagePoints = database.imagePointsFromObjectPoints<false>(frameId, scopedObjectPointIdMemoryAccessor.data(), scopedObjectPointIdMemoryAccessor.size(), validIndices);
2078  ocean_assert(scopedObjectPointIdMemoryAccessor.size() == validIndices.size());
2079 
2080  return determinePose(camera, randomGenerator, objectPoints, ConstArrayAccessor<ImagePoint>(imagePoints), roughPose, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2081 }
2082 
2083 inline HomogenousMatrix4 Solver3::determinePose(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const HomogenousMatrix4& roughPose, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, Indices32* validIndices)
2084 {
2085  ocean_assert(camera.isValid());
2086  ocean_assert(objectPoints.size() == imagePoints.size());
2087  ocean_assert(minimalValidCorrespondenceRatio >= 0 && minimalValidCorrespondenceRatio <= 1);
2088 
2089  HomogenousMatrix4 previousPose(roughPose);
2090 
2091  // **TODO** as now the p3p works fine, and as we have the refinement step we can skip the explicit refinement if p3p is used
2092 
2093  Indices32 internalValidIndices;
2094  if (!previousPose.isValid() || minimalValidCorrespondenceRatio < 1)
2095  {
2096  Geometry::RANSAC::p3p(camera, objectPoints, imagePoints, randomGenerator, previousPose, 5u, true, 50u, maximalSqrError, &internalValidIndices);
2097  }
2098 
2099  // check whether we did not receive enough valid correspondences from the RANSAC, however if the difference is 2 we accept the pose as in this case the ratio may provide wrong results
2100  if (minimalValidCorrespondenceRatio < 1 && Scalar(internalValidIndices.size()) < Scalar(objectPoints.size()) * minimalValidCorrespondenceRatio && objectPoints.size() - internalValidIndices.size() > 2)
2101  {
2102  return HomogenousMatrix4(false);
2103  }
2104 
2105  HomogenousMatrix4 currentPose(false);
2106  if (previousPose.isValid())
2107  {
2108  if (minimalValidCorrespondenceRatio < 1 && internalValidIndices.size() != objectPoints.size())
2109  {
2110  Geometry::NonLinearOptimizationPose::optimizePose(camera, previousPose, ConstIndexedAccessorSubsetAccessor<Vector3, Index32>(objectPoints, internalValidIndices), ConstIndexedAccessorSubsetAccessor<Vector2, Index32>(imagePoints, internalValidIndices),currentPose, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError);
2111 
2112  if (validIndices)
2113  {
2114  *validIndices = std::move(internalValidIndices);
2115  }
2116  }
2117  else
2118  {
2119  Geometry::NonLinearOptimizationPose::optimizePose(camera, previousPose, objectPoints, imagePoints, currentPose, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError);
2120 
2121  if (validIndices)
2122  {
2123  *validIndices = createIndices(objectPoints.size(), 0u);
2124  }
2125  }
2126  }
2127 
2128  return currentPose;
2129 }
2130 
2131 inline HomogenousMatrix4 Solver3::determinePose(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const size_t priorityCorrespondences, const HomogenousMatrix4& roughPose, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError)
2132 {
2133  ocean_assert(camera.isValid());
2134  ocean_assert(objectPoints.size() == imagePoints.size());
2135  ocean_assert(minimalValidCorrespondenceRatio >= 0 && minimalValidCorrespondenceRatio <= 1);
2136 
2137  HomogenousMatrix4 previousPose(roughPose);
2138 
2139  // **TODO** as now the p3p works fine, and as we have the refinement step we can skip the explicit refinement if p3p is used
2140 
2141  Indices32 validIndices;
2142  if (!previousPose.isValid() || minimalValidCorrespondenceRatio < 1)
2143  {
2144  Geometry::RANSAC::p3p(camera, objectPoints, imagePoints, randomGenerator, previousPose, 5u, true, 50u, maximalSqrError, &validIndices);
2145  }
2146 
2147  // check whether we did not receive enough valid correspondences from the RANSAC, however if the difference is 2 we accept the pose as in this case the ratio may provide wrong results
2148  if (minimalValidCorrespondenceRatio < 1 && Scalar(validIndices.size()) < Scalar(objectPoints.size()) * minimalValidCorrespondenceRatio && objectPoints.size() - validIndices.size() > 2)
2149  {
2150  return HomogenousMatrix4(false);
2151  }
2152 
2153  // for priority image points we take a sigma of 1
2154  // for remaining image points we take a sigma identical to the number of priority values (at least 1 and at most 10)
2155 
2156  const SquareMatrix2 priorityInvertedCovariance(1, 0, 0, 1);
2157 
2158  const Scalar sigmaRemaining = minmax(Scalar(1), Scalar(priorityCorrespondences), Scalar(10));
2159  const Scalar sigmaRemainingInvSqr(1 / (sigmaRemaining * sigmaRemaining));
2160  const SquareMatrix2 remainingInvertedCovariance(sigmaRemainingInvSqr, 0, 0, sigmaRemainingInvSqr);
2161 
2162  ocean_assert(priorityInvertedCovariance == Geometry::Utilities::covarianceMatrix(Vector2(1, 0), 1, Vector2(0, 1), 1).inverted());
2163  ocean_assert(remainingInvertedCovariance == Geometry::Utilities::covarianceMatrix(Vector2(1, 0), sigmaRemaining, Vector2(0, 1), sigmaRemaining).inverted());
2164 
2165  HomogenousMatrix4 currentPose(false);
2166  if (previousPose.isValid())
2167  {
2168  if (minimalValidCorrespondenceRatio < 1 && validIndices.size() != objectPoints.size())
2169  {
2170  Matrix invertedCovariances(validIndices.size(), 2);
2171 
2172  for (size_t n = 0; n < validIndices.size(); ++n)
2173  {
2174  const Index32 index = validIndices[n];
2175 
2176  if (index < priorityCorrespondences)
2177  {
2178  priorityInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2179  }
2180  else
2181  {
2182  remainingInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2183  }
2184  }
2185 
2186  Geometry::NonLinearOptimizationPose::optimizePose(camera, previousPose, ConstIndexedAccessorSubsetAccessor<Vector3, Index32>(objectPoints, validIndices), ConstIndexedAccessorSubsetAccessor<Vector2, Index32>(imagePoints, validIndices), currentPose, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError, &invertedCovariances);
2187  }
2188  else
2189  {
2190  Matrix invertedCovariances(2 * objectPoints.size(), 2);
2191 
2192  for (size_t n = 0; n < priorityCorrespondences; ++n)
2193  {
2194  priorityInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2195  }
2196 
2197  for (size_t n = priorityCorrespondences; n < objectPoints.size(); ++n)
2198  {
2199  remainingInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2200  }
2201 
2202  Geometry::NonLinearOptimizationPose::optimizePose(camera, previousPose, objectPoints, imagePoints, currentPose, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError, &invertedCovariances);
2203  }
2204  }
2205 
2206  return currentPose;
2207 }
2208 
2209 inline SquareMatrix3 Solver3::determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const SquareMatrix3& roughOrientation, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, unsigned int* correspondences)
2210 {
2211  ocean_assert(camera.isValid());
2212 
2213  Vectors2 imagePoints;
2214  Vectors3 objectPoints;
2215  database.imagePointsObjectPoints<false, false>(frameId, imagePoints, objectPoints, Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()));
2216  ocean_assert(imagePoints.size() == objectPoints.size());
2217 
2218  if (correspondences != nullptr)
2219  {
2220  *correspondences = (unsigned int)imagePoints.size();
2221  }
2222 
2223  // check whether enough points correspondences could be found
2224  if (imagePoints.size() < minimalCorrespondences)
2225  {
2226  return SquareMatrix3(false);
2227  }
2228 
2229  return determineOrientation(camera, randomGenerator, ConstArrayAccessor<Vector3>(objectPoints), ConstArrayAccessor<Vector2>(imagePoints), roughOrientation, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2230 }
2231 
2232 inline SquareMatrix3 Solver3::determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const IndexSet32& priorityObjectPointIds, const bool solePriorityPoints, const SquareMatrix3& roughOrientation, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, unsigned int* correspondences)
2233 {
2234  ocean_assert(camera.isValid());
2235  ocean_assert(!priorityObjectPointIds.empty());
2236 
2237  Vectors2 priorityImagePoints, remainingImagePoints;
2238  Vectors3 priorityObjectPoints, remainingObjectPoints;
2239  database.imagePointsObjectPoints<false, false>(frameId, priorityObjectPointIds, priorityImagePoints, priorityObjectPoints, remainingImagePoints, remainingObjectPoints, Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()));
2240  ocean_assert(priorityImagePoints.size() == priorityObjectPoints.size());
2241  ocean_assert(remainingImagePoints.size() == remainingObjectPoints.size());
2242 
2243  if (solePriorityPoints)
2244  {
2245  if (correspondences != nullptr)
2246  {
2247  *correspondences = (unsigned int)priorityImagePoints.size();
2248  }
2249 
2250  // check whether enough points correspondences could be found
2251  if (priorityImagePoints.size() < minimalCorrespondences)
2252  {
2253  return SquareMatrix3(false);
2254  }
2255 
2256  return determineOrientation(camera, randomGenerator, ConstArrayAccessor<Vector3>(priorityObjectPoints), ConstArrayAccessor<Vector2>(priorityImagePoints), roughOrientation, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2257  }
2258  else
2259  {
2260  if (correspondences != nullptr)
2261  {
2262  *correspondences = (unsigned int)(priorityImagePoints.size() + remainingImagePoints.size());
2263  }
2264 
2265  // check whether enough points correspondences could be found
2266  if (priorityImagePoints.size() + remainingImagePoints.size() < minimalCorrespondences)
2267  {
2268  return SquareMatrix3(false);
2269  }
2270 
2271  const size_t priorityCorrespondences = priorityImagePoints.size();
2272 
2273  priorityImagePoints.insert(priorityImagePoints.end(), remainingImagePoints.begin(), remainingImagePoints.end());
2274  priorityObjectPoints.insert(priorityObjectPoints.end(), remainingObjectPoints.begin(), remainingObjectPoints.end());
2275 
2276  return determineOrientation(camera, randomGenerator, ConstArrayAccessor<Vector3>(priorityObjectPoints), ConstArrayAccessor<Vector2>(priorityImagePoints), priorityCorrespondences, roughOrientation, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2277  }
2278 }
2279 
2280 inline SquareMatrix3 Solver3::determineOrientation(const Database& database, const AnyCamera& camera, RandomGenerator& randomGenerator, const unsigned int frameId, const ObjectPoint* objectPoints, const Index32* objectPointIds, const size_t numberObjectPoints, const SquareMatrix3& roughOrientation, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError)
2281 {
2282  ocean_assert(camera.isValid());
2283 
2284  Indices32 validIndices;
2285  const Vectors2 imagePoints = database.imagePointsFromObjectPoints<false>(frameId, objectPointIds, numberObjectPoints, validIndices);
2286  ocean_assert(numberObjectPoints == validIndices.size());
2287 
2288  return determineOrientation(camera, randomGenerator, ConstArrayAccessor<Vector3>(objectPoints, numberObjectPoints), ConstArrayAccessor<Vector2>(imagePoints), roughOrientation, estimator, minimalValidCorrespondenceRatio, maximalSqrError, finalRobustError);
2289 }
2290 
2291 inline SquareMatrix3 Solver3::determineOrientation(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const SquareMatrix3& roughOrientation, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError, Indices32* validIndices)
2292 {
2293  ocean_assert(camera.isValid());
2294  ocean_assert(objectPoints.size() == imagePoints.size());
2295  ocean_assert(minimalValidCorrespondenceRatio >= 0 && minimalValidCorrespondenceRatio <= 1);
2296 
2297  SquareMatrix3 previousOrientation(roughOrientation);
2298 
2299  Indices32 internalValidIndices;
2300  if (previousOrientation.isNull() || minimalValidCorrespondenceRatio < 1)
2301  {
2302  Geometry::RANSAC::orientation(camera, objectPoints, imagePoints, randomGenerator, previousOrientation, 5u, 50u, maximalSqrError, nullptr, &internalValidIndices);
2303  }
2304 
2305  // check whether we do not receive enough valid correspondences from the RANSAC, however if the difference is 2 we accept the pose as in this case the ratio may provide wrong results
2306  if (minimalValidCorrespondenceRatio < 1 && Scalar(internalValidIndices.size()) < Scalar(objectPoints.size()) * minimalValidCorrespondenceRatio && objectPoints.size() - internalValidIndices.size() > 2)
2307  {
2308  return SquareMatrix3(false);
2309  }
2310 
2311  SquareMatrix3 currentOrientation(false);
2312  if (!previousOrientation.isNull())
2313  {
2314  if (minimalValidCorrespondenceRatio < 1)
2315  {
2316  Geometry::NonLinearOptimizationOrientation::optimizeOrientation(camera, previousOrientation, ConstIndexedAccessorSubsetAccessor<Vector3, unsigned int>(objectPoints, internalValidIndices), ConstIndexedAccessorSubsetAccessor<Vector2, unsigned int>(imagePoints, internalValidIndices), currentOrientation, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError);
2317 
2318  if (validIndices != nullptr)
2319  {
2320  *validIndices = std::move(internalValidIndices);
2321  }
2322  }
2323  else
2324  {
2325  Geometry::NonLinearOptimizationOrientation::optimizeOrientation(camera, previousOrientation, objectPoints, imagePoints, currentOrientation, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError);
2326 
2327  if (validIndices != nullptr)
2328  {
2329  *validIndices = createIndices(objectPoints.size(), 0u);
2330  }
2331  }
2332  }
2333 
2334  return currentOrientation;
2335 }
2336 
2337 inline SquareMatrix3 Solver3::determineOrientation(const AnyCamera& camera, RandomGenerator& randomGenerator, const ConstIndexedAccessor<ObjectPoint>& objectPoints, const ConstIndexedAccessor<ImagePoint>& imagePoints, const size_t priorityCorrespondences, const SquareMatrix3& roughOrientation, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar maximalSqrError, Scalar* finalRobustError)
2338 {
2339  ocean_assert(camera.isValid());
2340  ocean_assert(objectPoints.size() == imagePoints.size());
2341  ocean_assert(minimalValidCorrespondenceRatio >= 0 && minimalValidCorrespondenceRatio <= 1);
2342 
2343  SquareMatrix3 previousOrientation(roughOrientation);
2344 
2345  Indices32 validIndices;
2346  if (previousOrientation.isNull() || minimalValidCorrespondenceRatio < 1)
2347  {
2348  Geometry::RANSAC::orientation(camera, objectPoints, imagePoints, randomGenerator, previousOrientation, 5u, 50u, maximalSqrError, nullptr, &validIndices);
2349  }
2350 
2351  // check whether we do not receive enough valid correspondences from the RANSAC, however if the difference is 2 we accept the pose as in this case the ratio may provide wrong results
2352  if (minimalValidCorrespondenceRatio < 1 && Scalar(validIndices.size()) < Scalar(objectPoints.size()) * minimalValidCorrespondenceRatio && objectPoints.size() - validIndices.size() > 2)
2353  {
2354  return SquareMatrix3(false);
2355  }
2356 
2357  // for priority image points we take a sigma of 1
2358  // for remaining image points we take a sigma identical to the number of priority values (at least 1 and at most 10)
2359 
2360  const SquareMatrix2 priorityInvertedCovariance(1, 0, 0, 1);
2361 
2362  const Scalar sigmaRemaining = minmax(Scalar(1), Scalar(priorityCorrespondences), Scalar(10));
2363  const Scalar sigmaRemainingInvSqr(1 / (sigmaRemaining * sigmaRemaining));
2364  const SquareMatrix2 remainingInvertedCovariance(sigmaRemainingInvSqr, 0, 0, sigmaRemainingInvSqr);
2365 
2366  ocean_assert(priorityInvertedCovariance == Geometry::Utilities::covarianceMatrix(Vector2(1, 0), 1, Vector2(0, 1), 1).inverted());
2367  ocean_assert(remainingInvertedCovariance == Geometry::Utilities::covarianceMatrix(Vector2(1, 0), sigmaRemaining, Vector2(0, 1), sigmaRemaining).inverted());
2368 
2369  SquareMatrix3 currentOrientation(false);
2370  if (!previousOrientation.isNull())
2371  {
2372  if (minimalValidCorrespondenceRatio < 1)
2373  {
2374  Vectors3 subsetObjectPoints;
2375  Vectors2 subsetImagePoints;
2376 
2377  subsetObjectPoints.reserve(validIndices.size());
2378  subsetImagePoints.reserve(validIndices.size());
2379 
2380  Matrix invertedCovariances(validIndices.size(), 2);
2381 
2382  for (size_t n = 0; n < validIndices.size(); ++n)
2383  {
2384  const Index32 index = validIndices[n];
2385 
2386  if (index < priorityCorrespondences)
2387  {
2388  priorityInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2389  }
2390  else
2391  {
2392  remainingInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2393  }
2394 
2395  subsetObjectPoints.push_back(objectPoints[index]);
2396  subsetImagePoints.push_back(imagePoints[index]);
2397  }
2398 
2399  Geometry::NonLinearOptimizationOrientation::optimizeOrientation(camera, previousOrientation, ConstArrayAccessor<Vector3>(subsetObjectPoints), ConstArrayAccessor<Vector2>(subsetImagePoints), currentOrientation, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError, &invertedCovariances);
2400  }
2401  else
2402  {
2403  Matrix invertedCovariances(2 * objectPoints.size(), 2);
2404 
2405  for (size_t n = 0; n < priorityCorrespondences; ++n)
2406  {
2407  priorityInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2408  }
2409 
2410  for (size_t n = priorityCorrespondences; n < objectPoints.size(); ++n)
2411  {
2412  remainingInvertedCovariance.copyElements(invertedCovariances[2 * n], false);
2413  }
2414 
2415  Geometry::NonLinearOptimizationOrientation::optimizeOrientation(camera, previousOrientation, objectPoints, imagePoints, currentOrientation, 20u, estimator, Scalar(0.001), Scalar(5), nullptr, finalRobustError, &invertedCovariances);
2416  }
2417  }
2418 
2419  return currentOrientation;
2420 }
2421 
2422 inline bool Solver3::determinePlane(const ConstIndexedAccessor<Vector3>& objectPoints, RandomGenerator& randomGenerator, Plane3& plane, const RelativeThreshold& minimalValidObjectPoints, const Geometry::Estimator::EstimatorType estimator, Scalar* finalError, Indices32* validIndices)
2423 {
2424  ocean_assert(objectPoints.size() >= 3);
2425  return Geometry::RANSAC::plane(objectPoints, randomGenerator, plane, 100u, Scalar(0.1), minimalValidObjectPoints.threshold((unsigned int)objectPoints.size()), estimator, finalError, validIndices);
2426 }
2427 
2428 inline bool Solver3::determinePlane(const Database& database, const Indices32& objectPointIds, RandomGenerator& randomGenerator, Plane3& plane, const RelativeThreshold& minimalValidObjectPoints, const Geometry::Estimator::EstimatorType estimator, Scalar* finalError, Indices32* validIndices)
2429 {
2430  ocean_assert(objectPointIds.size() >= 3);
2431 
2432  return determinePlane(ConstArrayAccessor<Vector3>(database.objectPoints<false>(objectPointIds)), randomGenerator, plane, minimalValidObjectPoints, estimator, finalError, validIndices);
2433 }
2434 
2435 }
2436 
2437 }
2438 
2439 #endif // META_OCEAN_TRACKING_SOLVER_3_H
virtual size_t size() const =0
Returns the number of accessible elements of this accessor object.
This class implements the abstract base class for all AnyCamera objects.
Definition: AnyCamera.h:130
virtual bool isValid() const =0
Returns whether this camera is valid.
This class implement a sub-region either defined by 2D triangles or defined by a binary mask.
Definition: SubRegion.h:32
This class implements an accessor providing direct access to a constant array of elements.
Definition: Accessor.h:400
This class implements a base class for all indexed-based accessors allowing a constant reference acce...
Definition: Accessor.h:241
This class implements an indexed-based constant accessor providing access to a subset of elements sto...
Definition: Accessor.h:1356
EstimatorType
Definition of individual robust estimator types.
Definition: Estimator.h:34
@ ET_HUBER
The Huber estimator type.
Definition: Estimator.h:84
@ ET_SQUARE
The standard square error estimator (L2).
Definition: Estimator.h:52
This class implements an abstract specialization of the accessor for groups of pairs for object point...
Definition: NonLinearOptimization.h:159
This class implements an abstract specialization of the accessor for groups of pairs for poses.
Definition: NonLinearOptimization.h:185
static bool optimizeOrientation(const AnyCamera &camera, const SquareMatrix3 &world_R_camera, const ConstIndexedAccessor< ObjectPoint > &objectPoints, const ConstIndexedAccessor< ImagePoint > &imagePoints, SquareMatrix3 &world_R_optimizedCamera, const unsigned int iterations, const Estimator::EstimatorType estimator=Estimator::ET_SQUARE, const Scalar lambda=Scalar(0.001), const Scalar lambdaFactor=Scalar(5), Scalar *initialError=nullptr, Scalar *finalError=nullptr, const Matrix *invertedCovariances=nullptr, Scalars *intermediateErrors=nullptr)
Minimizes the projection error of a given 3DOF orientation.
Definition: NonLinearOptimizationOrientation.h:134
static bool optimizePose(const PinholeCamera &pinholeCamera, const HomogenousMatrix4 &world_T_camera, const ConstIndexedAccessor< Vector3 > &objectPoints, const ConstIndexedAccessor< Vector2 > &imagePoints, const bool distortImagePoints, HomogenousMatrix4 &world_T_optimizedCamera, const unsigned int iterations=20u, const Estimator::EstimatorType estimator=Estimator::ET_SQUARE, Scalar lambda=Scalar(0.001), const Scalar lambdaFactor=10, Scalar *initialError=nullptr, Scalar *finalError=nullptr)
Deprecated.
Definition: NonLinearOptimizationPose.h:282
static bool p3p(const AnyCamera &anyCamera, const ConstIndexedAccessor< ObjectPoint > &objectPointAccessor, const ConstIndexedAccessor< ImagePoint > &imagePointAccessor, RandomGenerator &randomGenerator, HomogenousMatrix4 &world_T_camera, const unsigned int minimalValidCorrespondences=5u, const bool refine=true, const unsigned int iterations=20u, const Scalar sqrPixelErrorThreshold=Scalar(5 *5), Indices32 *usedIndices=nullptr, Scalar *sqrAccuracy=nullptr)
Calculates a pose using the perspective pose problem with three point correspondences using any camer...
static bool orientation(const AnyCamera &camera, const ConstIndexedAccessor< ObjectPoint > &objectPoints, const ConstIndexedAccessor< ImagePoint > &imagePoints, RandomGenerator &randomGenerator, SquareMatrix3 &world_R_camera, const unsigned int minValidCorrespondences=5u, const unsigned int iterations=20u, const Scalar maxSqrError=Scalar(5 *5), Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines the 3DOF rotation of a camera pose for a set of given 2D/3D point correspondences by minim...
static bool plane(const ConstIndexedAccessor< ObjectPoint > &objectPoints, RandomGenerator &randomGenerator, Plane3 &plane, const unsigned int iterations=20u, const Scalar medianDistanceFactor=Scalar(0.1), const unsigned int minValidCorrespondences=3u, const Estimator::EstimatorType refinementEstimator=Estimator::ET_SQUARE, Scalar *finalError=nullptr, Indices32 *usedIndices=nullptr)
Determines a 3D plane best matching to a set of given 3D object points.
static SquareMatrix2 covarianceMatrix(const ImagePoint *imagePoints, const size_t number, const Scalar minimalSigma=0)
Creates the covariance matrix for a given set of image points.
Definition: geometry/Utilities.h:510
bool isValid() const
Returns whether this matrix is a valid homogeneous transformation.
Definition: HomogenousMatrix4.h:1806
This class implements a recursive lock object.
Definition: Lock.h:31
static constexpr T deg2rad(const T deg)
Converts deg to rad.
Definition: Numeric.h:3232
static constexpr T minValue()
Returns the min scalar value.
Definition: Numeric.h:3250
static constexpr int32_t round32(const T value)
Returns the rounded 32 bit integer value of a given value.
Definition: Numeric.h:2064
static constexpr T maxValue()
Returns the max scalar value.
Definition: Numeric.h:3244
OptimizationStrategy
Definition of individual optimization strategies for camera parameters.
Definition: PinholeCamera.h:129
This class implements a generator for random numbers.
Definition: RandomGenerator.h:42
This class implements an accessor that guarantees memory access to the elements of an indexed accesso...
Definition: Accessor.h:1459
const T * data() const
Returns the pointer to the memory block providing the data of the accessor.
Definition: Accessor.h:2621
size_t size() const
Returns the number of elements the accessor provides.
Definition: Accessor.h:2629
This class implements a vector with shifted elements.
Definition: ShiftVector.h:27
This class implements a 2x2 square matrix.
Definition: SquareMatrix2.h:73
void copyElements(U *arrayValues, const bool columnAligned=true) const
Copies the elements of this matrix to an array with floating point values of type U.
Definition: SquareMatrix2.h:884
bool isNull() const
Returns whether this matrix is a zero matrix.
Definition: SquareMatrix3.h:1333
This class implements a database for 3D object points, 2D image points and 6DOF camera poses.
Definition: Database.h:67
std::vector< std::pair< Index32, PoseImagePointTopology > > PoseImagePointTopologyGroups
Definition of a vector holding several groups of pairs of pose and image point ids.
Definition: Database.h:250
Indices32 objectPointIds(Vectors3 *objectPoints=nullptr, Scalars *priorities=nullptr) const
Returns the ids of all object points that are part of this database.
Definition: Database.h:3829
std::vector< Vectors2 > ImagePointGroups
Definition of a vector holding 2D vectors.
Definition: Database.h:109
Vectors2 imagePointsFromObjectPoints(const Index32 poseId, Indices32 &objectPointIds, Indices32 *imagePointIds=nullptr) const
Returns all image points which are located in a specified frame and which are projections of a set of...
Definition: Database.h:4437
void imagePointsObjectPoints(const Index32 poseId, Vectors2 &imagePoints, Vectors3 &objectPoints, const Vector3 &referencePosition=invalidObjectPoint(), const size_t minimalObservations=0, Indices32 *imagePointIds=nullptr, Indices32 *objectPointIds=nullptr) const
Returns corresponding object points and image points for a given camera pose.
Definition: Database.h:4651
Vectors3 objectPoints() const
Returns the positions of all 3D object points.
Definition: Database.h:2379
This class implements an accessor for groups of pairs of pose indices (not pose ids) and image points...
Definition: Solver3.h:237
ObjectPointToPoseImagePointCorrespondenceAccessor(const ObjectPointToPoseImagePointCorrespondenceAccessor &accessor)
Copy constructor.
Definition: Solver3.h:1922
ObjectPointToPoseImagePointCorrespondenceAccessor(const Database &database, const Indices32 &poseIds, const Indices32 &objectPointCandidateIds, const unsigned int minimalObservationsInKeyframes=2u, Indices32 *validObjectPoints=nullptr)
Creates a new accessor object and extracts the necessary information from a given database.
ObjectPointToPoseImagePointCorrespondenceAccessor & operator=(const ObjectPointToPoseImagePointCorrespondenceAccessor &accessor)
Assign operator.
Definition: Solver3.h:1945
ObjectPointToPoseImagePointCorrespondenceAccessor(const Database &database, const Index32 lowerPoseId, const Index32 upperPoseId, const Indices32 &objectPointIds, const unsigned int minimalObservationsInKeyframes=2u, const unsigned int minimalKeyFrames=2u, Indices32 *usedKeyFrameIds=nullptr)
Creates a new accessor object and extracts the necessary information from a given database.
This class implements an accessor providing access to observation pairs (the observation of a project...
Definition: Solver3.h:331
ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const ShiftVector< Vectors2 > &imagePointGroups, const Indices32 &posesSubsetBlock)
Creates a new accessor object.
ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const std::vector< Vectors2 > &imagePointGroups, const Indices32 &posesSubsetBlock)
Creates a new accessor object.
ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const std::vector< Vectors2 > &imagePointGroups, const Indices32 &posesSubsetBlock, const Indices32 &objectPointsSubset)
Creates a new accessor object.
ObjectPointToPoseIndexImagePointCorrespondenceAccessor & operator=(const ObjectPointToPoseIndexImagePointCorrespondenceAccessor &accessor)
Assign operator.
Definition: Solver3.h:1969
ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const ObjectPointToPoseIndexImagePointCorrespondenceAccessor &accessor)
Copy constructor.
Definition: Solver3.h:1957
ObjectPointToPoseIndexImagePointCorrespondenceAccessor(const ShiftVector< Vectors2 > &imagePointGroups, const Indices32 &posesSubsetBlock, const Indices32 &objectPointsSubset)
Creates a new accessor object.
This class implements an accessor for groups of pairs of object point ids and image points.
Definition: Solver3.h:177
PoseToObjectPointIdImagePointCorrespondenceAccessor & operator=(const PoseToObjectPointIdImagePointCorrespondenceAccessor &accessor)
Assign operator.
Definition: Solver3.h:1910
PoseToObjectPointIdImagePointCorrespondenceAccessor(const PoseToObjectPointIdImagePointCorrespondenceAccessor &accessor)
Copy constructor.
Definition: Solver3.h:1898
PoseToObjectPointIdImagePointCorrespondenceAccessor(const Database &database, const Indices32 &poseIds, const Indices32 &objectPointIds, const unsigned int minimalVisibleObjectPoints=10u, Indices32 *validPoseIndices=nullptr, Indices32 *usedObjectPointIndices=nullptr)
Creates a new accessor object and extracts the necessary information from a given database.
Definition of a class allowing to define a relative threshold with lower and upper boundary for indiv...
Definition: Solver3.h:93
bool hasValidThreshold(const unsigned int value, unsigned int *threshold=nullptr) const
Returns whether for a given reference value a valid relative threshold can be determined.
Definition: Solver3.h:1863
RelativeThreshold(const unsigned int lowerBoundary, const Scalar factor, const unsigned int upperBoundary)
Creates a new threshold object.
Definition: Solver3.h:1835
unsigned int thresholdUpperBoundary
The upper boundary of the relative threshold.
Definition: Solver3.h:156
Scalar factor() const
Returns the factor of this object.
Definition: Solver3.h:1848
Scalar thresholdFactor
The factor defining the relative threshold.
Definition: Solver3.h:153
unsigned int threshold(const unsigned int value) const
Returns the relative threshold for a given reference value.
Definition: Solver3.h:1858
unsigned int upperBoundary() const
Returns the upper boundary of this object.
Definition: Solver3.h:1853
unsigned int lowerBoundary() const
Returns the lower boundary of this object.
Definition: Solver3.h:1843
unsigned int thresholdLowerBoundary
The lower boundary of the relative threshold.
Definition: Solver3.h:150
This class implements a Structure From Motion solver for unconstrained 3D object points and unconstra...
Definition: Solver3.h:42
static std::string translateCameraMotion(const CameraMotion cameraMotion)
Translates a camera motion value to a string providing the detailed motion as readable string.
static bool determinePerpendicularPlane(const Database &database, const PinholeCamera &pinholeCamera, const unsigned int frameIndex, const Vector2 &imagePoint, const Scalar distance, Plane3 &plane, const bool useDistortionParameters, Vector3 *pointOnPlane=nullptr)
Determines a 3D plane perpendicular to the camera with specified distance to the camera.
static void determineProjectionErrorsSubset(const Database *database, const PinholeCamera *pinholeCamera, const Index32 *objectPointIds, const HomogenousMatrix4 *posesIF, const Index32 lowerPoseId, const unsigned int upperPoseId, const bool useDistortionParameters, Scalar *minimalSqrErrors, Scalar *averagedSqrErrors, Scalar *maximalSqrErrors, unsigned int *observations, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints)
Determines the maximal squared pixel errors between the projections of a subset of individual 3D obje...
static Indices32 determineRepresentativePoses(const Database &database, const Indices32 &poseIds, const size_t numberRepresentative)
Determines a set of representative camera poses from a given database from a set of given camera pose...
static HomogenousMatrix4 determinePose(const Database &database, const AnyCamera &camera, RandomGenerator &randomGenerator, const unsigned int frameId, const HomogenousMatrix4 &roughPose=HomogenousMatrix4(false), const unsigned int minimalCorrespondences=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar maximalSqrError=Scalar(3.5 *3.5), Scalar *finalRobustError=nullptr, unsigned int *correspondences=nullptr)
Determines the camera 6-DOF pose for a specific camera frame.
Definition: Solver3.h:2001
static bool updateDatabaseToRotationalMotion(Database &database, const PinholeCamera &pinholeCamera, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalObservations, IndexSet32 *relocatedObjectPointIds)
Determines the semi-precise location of 3D object points and the camera poses for a sole rotational c...
static size_t filterStaticImagePoints(ImagePointGroups &imagePointGroups, Indices32 &objectPointIds, const Scalar maximalStaticImagePointFilterRatio)
Determines a subset of perfectly static image points which may be image points located (visible) at s...
std::pair< Index32, Scalar > PoseErrorPair
Definition of a pair combining a pose id and an error parameter.
Definition: Solver3.h:394
static Scalar determineObjectPointAccuracy(const PinholeCamera &pinholeCamera, const HomogenousMatrix4 *poses, const Vector2 *imagePoints, const size_t observations, const AccuracyMethod accuracyMethod)
Measures the accuracy of a 3D object point in combination with a set of camera poses and image points...
static CameraMotion determineCameraMotion(const Database &database, const PinholeCamera &pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool onlyVisibleObjectPoints=true, Worker *worker=nullptr, const Scalar minimalTinyTranslationObservationAngle=Numeric::deg2rad(Scalar(0.15)), const Scalar minimalModerateTranslationObservationAngle=Numeric::deg2rad(1), const Scalar minimalSignificantTranslationObservationAngle=Numeric::deg2rad(5), const Scalar minimalTinyRotationAngle=Numeric::deg2rad(Scalar(0.25)), const Scalar minimalModerateRotationAngle=Numeric::deg2rad(5), const Scalar minimalSignificantRotationAngle=Numeric::deg2rad(10))
Determines the camera motion from the camera poses within a specified frame range covering only valid...
static bool determineUnknownObjectPoints(const Database &database, const AnyCamera &camera, const CameraMotion cameraMotion, const Indices32 &unknownObjectPointIds, Vectors3 &newObjectPoints, Indices32 &newObjectPointIds, RandomGenerator &randomGenerator, Indices32 *newObjectPointObservations=nullptr, const unsigned int minimalObservations=2u, const bool useAllObservations=true, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar ransacMaximalSqrError=Scalar(3.5 *3.5), const Scalar averageRobustError=Scalar(3.5 *3.5), const Scalar maximalSqrError=Numeric::maxValue(), Worker *worker=nullptr, bool *abort=nullptr)
Determines the positions of a set of (currently unknown) object points.
static bool determineInitialObjectPointsFromSparseKeyFramesBySteps(const Database &database, const unsigned int steps, const PinholeCamera &pinholeCamera, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3 &initialObjectPoints, Indices32 &initialObjectPointIds, const RelativeThreshold &pointsThreshold=RelativeThreshold(20u, Scalar(0.5), 100u), const unsigned int minimalKeyFrames=2u, const unsigned int maximalKeyFrames=10u, const Scalar maximalSqrError=Scalar(3.5 *3.5), Indices32 *usedPoseIds=nullptr, Worker *worker=nullptr, bool *abort=nullptr)
Determines the initial positions of 3D object points in a database if no camera poses or structure in...
static bool optimizeObjectPointsWithVariablePoses(const Database &database, const PinholeCamera &pinholeCamera, const Index32 lowerPoseId, const Index32 upperPoseId, const Indices32 &objectPointIds, Indices32 &usedKeyFrameIds, Vectors3 &optimizedObjectPoints, const unsigned int minimalObservations=10u, const unsigned int minimalKeyFrames=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
static size_t removeObjectPointsNotInFrontOfCamera(Database &database, Indices32 *removedObjectPointIds=nullptr)
Removes all valid 3D object points (and their corresponding 2D image points) from the database which ...
static bool determineProjectionErrors(const Database &database, const PinholeCamera &pinholeCamera, const Indices32 &objectPointIds, const bool useDistortionParameters, const unsigned int lowerFrame=(unsigned int)(-1), const unsigned int upperFrame=(unsigned int)(-1), Scalar *minimalSqrErrors=nullptr, Scalar *averagedSqrErrors=nullptr, Scalar *maximalSqrErrors=nullptr, unsigned int *observations=nullptr, Worker *worker=nullptr)
Determines the averaged and maximal squared pixel errors between the projections of individual 3D obj...
static bool determinePlane(const Database &database, const PinholeCamera &pinholeCamera, const unsigned int lowerFrameIndex, const unsigned int subRegionFrameIndex, const unsigned int upperFrameIndex, const CV::SubRegion &subRegion, RandomGenerator &randomGenerator, Plane3 &plane, const bool useDistortionParameters, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(3u, Scalar(0.5), 20u), const Scalar medianDistanceFactor=Scalar(6), const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_HUBER, Scalar *finalError=nullptr, Indices32 *usedObjectPointIds=nullptr)
Determines a 3D plane best fitting to image points in a specified sub-region in a specified frame and...
static size_t removeObjectPointsWithoutEnoughObservations(Database &database, const size_t minimalNumberObservations, Indices32 *removedObjectPointIds=nullptr)
Removes any 3D object point (and it's corresponding 2D image points) from the database with less then...
static bool determineInitialObjectPointsFromDenseFrames(const Database &database, const PinholeCamera &pinholeCamera, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const CV::SubRegion &regionOfInterest, const Scalar maximalStaticImagePointFilterRatio, Vectors3 &initialObjectPoints, Indices32 &initialObjectPointIds, const RelativeThreshold &pointsThreshold=RelativeThreshold(20u, Scalar(0.5), 100u), const Scalar minimalTrackedFramesRatio=Scalar(0.1), const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=10u, const Scalar maximalSqrError=Scalar(3.5 *3.5), Indices32 *usedPoseIds=nullptr, Scalar *finalSqrError=nullptr, bool *abort=nullptr)
Determines the initial positions of 3D object points in a database if no camera poses or structure in...
static bool optimizeObjectPointsWithVariablePoses(const Database &database, const PinholeCamera &pinholeCamera, Vectors3 &optimizedObjectPoints, Indices32 &optimizedObjectPointIds, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, Indices32 *optimizedKeyFramePoseIds=nullptr, const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=20u, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses conc...
static void determinePosesOrientation(const Database &database, const unsigned int lowerFrame, const unsigned int upperFrame, Scalar *xOrientations, Scalar *yOrientations, Scalar *zOrientations)
Determines the individual cosine values between the mean coordinate axis of a range of poses and the ...
static void determineProjectionErrors(const AnyCamera &camera, const Vector3 &objectPoint, const ConstIndexedAccessor< HomogenousMatrix4 > &world_T_cameras, const ConstIndexedAccessor< Vector2 > &imagePoints, Scalar *minimalSqrError=nullptr, Scalar *averageSqrError=nullptr, Scalar *maximalSqrError=nullptr)
Determines the projection errors of a 3D object point in combination with a set of camera poses and i...
static bool determinePoses(const Database &database, const AnyCamera &camera, const CameraMotion cameraMotion, const IndexSet32 &priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector< HomogenousMatrix4 > &poses, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar ransacMaximalSqrError=Scalar(3.5 *3.5), const Scalar maximalRobustError=Scalar(3.5 *3.5), Scalar *finalAverageError=nullptr, Worker *worker=nullptr, bool *abort=nullptr)
Determines the camera poses depending on valid 2D/3D points correspondence within a range of camera f...
static Indices32 trackObjectPointsToNeighborFrames(const Database &database, const Indices32 &objectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame)
This function tracks a group of object points from one frame to both (if available) neighbor frames a...
static bool optimizeCamera(const Database &database, const PinholeCamera &pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool findInitialFieldOfView, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera &optimizedCamera, Database &optimizedDatabase, const unsigned int minimalObservationsInKeyframes=2u, const unsigned int minimalKeyframes=3u, const unsigned int maximalKeyframes=20u, const Scalar lowerFovX=Numeric::deg2rad(20), const Scalar upperFovX=Numeric::deg2rad(140), Worker *worker=nullptr, bool *abort=nullptr, Scalar *finalMeanSqrError=nullptr)
Optimizes the camera profile for a given database with stable camera poses determined by initial but ...
static bool determineInitialObjectPointsFromSparseKeyFramesRANSAC(const PinholeCamera &pinholeCamera, const Database::ImagePointGroups &imagePointGroups, RandomGenerator &randomGenerator, HomogenousMatrices4 &validPoses, Indices32 &validPoseIndices, Vectors3 &objectPoints, Indices32 &validObjectPointIndices, const unsigned int iterations=20u, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError=Scalar(3.5 *3.5), const Database *database=nullptr, const Indices32 *keyFrameIds=nullptr, const Indices32 *objectPointIds=nullptr, bool *abort=nullptr)
Determines the initial object point positions for a set of key frames (image point groups) observing ...
static bool determineNumberCorrespondences(const Database &database, const bool needValidPose, const unsigned int lowerFrame, const unsigned int upperFrame, unsigned int *minimalCorrespondences=nullptr, Scalar *averageCorrespondences=nullptr, unsigned int *medianCorrespondences=nullptr, unsigned int *maximalCorrespondences=nullptr, Worker *worker=nullptr)
Determines the number of valid correspondences between image points and object points for each frame ...
std::vector< PoseErrorPair > PoseErrorPairs
Definition of a vector holding pose error pairs.
Definition: Solver3.h:399
std::map< unsigned int, unsigned int > IndexMap32
Definition of a map mapping 32 bit indices to 32 bit indices.
Definition: Solver3.h:308
static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database &database, const PinholeCamera &pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, const Indices32 &keyFrameIds, const Indices32 &objectPointIds, PinholeCamera &optimizedCamera, Vectors3 *optimizedObjectPoints=nullptr, Indices32 *optimizedObjectPointIds=nullptr, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and ...
static void determineInitialObjectPointsFromSparseKeyFramesByStepsSubset(const Database *database, const PinholeCamera *pinholeCamera, RandomGenerator *randomGenerator, const unsigned int lowerFrame, const Indices32 *startFrames, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3 *initialObjectPoints, Indices32 *initialObjectPointIds, Indices32 *initialPoseIds, Scalar *initialPointDistance, const RelativeThreshold *pointsThreshold, const unsigned int minimalKeyFrames, const unsigned int maximalKeyFrames, const Scalar maximalSqrError, Lock *lock, bool *abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne)
Determines the initial positions of 3D object points in a database if no camera poses or structure in...
static bool determinePerpendicularPlane(const PinholeCamera &pinholeCamera, const HomogenousMatrix4 &pose, const Vector2 &imagePoint, const Scalar distance, Plane3 &plane, const bool useDistortionParameters, Vector3 *pointOnPlane=nullptr)
Determines a 3D plane perpendicular to the camera with specified distance to the camera.
static void determineUnknownObjectPointsSubset(const AnyCamera *camera, const Database *database, const Database::PoseImagePointTopologyGroups *objectPointsData, RandomGenerator *randomGenerator, const Scalar maximalSqrError, bool *abort, Lock *lock, Vectors3 *newObjectPoints, Indices32 *newObjectPointIds, unsigned int firstObjectPoint, unsigned int numberObjectPoints)
Determines the positions of new object points from a database within a specified frame range.
static void determineObjectPointsAccuracySubset(const Database *database, const PinholeCamera *pinholeCamera, const Index32 *objectPointIds, const AccuracyMethod accuracyMethhod, const unsigned int lowerFrame, const unsigned int upperFrame, Scalar *values, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints)
Measures the accuracy of a subset of several 3D object points.
static bool optimizeObjectPointsWithFixedPoses(const Database &database, const PinholeCamera &pinholeCamera, const CameraMotion cameraMotion, const Indices32 &objectPointIds, Vectors3 &optimizedObjectPoints, Indices32 &optimizedObjectPointIds, unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar maximalRobustError=Scalar(3.5 *3.5), Worker *worker=nullptr, bool *abort=nullptr)
Optimizes a set of 3D object points (having a quite good accuracy already) without optimizing the cam...
static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database &database, const PinholeCamera &pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, const Indices32 &keyFrameIds, PinholeCamera &optimizedCamera, Vectors3 *optimizedObjectPoints=nullptr, Indices32 *optimizedObjectPointIds=nullptr, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and ...
CameraMotion
Definition of individual camera motion types.
Definition: Solver3.h:49
@ CM_INVALID
Invalid camera motion.
Definition: Solver3.h:51
static bool determineInitialObjectPointsFromSparseKeyFrames(const Database &database, const PinholeCamera &pinholeCamera, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const Scalar maximalStaticImagePointFilterRatio, Vectors3 &initialObjectPoints, Indices32 &initialObjectPointIds, const RelativeThreshold &pointsThreshold=RelativeThreshold(20u, Scalar(0.5), 100u), const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=10u, const Scalar maximalSqrError=Scalar(3.5 *3.5), Indices32 *usedPoseIds=nullptr, Scalar *finalSqrError=nullptr, Scalar *finalImagePointDistance=nullptr, bool *abort=nullptr)
Determines the initial positions of 3D object points in a database if no camera poses or structure in...
static void determineUnknownObjectPointsSubset(const Database *database, const AnyCamera *camera, const CameraMotion cameraMotion, const Index32 *objectPointIds, Vectors3 *newObjectPoints, Indices32 *newObjectPointIds, Indices32 *newObjectPointObservations, RandomGenerator *randomGenerator, const unsigned int minimalObservations, const bool useAllObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar ransacMaximalSqrError, const Scalar averageRobustError, const Scalar maximalSqrError, Lock *look, bool *abort, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints)
Determines the positions of a subset of (currently unknown) object points.
static bool optimizeObjectPointsWithVariablePoses(const Database &database, const PinholeCamera &pinholeCamera, const Indices32 &keyFrameIds, const Indices32 &objectPointIds, Vectors3 &optimizedObjectPoints, Indices32 &optimizedObjectPointIds, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses conc...
static bool optimizeObjectPointsWithVariablePoses(const Database &database, const PinholeCamera &pinholeCamera, const Indices32 &keyFrameIds, Vectors3 &optimizedObjectPoints, Indices32 &optimizedObjectPointIds, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses conc...
static void optimizeObjectPointsWithFixedPosesSubset(const Database *database, const PinholeCamera *pinholeCamera, const CameraMotion cameraMotion, const Index32 *objectPointIds, Vectors3 *optimizedObjectPoints, Indices32 *optimizedObjectPointIds, const unsigned int minimalObservations, const Geometry::Estimator::EstimatorType estimator, const Scalar maximalRobustError, Lock *look, bool *abort, const unsigned int firstObjectPoint, const unsigned int numberObjectPoints)
Optimizes a subset of a set of 3D object points which have a quite good accuracy already without opti...
static size_t removeObjectPointsWithSmallBaseline(Database &database, const Scalar minimalBoxDiagonal, Indices32 *removedObjectPointIds=nullptr)
Removes any 3D object point (and it's corresponding 2D image points) from the database if all their c...
static void updatePosesSubset(Database *database, const AnyCamera *camera, RandomGenerator *randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar *totalError, size_t *validPoses, Lock *lock, bool *abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne)
Updates a subset of the camera poses depending on valid 2D/3D points correspondences within a range o...
static void updateOrientationsSubset(Database *database, const AnyCamera *camera, RandomGenerator *randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar *totalError, size_t *validPoses, Lock *lock, bool *abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne)
Updates a subset of the camera orientations (as the camera has rotational motion only) depending on v...
static bool updatePoses(Database &database, const AnyCamera &camera, const CameraMotion cameraMotion, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar ransacMaximalSqrError=Scalar(3.5 *3.5), const Scalar maximalRobustError=Scalar(3.5 *3.5), Scalar *finalAverageError=nullptr, size_t *validPoses=nullptr, bool *abort=nullptr)
Updates the camera poses of the database depending on valid 2D/3D points correspondences within a ran...
static bool supposeRotationalCameraMotion(const Database &database, const PinholeCamera &pinholeCamera, const unsigned int lowerFrame, const unsigned int upperFrame, const bool findInitialFieldOfView, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera &optimizedCamera, Database &optimizedDatabase, const unsigned int minimalObservations=0u, const unsigned int minimalKeyframes=3u, const unsigned int maximalKeyframes=20u, const Scalar lowerFovX=Numeric::deg2rad(20), const Scalar upperFovX=Numeric::deg2rad(140), const Scalar maximalSqrError=(1.5 *1.5), Worker *worker=nullptr, bool *abort=nullptr, Scalar *finalMeanSqrError=nullptr)
Supposes pure rotational camera motion for a given database with stable camera poses determined by in...
static bool removeSparseObjectPoints(Database &database, const Scalar minimalBoundingBoxDiagonal=Scalar(1e+7), const Scalar medianFactor=Scalar(100), const Scalar maximalSparseObjectPointRatio=Scalar(0.05))
Removes very far object points from the database if the amount of object points does not exceed a spe...
static void determineInitialObjectPointsFromDenseFramesRANSACSubset(const PinholeCamera *pinholeCamera, const ImagePointGroups *imagePointGroups, RandomGenerator *randomGenerator, HomogenousMatrices4 *validPoses, Indices32 *validPoseIds, Vectors3 *objectPoints, Indices32 *validObjectPointIndices, Scalar *totalError, const RelativeThreshold *minimalValidObjectPoints, const Scalar maximalSqrError, unsigned int *remainingIterations, Lock *lock, bool *abort, unsigned int firstIteration, unsigned int numberIterations)
Determines the initial object point positions for a set of frames (image point groups) observing the ...
static void determineOrientationsSubset(const Database *database, const AnyCamera *camera, const IndexSet32 *priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator *randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector< HomogenousMatrix4 > *poses, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar *totalError, Lock *lock, bool *abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne)
Determines a subset of the camera orientations (as the camera has rotational motion only) depending o...
static size_t determineValidPoses(const AnyCamera &camera, const Vectors3 &objectPoints, const ImagePointGroups &imagePointGroups, RandomGenerator &randomGenerator, const CameraMotion cameraMotion, const unsigned int firstValidPoseIndex, const HomogenousMatrix4 &firstValidPose, const unsigned int secondValidPoseIndex, const HomogenousMatrix4 &secondValidPose, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar maximalSqrError=Scalar(3.5 *3.5), Indices32 *validObjectPointIndices=nullptr, HomogenousMatrices4 *poses=nullptr, Indices32 *poseIds=nullptr, Scalar *totalSqrError=nullptr)
Determines valid poses for a range of camera frames while for each frame a group of image points is g...
ShiftVector< Vectors2 > ImagePointGroups
Definition of a shift vector holding groups of image points.
Definition: Solver3.h:313
static Indices32 determineRepresentativePoses(const Database &database, const unsigned int lowerFrame, const unsigned int upperFrame, const size_t numberRepresentative)
Determines a set of representative camera poses from a given database within a specified frame range.
static bool optimizeInitialObjectPoints(const Database &database, const AnyCamera &camera, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const Vectors3 &initialObjectPoints, const Indices32 &initialObjectPointIds, Vectors3 &optimizedObjectPoints, Indices32 &optimizedObjectPointIds, const unsigned int minimalObjectPoints=5u, const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=10u, const Scalar maximalSqrError=Scalar(3.5 *3.5), Indices32 *usedPoseIds=nullptr, Scalar *initialSqrError=nullptr, Scalar *finalSqrError=nullptr, bool *abort=nullptr)
Optimizes the positions of already known initial 3D object points when a given database holds neither...
static bool updatePoses(Database &database, const AnyCamera &camera, const CameraMotion cameraMotion, RandomGenerator &randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar ransacMaximalSqrError=Scalar(3.5 *3.5), const Scalar maximalRobustError=Scalar(3.5 *3.5), Scalar *finalAverageError=nullptr, size_t *validPoses=nullptr, Worker *worker=nullptr, bool *abort=nullptr)
Updates the camera poses of the database depending on valid 2D/3D points correspondences within a ran...
static SquareMatrix3 determineOrientation(const Database &database, const AnyCamera &camera, RandomGenerator &randomGenerator, const unsigned int frameId, const SquareMatrix3 &roughOrientation=SquareMatrix3(false), const unsigned int minimalCorrespondences=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const Scalar minimalValidCorrespondenceRatio=Scalar(1), const Scalar maximalSqrError=Scalar(3.5 *3.5), Scalar *finalRobustError=nullptr, unsigned int *correspondences=nullptr)
Determines the camera 3-DOF orientation (as the camera has rotational motion only) for a specific cam...
Definition: Solver3.h:2209
static bool determineInitialObjectPointsFromDenseFramesRANSAC(const PinholeCamera &pinholeCamera, const ImagePointGroups &imagePointGroups, RandomGenerator &randomGenerator, HomogenousMatrices4 &validPoses, Indices32 &validPoseIds, Vectors3 &objectPoints, Indices32 &validObjectPointIndices, const unsigned int iterations=20u, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError=Scalar(3.5 *3.5), Worker *worker=nullptr, bool *abort=nullptr)
Determines the initial object point positions for a set of frames (image point groups) observing the ...
static bool determinePlane(const ConstIndexedAccessor< Vector3 > &objectPoints, RandomGenerator &randomGenerator, Plane3 &plane, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(3u, Scalar(0.5), 20u), const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_HUBER, Scalar *finalError=nullptr, Indices32 *validIndices=nullptr)
Determines a 3D plane best fitting to a set of given 3D object points.
Definition: Solver3.h:2422
static bool determineUnknownObjectPoints(const Database &database, const AnyCamera &camera, const unsigned int lowerFrame, const unsigned int upperFrame, Vectors3 &newObjectPoints, Indices32 &newObjectPointIds, const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=10u, const Scalar maximalSqrError=Scalar(3.5 *3.5), Worker *worker=nullptr, bool *abort=nullptr)
Determines the positions of new object points from a database within a specified frame range.
AccuracyMethod
Definition of individual methods to determine the accuracy of object points.
Definition: Solver3.h:78
@ AM_INVALID
Invalid method.
Definition: Solver3.h:80
@ AM_MEAN_DIRECTION_MEAN_COSINE
Determination of the mean absolute cosine value between the mean observation direction and each obser...
Definition: Solver3.h:84
@ AM_MEAN_DIRECTION_MIN_COSINE
Determination of the minimal absolute cosine values between the mean observation direction and each o...
Definition: Solver3.h:82
static Scalars determineObjectPointsAccuracy(const Database &database, const PinholeCamera &pinholeCamera, const Indices32 &objectPointIds, const AccuracyMethod accuracyMethhod, const unsigned int lowerFrame=(unsigned int)(-1), const unsigned int upperFrame=(unsigned int)(-1), Worker *worker=nullptr)
Measures the accuracy of several 3D object points.
static bool optimizeCameraWithVariableObjectPointsAndPoses(const Database &database, const PinholeCamera &pinholeCamera, const PinholeCamera::OptimizationStrategy optimizationStrategy, PinholeCamera &optimizedCamera, Vectors3 *optimizedObjectPoints=nullptr, Indices32 *optimizedObjectPointIds=nullptr, HomogenousMatrices4 *optimizedKeyFramePoses=nullptr, Indices32 *optimizedKeyFramePoseIds=nullptr, const unsigned int minimalKeyFrames=3u, const unsigned int maximalKeyFrames=20u, const unsigned int minimalObservations=10u, const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_SQUARE, const unsigned int iterations=50u, Scalar *initialRobustError=nullptr, Scalar *finalRobustError=nullptr)
Optimizes 3D object points (having a quite good accuracy already) and optimizes the camera poses and ...
static bool determineInitialObjectPointsFromDenseFrames(const PinholeCamera &pinholeCamera, const ImagePointGroups &imagePointGroups, RandomGenerator &randomGenerator, const unsigned int firstGroupIndex, const unsigned int secondGroupIndex, HomogenousMatrices4 &validPoses, Indices32 &validPoseIds, Scalar &totalSqrError, Vectors3 &objectPoints, Indices32 &validObjectPointIndices, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError=Scalar(3.5 *3.5))
Determines the initial object point positions for a set of image point groups (covering a range of im...
static bool determineInitialObjectPointsFromSparseKeyFrames(const PinholeCamera &pinholeCamera, const Database::ImagePointGroups &imagePointGroups, RandomGenerator &randomGenerator, const unsigned int firstGroupIndex, const unsigned int secondGroupIndex, HomogenousMatrices4 &poses, Indices32 &validPoseIndices, Vectors3 &objectPoints, Indices32 &validObjectPointIndices, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(10u, Scalar(0.3), 20u), const Scalar maximalSqrError=Scalar(3.5 *3.5))
Determines the initial object point positions for a set of key frames (image point groups) observing ...
static bool trackObjectPoints(const Database &database, const Indices32 &priorityObjectPointIds, const Indices32 &remainingObjectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalTrackedPriorityObjectPoints, const Scalar minimalRemainingFramesRatio, const unsigned int maximalTrackedPriorityObjectPoints, const unsigned int maximalTrackedRemainingObjectPoints, Indices32 &trackedObjectPointIds, ImagePointGroups &trackedImagePointGroups, Indices32 *trackedValidPriorityIndices=nullptr, Indices32 *trackedValidRemainingIndices=nullptr, bool *abort=nullptr)
This functions tracks two individual groups (disjoined) image points (defined by their object points)...
static bool determineProjectionError(const Database &database, const PinholeCamera &pinholeCamera, const Index32 poseId, const bool useDistortionParameters, unsigned int *validCorrespondences=nullptr, Scalar *minimalSqrError=nullptr, Scalar *averageSqrError=nullptr, Scalar *maximalSqrError=nullptr)
Determines the accuracy of a camera pose for all valid object points visible in the frame by measurin...
static Scalar averagePointDistance(const Vector2 *points, const size_t size)
Determines the average distance between the center of a set of given points and each of the points.
static void determinePosesSubset(const Database *database, const AnyCamera *camera, const IndexSet32 *priorityObjectPointIds, const bool solePriorityPoints, RandomGenerator *randomGenerator, const unsigned int lowerFrame, const unsigned int upperFrame, const unsigned int minimalCorrespondences, ShiftVector< HomogenousMatrix4 > *poses, const Geometry::Estimator::EstimatorType estimator, const Scalar minimalValidCorrespondenceRatio, const Scalar ransacMaximalSqrError, const Scalar maximalRobustError, Scalar *totalError, Lock *lock, bool *abort, const unsigned int numberThreads, const unsigned int threadIndex, const unsigned int numberThreadsOne)
Determines a subset of the camera poses depending on valid 2D/3D points correspondences within a rang...
static bool determinePlane(const Database &database, const Index32 frameIndex, const CV::SubRegion &subRegion, RandomGenerator &randomGenerator, Plane3 &plane, const RelativeThreshold &minimalValidObjectPoints=RelativeThreshold(3u, Scalar(0.5), 20u), const Geometry::Estimator::EstimatorType estimator=Geometry::Estimator::ET_HUBER, Scalar *finalError=nullptr, Indices32 *usedObjectPointIds=nullptr)
Determines a 3D plane best fitting to a set of given 3D object point ids which are specified by a giv...
static bool trackObjectPoints(const Database &database, const Indices32 &objectPointIds, const unsigned int lowerFrame, const unsigned int startFrame, const unsigned int upperFrame, const unsigned int minimalTrackedObjectPoints, const unsigned int minimalTrackedFrames, const unsigned int maximalTrackedObjectPoints, Indices32 &trackedObjectPointIds, ImagePointGroups &trackedImagePointGroups, Indices32 *trackedValidIndices=nullptr, bool *abort=nullptr)
This functions tracks image points (defined by their object points) from one frame to the sibling fra...
This class implements a worker able to distribute function calls over different threads.
Definition: Worker.h:33
T minmax(const T &lowerBoundary, const T &value, const T &upperBoundary)
This function fits a given parameter into a specified value range.
Definition: base/Utilities.h:903
static void createIndices(const size_t numberIndices, const T &startIndex, std::vector< T > &indices)
Fills a vector with increasing index values.
Definition: base/Utilities.h:1265
std::set< Index32 > IndexSet32
Definition of a set holding 32 bit indices.
Definition: Base.h:114
std::vector< Index32 > Indices32
Definition of a vector holding 32 bit index values.
Definition: Base.h:96
uint32_t Index32
Definition of a 32 bit index value.
Definition: Base.h:84
SquareMatrixT3< Scalar > SquareMatrix3
Definition of the SquareMatrix3 object, depending on the OCEAN_MATH_USE_SINGLE_PRECISION either with ...
Definition: SquareMatrix3.h:35
float Scalar
Definition of a scalar type.
Definition: Math.h:128
std::vector< HomogenousMatrix4 > HomogenousMatrices4
Definition of a vector holding HomogenousMatrix4 objects.
Definition: HomogenousMatrix4.h:73
std::vector< Scalar > Scalars
Definition of a vector holding Scalar objects.
Definition: Math.h:144
VectorT3< Scalar > Vector3
Definition of a 3D vector.
Definition: Vector3.h:22
std::vector< Vector2 > Vectors2
Definition of a vector holding Vector2 objects.
Definition: Vector2.h:64
HomogenousMatrixT4< Scalar > HomogenousMatrix4
Definition of the HomogenousMatrix4 object, depending on the OCEAN_MATH_USE_SINGLE_PRECISION flag eit...
Definition: HomogenousMatrix4.h:37
std::vector< Vector3 > Vectors3
Definition of a vector holding Vector3 objects.
Definition: Vector3.h:65
VectorT2< Scalar > Vector2
Definition of a 2D vector.
Definition: Vector2.h:21
The namespace covering the entire Ocean framework.
Definition: Accessor.h:15