Ocean
MultiViewMapCreator.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  *
4  * This source code is licensed under the MIT license found in the
5  * LICENSE file in the root directory of this source tree.
6  */
7 
8 #ifndef META_OCEAN_TRACKING_MAPBUILDING_MULTI_VIEW_MAP_CREATOR_H
9 #define META_OCEAN_TRACKING_MAPBUILDING_MULTI_VIEW_MAP_CREATOR_H
10 
14 
16 #include "ocean/base/Worker.h"
17 
19 
21 
22 #include "ocean/math/AnyCamera.h"
24 
25 namespace Ocean
26 {
27 
28 namespace Tracking
29 {
30 
31 namespace MapBuilding
32 {
33 
34 /**
35  * This class implements a creator for real-time feature maps based on multiple views (e.g., an HMD).
36  * The lower two stereo cameras will be used to detect new features while the remaining cameras will be used to add observations of existing features.
37  * @ingroup trackingmapbuilding
38  */
39 class OCEAN_TRACKING_MAPBUILDING_EXPORT MultiViewMapCreator : public DescriptorHandling
40 {
41  public:
42 
43  /**
44  * Definition of a pair combining a frame index with a camera index.
45  */
47  {
48  public:
49 
50  /**
51  * Creates a new observation pair.
52  * @param frameIndex The index of the (multi-view) frame to which the observation belongs, with range [0, infinity)
53  * @param cameraIndex The index of the camera (within the multi-views) to which the observation belongs, with range [0, infinity)
54  */
55  inline ObservationPair(const Index32 frameIndex, const Index32 cameraIndex);
56 
57  public:
58 
59  /// The index of the (multi-view) frame to which the observation belongs.
60  Index32 frameIndex_ = Index32(-1);
61 
62  /// The index of the camera (within the multi-views) to which the observation belongs.
63  Index32 cameraIndex_ = Index32(-1);
64  };
65 
66  protected:
67 
68  /**
69  * This class holds the relevant information for one 3D feature point.
70  */
71  class OCEAN_TRACKING_MAPBUILDING_EXPORT Feature
72  {
73  public:
74 
75  /**
76  * Definition of individual localization results.
77  */
78  enum LocalizationResult : uint32_t
79  {
80  /// The localization failed.
81  LR_FAILED = 0u,
82  /// The localization has been skipped.
84  /// The localization succeeded.
85  LR_SUCCEEDED
86  };
87 
88  /**
89  * This class holds the relevant information for one observation of a 3D feature.
90  */
92  {
93  public:
94 
95  /**
96  * Creates a new observation object.
97  * @param imagePoint The 2D image point of the observation within the camera image
98  * @param observationPair
99  * @param descriptor The descriptor of the observation
100  */
101  inline Observation(const Vector2& imagePoint, const ObservationPair& observationPair, const CV::Detector::FREAKDescriptor32& descriptor);
102 
103  public:
104 
105  /// The 2D location of the observation within one camera image.
107 
108  /// The observation pair defining to which cameras/images the observation belongs.
110 
111  /// The descriptor of the observation.
113  };
114 
115  /**
116  * Definition of a vector holding observations.
117  */
118  typedef std::vector<Observation> Observations;
119 
120  public:
121 
122  /**
123  * Creates a new feature object which has been determined in a mono view.
124  * @param imagePoint The 2D observation of the 3D feature point in the camera
125  * @param observationPair The observation pair defining in which camera image the feature point was observed
126  * @param descriptor The descriptor of the feature point
127  */
128  Feature(const Vector2& imagePoint, const ObservationPair& observationPair, const CV::Detector::FREAKDescriptor32& descriptor);
129 
130  /**
131  * Creates a new feature object which has been determined in a stereo view.
132  * @param imagePointA The 2D observation of the 3D feature point in the first camera
133  * @param imagePointB The 2D observation of the 3D feature point in the second camera
134  * @param observationPairA The observation pair defining in which camera image the feature point was observed in the first camera
135  * @param observationPairB The observation pair defining in which camera image the feature point was observed in the second camera
136  * @param descriptorA The descriptor of the feature point in the first image
137  * @param descriptorB The descriptor of the feature point in the second image
138  * @param world_T_cameraA The transformation between first camera and world, must be valid
139  * @param world_T_cameraB The transformation between second camera and world, must be valid
140  * @param objectPoint The 3D location of the feature point
141  */
142  Feature(const Vector2& imagePointA, const Vector2& imagePointB, const ObservationPair& observationPairA, const ObservationPair& observationPairB, const CV::Detector::FREAKDescriptor32& descriptorA, const CV::Detector::FREAKDescriptor32& descriptorB, const HomogenousMatrix4& world_T_cameraA, const HomogenousMatrix4& world_T_cameraB, const Vector3& objectPoint);
143 
144  /**
145  * Returns all observations of this feature point.
146  * @return The feature's observations
147  */
148  inline const Observations& observations() const;
149 
150  /**
151  * Returns the 3D object point of this feature point, if known already
152  * @return The feature's 3D object point location
153  * @see isLocalized().
154  */
155  inline const Vector3& objectPoint() const;
156 
157  /**
158  * Returns whether this feature point is localized in 3D space.
159  * In case the feature is localized, a valid 3D location is known based on the feature's observations.
160  * @return True, if so
161  */
162  inline bool isLocalized() const;
163 
164  /**
165  * Adds a new observation for the feature.
166  * @param imagePoint The 2D image point within the image at which the feature was observed
167  * @param observationPair The observation pair defining in which image the image point is defined
168  * @param descriptor The descriptor of the 2D observation
169  * @param randomGenerator Random generator object
170  */
171  void addObservation(const Vector2& imagePoint, const ObservationPair& observationPair, const CV::Detector::FREAKDescriptor32& descriptor, RandomGenerator& randomGenerator);
172 
173  /**
174  * (Re-)localizes this feature.
175  * @param world_T_cameraGroups The groups of transformations between cameras and world, one group for each multi-view camera
176  * @param cameraGroups The groups of cameras defining the projection, one group for each group of transformations
177  * @return The localization result
178  */
179  LocalizationResult localizeObjectPoint(const std::vector<HomogenousMatrices4>& world_T_cameraGroups, const std::vector<SharedAnyCameras>& cameraGroups);
180 
181  /**
182  * Informs the feature that is has not been observed.
183  * @param sqrDistance The square distance between camera and feature point, with range (0, infinity)
184  * @param secondsPerFrame The seconds since the last frame, with range (0, infinity)
185  * @return True, if the feature could have been observed based on the distance between camera and feature point; False, if the feature was outside visibility range
186  */
187  bool failedObservation(const Scalar sqrDistance, const double secondsPerFrame);
188 
189  /**
190  * Copies the observations from a second feature e.g., to join two features.
191  * @param feature The second feature from which the observation will be copied
192  */
193  void copyObservations(const Feature& feature);
194 
195  /**
196  * Returns the stability factor of this feature.
197  * @return The feature's stability factor; 0 means not stable, 1 means stable, with range [0, 1]
198  */
199  inline Scalar stabilityFactor() const;
200 
201  /**
202  * Returns whether this feature is not stable anymore and should be removed.
203  * @return True, if so
204  */
205  inline bool isInstable() const;
206 
207  protected:
208 
209  /// The overall number of observations for this feature.
210  size_t observationIterations_ = 0;
211 
212  /// The observations of this feature.
214 
215  /// The 3D location of this feature, defined in world.
217 
218  /// The minimal distance between 3D object point and camera at which the feature has been observed, adjusted with a generous threshold.
219  Scalar minimalObservationSqrDistance_ = Numeric::maxValue();
220 
221  /// The maximal distance between 3D object point and camera at which the feature has bee observed, adjusted with a generous threshold.
222  Scalar maximalObservationSqrDistance_ = Numeric::minValue();
223 
224  /// The minimal number of observation iterations necessary when the next re-localization will be invoked.
225  size_t nextLocalizationObservationIterations_ = 3;
226 
227  /// The squared diagonal size of the bounding box in which all camera poses are located.
228  Scalar sqrBaseline_ = 0;
229 
230  /// The time since the feature has been observed the last time.
231  double timeSinceLastObservation_ = 0.0;
232 
233  /// The maximal number of observations that will be kept.
234  static constexpr size_t maxObservations_ = 100;
235 
236  /// The adjustment threshold for the observation distance, with range (0, 1), e.g., 0.25 = 75% and 125%
237  static constexpr Scalar observationThreshold_ = Scalar(0.25);
238 
239  /// The maximal time a feature exists without any re-observation, in seconds.
240  static constexpr double maxTimeWithoutObservation_ = 5.0;
241  };
242 
243  /**
244  * Definition of a vector holding features.
245  */
246  typedef std::vector<Feature> Features;
247 
248  public:
249 
250  /**
251  * Processes multi-frames to extend the current feature map.
252  * @param yFrames The camera images to be used, with pixel format FORMAT_Y8, at least two
253  * @param cameras The camera profiles of the given camera images, one for each camera image
254  * @param world_T_device The transformation between the device and the world, must be valid
255  * @param device_T_cameras The transformations between cameras and the device, with each camera pointing towards the negative z-space with y-axis pointing upwards, one for each camera image
256  * @return True, if succeeded
257  */
258  bool processFrame(const Frames& yFrames, const SharedAnyCameras& cameras, const HomogenousMatrix4& world_T_device, const HomogenousMatrices4& device_T_cameras);
259 
260  /**
261  * Extracts the latest feature map from this creator.
262  * @param objectPoints The resulting 3D object points of all features
263  * @param multiDescriptors Optional resulting multi-descriptors for the 3D feature points, each feature point can be associated with several descriptors, one for each 3D object point
264  * @param stabilityFactors Optional resulting stability factors for feature points; 0 means instable, 1 means stable; one for each 3D object point
265  * @param minimalObservations The minimal number of observations each resulting feature point must have, with range [2, infinity)
266  * @param maximalDescriptorsPerFeaturePoint The maximal number of feature descriptors that will be returned per feature point, with range [1, infinity)
267  * @return True, if succeeded
268  */
269  bool latestFeatureMap(Vectors3& objectPoints, std::vector<CV::Detector::FREAKDescriptors32>* multiDescriptors = nullptr, Scalars* stabilityFactors = nullptr, const size_t minimalObservations = 10, const size_t maximalDescriptorsPerFeaturePoint = 10) const;
270 
271  /**
272  * Determines the indices of the two lower (hopefully overlapping) stereo cameras.
273  * @param device_T_cameras The transformations between cameras and device, with each camera pointing towards the negative z-space with y-axis pointing upwards
274  * @param stereoCameraIndices The resulting indices of the stereo cameras
275  * @return True, if succeeded
276  */
277  static bool determineLowerStereoCameras(const HomogenousMatrices4& device_T_cameras, IndexPair32& stereoCameraIndices);
278 
279  protected:
280 
281  /**
282  * Determines observations for existing localized 3D feature points.
283  * Further, features which have not been observed for a while will be removed.
284  * @param currentFrameIndex The index of the current frame, with range [0, infinity)
285  * @param currentTimestamp The timestamp of the current multi frames, must be valid
286  * @param world_T_device The transformation between the device and the world, must be valid
287  * @param world_T_currentCameras The transformations between cameras and world, one for each multi frame
288  * @param currentCameras The camera profiles defining the projection, one for each multi frame
289  * @param distributionArrays The distribution arrays for all image points, one for each multi frame
290  */
291  void determineObservations(const Index32 currentFrameIndex, const Timestamp& currentTimestamp, const HomogenousMatrix4& world_T_device, const HomogenousMatrices4& world_T_currentCameras, const SharedAnyCameras& currentCameras, std::vector<Geometry::SpatialDistribution::DistributionArray>& distributionArrays);
292 
293  /**
294  * Determines features in all frames.
295  * @param yFrames The camera frames in which the features will be determined, with pixel format FORMAT_Y8, at least one
296  * @param cameras The camera profiles defining the projection, one for each camera frame
297  * @param imagePointGroups The resulting groups of image points (detected features), one group for each camera frame
298  * @param descriptorGroups The resulting groups of feature descriptors, one group for each camera frame, on descriptor for each image point
299  * @param cornerPyramidLevelGroups The resulting groups of pyramid levels in which the individual features were detected, on group for each camera frame
300  * @param distributionArrays The resulting distribution arrays for all image points, one for each camera frame
301  * @param worker Optional worker to distribute the computation
302  */
303  static void determineImageFeatures(const Frames& yFrames, const SharedAnyCameras& cameras, std::vector<Vectors2>& imagePointGroups, std::vector<CV::Detector::FREAKDescriptors32>& descriptorGroups, std::vector<Indices32>& cornerPyramidLevelGroups, std::vector<Geometry::SpatialDistribution::DistributionArray>& distributionArrays, Worker* worker);
304 
305  /**
306  * Determines features in a subset of all frames.
307  * @param yFrames The camera frames in which the features will be determined, with pixel format FORMAT_Y8, at least one
308  * @param cameras The camera profiles defining the projection, one for each camera frame
309  * @param imagePointGroups The resulting groups of image points (detected features), one group for each camera frame
310  * @param descriptorGroups The resulting groups of feature descriptors, one group for each camera frame, on descriptor for each image point
311  * @param cornerPyramidLevelGroups The resulting groups of pyramid levels in which the individual features were detected, on group for each camera frame
312  * @param distributionArrays The resulting distribution arrays for all image points, one for each camera frame
313  * @param firstGroup The first group to be handled, with range [0, yFrames.size() - 1]
314  * @param numberGroups The first group to be handled, with range [1, yFrames.size() - firstGroup]
315  */
316  static void determineImageFeaturesSubset(const Frame* yFrames, const SharedAnyCamera* cameras, Vectors2* imagePointGroups, CV::Detector::FREAKDescriptors32* descriptorGroups, Indices32* cornerPyramidLevelGroups, Geometry::SpatialDistribution::DistributionArray* distributionArrays, const unsigned int firstGroup, const unsigned int numberGroups);
317 
318  /**
319  * Determines feature matches between two stereo images.
320  * @param cameraA The camera profile of the first camera, must be valid
321  * @param cameraB The camera profile of the second camera, must be valid
322  * @param world_T_cameraA The transformations between first camera and world, with camera pointing towards negative z-space and y-axis upwards, must be valid
323  * @param world_T_cameraB The transformations between second camera and world, with camera pointing towards negative z-space and y-axis upwards, must be valid
324  * @param imagePointsA The image points of all features in the first camera frame
325  * @param imagePointsB The image points of all features in the second camera frame
326  * @param descriptorsA The descriptors for all image points in the first camera frame, one for each image point
327  * @param descriptorsB The descriptors for all image points in the second camera frame, one for each image point
328  * @param pyramidLevelsA The pyramid levels in which the first image points have been detected, one for each image point
329  * @param pyramidLevelsB The pyramid levels in which the second image points have been detected, one for each image point
330  * @param objectPoints The resulting 3D object points of all matched stereo features
331  * @return The index pairs of all matched features
332  */
333  static IndexPairs32 matchStereoFeatures(const AnyCamera& cameraA, const AnyCamera& cameraB, const HomogenousMatrix4& world_T_cameraA, const HomogenousMatrix4& world_T_cameraB, const Vectors2& imagePointsA, const Vectors2& imagePointsB, const CV::Detector::FREAKDescriptors32& descriptorsA, const CV::Detector::FREAKDescriptors32 descriptorsB, const Indices32& pyramidLevelsA, const Indices32& pyramidLevelsB, Vectors3& objectPoints);
334 
335  /**
336  * Determines feature matches between two mono images (using the same camera at different moments in time).
337  * Features matched in mono frames cannot be localized immediately due to their smaller baseline.
338  * @param cameraA The camera profile of the first camera, must be valid
339  * @param cameraB The camera profile of the second camera, must be valid
340  * @param world_T_cameraA The transformations between first camera and world, with camera pointing towards negative z-space and y-axis upwards, must be valid
341  * @param world_T_cameraB The transformations between second camera and world, with camera pointing towards negative z-space and y-axis upwards, must be valid
342  * @param imagePointsA The image points of all features in the first camera frame
343  * @param imagePointsB The image points of all features in the second camera frame
344  * @param descriptorsA The descriptors for all image points in the first camera frame, one for each image point
345  * @param descriptorsB The descriptors for all image points in the second camera frame, one for each image point
346  * @param pyramidLevelsA The pyramid levels in which the first image points have been detected, one for each image point
347  * @param pyramidLevelsB The pyramid levels in which the second image points have been detected, one for each image point
348  * @return The index pairs of all matched features
349  */
350  static IndexPairs32 matchMonoFeatures(const AnyCamera& cameraA, const AnyCamera& cameraB, const HomogenousMatrix4& world_T_cameraA, const HomogenousMatrix4& world_T_cameraB, const Vectors2& imagePointsA, const Vectors2& imagePointsB, const CV::Detector::FREAKDescriptors32& descriptorsA, const CV::Detector::FREAKDescriptors32 descriptorsB, const Indices32& pyramidLevelsA, const Indices32& pyramidLevelsB);
351 
352  protected:
353 
354  /// The groups of transformations between cameras and world, one group for each multi-frame, one transformation for each frame index.
355  std::vector<HomogenousMatrices4> world_T_cameraGroups_;
356 
357  /// The groups of camera profiles, one group for each multi-frame, one profile for reach frame index.
358  std::vector<SharedAnyCameras> cameraGroups_;
359 
360  /// The groups of image points detected in the current multi-frames.
361  std::vector<Vectors2> currentImagePointGroups_;
362 
363  /// The groups of descriptors for the detected image points in the current multi-frames.
364  std::vector<CV::Detector::FREAKDescriptors32> currentDescriptorGroups_;
365 
366  /// The groups of pyramid levels in which the image points have been detected in the current multi-frames.
367  std::vector<Indices32> currentCornerPyramidLevelGroups_;
368 
369  /// The timestamp of the last frame.
370  Timestamp lastTimestamp_ = Timestamp(false);
371 
372  /// The features which have been detected so far.
374 
375  /// The creator's random generator object.
377 
378  /// The creator's lock.
379  mutable Lock lock_;
380 
381  /// The maximal expected projection distance between two corresponding feature points per pixel.
382  static constexpr Scalar maxPixelDistanceFactor_ = Scalar(0.0625); // 40px for 640px
383 };
384 
385 inline MultiViewMapCreator::ObservationPair::ObservationPair(const Index32 frameIndex, const Index32 cameraIndex) :
386  frameIndex_(frameIndex),
387  cameraIndex_(cameraIndex)
388 {
389  // nothing to do here
390 }
391 
392 inline MultiViewMapCreator::Feature::Observation::Observation(const Vector2& imagePoint, const ObservationPair& observationPair, const CV::Detector::FREAKDescriptor32& descriptor) :
393  imagePoint_(imagePoint),
394  observationPair_(observationPair),
395  descriptor_(descriptor)
396 {
397  // nothing to do here
398 }
399 
401 {
402  return observations_;
403 }
404 
406 {
407  return objectPoint_;
408 }
409 
411 {
413 
414  return objectPoint_.x() != Numeric::minValue();
415 }
416 
418 {
419  static_assert(maxTimeWithoutObservation_ > 0.0, "Invalid parameter!");
420 
421  return Scalar(1.0 - std::min(timeSinceLastObservation_ / maxTimeWithoutObservation_, 1.0));
422 }
423 
425 {
427 }
428 
429 }
430 
431 }
432 
433 }
434 
435 #endif // META_OCEAN_TRACKING_MAPBUILDING_MULTI_VIEW_MAP_CREATOR_H
This class implements the abstract base class for all AnyCamera objects.
Definition: AnyCamera.h:130
This class implements Ocean's image class.
Definition: Frame.h:1792
This class implements a distribution array.
Definition: SpatialDistribution.h:228
This class implements a recursive lock object.
Definition: Lock.h:31
static constexpr T minValue()
Returns the min scalar value.
Definition: Numeric.h:3250
static constexpr T maxValue()
Returns the max scalar value.
Definition: Numeric.h:3244
This class implements a generator for random numbers.
Definition: RandomGenerator.h:42
This class implements a timestamp.
Definition: Timestamp.h:36
This class implements functions necessary when handling descriptors.
Definition: DescriptorHandling.h:32
This class holds the relevant information for one observation of a 3D feature.
Definition: MultiViewMapCreator.h:92
ObservationPair observationPair_
The observation pair defining to which cameras/images the observation belongs.
Definition: MultiViewMapCreator.h:109
Vector2 imagePoint_
The 2D location of the observation within one camera image.
Definition: MultiViewMapCreator.h:106
Observation(const Vector2 &imagePoint, const ObservationPair &observationPair, const CV::Detector::FREAKDescriptor32 &descriptor)
Creates a new observation object.
Definition: MultiViewMapCreator.h:392
CV::Detector::FREAKDescriptor32 descriptor_
The descriptor of the observation.
Definition: MultiViewMapCreator.h:112
This class holds the relevant information for one 3D feature point.
Definition: MultiViewMapCreator.h:72
Feature(const Vector2 &imagePointA, const Vector2 &imagePointB, const ObservationPair &observationPairA, const ObservationPair &observationPairB, const CV::Detector::FREAKDescriptor32 &descriptorA, const CV::Detector::FREAKDescriptor32 &descriptorB, const HomogenousMatrix4 &world_T_cameraA, const HomogenousMatrix4 &world_T_cameraB, const Vector3 &objectPoint)
Creates a new feature object which has been determined in a stereo view.
static constexpr double maxTimeWithoutObservation_
The maximal time a feature exists without any re-observation, in seconds.
Definition: MultiViewMapCreator.h:240
Scalar stabilityFactor() const
Returns the stability factor of this feature.
Definition: MultiViewMapCreator.h:417
Scalar minimalObservationSqrDistance_
The minimal distance between 3D object point and camera at which the feature has been observed,...
Definition: MultiViewMapCreator.h:219
void addObservation(const Vector2 &imagePoint, const ObservationPair &observationPair, const CV::Detector::FREAKDescriptor32 &descriptor, RandomGenerator &randomGenerator)
Adds a new observation for the feature.
bool isInstable() const
Returns whether this feature is not stable anymore and should be removed.
Definition: MultiViewMapCreator.h:424
std::vector< Observation > Observations
Definition of a vector holding observations.
Definition: MultiViewMapCreator.h:118
double timeSinceLastObservation_
The time since the feature has been observed the last time.
Definition: MultiViewMapCreator.h:231
Feature(const Vector2 &imagePoint, const ObservationPair &observationPair, const CV::Detector::FREAKDescriptor32 &descriptor)
Creates a new feature object which has been determined in a mono view.
const Vector3 & objectPoint() const
Returns the 3D object point of this feature point, if known already.
Definition: MultiViewMapCreator.h:405
const Observations & observations() const
Returns all observations of this feature point.
Definition: MultiViewMapCreator.h:400
Scalar maximalObservationSqrDistance_
The maximal distance between 3D object point and camera at which the feature has bee observed,...
Definition: MultiViewMapCreator.h:222
void copyObservations(const Feature &feature)
Copies the observations from a second feature e.g., to join two features.
bool isLocalized() const
Returns whether this feature point is localized in 3D space.
Definition: MultiViewMapCreator.h:410
LocalizationResult localizeObjectPoint(const std::vector< HomogenousMatrices4 > &world_T_cameraGroups, const std::vector< SharedAnyCameras > &cameraGroups)
(Re-)localizes this feature.
Vector3 objectPoint_
The 3D location of this feature, defined in world.
Definition: MultiViewMapCreator.h:216
LocalizationResult
Definition of individual localization results.
Definition: MultiViewMapCreator.h:79
@ LR_SKIPPED
The localization has been skipped.
Definition: MultiViewMapCreator.h:83
Observations observations_
The observations of this feature.
Definition: MultiViewMapCreator.h:213
bool failedObservation(const Scalar sqrDistance, const double secondsPerFrame)
Informs the feature that is has not been observed.
Definition of a pair combining a frame index with a camera index.
Definition: MultiViewMapCreator.h:47
ObservationPair(const Index32 frameIndex, const Index32 cameraIndex)
Creates a new observation pair.
Definition: MultiViewMapCreator.h:385
This class implements a creator for real-time feature maps based on multiple views (e....
Definition: MultiViewMapCreator.h:40
std::vector< SharedAnyCameras > cameraGroups_
The groups of camera profiles, one group for each multi-frame, one profile for reach frame index.
Definition: MultiViewMapCreator.h:358
static void determineImageFeaturesSubset(const Frame *yFrames, const SharedAnyCamera *cameras, Vectors2 *imagePointGroups, CV::Detector::FREAKDescriptors32 *descriptorGroups, Indices32 *cornerPyramidLevelGroups, Geometry::SpatialDistribution::DistributionArray *distributionArrays, const unsigned int firstGroup, const unsigned int numberGroups)
Determines features in a subset of all frames.
std::vector< Indices32 > currentCornerPyramidLevelGroups_
The groups of pyramid levels in which the image points have been detected in the current multi-frames...
Definition: MultiViewMapCreator.h:367
std::vector< Vectors2 > currentImagePointGroups_
The groups of image points detected in the current multi-frames.
Definition: MultiViewMapCreator.h:361
bool processFrame(const Frames &yFrames, const SharedAnyCameras &cameras, const HomogenousMatrix4 &world_T_device, const HomogenousMatrices4 &device_T_cameras)
Processes multi-frames to extend the current feature map.
Features features_
The features which have been detected so far.
Definition: MultiViewMapCreator.h:373
std::vector< Feature > Features
Definition of a vector holding features.
Definition: MultiViewMapCreator.h:246
std::vector< CV::Detector::FREAKDescriptors32 > currentDescriptorGroups_
The groups of descriptors for the detected image points in the current multi-frames.
Definition: MultiViewMapCreator.h:364
bool latestFeatureMap(Vectors3 &objectPoints, std::vector< CV::Detector::FREAKDescriptors32 > *multiDescriptors=nullptr, Scalars *stabilityFactors=nullptr, const size_t minimalObservations=10, const size_t maximalDescriptorsPerFeaturePoint=10) const
Extracts the latest feature map from this creator.
static bool determineLowerStereoCameras(const HomogenousMatrices4 &device_T_cameras, IndexPair32 &stereoCameraIndices)
Determines the indices of the two lower (hopefully overlapping) stereo cameras.
static IndexPairs32 matchMonoFeatures(const AnyCamera &cameraA, const AnyCamera &cameraB, const HomogenousMatrix4 &world_T_cameraA, const HomogenousMatrix4 &world_T_cameraB, const Vectors2 &imagePointsA, const Vectors2 &imagePointsB, const CV::Detector::FREAKDescriptors32 &descriptorsA, const CV::Detector::FREAKDescriptors32 descriptorsB, const Indices32 &pyramidLevelsA, const Indices32 &pyramidLevelsB)
Determines feature matches between two mono images (using the same camera at different moments in tim...
static IndexPairs32 matchStereoFeatures(const AnyCamera &cameraA, const AnyCamera &cameraB, const HomogenousMatrix4 &world_T_cameraA, const HomogenousMatrix4 &world_T_cameraB, const Vectors2 &imagePointsA, const Vectors2 &imagePointsB, const CV::Detector::FREAKDescriptors32 &descriptorsA, const CV::Detector::FREAKDescriptors32 descriptorsB, const Indices32 &pyramidLevelsA, const Indices32 &pyramidLevelsB, Vectors3 &objectPoints)
Determines feature matches between two stereo images.
static void determineImageFeatures(const Frames &yFrames, const SharedAnyCameras &cameras, std::vector< Vectors2 > &imagePointGroups, std::vector< CV::Detector::FREAKDescriptors32 > &descriptorGroups, std::vector< Indices32 > &cornerPyramidLevelGroups, std::vector< Geometry::SpatialDistribution::DistributionArray > &distributionArrays, Worker *worker)
Determines features in all frames.
RandomGenerator randomGenerator_
The creator's random generator object.
Definition: MultiViewMapCreator.h:376
void determineObservations(const Index32 currentFrameIndex, const Timestamp &currentTimestamp, const HomogenousMatrix4 &world_T_device, const HomogenousMatrices4 &world_T_currentCameras, const SharedAnyCameras &currentCameras, std::vector< Geometry::SpatialDistribution::DistributionArray > &distributionArrays)
Determines observations for existing localized 3D feature points.
std::vector< HomogenousMatrices4 > world_T_cameraGroups_
The groups of transformations between cameras and world, one group for each multi-frame,...
Definition: MultiViewMapCreator.h:355
Lock lock_
The creator's lock.
Definition: MultiViewMapCreator.h:379
const T & x() const noexcept
Returns the x value.
Definition: Vector3.h:800
This class implements a worker able to distribute function calls over different threads.
Definition: Worker.h:33
unsigned int sqrDistance(const char first, const char second)
Returns the square distance between two values.
Definition: base/Utilities.h:1089
std::vector< IndexPair32 > IndexPairs32
Definition of a vector holding 32 bit index pairs.
Definition: Base.h:144
std::vector< Frame > Frames
Definition of a vector holding padding frames.
Definition: Frame.h:1755
std::vector< Index32 > Indices32
Definition of a vector holding 32 bit index values.
Definition: Base.h:96
std::pair< Index32, Index32 > IndexPair32
Definition of a pair holding 32 bit indices.
Definition: Base.h:138
uint32_t Index32
Definition of a 32 bit index value.
Definition: Base.h:84
float Scalar
Definition of a scalar type.
Definition: Math.h:128
std::vector< HomogenousMatrix4 > HomogenousMatrices4
Definition of a vector holding HomogenousMatrix4 objects.
Definition: HomogenousMatrix4.h:73
std::shared_ptr< AnyCamera > SharedAnyCamera
Definition of a shared pointer holding an AnyCamera object with Scalar precision.
Definition: AnyCamera.h:60
std::vector< Scalar > Scalars
Definition of a vector holding Scalar objects.
Definition: Math.h:144
VectorT3< Scalar > Vector3
Definition of a 3D vector.
Definition: Vector3.h:22
std::vector< Vector2 > Vectors2
Definition of a vector holding Vector2 objects.
Definition: Vector2.h:64
SharedAnyCamerasT< Scalar > SharedAnyCameras
Definition of a vector holding AnyCamera objects.
Definition: AnyCamera.h:90
std::vector< Vector3 > Vectors3
Definition of a vector holding Vector3 objects.
Definition: Vector3.h:65
std::vector< FREAKDescriptor32 > FREAKDescriptors32
Vector of 32-bytes long FREAK descriptors.
Definition: FREAKDescriptor.h:69
FREAKDescriptorT< 32 > FREAKDescriptor32
Typedef for the 32-bytes long FREAK descriptor.
Definition: FREAKDescriptor.h:66
The namespace covering the entire Ocean framework.
Definition: Accessor.h:15