Ocean
EpipolarGeometry.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  *
4  * This source code is licensed under the MIT license found in the
5  * LICENSE file in the root directory of this source tree.
6  */
7 
8 #ifndef META_OCEAN_GEOMETRY_EPIPOLAR_GEOMETRY_H
9 #define META_OCEAN_GEOMETRY_EPIPOLAR_GEOMETRY_H
10 
13 
14 #include "ocean/math/AnyCamera.h"
15 #include "ocean/math/Line2.h"
17 #include "ocean/math/Quaternion.h"
19 #include "ocean/math/Vector3.h"
20 
21 #include <vector>
22 
23 namespace Ocean
24 {
25 
26 namespace Geometry
27 {
28 
29 /**
30  * This class implements epipolar geometry functions.
31  * @ingroup geometry
32  */
33 class OCEAN_GEOMETRY_EXPORT EpipolarGeometry
34 {
35  public:
36 
37  /**
38  * Calculates the fundamental matrix by two sets of at least eight corresponding image points.
39  * @param leftPoints Left image points
40  * @param rightPoints Right image points
41  * @param correspondences Number of point correspondences (at least 8)
42  * @param fundamental Resulting fundamental matrix
43  * @return True, if succeeded
44  */
45  static bool fundamentalMatrix(const ImagePoint* leftPoints, const ImagePoint* rightPoints, const size_t correspondences, SquareMatrix3& fundamental);
46 
47  /**
48  * Returns the inverse fundamental matrix.
49  * Actually the matrix will be transposed only.
50  * @param fundamental The fundamental matrix F with property xr F xl = 0
51  * @return Fundamental matrix F' with property xl F' xr = 0
52  */
54 
55  /**
56  * Calculates the essential matrix by the rotation and translation between two cameras.
57  * The matrix will be calculated by the extrinsic camera matrix of the right camera relative to the left camera.<br>
58  * The camera is pointing into the negative z-direction with positive y-direction as up-vector.<br>
59  * The right extrinsic camera matrix transforms points defined in the right camera coordinate system in the left camera coordinate system.
60  *
61  * However, as the essential matrix needs the inverted extrinsic matrix of the right camera, the given extrinsic matrix will be inverted before creating the extrinsic matrix.<br>
62  * The extrinsic matrix then is defined by the product of the skew-symmetric matrix of the translation and the rotation matrix of the (now inverted) extrinsic (right) camera matrix:
63  *
64  * Further, the essential matrix is defined for cameras pointing into the positive z-direction.<br>
65  * Thus, the given extrinsic camera matrix will be flipped around the x-axis (by 180 deg) before computing the essential matrix.
66  * <pre>
67  * Thus E is defined by:
68  * E = skew[T.translation()] * T.rotationMatrix(),
69  * T = extrinsicFlipped.inverted(),
70  * extrinsicFlipped = flipMatrix * extrinsic * flipMatrix
71  * </pre>
72  * @param extrinsic The extrinsic camera matrix of the right camera relative to the left camera (rightTleft)
73  * @return Essential matrix
74  */
76 
77  /**
78  * Calculates the fundamental matrix by the given essential matrix and the two intrinsic camera matrices.
79  * @param essential The essential matrix to convert into fundamental matrix
80  * @param leftIntrinsic Left intrinsic camera matrix
81  * @param rightIntrinsic Right intrinsic camera matrix
82  * @return Resulting fundamental matrix
83  */
84  static SquareMatrix3 essential2fundamental(const SquareMatrix3& essential, const SquareMatrix3& leftIntrinsic, const SquareMatrix3& rightIntrinsic);
85 
86  /**
87  * Calculates the fundamental matrix by the given essential matrix and the two cameras.
88  * @param essential The essential matrix to convert into fundamental matrix
89  * @param leftCamera Left camera
90  * @param rightCamera Right camera
91  * @return Resulting fundamental matrix
92  */
93  static SquareMatrix3 essential2fundamental(const SquareMatrix3& essential, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera);
94 
95  /**
96  * Calculates the essential matrix by the given fundamental matrix and the two intrinsic camera matrices.
97  * @param fundamental The fundamental matrix to convert into essential matrix
98  * @param leftIntrinsic Left intrinsic camera matrix
99  * @param rightIntrinsic Right intrinsic camera matrix
100  * @return Resulting essential matrix
101  */
102  static SquareMatrix3 fundamental2essential(const SquareMatrix3& fundamental, const SquareMatrix3& leftIntrinsic, const SquareMatrix3& rightIntrinsic);
103 
104  /**
105  * Calculates the essential matrix by the given fundamental matrix and the two cameras.
106  * @param fundamental The fundamental matrix to convert into essential matrix
107  * @param leftCamera Left camera
108  * @param rightCamera Right camera
109  * @return Resulting essential matrix
110  */
111  static SquareMatrix3 fundamental2essential(const SquareMatrix3& fundamental, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera);
112 
113  /**
114  * Determines the two epipoles corresponding to a fundamental matrix.
115  * This method uses singular values decomposition for the calculation.
116  * @param fundamental The fundamental matrix to extract the epipoles from
117  * @param leftEpipole Resulting left epipole
118  * @param rightEpipole Resulting right epipole
119  * @return True, if succeeded
120  */
121  static bool epipoles(const SquareMatrix3& fundamental, Vector2& leftEpipole, Vector2& rightEpipole);
122 
123  /**
124  * Determines the two epipoles corresponding to two cameras separated by an extrinsic camera matrix.
125  * The matrix will be calculated by the extrinsic camera matrix of the right camera relative to the left camera,<br>
126  * and the two intrinsic camera matrices of both cameras.
127  * @param extrinsic The extrinsic camera matrix of the right camera relative to the left camera (rightTleft)
128  * @param leftIntrinsic Intrinsic camera matrix of the left camera
129  * @param rightIntrinsic Intrinsic camera matrix of the right camera
130  * @param leftEpipole Resulting left epipole
131  * @param rightEpipole Resulting right epipole
132  * @return True, if succeeded
133  * @see essentialMatrix().
134  */
135  static bool epipoles(const HomogenousMatrix4& extrinsic, const SquareMatrix3& leftIntrinsic, const SquareMatrix3& rightIntrinsic, Vector2& leftEpipole, Vector2& rightEpipole);
136 
137  /**
138  * Finds the two epipoles corresponding to a fundamental matrix.
139  * This method calculates the intersection of two epipolar lines.
140  * If no intersection can be found the SVD calculation is used.
141  * @param fundamental The fundamental matrix to extract the epipoles from
142  * @param leftEpipole Resulting left epipole
143  * @param rightEpipole Resulting right epipole
144  * @return True, if succeeded
145  */
146  static bool epipolesFast(const SquareMatrix3& fundamental, Vector2& leftEpipole, Vector2& rightEpipole);
147 
148  /**
149  * Returns the epipolar line in the left image corresponding to a given point in the right image.
150  * @param fundamental The fundamental matrix
151  * @param rightPoint Point in the right image
152  * @return Resulting epipolar line in the left image
153  */
154  static inline Line2 leftEpipolarLine(const SquareMatrix3& fundamental, const Vector2& rightPoint);
155 
156  /**
157  * Returns the epipolar line in the right image corresponding to a given point in the left image.
158  * @param fundamental The fundamental matrix
159  * @param leftPoint Point in the left image
160  * @return Resulting epipolar line in the right image
161  */
162  static inline Line2 rightEpipolarLine(const SquareMatrix3& fundamental, const Vector2& leftPoint);
163 
164  /**
165  * Factorizes the essential matrix into rotation and translation.
166  * Beware: The translation can be determined up to a scale factor only.<br>
167  * The resulting factorization provides the extrinsic camera matrix for the right camera while the left camera has the identity extrinsic camera matrix.<br>
168  * Thus, the resulting transformation transforms points defined inside the right camera coordinate system into point defined inside the left camera coordinate system.
169  * @param essential The essential matrix to be factorized
170  * @param leftCamera Left camera corresponding to the left image point, must be valid
171  * @param rightCamera Right camera corresponding to the right image point, must be valid
172  * @param leftPoint One image point inside the left image, this point must correspond to the given right image point
173  * @param rightPoint One image point inside the right image, this point must correspond to the given left image point
174  * @param transformation Resulting transformation of between the left and the right camera
175  * @return True, if succeeded
176  */
177  static bool factorizeEssential(const SquareMatrix3& essential, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera, const ImagePoint& leftPoint, const ImagePoint& rightPoint, HomogenousMatrix4& transformation);
178 
179  /**
180  * Factorizes an essential matrix into a camera pose composed of rotation and translation.
181  * Beware: The translation can be determined up to a scale factor only.<br>
182  * The factorization provides the extrinsic camera matrix (camera pose) for the right camera while the left camera is expected to have the identity as extrinsic camera matrix.<br>
183  * The resulting transformation transforms points defined inside the right camera coordinate system into point defined inside the left camera coordinate system: pointLeft = transformation * pointRight.<br>
184  * @param essential The essential matrix to be factorized
185  * @param leftCamera Left camera corresponding to the left image point, must be valid
186  * @param rightCamera Right camera corresponding to the right image point, must be valid
187  * @param leftPoints All image point inside the left image to be checked whether they produce 3D object points lying in front of the camera
188  * @param rightPoints All image points inside the right image, one for each left point
189  * @param transformation Resulting transformation between the left and the right camera
190  * @return The number of given image points resulting in valid object points, with range [0, infinity)
191  */
192  static unsigned int factorizeEssential(const SquareMatrix3& essential, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera, const ImagePoints& leftPoints, const ImagePoints& rightPoints, HomogenousMatrix4& transformation);
193 
194  /**
195  * Determines the homograph for two (stereo) frames rectifying both images using the transformation between the left and the right camera.
196  * As the resulting homography may not cover the entire input images using the same camera profile a new camera (perfect) profile can be calculated instead.<br>
197  * Thus, the resulting rectified images will have a larger field of view but will cover the entire input frame data.<br>
198  * The projection center of the left camera is expected to be at the origin of the world coordinate system.<br>
199  * The viewing directions of both cameras is towards the negative z-axis in their particular coordinate systems.<br>
200  * The given transformation is equal to the extrinsic camera matrix of the right camera<br>
201  * and thus transforms points defined inside the right camera coordinate system to points defined inside the left camera coordinate system.<br>
202  * The resulting homography transformations transform 3D rectified image points (homogenous 2D coordinates) into 3D unrectified image points for their particular coordinate system.<br>
203  * The coordinate system of the 3D image points has the origin in the top left corner, while the x-axis points to right, the y-axis points to the bottom and the z-axis to the back of the image.<br>
204  * @param transformation Extrinsic camera matrix of the right camera with negative z-axis as viewing direction
205  * @param pinholeCamera The pinhole camera profile used for both images
206  * @param leftHomography Resulting left homography
207  * @param rightHomography Resulting right homography
208  * @param appliedRotation Resulting rotation applied to both cameras
209  * @param newCamera Optional resulting new camera profile used to cover the entire input image data into the output frames, otherwise nullptr
210  * @return True, if succeeded
211  */
212  static bool rectificationHomography(const HomogenousMatrix4& transformation, const PinholeCamera& pinholeCamera, SquareMatrix3& leftHomography, SquareMatrix3& rightHomography, Quaternion& appliedRotation, PinholeCamera* newCamera);
213 
214  /**
215  * Calculates the 3D positions for a pair of image point correspondences with corresponding extrinsic camera transformations.
216  * @param world_T_cameraA The extrinsic camera transformations of the first camera, with the camera pointing towards the negative z-space, y-axis pointing up, and the x-axis pointing to the right, must be valid
217  * @param world_T_cameraB The extrinsic camera transformations of the second camera, with the camera pointing towards the negative z-space, y-axis pointing up, and the x-axis pointing to the right, must be valid
218  * @param anyCameraA The first camera profile, must be valid
219  * @param anyCameraB The second camera profile, must be valid
220  * @param imagePointsA The set of 2D image points in the first image, each point must correspond to the point with the same index from the second image
221  * @param imagePointsB The set of 2D image points in the second image, each point must correspond to the point with the same index from the first image
222  * @param numberPoints The number of point correspondences, with range [0, infinity)
223  * @param onlyFrontObjectPoints If true, only points that are located in front of the camera will be used for the optimization, otherwise all points will be used.
224  * @param invalidObjectPoint Optional, the location of an invalid object point which will be used as value for all object points which cannot be determined e.g., because of parallel projection rays
225  * @param invalidIndices Optional, the resulting indices of the resulting object points with invalid location
226  * @return objectPoints The resulting triangulated object points
227  */
228  static Vectors3 triangulateImagePoints(const HomogenousMatrix4& world_T_cameraA, const HomogenousMatrix4& world_T_cameraB, const AnyCamera& anyCameraA, const AnyCamera& anyCameraB, const Vector2* imagePointsA, const Vector2* imagePointsB, const size_t numberPoints, const bool onlyFrontObjectPoints = true, const Vector3& invalidObjectPoint = Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32* invalidIndices = nullptr);
229 
230  /**
231  * Calculates the 3D positions for a set of image point correspondences with corresponding poses (Rt) in inverted flipped camera system.
232  * This linear triangulation uses singular value decomposition.<br>
233  * If an object point cannot be determined than the resulting object point will have value (0, 0, 0).
234  * @param camera1 The camera profile used for the first image
235  * @param iFlippedPose1 Given projection matrix for the first camera
236  * @param camera2 The camera profile used for the second image
237  * @param iFlippedPose2 Given projection matrix for the second camera
238  * @param points1 Set of 2D image points in the first image, each point must correspond the one in the right image
239  * @param points2 Set of 2D image points in the second image
240  * @param correspondences Number of point correspondences, with range [1, infinity)
241  * @param invalidObjectPoint Optional, the location of an invalid object point which will be used as value for all object points which cannot be determined e.g., because of parallel projection rays
242  * @param invalidIndices Optional resulting indices of the resulting object points with invalid location
243  * @return objectPoints Resulting object points in inverted flipped coordinate space
244  */
245  static ObjectPoints triangulateImagePointsIF(const PinholeCamera& camera1, const HomogenousMatrix4& iFlippedPose1, const PinholeCamera& camera2, const HomogenousMatrix4& iFlippedPose2, const ImagePoint* points1, const ImagePoint* points2, const size_t correspondences, const Vector3& invalidObjectPoint = Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32* invalidIndices = nullptr);
246 
247  /**
248  * Calculates the 3D positions for a set of image point correspondences in multiple views with corresponding camera projection matrices (K * Rt) or poses (Rt) in inverted flipped camera system.
249  * This linear triangulation uses singular value decomposition.
250  * @param posesIF Given poses or projection matrices per view
251  * @param imagePointsPerPose Set of 2D image points per the view, each point must correspond the one in the other views
252  * @param pinholeCamera The pinhole camera profile, one for all views. If no camera profile is given, posesIF act as projection matrices
253  * @param invalidObjectPoint Optional, the location of an invalid object point which will be used as value for all object points which cannot be determined e.g., because of parallel projection rays
254  * @param invalidIndices Optional resulting indices of the resulting object points with invalid location
255  * @return objectPoints Resulting object points in inverted flipped coordinates
256  */
257  static ObjectPoints triangulateImagePointsIF(const ConstIndexedAccessor<HomogenousMatrix4>& posesIF, const ConstIndexedAccessor<ImagePoints>& imagePointsPerPose, const PinholeCamera* pinholeCamera = nullptr, const Vector3& invalidObjectPoint = Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32* invalidIndices = nullptr);
258 
259  protected:
260 
261  /**
262  * Determines one transformation from a set of four transformations with most given image point correspondences provide 3D object points in front of the two cameras.
263  * The transformation for the first camera is defined as unit transformation, the four transformation between second and first camera is provided instead.
264  * @param transformation0 First transformation candidate
265  * @param transformation1 Second transformation candidate
266  * @param transformation2 Third transformation candidate
267  * @param transformation3 Fourth transformation candidate
268  * @param leftCamera Left camera profile
269  * @param rightCamera Right camera profile
270  * @param leftPoints Left image points
271  * @param rightPoints Right image points, each point corresponds to one from the left set
272  * @param transformation Resulting transformation with most 3D object points in front of both cameras
273  * @return Number of valid object points
274  */
275  static unsigned int solveAmbiguousTransformations(const HomogenousMatrix4& transformation0, const HomogenousMatrix4& transformation1, const HomogenousMatrix4& transformation2, const HomogenousMatrix4& transformation3, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera, const ImagePoints& leftPoints, const ImagePoints& rightPoints, HomogenousMatrix4& transformation);
276 
277  /**
278  * Returns the number of 3D object points lying in front of two cameras for a given transformation between the two cameras.
279  * The transformation for the first camera is defined as unit transformation, the transformation between second and first camera is provided instead.
280  * @param transformation The transformation between the right and left camera, transforming points defined in the right coordinate system to points defined in the left (identity) coordinate system, must be valid
281  * @param leftCamera Left camera profile, must be valid
282  * @param rightCamera Right camera profile, must be valid
283  * @param leftPoints Left image points, can be nullptr if 'correspondences' is 0
284  * @param rightPoints Right image points, one for each left point, can be nullptr if 'correspondences' is 0
285  * @param correspondences The number of provided point correspondences, with range [0, infinity)
286  * @return Number of valid object points, with range [0, correspondences]
287  */
288  static unsigned int validateTransformation(const HomogenousMatrix4& transformation, const PinholeCamera& leftCamera, const PinholeCamera& rightCamera, const Vector2* leftPoints, const Vector2* rightPoints, const size_t correspondences);
289 
290  /**
291  * Converts a epipolar line to a line object.
292  * @param line The epipolar line to be converted
293  * @return Resulting line object
294  */
295  static inline Line2 epipolarLine2Line(const Vector3& line);
296 };
297 
298 inline Line2 EpipolarGeometry::leftEpipolarLine(const SquareMatrix3& fundamental, const Vector2& rightPoint)
299 {
300  return epipolarLine2Line(fundamental.transposed() * Vector3(rightPoint, 1));
301 }
302 
303 inline Line2 EpipolarGeometry::rightEpipolarLine(const SquareMatrix3& fundamental, const Vector2& leftPoint)
304 {
305  return epipolarLine2Line(fundamental * Vector3(leftPoint, 1));
306 }
307 
309 {
310  const Vector2 normal(line[0], line[1]);
311  ocean_assert(!normal.isNull());
312 
313  const Scalar normalLength = normal.length();
314  ocean_assert(Numeric::isNotEqualEps(normalLength));
315 
316  /*const Scalar distance = normalLength * line[2];
317  const Scalar factor = 1 / normalLength;*/
318 
319  return Line2(line / normalLength);
320 
321  //return Line2(Vector3(normal.x() * factor, normal.y() * factor, distance *));
322 }
323 
324 }
325 
326 }
327 
328 #endif // META_OCEAN_GEOMETRY_EPIPOLAR_GEOMETRY_H
This class implements the abstract base class for all AnyCamera objects.
Definition: AnyCamera.h:130
This class implements epipolar geometry functions.
Definition: EpipolarGeometry.h:34
static Line2 leftEpipolarLine(const SquareMatrix3 &fundamental, const Vector2 &rightPoint)
Returns the epipolar line in the left image corresponding to a given point in the right image.
Definition: EpipolarGeometry.h:298
static Vectors3 triangulateImagePoints(const HomogenousMatrix4 &world_T_cameraA, const HomogenousMatrix4 &world_T_cameraB, const AnyCamera &anyCameraA, const AnyCamera &anyCameraB, const Vector2 *imagePointsA, const Vector2 *imagePointsB, const size_t numberPoints, const bool onlyFrontObjectPoints=true, const Vector3 &invalidObjectPoint=Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32 *invalidIndices=nullptr)
Calculates the 3D positions for a pair of image point correspondences with corresponding extrinsic ca...
static bool epipoles(const HomogenousMatrix4 &extrinsic, const SquareMatrix3 &leftIntrinsic, const SquareMatrix3 &rightIntrinsic, Vector2 &leftEpipole, Vector2 &rightEpipole)
Determines the two epipoles corresponding to two cameras separated by an extrinsic camera matrix.
static unsigned int factorizeEssential(const SquareMatrix3 &essential, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera, const ImagePoints &leftPoints, const ImagePoints &rightPoints, HomogenousMatrix4 &transformation)
Factorizes an essential matrix into a camera pose composed of rotation and translation.
static Line2 epipolarLine2Line(const Vector3 &line)
Converts a epipolar line to a line object.
Definition: EpipolarGeometry.h:308
static SquareMatrix3 fundamental2essential(const SquareMatrix3 &fundamental, const SquareMatrix3 &leftIntrinsic, const SquareMatrix3 &rightIntrinsic)
Calculates the essential matrix by the given fundamental matrix and the two intrinsic camera matrices...
static Line2 rightEpipolarLine(const SquareMatrix3 &fundamental, const Vector2 &leftPoint)
Returns the epipolar line in the right image corresponding to a given point in the left image.
Definition: EpipolarGeometry.h:303
static SquareMatrix3 essentialMatrix(const HomogenousMatrix4 extrinsic)
Calculates the essential matrix by the rotation and translation between two cameras.
static SquareMatrix3 fundamental2essential(const SquareMatrix3 &fundamental, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera)
Calculates the essential matrix by the given fundamental matrix and the two cameras.
static bool epipolesFast(const SquareMatrix3 &fundamental, Vector2 &leftEpipole, Vector2 &rightEpipole)
Finds the two epipoles corresponding to a fundamental matrix.
static unsigned int solveAmbiguousTransformations(const HomogenousMatrix4 &transformation0, const HomogenousMatrix4 &transformation1, const HomogenousMatrix4 &transformation2, const HomogenousMatrix4 &transformation3, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera, const ImagePoints &leftPoints, const ImagePoints &rightPoints, HomogenousMatrix4 &transformation)
Determines one transformation from a set of four transformations with most given image point correspo...
static SquareMatrix3 essential2fundamental(const SquareMatrix3 &essential, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera)
Calculates the fundamental matrix by the given essential matrix and the two cameras.
static ObjectPoints triangulateImagePointsIF(const PinholeCamera &camera1, const HomogenousMatrix4 &iFlippedPose1, const PinholeCamera &camera2, const HomogenousMatrix4 &iFlippedPose2, const ImagePoint *points1, const ImagePoint *points2, const size_t correspondences, const Vector3 &invalidObjectPoint=Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32 *invalidIndices=nullptr)
Calculates the 3D positions for a set of image point correspondences with corresponding poses (Rt) in...
static unsigned int validateTransformation(const HomogenousMatrix4 &transformation, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera, const Vector2 *leftPoints, const Vector2 *rightPoints, const size_t correspondences)
Returns the number of 3D object points lying in front of two cameras for a given transformation betwe...
static bool epipoles(const SquareMatrix3 &fundamental, Vector2 &leftEpipole, Vector2 &rightEpipole)
Determines the two epipoles corresponding to a fundamental matrix.
static bool rectificationHomography(const HomogenousMatrix4 &transformation, const PinholeCamera &pinholeCamera, SquareMatrix3 &leftHomography, SquareMatrix3 &rightHomography, Quaternion &appliedRotation, PinholeCamera *newCamera)
Determines the homograph for two (stereo) frames rectifying both images using the transformation betw...
static SquareMatrix3 inverseFundamentalMatrix(const SquareMatrix3 &fundamental)
Returns the inverse fundamental matrix.
static SquareMatrix3 essential2fundamental(const SquareMatrix3 &essential, const SquareMatrix3 &leftIntrinsic, const SquareMatrix3 &rightIntrinsic)
Calculates the fundamental matrix by the given essential matrix and the two intrinsic camera matrices...
static ObjectPoints triangulateImagePointsIF(const ConstIndexedAccessor< HomogenousMatrix4 > &posesIF, const ConstIndexedAccessor< ImagePoints > &imagePointsPerPose, const PinholeCamera *pinholeCamera=nullptr, const Vector3 &invalidObjectPoint=Vector3(Numeric::minValue(), Numeric::minValue(), Numeric::minValue()), Indices32 *invalidIndices=nullptr)
Calculates the 3D positions for a set of image point correspondences in multiple views with correspon...
static bool fundamentalMatrix(const ImagePoint *leftPoints, const ImagePoint *rightPoints, const size_t correspondences, SquareMatrix3 &fundamental)
Calculates the fundamental matrix by two sets of at least eight corresponding image points.
static bool factorizeEssential(const SquareMatrix3 &essential, const PinholeCamera &leftCamera, const PinholeCamera &rightCamera, const ImagePoint &leftPoint, const ImagePoint &rightPoint, HomogenousMatrix4 &transformation)
Factorizes the essential matrix into rotation and translation.
This class implements an infinite line in 2D space.
Definition: Line2.h:83
static constexpr T minValue()
Returns the min scalar value.
Definition: Numeric.h:3250
static constexpr bool isNotEqualEps(const T value)
Returns whether a value is not smaller than or equal to a small epsilon.
Definition: Numeric.h:2237
SquareMatrixT3< T > transposed() const
Returns the transposed of this matrix.
Definition: SquareMatrix3.h:1144
bool isNull() const
Returns whether this vector is a null vector up to a small epsilon.
Definition: Vector2.h:734
T length() const
Returns the length of the vector.
Definition: Vector2.h:615
std::vector< Index32 > Indices32
Definition of a vector holding 32 bit index values.
Definition: Base.h:96
std::vector< ObjectPoint > ObjectPoints
Definition of a vector holding 3D object points.
Definition: geometry/Geometry.h:129
std::vector< ImagePoint > ImagePoints
Definition of a vector holding 2D image points.
Definition: geometry/Geometry.h:123
float Scalar
Definition of a scalar type.
Definition: Math.h:128
LineT2< Scalar > Line2
Definition of the Line2 object, depending on the OCEAN_MATH_USE_SINGLE_PRECISION either with single o...
Definition: Line2.h:21
VectorT3< Scalar > Vector3
Definition of a 3D vector.
Definition: Vector3.h:22
std::vector< Vector3 > Vectors3
Definition of a vector holding Vector3 objects.
Definition: Vector3.h:65
The namespace covering the entire Ocean framework.
Definition: Accessor.h:15