BitmapToolkit Scol plugin
sSlam.cpp
Go to the documentation of this file.
1/*
2-----------------------------------------------------------------------------
3This source file is part of OpenSpace3D
4For the latest info, see http://www.openspace3d.com
5
6Copyright (c) 2012 I-maginer
7
8This program is free software; you can redistribute it and/or modify it under
9the terms of the GNU Lesser General Public License as published by the Free Software
10Foundation; either version 2 of the License, or (at your option) any later
11version.
12
13This program is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
16
17You should have received a copy of the GNU Lesser General Public License along with
18this program; if not, write to the Free Software Foundation, Inc., 59 Temple
19Place - Suite 330, Boston, MA 02111-1307, USA, or go to
20http://www.gnu.org/copyleft/lesser.txt
21
22-----------------------------------------------------------------------------
23*/
24
25//#include "sService.h"
26#include "ArManager.h"
27#include "ArCameraParam.h"
28#include "sSlam.h"
29
30#include <vector>
31
32uint32 firstPO2From(uint32 n)
33{
34 uint32 n1 = n;
35 --n1;
36 n1 |= n1 >> 16;
37 n1 |= n1 >> 8;
38 n1 |= n1 >> 4;
39 n1 |= n1 >> 2;
40 n1 |= n1 >> 1;
41 ++n1;
42 n1 = n1 >> 1;
43
44 return n1;
45}
46
47void Sophus2Cv(const dso::SE3 &smat, cv::Mat &mat)
48{
49 if (smat.unit_quaternion().norm() < dso::SE3::Scalar(1e-10))
50 return;
51
52 dso::Vec3f pos = smat.translation().cast<float>();
53 dso::Mat33f rotation_matrix = smat.rotationMatrix().cast<float>();
54
55 cv::Mat nmat(4, 4, CV_32F);
56 nmat.at<float>(0, 0) = rotation_matrix(0, 0);
57 nmat.at<float>(1, 0) = rotation_matrix(1, 0);
58 nmat.at<float>(2, 0) = rotation_matrix(2, 0);
59 nmat.at<float>(3, 0) = 0.0f;
60 nmat.at<float>(0, 1) = rotation_matrix(0, 1);
61 nmat.at<float>(1, 1) = rotation_matrix(1, 1);
62 nmat.at<float>(2, 1) = rotation_matrix(2, 1);
63 nmat.at<float>(3, 1) = 0.0f;
64 nmat.at<float>(0, 2) = rotation_matrix(0, 2);
65 nmat.at<float>(1, 2) = rotation_matrix(1, 2);
66 nmat.at<float>(2, 2) = rotation_matrix(2, 2);
67 nmat.at<float>(3, 2) = 0.0f;
68 nmat.at<float>(0, 3) = pos.x();
69 nmat.at<float>(1, 3) = pos.y();
70 nmat.at<float>(2, 3) = pos.z();
71 nmat.at<float>(3, 3) = 1.0f;
72 nmat.copyTo(mat);
73}
74
75void cvMatToQuatAndPos(const cv::Mat mat, BtQuaternion &quat, Vector3 &vec, bool invertx = false, bool flip = false)
76{
77 double modelview_matrix[16];
78 float invx = 1.0f;
79
80 if (invertx)
81 invx = -1.0f;
82
83 modelview_matrix[0] = mat.at<float>(0, 0) * invx;
84 modelview_matrix[1] = mat.at<float>(1, 0) * invx;
85 modelview_matrix[2] = mat.at<float>(2, 0) * invx;
86 modelview_matrix[3] = 0.0;
87
88 modelview_matrix[4] = mat.at<float>(0, 1);
89 modelview_matrix[5] = mat.at<float>(1, 1);
90 modelview_matrix[6] = mat.at<float>(2, 1);
91 modelview_matrix[7] = 0.0;
92
93 modelview_matrix[8] = mat.at<float>(0, 2);
94 modelview_matrix[9] = mat.at<float>(1, 2);
95 modelview_matrix[10] = mat.at<float>(2, 2);
96 modelview_matrix[11] = 0.0;
97
98 modelview_matrix[12] = mat.at<float>(0, 3);
99 modelview_matrix[13] = mat.at<float>(1, 3);
100 modelview_matrix[14] = mat.at<float>(2, 3);
101 modelview_matrix[15] = 1.0;
102
103 quat = BtQuaternion(0.0f, 0.0f, 1.0f, 0.0f) * BtQuaternion::FromRotationMatrix(modelview_matrix, flip);
104 vec = Vector3(static_cast<float>(flip ? -modelview_matrix[12] : modelview_matrix[12]), static_cast<float>(modelview_matrix[13]), static_cast<float>(modelview_matrix[14]));
105}
106
107const float eps = 1e-4;
108cv::Mat ExpSO3(const float &x, const float &y, const float &z)
109{
110 cv::Mat I = cv::Mat::eye(3, 3, CV_32F);
111 const float d2 = x * x + y * y + z * z;
112 const float d = sqrt(d2);
113 cv::Mat W;
114 W = (cv::Mat_<float>(3, 3) << 0, -z, y, z, 0, -x, -y, x, 0);
115
116 if (d < eps)
117 return (I + W + 0.5f*W*W);
118 else
119 return (I + W * sin(d) / d + W * W*(1.0f - cos(d)) / d2);
120}
121
122cv::Mat ExpSO3(const cv::Mat &v)
123{
124 return ExpSO3(v.at<float>(0), v.at<float>(1), v.at<float>(2));
125}
126
127cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
128{
129 return cv::Point2f(
130 poseCamera.at<float>(0, 0) / poseCamera.at<float>(2, 0) * mk.at<float>(0, 0) + mk.at<float>(0, 2),
131 poseCamera.at<float>(1, 0) / poseCamera.at<float>(2, 0) * mk.at<float>(1, 1) + mk.at<float>(1, 2)
132 );
133}
134
135cv::Mat Plane::DetectPlane(cv::Mat campos, std::vector<slamPoint> &vMPs, const int iterations)
136{
137 // Retrieve 3D points
138 unsigned int maxPoints = std::min(800, (int)vMPs.size());
139
140 std::vector<cv::Mat> vPoints;
141 vPoints.reserve(maxPoints);
142 std::vector<slamPoint> vPointMP;
143 vPointMP.reserve(maxPoints);
144
145 for (size_t i = 0; i < maxPoints; i++)
146 {
147 slamPoint pMP = vMPs[i];
148 if (pMP.ptr)
149 {
150 vPoints.push_back(pMP.point3d);
151 vPointMP.push_back(pMP);
152 }
153 }
154
155 const int N = vPoints.size();
156
157 if (N < 50)
158 return mTpw;
159
160 // Indices for minimum set selection
161 std::vector<size_t> vAllIndices;
162 vAllIndices.reserve(N);
163 std::vector<size_t> vAvailableIndices;
164
165 for (int i = 0; i < N; i++)
166 {
167 vAllIndices.push_back(i);
168 }
169
170 float bestDist = 1e10;
171 std::vector<float> bestvDist;
172
173 //RANSAC
174 for (int n = 0; n < iterations; n++)
175 {
176 vAvailableIndices = vAllIndices;
177
178 cv::Mat A(3, 4, CV_32F);
179 A.col(3) = cv::Mat::ones(3, 1, CV_32F);
180
181 // Get min set of points
182 for (short i = 0; i < 3; ++i)
183 {
184 int randi = Random(0, vAvailableIndices.size() - 1);
185
186 int idx = vAvailableIndices[randi];
187
188 A.row(i).colRange(0, 3) = vPoints[idx].t();
189
190 vAvailableIndices[randi] = vAvailableIndices.back();
191 vAvailableIndices.pop_back();
192 }
193
194 cv::Mat u, w, vt;
195 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
196
197 const float a = vt.at<float>(3, 0);
198 const float b = vt.at<float>(3, 1);
199 const float c = vt.at<float>(3, 2);
200 const float d = vt.at<float>(3, 3);
201
202 std::vector<float> vDistances(N, 0);
203
204 const float f = 1.0f / sqrt(a*a + b * b + c * c + d * d);
205
206 for (int i = 0; i < N; i++)
207 {
208 vDistances[i] = fabs(vPoints[i].at<float>(0)*a + vPoints[i].at<float>(1)*b + vPoints[i].at<float>(2)*c + d)*f;
209 }
210
211 std::vector<float> vSorted = vDistances;
212 std::sort(vSorted.begin(), vSorted.end());
213
214 int nth = std::max((int)(0.2*N), 20);
215 const float medianDist = vSorted[nth];
216
217 if (medianDist < bestDist)
218 {
219 bestDist = medianDist;
220 bestvDist = vDistances;
221 }
222 }
223
224 // Compute threshold inlier/outlier
225 const float th = 1.4 * bestDist;
226 std::vector<bool> vbInliers(N, false);
227 int nInliers = 0;
228 for (int i = 0; i < N; i++)
229 {
230 if (bestvDist[i] < th)
231 {
232 nInliers++;
233 vbInliers[i] = true;
234 }
235 }
236
237 std::vector<slamPoint> vInlierMPs(nInliers);
238 int nin = 0;
239 for (int i = 0; i < N; i++)
240 {
241 if (vbInliers[i])
242 {
243 vInlierMPs[nin] = vPointMP[i];
244 nin++;
245 }
246 }
247
248 campos.copyTo(mMTcw);
249 mMvMPs = vInlierMPs;
250 mRang = -3.14f / 2 + ((float)rand() / RAND_MAX)*3.14f;
251 Recompute();
252 return mTpw;
253}
254
256{
257 const int N = mMvMPs.size();
258
259 // Recompute plane with all points
260 cv::Mat A = cv::Mat(N, 4, CV_32F);
261 A.col(3) = cv::Mat::ones(N, 1, CV_32F);
262
263 mOrigin = cv::Mat::zeros(3, 1, CV_32F);
264
265 int nPoints = 0;
266 for (int i = 0; i < N; i++)
267 {
268 slamPoint pMP = mMvMPs[i];
269 if (pMP.ptr)
270 {
271 cv::Mat Xw = pMP.point3d;
272 mOrigin += Xw;
273 A.row(nPoints).colRange(0, 3) = Xw.t();
274 nPoints++;
275 }
276 }
277
278 if (!nPoints)
279 return;
280
281 A.resize(nPoints);
282
283 cv::Mat u, w, vt;
284 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
285
286 float a = vt.at<float>(3, 0);
287 float b = vt.at<float>(3, 1);
288 float c = vt.at<float>(3, 2);
289
290 mOrigin = mOrigin * (1.0f / nPoints);
291 const float f = 1.0f / sqrt(a * a + b * b + c * c);
292
293 // Compute XC just the first time
294 if (mXC.empty())
295 {
296 cv::Mat Oc = -mMTcw.colRange(0, 3).rowRange(0, 3).t() * mMTcw.rowRange(0, 3).col(3);
297 mXC = Oc - mOrigin;
298 }
299
300 if ((mXC.at<float>(0) * a + mXC.at<float>(1) * b + mXC.at<float>(2) * c) < 0)
301 {
302 a = -a;
303 b = -b;
304 c = -c;
305 }
306
307 const float nx = a * f;
308 const float ny = b * f;
309 const float nz = c * f;
310
311 mNormal = (cv::Mat_<float>(3, 1) << nx, ny, nz);
312
313 cv::Mat up;
314 up = (cv::Mat_<float>(3, 1) << 0.0f, 1.0f, 0.0f);
315
316 cv::Mat v = up.cross(mNormal);
317 const float sa = cv::norm(v);
318 const float ca = up.dot(mNormal);
319 const float ang = atan2(sa, ca);
320
321 mTpw = cv::Mat::eye(4, 4, CV_32F);
322
323 //mTpw.rowRange(0, 3).colRange(0, 3) = ExpSO3(v * ang / sa) * ExpSO3(up * mRang);
324 ExpSO3(v*ang / sa).copyTo(mTpw.rowRange(0, 3).colRange(0, 3));
325 mOrigin.copyTo(mTpw.col(3).rowRange(0, 3));
326}
327
329{
330 mIsDirty = false;
331 needUpdate = false;
332 mScaleX = 1.0f;
333 mScaleY = 1.0f;
334 mSlam = 0;
335 mFrameIdx = 0;
336 mFound = false;
337 mInitialized = false;
338 mUseSensor = false;
339 mReady = 0;
340 mLastUpdate = 0.0;
341
342 dso::setting_solverMode = SOLVER_FIX_LAMBDA | SOLVER_ORTHOGONALIZE_X_LATER;
343 dso::setting_solverModeDelta = 0.00001;
344 dso::setting_forceAceptStep = true;
345 dso::goStepByStep = false;
346 dso::setting_useExposure = true;
347 dso::setting_realTimeMaxKF = true;
348 dso::setting_use_stereo = false;
349 dso::stereo_weight = 0;
350
351 dso::setting_use_imu_track = false;
352 dso::setting_use_imu = false;
353 dso::imu_weight = 0.5;
354 dso::imu_weight_tracker = 0.1;
355 dso::AccCov = dso::Mat33::Identity();
356 dso::AccRandomWalkNoise = dso::Mat33::Identity();
357 dso::GyrCov = dso::Mat33::Identity();
358 dso::GyrRandomWalkNoise = dso::Mat33::Identity();
359
360 dso::setting_use_optimize = true;
361 dso::setting_use_Dmargin = true;
362
363 dso::disableAllDisplay = true;
364 dso::setting_render_plotTrackingFull = false;
365 dso::setting_render_renderWindowFrames = false;
366 dso::setting_logStuff = false;
367 dso::setting_debugout_runquiet = true;
368 dso::setting_desiredImmatureDensity = 500;
369 dso::setting_desiredPointDensity = 650;
370 dso::setting_minFrames = 2;
371 dso::setting_maxFrames = 4;
372 dso::setting_maxOptIterations = 3;
373 dso::setting_minOptIterations = 1;
374 dso::setting_minPointsRemaining = 0.05f;
375 dso::setting_maxLogAffFacInWindow = 0.7f;
376 dso::setting_kfGlobalWeight = 4.0;
377 dso::setting_maxAffineWeight = 2.0;
378 dso::setting_maxIMUSamples = 10000;
379
380 dso::setting_maxPixSearch = 0.027f; // max length of the ep. line segment searched during immature point tracking. relative to image resolution.
381 dso::setting_minTraceQuality = 4.0f;
382 dso::setting_minTraceTestRadius = 3;
383 dso::setting_GNItsOnPointActivation = 3;
384 dso::setting_trace_stepsize = 1.0f; // stepsize for initial discrete search.
385 dso::setting_trace_GNIterations = 3; // max # GN iterations
386 dso::setting_trace_GNThreshold = 0.1f; // GN stop after this stepsize.
387 dso::setting_trace_extraSlackOnTH = 1.2f; // for energy-based outlier check, be slightly more relaxed by this factor.
388 dso::setting_trace_slackInterval = 1.5f; // if pixel-interval is smaller than this, leave it be.
389 dso::setting_trace_minImprovementFactor = 2.0f; // if pixel-interval is smaller than this, leave it be.
390
391 dso::setting_photometricCalibration = 0;
392 //dso::setting_affineOptModeA = 0;
393 //dso::setting_affineOptModeB = 0;
394 //dso::setting_initialRotPrior = 0;
395 //dso::setting_initialTransPrior = 0;
396 //dso::setting_initialAffBPrior = 0;
397 //dso::setting_initialAffAPrior = 0;
398 //dso::setting_initialCalibHessian = 0;
399 //dso::setting_gammaWeightsPixelSelect = 1;
400 dso::multiThreading = true;
401 dso::sparsityFactor = 4;
402
403 mCamParams.inWidth = 0;
404 mCamParams.inHeight = 0;
405
406 mUndistorter = 0;
407
408 // Run thread
409 StartThreading(boost::bind(&Sslam::GoThread, this));
410}
411
413{
415
416 if (mSlam)
417 {
418 for (dso::IOWrap::Output3DWrapper* ow : mSlam->outputWrapper)
419 {
420 ow->join();
421 if (ow != this)
422 delete ow;
423 }
424 }
425 SAFE_DELETE(mSlam);
426 SAFE_DELETE(mUndistorter);
427}
428
430{
431}
432
434{
435 mReady = 0;
436 mInitialized = false;
437}
438
439void Sslam::publishGraph(const std::map<uint64_t, Eigen::Vector2i, std::less<uint64_t>, Eigen::aligned_allocator<std::pair<const uint64_t, Eigen::Vector2i>>> &connectivity)
440{
441}
442
443void Sslam::publishKeyframes(std::vector<dso::FrameHessian*> &frames, bool final, dso::CalibHessian* HCalib)
444{
445 float fx = HCalib->fxl();
446 float fy = HCalib->fyl();
447 float cx = HCalib->cxl();
448 float cy = HCalib->cyl();
449 float fxi = 1.0f / fx;
450 float fyi = 1.0f / fy;
451 float cxi = -cx / fx;
452 float cyi = -cy / fy;
453
454 cv::Mat camtoworld(4, 4, CV_32F);
455 for (dso::FrameHessian* fh : frames)
456 {
457 Sophus2Cv((dso::T_WR_align.inverse() * fh->shell->camToWorld).inverse(), camtoworld);
458
459 if (camtoworld.empty())
460 continue;
461
462 for (dso::PointHessian* ph : fh->pointHessians)
463 {
464 /*
465 for (dso::PointFrameResidual* r : ph->residuals)
466 {
467 cv::Scalar cT;
468
469 if (r->state_state == dso::ResState::INLIER) cT = cv::Scalar(255, 0, 0);
470 else if (r->state_state == dso::ResState::OOB) cT = cv::Scalar(255, 255, 0);
471 else if (r->state_state == dso::ResState::OUTLIER) cT = cv::Scalar(0, 0, 255);
472 else cT = cv::Scalar(255, 255, 255);
473
474 slamPoint spt;
475 spt.ptr = ph;
476 spt.color = cT;
477 spt.point2d = cv::Point2f(ph->u + 0.5f, ph->v + 0.5f);
478 spt.depth = 0.3f;
479 {
480 boost::mutex::scoped_lock lock(mDebugMutex);
481 mCloudPoints.push_back(spt);
482 }
483 }
484 */
485
486 if (ph->idepth_scaled < 0)
487 continue;
488
489 if ((ph->status == dso::PointHessian::MARGINALIZED) || (ph->status == dso::PointHessian::ACTIVE))
490 {
491 float depth = 1.0f / ph->idepth;
492 float x = (ph->u * fxi + cxi) * depth;
493 float y = (ph->v * fyi + cyi) * depth;
494 float z = depth * (1 + 2 * fxi);
495
496 cv::Mat pm4 = camtoworld.inv() * cv::Mat(cv::Vec4f(x, y, z, 1.0));
497 cv::Mat pm(cv::Point3f(pm4.at<float>(0), pm4.at<float>(1), pm4.at<float>(2)));
498
499 slamPoint dspt;
500 dspt.ptr = ph;
501 dspt.point3d = pm;
502 dspt.color = cv::Scalar(0, 0, 255);
503 dspt.point2d = cv::Point2i(ph->u, ph->v);
504 dspt.depth = ph->idepth_scaled;
505
506 boost::mutex::scoped_lock lock(mDebugMutex);
507 if (mCloudPoints.size() > dso::setting_desiredPointDensity * 3)
508 mCloudPoints.erase(mCloudPoints.begin());
509
510 mCloudPoints.push_back(dspt);
511 }
512 }
513 }
514}
515
516void Sslam::publishCamPose(dso::FrameShell* frame, dso::CalibHessian* HCalib)
517{
518 cv::Mat camtoworld(4, 4, CV_32F);
519 Sophus2Cv((dso::T_WR_align.inverse() * frame->camToWorld).inverse(), camtoworld);
520
521 if (camtoworld.empty())
522 return;
523
524 camtoworld.copyTo(mCameraWorld);
525
526 if ((mCloudPoints.size() > dso::setting_desiredPointDensity) && !mInitialized && !mCameraWorld.empty() && !mSlam->isLost && mSlam->initialized && !mSlam->initFailed)
527 {
528 boost::mutex::scoped_lock lock(mDebugMutex);
529 Plane p;
530 cv::Mat plane2w = p.DetectPlane(mCameraWorld, mCloudPoints, 50);
531 if (!plane2w.empty())
532 {
533 mCentroid = p.mOrigin;
534 mPlane2World = plane2w;
535 mInitialized = true;
536 }
537 }
538
539 if (mInitialized && frame->poseValid && !mPlane2World.empty())
540 {
541 mPlane2Camera = camtoworld * mPlane2World;
542
543 mPrevCamPos = mLastCamPos;
544 mPrevCamQuat = mLastCamQuat;
545
546 if (mUseSensor)
547 {
548 camtoworld.at<float>(0, 3) = mPlane2Camera.at<float>(0, 3);
549 camtoworld.at<float>(1, 3) = mPlane2Camera.at<float>(1, 3);
550 camtoworld.at<float>(2, 3) = mPlane2Camera.at<float>(2, 3);
551
552 cvMatToQuatAndPos(camtoworld.inv(), mLastCamQuat, mLastCamPos, true, mLastData.reversedBitmap);
553 }
554 else
555 {
556 cvMatToQuatAndPos(mPlane2Camera.inv(), mLastCamQuat, mLastCamPos, true, mLastData.reversedBitmap);
557 }
558
559 std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
560 mLastUpdate = std::chrono::duration<double, std::milli>(t1 - mLastTimeFrame).count();
561 mLastTimeFrame = t1;
562 mFound = true;
563 }
564 else
565 {
566 mFound = false;
567 }
568}
569
570void Sslam::pushLiveFrame(dso::FrameHessian* image)
571{
572}
573
575{
576 return false;
577}
578
579void Sslam::pushDepthImage(dso::MinimalImageB3* image)
580{
581}
582
584{
585 mIsDirty = true;
586}
587
588void Sslam::BuildCameraParam(const aruco::CameraParameters &camparam)
589{
590 mScaleX = 1.0f;
591 mScaleY = 1.0f;
592 float scale = 1.0f;
593
594 float maxsize = static_cast<float>(std::max(camparam.CamSize.width, camparam.CamSize.height));
595 if (maxsize > 320)
596 scale = 320.0f / maxsize;
597
598 mScaleX = static_cast<float>(firstPO2From((float)camparam.CamSize.width * scale)) / static_cast<float>(camparam.CamSize.width);
599 mScaleY = static_cast<float>(firstPO2From((float)camparam.CamSize.height * scale)) / static_cast<float>(camparam.CamSize.height);
600
601 mCamParams.type = dso::Model_FOV;
602 mCamParams.rectify = dso::Rectify_Crop;
603
604 mCamParams.inWidth = (int)(static_cast<float>(camparam.CamSize.width) * mScaleX);
605 mCamParams.inHeight = (int)(static_cast<float>(camparam.CamSize.height) * mScaleY);
606
607 mCamParams.outWidth = mCamParams.inWidth;
608 mCamParams.outHeight = mCamParams.inHeight;
609
610 //float f = std::max(mCamParams.cx, mCamParams.cy) * 2.0f;
611 mCamParams.fx = camparam.CameraMatrix.at<float>(0, 0) * mScaleX; // f;
612 mCamParams.fy = camparam.CameraMatrix.at<float>(1, 1) * mScaleY; // f;
613
614 mCamParams.cx = camparam.CameraMatrix.at<float>(0, 2) * mScaleX; //static_cast<float>(mCamParams.inWidth) * 0.5f;
615 mCamParams.cy = camparam.CameraMatrix.at<float>(1, 2) * mScaleY; //static_cast<float>(mCamParams.inHeight) * 0.5f;
616
617 mCamParams.omega = 0.0f;
618}
619
620void Sslam::InitDetector()
621{
622 try
623 {
624 if (mSlam)
625 {
626 mSlam->blockUntilMappingIsFinished();
627 std::vector<dso::IOWrap::Output3DWrapper*> wraps = mSlam->outputWrapper;
628
629 boost::mutex::scoped_lock lock(mResetMutex);
630 SAFE_DELETE(mSlam);
631
632 for (dso::IOWrap::Output3DWrapper* ow : wraps)
633 ow->reset();
634
635 SAFE_DELETE(mUndistorter);
636 mUndistorter = dso::Undistort::getUndistorter(mCamParams, "", "");
637
638 dso::setGlobalCalib((int)mUndistorter->getSize()[0], (int)mUndistorter->getSize()[1], mUndistorter->getK().cast<float>());
639
640 float fsize = (float)mUndistorter->getSize()[0] + (float)mUndistorter->getSize()[1];
641 dso::setting_maxShiftWeightT = 0.04f * fsize;
642 dso::setting_maxShiftWeightR = 0.0f * fsize;
643 dso::setting_maxShiftWeightRT = 0.02f * fsize;
644
645 mSlam = new dso::FullSystem();
646 mSlam->outputWrapper = wraps;
647 }
648 else
649 {
650 boost::mutex::scoped_lock lock(mResetMutex);
651 SAFE_DELETE(mUndistorter);
652 mUndistorter = dso::Undistort::getUndistorter(mCamParams, "", "");
653
654 dso::setGlobalCalib((int)mUndistorter->getSize()[0], (int)mUndistorter->getSize()[1], mUndistorter->getK().cast<float>());
655
656 float fsize = (float)mUndistorter->getSize()[0] + (float)mUndistorter->getSize()[1];
657 dso::setting_maxShiftWeightT = 0.04f * fsize;
658 dso::setting_maxShiftWeightR = 0.0f * fsize;
659 dso::setting_maxShiftWeightRT = 0.02f * fsize;
660
661 mSlam = new dso::FullSystem();
662 mSlam->outputWrapper.push_back(this);
663 }
664
665 mSlam->linearizeOperation = true;
666
667 if (mUndistorter && mUndistorter->photometricUndist != 0)
668 mSlam->setGammaFunction(mUndistorter->photometricUndist->getG());
669 }
670 catch (std::exception &)
671 {
672 SAFE_DELETE(mSlam);
673 }
674
675 mLastTimeFrame = std::chrono::steady_clock::now();
676 mFrameIdx = 0;
677 mInitialized = false;
678 mReady = 0;
679 mFound = false;
680
681 {
682 boost::mutex::scoped_lock lock(mDebugMutex);
683 mCloudPoints.clear();
684 }
685}
686
688{
689 if (mLastUpdate == 0.0)
690 return mLastCamPos;
691
692 double frame = 1.0 / mLastUpdate;
693 double lerpcoef = frame * std::chrono::duration<double, std::milli>(std::chrono::steady_clock::now() - mLastTimeFrame).count();
694 lerpcoef = std::min(lerpcoef, 1.0);
695
696 Vector3 lerpVec = mPrevCamPos + ((mLastCamPos - mPrevCamPos) * lerpcoef);
697 return lerpVec;
698}
699
701{
702 if (mLastUpdate == 0.0)
703 return mLastCamQuat;
704
705 double frame = 1.0 / mLastUpdate;
706 double lerpcoef = frame * std::chrono::duration<double, std::milli>(std::chrono::steady_clock::now() - mLastTimeFrame).count();
707 lerpcoef = std::min(lerpcoef, 1.0);
708
709 return BtQuaternion::Slerp(lerpcoef, mPrevCamQuat, mLastCamQuat, true);
710}
711
713{
714 return mFound;
715}
716
718{
719 mCloudPoints.clear();
720 mFound = false;
721 mInitialized = false;
722 mReady = 0;
723
724 if (mSlam)
725 {
726 for (dso::IOWrap::Output3DWrapper* ow : mSlam->outputWrapper)
727 {
728 ow->join();
729 if (ow != this)
730 delete ow;
731 }
732 }
733 mLastTimeFrame = std::chrono::steady_clock::now();
734
735 SAFE_DELETE(mSlam);
736 SAFE_DELETE(mUndistorter);
737}
738
739void Sslam::DrawLandmarks(cv::Mat image)
740{
741 if (mSlam && !image.empty())
742 {
743 //cv::Mat small;
744 //cv::cvtColor(image, small, cv::COLOR_BGR2GRAY);
745 //cv::resize(small, small, cv::Size(mCamParams.inWidth, mCamParams.inHeight), 0, 0, cv::INTER_LINEAR);
746 //cv::equalizeHist(small, small);
747 //cv::cvtColor(small, small, cv::COLOR_GRAY2BGR);
748 //cv::resize(small, image, cv::Size(image.cols, image.rows), 0, 0, cv::INTER_LINEAR);
749
750 if (!mCameraWorld.empty())
751 {
752 boost::mutex::scoped_lock lock(mDebugMutex);
753 for (unsigned int i = 0; i < mCloudPoints.size(); i++)
754 {
755 slamPoint spt = mCloudPoints[i];
756 if (!spt.point3d.empty())
757 {
758 cv::Mat pm4 = mCameraWorld * cv::Mat(cv::Vec4f(spt.point3d.at<float>(0), spt.point3d.at<float>(1), spt.point3d.at<float>(2), 1.0));
759 cv::circle(image, Camera2Pixel(pm4, mLastData.arCamParam.camParam.CameraMatrix), 2, cv::Scalar(250, 0, 0));
760 }
761 }
762 }
763
764 if (!mCentroid.empty() && mInitialized)
765 {
766 std::vector<cv::Mat> axisPoints(4);
767 axisPoints[0] = (cv::Mat_ <float>(4, 1) << 0.0, 0, 0, 1);
768 axisPoints[1] = (cv::Mat_ <float>(4, 1) << 0.3, 0, 0, 1);
769 axisPoints[2] = (cv::Mat_ <float>(4, 1) << 0.0, 0.3, 0, 1);
770 axisPoints[3] = (cv::Mat_ <float>(4, 1) << 0, 0, 0.3, 1);
771 std::vector<cv::Point2f> drawPoints(4);
772 for (int i = 0; i < 4; i++)
773 {
774 axisPoints[i] = mPlane2Camera * axisPoints[i];
775 drawPoints[i] = Camera2Pixel(axisPoints[i], mLastData.arCamParam.camParam.CameraMatrix);
776 }
777
778 cv::line(image, drawPoints[0], drawPoints[1], cv::Scalar(250, 0, 0), 5);
779 cv::line(image, drawPoints[0], drawPoints[2], cv::Scalar(0, 250, 0), 5);
780 cv::line(image, drawPoints[0], drawPoints[3], cv::Scalar(0, 0, 250), 5);
781 }
782 }
783}
784
785void Sslam::Detect()
786{
787 cv::equalizeHist(mLastData.gray, mLastData.gray);
788 dso::MinimalImageB minImg((int)mLastData.gray.cols, (int)mLastData.gray.rows, (unsigned char*)mLastData.gray.data);
789 dso::ImageAndExposure* undistImg = mUndistorter->undistort<unsigned char>(&minImg, 1, mLastData.timeStamp, 1.0f);
790 dso::SE3 groundTruth;
791
793 {
794 mUseSensor = true;
795 BtQuaternion quat = BtQuaternion(sqrt(0.5f), 0.0f, sqrt(0.5f), 0.0f) * (mLastData.arCamParam.GetCameraQuat() * BtQuaternion(0.0f, 1.0f, 0.0f, 0.0f));
796 groundTruth.setQuaternion(dso::Quat_d(quat.w, quat.x, quat.y, quat.z));
797 }
798
799 std::vector<ArImuData> frameImu = mLastData.arCamParam.GetCameraImu();
800 std::vector<dso::ImuData> imuCpy;
801
802 // copy imu list
803 //if (!frameImu.empty())
804 //{
805 // dso::setting_use_imu_track = true;
806 // dso::setting_use_imu = true;
807//
808 // for (unsigned int i = 0; i < frameImu.size(); i++)
809 // imuCpy.push_back(dso::ImuData(dso::Vec3(frameImu[i].m_gry.x, frameImu[i].m_gry.y, frameImu[i].m_gry.z), dso::Vec3(frameImu[i].m_acc.x, frameImu[i].m_acc.y, frameImu[i].m_acc.z), frameImu[i].m_delta));
810 //};
811 mSlam->addActiveFrame(undistImg, 0, mFrameIdx, groundTruth, imuCpy);
812
813 mFrameIdx += 1;
814 SAFE_DELETE(undistImg);
815
816 if (!mInitialized || mSlam->initFailed || !mSlam->initialized || mSlam->isLost)
817 mFound = false;
818}
819
821{
822 while (IsRunning())
823 {
825
826 if (needUpdate)
827 {
828 manager->GetLastData(mLastData);
829 needUpdate = false;
830
831 cv::Size nsize;
832 nsize.width = (int)((float)mLastData.gray.cols * mScaleX);
833 nsize.height = (int)((float)mLastData.gray.rows * mScaleY);
834
835 // camera size changed
836 // or too much bad frames
837 if (!mSlam || (nsize.width != mCamParams.inWidth) || (nsize.height != mCamParams.inHeight) ||
838 (mSlam && mSlam->initialized && mSlam->initFailed) || (mSlam && (mFrameIdx > 50) && (!mSlam->initialized || mSlam->isLost)))
839 mIsDirty = true;
840
841 // init detector
842 if (mIsDirty)
843 {
844 mIsDirty = false;
845 BuildCameraParam(mLastData.arCamParam.GetCameraParameter());
846 InitDetector();
847 }
848
849 if ((mScaleX != 1.0f) || (mScaleY != 1.0f))
850 cv::resize(mLastData.gray, mLastData.gray, cv::Size(mCamParams.inWidth, mCamParams.inHeight), 0, 0, cv::INTER_LINEAR);
851 //cv::blur(mLastData.gray, mLastData.gray, cv::Size(2, 2));
852
853 if (!mLastData.image.empty() && mSlam && mUndistorter && (dso::pyrLevelsUsed >= 3))
854 {
855 Detect();
856 }
857 }
858 else
859 {
860 //prevent cpu burn
861 boost::this_thread::sleep_for(boost::chrono::milliseconds(1)); //DO not burn too much CPU
862 }
863 }
864}
int Random(int mi, int mx)
BtQuaternion GetCameraQuat()
std::vector< ArImuData > GetCameraImu() const
aruco::CameraParameters GetCameraParameter()
aruco::CameraParameters camParam
void GetLastData(LASTDATA &data)
static ArManager * GetInstance()
Definition ArManager.cpp:70
static BtQuaternion Slerp(float fT, const BtQuaternion &rkP, const BtQuaternion &rkQ, bool shortestPath)
static const BtQuaternion IDENTITY
static BtQuaternion FromRotationMatrix(double rotMatrix[16], bool reverseX=false, bool reverseY=true)
ArCameraParam arCamParam
cv::Mat image
cv::Mat gray
bool reversedBitmap
double timeStamp
Definition sSlam.h:52
cv::Mat mMTcw
Definition sSlam.h:69
float mRang
Definition sSlam.h:63
cv::Mat mXC
Definition sSlam.h:70
cv::Mat mTpw
Definition sSlam.h:65
cv::Mat DetectPlane(cv::Mat campos, std::vector< slamPoint > &vMPs, const int iterations)
Definition sSlam.cpp:135
std::vector< slamPoint > mMvMPs
Definition sSlam.h:67
cv::Mat mOrigin
Definition sSlam.h:61
void Recompute()
Definition sSlam.cpp:255
cv::Mat mNormal
Definition sSlam.h:59
virtual void publishKeyframes(std::vector< dso::FrameHessian * > &frames, bool final, dso::CalibHessian *HCalib) override
Definition sSlam.cpp:443
virtual void reset() override
Definition sSlam.cpp:433
virtual void pushDepthImage(dso::MinimalImageB3 *image) override
Definition sSlam.cpp:579
void SetDirty()
Definition sSlam.cpp:583
virtual void join() override
Definition sSlam.cpp:429
bool needUpdate
Definition sSlam.h:128
virtual void pushLiveFrame(dso::FrameHessian *image) override
Definition sSlam.cpp:570
virtual bool needPushDepthImage() override
Definition sSlam.cpp:574
Vector3 GetCameraPosition()
Definition sSlam.cpp:687
~Sslam()
Definition sSlam.cpp:412
Sslam()
Definition sSlam.cpp:328
void GoThread()
Definition sSlam.cpp:820
bool IsFound()
Definition sSlam.cpp:712
virtual void publishGraph(const std::map< uint64_t, Eigen::Vector2i, std::less< uint64_t >, Eigen::aligned_allocator< std::pair< const uint64_t, Eigen::Vector2i > > > &connectivity) override
Definition sSlam.cpp:439
BtQuaternion GetCameraOrientation()
Definition sSlam.cpp:700
void DrawLandmarks(cv::Mat image)
Definition sSlam.cpp:739
void Reset()
Definition sSlam.cpp:717
virtual void publishCamPose(dso::FrameShell *frame, dso::CalibHessian *HCalib) override
Definition sSlam.cpp:516
void StartThreading(std::function< void()> threadFunction)
void cvMatToQuatAndPos(const cv::Mat mat, BtQuaternion &quat, Vector3 &vec, bool invertx=false, bool flip=false)
Definition sSlam.cpp:75
void Sophus2Cv(const dso::SE3 &smat, cv::Mat &mat)
Definition sSlam.cpp:47
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
Definition sSlam.cpp:108
cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
Definition sSlam.cpp:127
const float eps
Definition sSlam.cpp:107
uint32 firstPO2From(uint32 n)
Definition sSlam.cpp:32
cv::Mat point3d
Definition sSlam.h:46
dso::PointHessian * ptr
Definition sSlam.h:44
cv::Point2f point2d
Definition sSlam.h:45
cv::Scalar color
Definition sSlam.h:47
float depth
Definition sSlam.h:48