camera_calibrator.cpp 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903
  1. #include "camera_calibrator_impl.h"
  2. #include "core/imgui_utility.hpp"
  3. #include "third_party/scope_guard.hpp"
  4. #include "render/render_utility.h"
  5. #include "render/render_texture.h"
  6. #include <boost/asio/post.hpp>
  7. #include <opencv2/calib3d.hpp>
  8. #include <opencv2/imgproc.hpp>
  9. #include <glm/ext/quaternion_geometric.hpp>
  10. #include <glm/gtc/matrix_access.hpp>
  11. #include <glm/gtx/string_cast.hpp>
  12. #define NANOVG_GL3_IMPLEMENTATION
  13. #include <nanovg_gl.h>
  14. #include <fstream>
  15. #include <random>
  16. #include <ranges>
  17. namespace camera_calibrator_impl {
  18. cv::Mat to_cv_mat(const glm::mat3 &mat) {
  19. auto ret = cv::Mat(3, 3, CV_32FC1);
  20. for (auto j = 0; j < 3; ++j)
  21. for (auto i = 0; i < 3; ++i)
  22. ret.at<float>(j, i) = mat[i][j];
  23. return ret;
  24. }
  25. cv::Mat to_cv_mat(const glm::vec3 &vec) {
  26. auto ret = cv::Mat(3, 1, CV_32FC1);
  27. for (auto j = 0; j < 3; ++j)
  28. ret.at<float>(j, 0) = vec[j];
  29. return ret;
  30. }
  31. glm::mat3 rodrigues_to_mat(const cv::Mat &vec) {
  32. assert(vec.size().area() == 3);
  33. auto rot_mat = cv::Mat();
  34. cv::Rodrigues(vec, rot_mat);
  35. return to_mat3(rot_mat);
  36. }
  37. cv::Mat to_cv_rodigues(const glm::mat3 &mat) {
  38. auto ret = cv::Mat();
  39. cv::Rodrigues(to_cv_mat(mat), ret);
  40. return ret;
  41. }
  42. glm::mat3 to_mat3(const cv::Mat &mat) {
  43. assert(mat.size() == cv::Size(3, 3));
  44. assert(mat.type() == CV_32FC1
  45. || mat.type() == CV_64FC1);
  46. auto ret = glm::mat3();
  47. for (auto j = 0; j < 3; ++j)
  48. for (auto i = 0; i < 3; ++i) {
  49. switch (mat.type()) {
  50. // @formatter:off
  51. case CV_32FC1: { ret[i][j] = mat.at<float>(j, i); break; }
  52. case CV_64FC1: { ret[i][j] = mat.at<double>(j, i); break; }
  53. // @formatter:on
  54. default: {
  55. RET_ERROR_E;
  56. }
  57. }
  58. }
  59. return ret;
  60. }
  61. glm::vec3 to_vec3(const cv::Mat &mat) {
  62. assert(mat.size() == cv::Size(1, 3));
  63. assert(mat.type() == CV_32FC1
  64. || mat.type() == CV_64FC1);
  65. auto ret = glm::vec3();
  66. for (auto j = 0; j < 3; ++j) {
  67. switch (mat.type()) {
  68. // @formatter:off
  69. case CV_32FC1: { ret[j] = mat.at<float>(j, 0); break; }
  70. case CV_64FC1: { ret[j] = mat.at<double>(j, 0); break; }
  71. // @formatter:on
  72. default: {
  73. RET_ERROR_E;
  74. }
  75. }
  76. }
  77. return ret;
  78. }
  79. template<typename Tp>
  80. glm::vec3 to_vec3(const cv::Vec<Tp, 3> &vec) {
  81. auto ret = glm::vec3();
  82. for (auto j = 0; j < 3; ++j)
  83. ret[j] = vec[j];
  84. return ret;
  85. }
  86. void hand_eye_calib::set_object_points(cv::Size size, float dis) {
  87. obj_ps.clear();
  88. for (auto iy = 0; iy < size.height; ++iy)
  89. for (auto ix = 0; ix < size.width; ++ix) {
  90. obj_ps.emplace_back(iy * dis, ix * dis);
  91. }
  92. }
  93. glm::mat4 hand_eye_calib::calc_track_mat(timestamp_type t) {
  94. return track_pool.query(t);
  95. }
  96. glm::vec2 hand_eye_calib::distort_point(glm::vec2 p) {
  97. auto &dc = dist_coeffs;
  98. assert(dc.size() == 8);
  99. auto k1 = dc[0], k2 = dc[1], p1 = dc[2], p2 = dc[3];
  100. auto k3 = dc[4], k4 = dc[5], k5 = dc[6], k6 = dc[7];
  101. auto r2 = p.x * p.x + p.y * p.y;
  102. auto r4 = r2 * r2, r6 = r4 * r2;
  103. auto k = (1.f + k1 * r2 + k2 * r4 + k3 * r6)
  104. / (1.f + k4 * r2 + k5 * r4 + k6 * r6);
  105. auto tx = p.x * k
  106. + 2 * p1 * p.x * p.y
  107. + p2 * (r2 + 2 * p.x * p.x);
  108. auto ty = p.y * k
  109. + p1 * (r2 + 2 * p.y * p.y)
  110. + 2 * p2 * p.x * p.y;
  111. return glm::vec2(tx, ty);
  112. }
  113. glm::vec2 hand_eye_calib::project_point(glm::vec3 p) {
  114. auto np = distort_point(from_homo(p));
  115. return transform_p(intrinsic_mat, np);
  116. }
  117. float hand_eye_calib::corner_distance(const corner_type &c1,
  118. const corner_type &c2) {
  119. assert(c1.size() == c2.size());
  120. auto corner_num = c1.size();
  121. float dis_sum = 0.f;
  122. for (auto k = 0; k < corner_num; ++k) {
  123. auto p1 = c1[k], p2 = c2[k];
  124. dis_sum += sqrt(pow(p1.x - p2.x, 2.f)
  125. + pow(p1.y - p2.y, 2.f));
  126. }
  127. return dis_sum / corner_num;
  128. }
  129. glm::mat4 hand_eye_calib::calib_hand_eye() {
  130. // build hand-eye calibration problem
  131. auto ref_r_vec = std::vector<cv::Mat>(); // tracker in reference
  132. auto ref_t_vec = std::vector<cv::Mat>();
  133. for (auto &info: img_pool) {
  134. auto ref_mat = calc_track_mat(info.sample_ts + ts_offset);
  135. ref_mat = glm::inverse(ref_mat);
  136. auto r_mat = glm::mat3(ref_mat);
  137. auto t_vec = glm::vec3(to_translation(ref_mat)); // fourth column of matrix
  138. ref_r_vec.push_back(to_cv_mat(r_mat));
  139. ref_t_vec.push_back(to_cv_mat(t_vec));
  140. }
  141. // if (simulate_conventional) {
  142. // cv::Mat ret_r, ret_t;
  143. // cv::calibrateRobotWorldHandEye(
  144. // cam_r_vec, cam_t_vec, // chess board in camera
  145. // ref_r_vec, ref_t_vec, // tracker in reference
  146. // aux_r, aux_t, // tracker in chess board
  147. // ret_r, ret_t); // reference in camera
  148. //// SPDLOG_DEBUG("OpenCV: {}", glm::to_string(to_transform_mat(to_vec3(aux_t), to_mat3(aux_r))));
  149. // return to_transform_mat(to_vec3(ret_t), to_mat3(ret_r));
  150. // }
  151. auto inv_op = [](const cv::Mat &r, const cv::Mat &t) {
  152. return glm::inverse(to_transform_mat(to_vec3(t), to_mat3(r)));
  153. };
  154. auto mat_A = std::vector<glm::mat4>(); // reference in tracker
  155. auto mat_B = std::vector<glm::mat4>(); // camera in chess board
  156. std::ranges::transform(ref_r_vec, ref_t_vec, std::back_inserter(mat_A), inv_op);
  157. std::ranges::transform(cam_r_vec, cam_t_vec, std::back_inserter(mat_B), inv_op);
  158. // weighted shah algorithm
  159. using namespace Eigen;
  160. using mat_T_type = Matrix<float, 9, 9>;
  161. auto mat_T = mat_T_type();
  162. mat_T.setZero();
  163. for (auto k = 0; k < sample_num; ++k) {
  164. auto Ra = glm::mat3(mat_A[k]), Rb = glm::mat3(mat_B[k]);
  165. mat_T += sample_w[k] * kron_product(to_eigen(Ra), to_eigen(Rb));
  166. }
  167. JacobiSVD<mat_T_type> svdT(mat_T, ComputeFullU | ComputeFullV);
  168. auto x = svdT.matrixV().col(0), y = svdT.matrixU().col(0);
  169. auto shah_solve_op = [](Matrix3f x) -> Matrix3f {
  170. auto x_det = x.determinant();
  171. x *= (x_det >= 0 ? 1.f : -1.f)
  172. / std::pow(std::abs(x_det), 1.f / 3.f);
  173. JacobiSVD <Matrix3f> svdX(x, ComputeFullU | ComputeFullV);
  174. x = svdX.matrixU() * svdX.matrixV().transpose();
  175. return x.transpose();
  176. };
  177. auto Rx = shah_solve_op(Map<const Matrix3f>(x.data()));
  178. auto Ry = shah_solve_op(Map<const Matrix3f>(y.data()));
  179. using mat_A_type = Matrix<float, Dynamic, 6>;
  180. mat_A_type A = MatrixXf::Zero(3 * sample_num, 6);
  181. VectorXf b = VectorXf::Zero(3 * sample_num);
  182. for (int k = 0; k < sample_num; ++k) {
  183. auto ta = to_eigen(to_translation(mat_A[k]));
  184. auto tb = to_eigen(to_translation(mat_B[k]));
  185. A.block<3, 3>(3 * k, 0) = sample_w[k] * -to_eigen(glm::mat3(mat_A[k]));
  186. A.block<3, 3>(3 * k, 3) = sample_w[k] * Matrix3f::Identity();
  187. b.segment<3>(3 * k) = sample_w[k] * (ta - Ry * tb);
  188. }
  189. using vec_t_type = Vector<float, 6>;
  190. vec_t_type t = A.colPivHouseholderQr().solve(b);
  191. Matrix4f X, Y;
  192. X << Rx, t.segment<3>(0), 0, 0, 0, 1; // camera in reference
  193. Y << Ry, t.segment<3>(3), 0, 0, 0, 1; // chess board in tracker
  194. auto aux_mat = glm::inverse(to_mat4(Y));
  195. aux_r = to_cv_mat(glm::mat3(aux_mat));
  196. aux_t = to_cv_mat(to_translation(aux_mat));
  197. // SPDLOG_DEBUG("Shah: {}", glm::to_string(aux_mat));
  198. return glm::inverse(to_mat4(X));
  199. }
  200. cv::Scalar hand_eye_calib::evaluate_hand_eye(const glm::mat4 &ret_mat) {
  201. auto aux_mat = to_transform_mat(to_vec3(aux_t), to_mat3(aux_r));
  202. auto chess_mat = glm::inverse(aux_mat); // chess board in traker
  203. auto err_mat = cv::Mat(sample_num, corner_num, CV_32FC2);
  204. auto obj_ps_err = std::vector<cv::Point3f>();
  205. std::ranges::transform(obj_ps, std::back_inserter(obj_ps_err), [](cv::Vec3f &p) {
  206. return cv::Point3f(p[0], p[1], p[2]);
  207. });
  208. for (auto j = 0; j < sample_num; ++j) {
  209. auto &item = img_pool[j];
  210. auto proj_mat = ret_mat // reference in camera
  211. * glm::inverse(calc_track_mat(item.sample_ts + ts_offset)) // tracker in reference
  212. * chess_mat; // chess board in tracker
  213. auto cam_mat = to_transform_mat( // chess board in camera
  214. to_vec3(cam_t_vec[j]), to_mat3(cam_r_vec[j]));
  215. auto loop_mat = glm::inverse(proj_mat) // camera in chess board
  216. * cam_mat; // chess board in camera
  217. for (auto i = 0; i < corner_num; ++i) {
  218. auto p_gold = to_vec3(obj_ps[i]);
  219. auto p_pred = transform_p(loop_mat, p_gold);
  220. auto err_3d = glm::distance(p_gold, p_pred);
  221. err_mat.at<cv::Vec2f>(j, i)[0] = err_3d;
  222. }
  223. auto proj_r = to_cv_rodigues(glm::mat3(proj_mat));
  224. auto proj_t = to_cv_mat(to_translation(proj_mat));
  225. auto c_pred = std::vector<cv::Point2f>();
  226. cv::projectPoints(obj_ps_err, proj_r, proj_t,
  227. cam_cv.intrinsic, cam_cv.dist_coeffs,
  228. c_pred);
  229. assert(c_pred.size() == corner_num);
  230. for (auto i = 0; i < corner_num; ++i) {
  231. auto c_gold = to_vec2(item.corner[i]);
  232. auto err_2d = glm::distance(c_gold, to_vec2(c_pred[i]));
  233. err_mat.at<cv::Vec2f>(j, i)[1] = err_2d;
  234. }
  235. }
  236. // calculate weighted average error
  237. cv::Scalar err_sum = {};
  238. float w_sum = 0.f;
  239. for (auto j = 0; j < sample_num; ++j) {
  240. for (auto i = 0; i < corner_num; ++i) {
  241. auto err = sample_w[j] * err_mat.at<cv::Vec2f>(j, i);
  242. err_sum[0] += err[0];
  243. err_sum[1] += err[1];
  244. }
  245. w_sum += sample_w[j] * corner_num;
  246. }
  247. return err_sum / w_sum;
  248. }
  249. hand_eye_calib::camera_calib_result
  250. hand_eye_calib::camera_calib(const img_index_list_type &index) {
  251. auto img_ps_in = std::vector<corner_type>();
  252. std::ranges::transform(index, std::back_inserter(img_ps_in),
  253. [this](const auto &ind) { return img_pool[ind].corner; });
  254. auto select_num = img_ps_in.size();
  255. auto obj_ps_in = std::vector<object_points_type>(select_num, obj_ps);
  256. auto ret = camera_calib_result();
  257. cv::Mat r_vec_ignore, t_vec_ignore;
  258. auto err = cv::calibrateCamera(
  259. obj_ps_in, img_ps_in, img_size,
  260. ret.intrinsic, ret.dist_coeffs,
  261. r_vec_ignore, t_vec_ignore,
  262. cv::CALIB_RATIONAL_MODEL); // use k4, k5, k6
  263. assert(!isnan(err));
  264. return ret;
  265. }
  266. float hand_eye_calib::reproject_error(const camera_calib_result &cam,
  267. const corner_type &img_ps) {
  268. cv::Mat r_vec, t_vec;
  269. auto ok = cv::solvePnP(obj_ps, img_ps,
  270. cam.intrinsic, cam.dist_coeffs,
  271. r_vec, t_vec);
  272. assert(ok);
  273. corner_type proj_ps;
  274. cv::projectPoints(obj_ps, r_vec, t_vec,
  275. cam.intrinsic, cam.dist_coeffs, proj_ps);
  276. return corner_distance(img_ps, proj_ps);
  277. }
  278. hand_eye_calib::camera_calib_result
  279. hand_eye_calib::camera_calib_ransac() {
  280. static constexpr auto max_iter = 32; // TODO: make configurable
  281. static constexpr auto calib_img_num = 10;
  282. static constexpr auto accept_threshold = 1.0f; // pix
  283. static constexpr auto valid_ratio = 0.8f; // percentage
  284. auto rd = std::random_device();
  285. auto gen = std::mt19937(rd());
  286. auto sampler = std::uniform_int_distribution<size_t>(0, sample_num - 1);
  287. auto err_vec = std::vector<float>(sample_num);
  288. auto ret = camera_calib_result();
  289. auto ret_err = std::numeric_limits<float>::max();
  290. auto index_list = img_index_list_type();
  291. for (auto k = 0; k < max_iter; ++k) {
  292. index_list.resize(calib_img_num);
  293. std::ranges::generate(index_list, [&] { return sampler(gen); });
  294. auto cur = camera_calib(index_list);
  295. // evaluate current result
  296. err_vec.clear();
  297. std::ranges::transform(img_pool, std::back_inserter(err_vec),
  298. [=, this](const auto &img) { return reproject_error(cur, img.corner); });
  299. auto err_mean = std::accumulate(err_vec.begin(), err_vec.end(), 0.f) / sample_num;
  300. SPDLOG_INFO("Iter {}, error = {:.2f}pix.", k, err_mean);
  301. if (save_calib_error) {
  302. auto fout = std::ofstream(fmt::format("calib_{}.txt", k));
  303. for (auto val: err_vec) {
  304. fout << val << std::endl;
  305. }
  306. }
  307. auto ratio = 1.f * std::ranges::count_if(
  308. err_vec, [=](auto err) { return err <= accept_threshold; }) / sample_num;
  309. if (ratio < valid_ratio) continue;
  310. if (err_mean < ret_err) {
  311. ret = cur;
  312. ret_err = err_mean;
  313. }
  314. }
  315. assert(!ret.intrinsic.empty());
  316. SPDLOG_INFO("Camera calibration error is {:.2f}pix", ret_err);
  317. // remove outlier samples
  318. auto end_iter = std::ranges::remove_if(img_pool, [=, this](const auto &img) {
  319. return reproject_error(ret, img.corner) > accept_threshold;
  320. });
  321. img_pool.erase(end_iter.begin(), end_iter.end());
  322. sample_num = img_pool.size();
  323. return ret;
  324. }
  325. void hand_eye_calib::calc_sample_weights() {
  326. auto cam_p = std::vector<glm::vec3>();
  327. auto cam_p_op = [](const cv::Mat &r, const cv::Mat &t) {
  328. auto t_mat = to_transform_mat(to_vec3(t), to_mat3(r));
  329. return to_translation(t_mat);
  330. };
  331. std::ranges::transform(cam_r_vec, cam_t_vec,
  332. std::back_inserter(cam_p), cam_p_op);
  333. sample_w.clear();
  334. auto w_op = [&](glm::vec3 x) {
  335. return 1.f / std::ranges::count_if(cam_p, [=](glm::vec3 y) {
  336. return glm::distance(x, y) <= sample_neighbor_radius;
  337. });
  338. };
  339. std::ranges::transform(cam_p, std::back_inserter(sample_w), w_op);
  340. }
  341. void hand_eye_calib::calc() {
  342. sample_num = img_pool.size();
  343. corner_num = img_pool.begin()->corner.size();
  344. if (sub_sample_count != 0) {
  345. assert(sample_num >= sub_sample_count);
  346. std::random_device rd;
  347. std::mt19937 g(rd());
  348. std::shuffle(img_pool.begin(), img_pool.end(), g);
  349. img_pool.erase(img_pool.begin() + sub_sample_count, img_pool.end());
  350. sample_num = img_pool.size();
  351. }
  352. auto ts = current_timestamp();
  353. cam_cv = camera_calib_ransac();
  354. SPDLOG_INFO("Used time: {}ms", (current_timestamp() - ts) / 1000);
  355. intrinsic_mat = to_mat3(cam_cv.intrinsic);
  356. assert(cam_cv.dist_coeffs.size().height == 1);
  357. auto dist_coeffs_num = cam_cv.dist_coeffs.size().width;
  358. assert(dist_coeffs_num >= 8); // k1-3, p1-2, k4-6
  359. dist_coeffs.reserve(dist_coeffs_num);
  360. assert(cam_cv.dist_coeffs.type() == CV_64FC1);
  361. for (auto k = 0; k < 8; ++k) {
  362. dist_coeffs.push_back(cam_cv.dist_coeffs.at<double>(0, k));
  363. }
  364. // solve PnP for each image
  365. cam_r_vec = std::vector<cv::Mat>(); // chess board in camera
  366. cam_t_vec = std::vector<cv::Mat>();
  367. auto obj_ps_pnp = std::vector<cv::Point3d>();
  368. std::ranges::transform(obj_ps, std::back_inserter(obj_ps_pnp), [](cv::Vec3f &p) {
  369. return cv::Point3d(p[0], p[1], p[2]);
  370. });
  371. std::optional<glm::quat> last_rot_quat;
  372. static constexpr auto quat_threshold = 0.5f;
  373. for (auto &img: img_pool) {
  374. auto &corner = img.corner;
  375. cv::Mat chess_r, chess_t; // chess board in camera
  376. for (;;) {
  377. bool ok = cv::solvePnP(obj_ps_pnp, corner,
  378. cam_cv.intrinsic, cam_cv.dist_coeffs,
  379. chess_r, chess_t);
  380. assert(ok);
  381. // rotate corners if needed
  382. auto rot_mat = rodrigues_to_mat(chess_r);
  383. auto rot_quat = glm::quat_cast(rot_mat);
  384. if (!last_rot_quat.has_value()
  385. // https://math.stackexchange.com/questions/90081/quaternion-distance
  386. || 1 - glm::dot(rot_quat, *last_rot_quat) < quat_threshold) {
  387. last_rot_quat = rot_quat;
  388. break;
  389. } else {
  390. std::ranges::reverse(corner);
  391. }
  392. }
  393. auto ret_mat = to_transform_mat(to_vec3(chess_t),
  394. rodrigues_to_mat(chess_r));
  395. auto cam_r = to_cv_mat(glm::mat3(ret_mat)); // rotation part
  396. auto cam_t = to_cv_mat(to_translation(ret_mat)); // translation part
  397. cam_r_vec.push_back(cam_r);
  398. cam_t_vec.push_back(cam_t);
  399. }
  400. calc_sample_weights();
  401. // find minimal error using trichotomy algorithm
  402. auto offset_l = -128000; // -128ms
  403. auto offset_r = 0; // 0ms
  404. static constexpr auto eps = 1000; // 1ms
  405. auto eval_func = [this](int offset) {
  406. ts_offset = offset;
  407. auto ret_mat = calib_hand_eye();
  408. return evaluate_hand_eye(ret_mat);
  409. };
  410. auto weight_op = [](const cv::Scalar &w) {
  411. // return w[1];
  412. return w[0] * 10 + w[1]; // w[0]: 3d, w[1]: 2d
  413. };
  414. auto better_op = [=](const cv::Scalar &lhs, // is lhs better than rhs ?
  415. const cv::Scalar &rhs) {
  416. return weight_op(lhs) < weight_op(rhs);
  417. };
  418. for (; offset_r - offset_l > eps;) {
  419. auto l_sec = (2 * offset_l + offset_r) / 3;
  420. auto r_sec = (offset_l + 2 * offset_r) / 3;
  421. auto l_err = eval_func(l_sec);
  422. auto r_err = eval_func(r_sec);
  423. if (better_op(l_err, r_err)) {
  424. offset_r = r_sec;
  425. } else {
  426. offset_l = l_sec;
  427. }
  428. }
  429. result_ts_offset = (offset_l + offset_r) / 2;
  430. if (simulate_conventional) {
  431. static constexpr auto select_num = 4;
  432. auto step = (float) sample_num / (select_num + 1);
  433. auto select_ind = img_index_list_type(select_num);
  434. for (auto k = 1; k < select_num; ++k) {
  435. select_ind[k] = sample_num - step * k;
  436. }
  437. cam_cv = camera_calib(select_ind);
  438. intrinsic_mat = to_mat3(cam_cv.intrinsic);
  439. dist_coeffs.clear();
  440. for (auto k = 0; k < 8; ++k) {
  441. dist_coeffs.push_back(cam_cv.dist_coeffs.at<double>(0, k));
  442. }
  443. auto img_pool_new = image_pool_type();
  444. std::ranges::transform(select_ind, std::back_inserter(img_pool_new),
  445. [this](auto k) { return img_pool[k]; });
  446. img_pool = img_pool_new;
  447. sample_num = select_num;
  448. // solve PnP for each image
  449. cam_r_vec = std::vector<cv::Mat>(); // chess board in camera
  450. cam_t_vec = std::vector<cv::Mat>();
  451. for (auto &img: img_pool) {
  452. auto &corner = img.corner;
  453. cv::Mat chess_r, chess_t; // chess board in camera
  454. bool ok = cv::solvePnP(obj_ps_pnp, corner,
  455. cam_cv.intrinsic, cam_cv.dist_coeffs,
  456. chess_r, chess_t);
  457. auto ret_mat = to_transform_mat(to_vec3(chess_t),
  458. rodrigues_to_mat(chess_r));
  459. auto cam_r = to_cv_mat(glm::mat3(ret_mat)); // rotation part
  460. auto cam_t = to_cv_mat(to_translation(ret_mat)); // translation part
  461. cam_r_vec.push_back(cam_r);
  462. cam_t_vec.push_back(cam_t);
  463. }
  464. // reset weights
  465. sample_w = std::vector<float>(sample_num, 1.f);
  466. }
  467. if (disable_temporl_offset) {
  468. result_ts_offset = 0;
  469. }
  470. ts_offset = result_ts_offset;
  471. result_mat = glm::inverse(calib_hand_eye());
  472. SPDLOG_INFO("Estimated temporal offset is {}ms", result_ts_offset / 1000);
  473. auto err_val = evaluate_hand_eye(glm::inverse(result_mat));
  474. obj_reproj_err = err_val[0];
  475. SPDLOG_INFO("Average 3D projection error is {:.2f}mm", obj_reproj_err);
  476. img_reproj_err = err_val[1];
  477. SPDLOG_INFO("Average 2D projection error is {:.2f}pix", img_reproj_err);
  478. }
  479. camera_intrinsic_v0 camera_calibrator_impl::hand_eye_calib::intrinsic_v0() {
  480. auto ret = camera_intrinsic_v0();
  481. ret.fx = intrinsic_mat[0][0];
  482. ret.fy = intrinsic_mat[1][1];
  483. ret.cx = intrinsic_mat[2][0];
  484. ret.cy = intrinsic_mat[2][1];
  485. std::ranges::copy(dist_coeffs, std::back_inserter(ret.dist));
  486. ret.width = img_size.width;
  487. ret.height = img_size.height;
  488. return ret;
  489. }
  490. }
  491. camera_calibrator::impl::impl(const create_config &_conf) {
  492. conf = _conf;
  493. vg = nvgCreateGL3(NVG_ANTIALIAS | NVG_STENCIL_STROKES);
  494. }
  495. camera_calibrator::impl::~impl() {
  496. if (tp != nullptr) {
  497. tp->purge();
  498. stop(true); // on_exit = true, force exit
  499. }
  500. }
  501. void camera_calibrator::impl::start() {
  502. // clear last data
  503. last_finish.store(nullptr);
  504. img_pool.clear();
  505. track_pool.clear();
  506. img_ok_cnt = 0;
  507. coverage_fbo = std::make_unique<smart_frame_buffer>();
  508. assert(tp == nullptr);
  509. tp = std::make_unique<BS::thread_pool>(conf.num_threads);
  510. img_conn = OBJ_SIG(conf.img_name)->connect(
  511. [this](auto name) { img_callback(name); });
  512. }
  513. void camera_calibrator::impl::process() {
  514. calib = std::make_unique<hand_eye_calib>();
  515. calib->img_size = img_size;
  516. calib->set_object_points(conf.pattern_size, conf.corner_distance);
  517. auto valid_imgs =
  518. img_pool | std::views::filter([](const auto &item) {
  519. assert(item.process_finished);
  520. if (!item.corners_detected) return false;
  521. if (item.corner_sharpness > sharpness_threshold) return false;
  522. return true;
  523. }) | std::views::transform([](const auto &item) {
  524. assert(!item.corners.empty());
  525. return hand_eye_calib::image_store_type(
  526. item.corners, item.sample_ts);
  527. });
  528. std::ranges::copy(valid_imgs, std::back_inserter(calib->img_pool));
  529. for (auto &item: track_pool) {
  530. calib->track_pool.add(item.ref_mat, item.sample_ts);
  531. }
  532. calib->calc();
  533. if (conf.sophiar_conn != nullptr) {
  534. auto ret = Eigen::Isometry3d();
  535. to_eigen_transform(calib->result_mat, ret);
  536. conf.sophiar_conn->update_transform_variable(
  537. conf.result_transform_var, ret);
  538. }
  539. if (conf.cb_func) {
  540. conf.cb_func(result_type{
  541. .cam_in = calib->intrinsic_v0(),
  542. .transform = calib->result_mat,
  543. .time_offset = calib->result_ts_offset,
  544. });
  545. }
  546. }
  547. void camera_calibrator::impl::simulate_process(const simulate_info_type &info) {
  548. load_track_data(info.data_path);
  549. img_size = info.img_size;
  550. process();
  551. }
  552. void camera_calibrator::impl::stop(bool on_exit) {
  553. assert(img_conn.connected());
  554. img_conn.disconnect();
  555. tp = nullptr; // implicit wait()
  556. coverage_fbo = nullptr;
  557. if (!on_exit) {
  558. save_track_data();
  559. process();
  560. }
  561. }
  562. void camera_calibrator::impl::save_track_data() {
  563. auto fout = std::ofstream("calib_data.txt");
  564. // tracker data
  565. fout << track_pool.size() << std::endl;
  566. for (auto &item: track_pool) {
  567. fout << item.sample_ts << " ";
  568. using mat_type = typeof(item.ref_mat);
  569. using value_type = mat_type::value_type;
  570. static constexpr auto len = sizeof(mat_type) / sizeof(value_type);
  571. auto ptr = (value_type *) &item.ref_mat;
  572. for (auto i = 0; i < len; ++i) {
  573. fout << ptr[i] << " ";
  574. }
  575. fout << std::endl;
  576. }
  577. fout << std::endl;
  578. // camera data
  579. auto sample_num = std::ranges::count_if(
  580. img_pool, [](auto &item) { return item.corners_detected; });
  581. fout << sample_num << std::endl;
  582. for (auto &item: img_pool) {
  583. if (!item.corners_detected) continue;
  584. fout << item.sample_ts << " ";
  585. for (auto &point: item.corners) {
  586. fout << point.x << " " << point.y << " ";
  587. }
  588. fout << item.corner_sharpness;
  589. fout << std::endl;
  590. }
  591. fout << std::endl;
  592. }
  593. void camera_calibrator::impl::load_track_data(const std::string &path) {
  594. auto fin = std::ifstream(path);
  595. // tracker data
  596. assert(track_pool.empty());
  597. size_t sample_num;
  598. fin >> sample_num;
  599. for (auto k = 0; k < sample_num; ++k) {
  600. auto &item = track_pool.emplace_back();
  601. fin >> item.sample_ts;
  602. using mat_type = typeof(item.ref_mat);
  603. using value_type = mat_type::value_type;
  604. static constexpr auto len = sizeof(mat_type) / sizeof(value_type);
  605. auto ptr = (value_type *) &item.ref_mat;
  606. for (auto i = 0; i < len; ++i) {
  607. fin >> ptr[i];
  608. }
  609. }
  610. // camera data
  611. fin >> sample_num;
  612. for (auto k = 0; k < sample_num; ++k) {
  613. auto &item = img_pool.emplace_back();
  614. fin >> item.sample_ts;
  615. assert(item.sample_ts != 0);
  616. auto corner_num = conf.pattern_size.area();
  617. for (auto i = 0; i < corner_num; ++i) {
  618. auto &p = item.corners.emplace_back();
  619. fin >> p.x >> p.y;
  620. }
  621. fin >> item.corner_sharpness;
  622. item.corners_detected = true;
  623. item.process_finished = true;
  624. }
  625. }
  626. void camera_calibrator::impl::per_image_process(img_store_type *st) {
  627. auto closer = sg::make_scope_guard([&] {
  628. st->process_finished = true;
  629. // release image to save memory
  630. st->img_mat = cv::Mat();
  631. st->img = nullptr;
  632. });
  633. // color to gray
  634. cv::Mat img_gray;
  635. cv::cvtColor(st->img_mat, img_gray, cv::COLOR_BGR2GRAY);
  636. // find control points
  637. assert(conf.board_type == CALIB_CHESS);
  638. auto corner_flag = cv::CALIB_CB_ADAPTIVE_THRESH
  639. | cv::CALIB_CB_NORMALIZE_IMAGE
  640. | cv::CALIB_CB_FAST_CHECK;
  641. corner_type img_corners;
  642. bool ok = cv::findChessboardCorners(img_gray, conf.pattern_size,
  643. img_corners, corner_flag);
  644. if (!ok) {
  645. // last_finish.store(nullptr, std::memory_order::relaxed);
  646. return;
  647. }
  648. st->corners_detected = true;
  649. // refinement points // https://docs.opencv.org/4.9.0/d2/d1d/samples_2cpp_2lkdemo_8cpp-example.html
  650. auto win_size = cv::Size(10, 10);
  651. auto zero_zone = cv::Size(-1, -1);
  652. auto term_crit = cv::TermCriteria(cv::TermCriteria::COUNT | cv::TermCriteria::EPS, 20, 0.03);
  653. cv::cornerSubPix(img_gray, img_corners, win_size, zero_zone, term_crit);
  654. st->corners = img_corners;
  655. // evaluate sharpness
  656. auto sharpness = cv::estimateChessboardSharpness(
  657. img_gray, conf.pattern_size, img_corners);
  658. st->corner_sharpness = sharpness[0];
  659. if (st->corner_sharpness > sharpness_threshold) return;
  660. ++img_ok_cnt;
  661. }
  662. void camera_calibrator::impl::img_callback(obj_name_type name) {
  663. assert(name == conf.img_name);
  664. auto img = to_image(name);
  665. if (img == nullptr) return;
  666. if (img_size.empty()) {
  667. img_size = img->size();
  668. } else {
  669. assert(img_size == img->size());
  670. }
  671. // tracking information
  672. auto cur_ts = OBJ_TS(conf.img_name);
  673. auto ref_t = conf.sophiar_conn
  674. ->query_transform_variable(conf.ref_transform_var);
  675. if (!ref_t.has_value()) return;
  676. auto &item = track_pool.emplace_back();
  677. item.ref_mat = to_mat4(*ref_t);
  678. item.sample_ts = cur_ts;
  679. auto err_t = conf.sophiar_conn
  680. ->query_double_variable(conf.ref_error_var);
  681. if (!err_t.has_value()) return;
  682. // assert(err_t.has_value());
  683. item.track_error = *err_t;
  684. last_track_err = item.track_error;
  685. if (item.track_error > track_err_threshold) return;
  686. // throttle
  687. if (tp->get_tasks_queued() > conf.num_threads - 1) {
  688. return;
  689. }
  690. auto &st = img_pool.emplace_back();
  691. st.img = img;
  692. st.img_mat = img->cv_mat(conf.stream);
  693. st.sample_ts = cur_ts;
  694. // post task
  695. tp->detach_task([this, p_st = &st] {
  696. per_image_process(p_st);
  697. last_finish.store(p_st, std::memory_order::release);
  698. });
  699. }
  700. void camera_calibrator::impl::render_corners(const corner_type &corner, NVGcolor color) {
  701. auto size = query_viewport_size();
  702. nvgBeginFrame(vg, size.width, size.height, 1.0f);
  703. auto closer = sg::make_scope_guard([this] {
  704. nvgEndFrame(vg);
  705. // TODO: ugly hacked, NanoVG sets ColSrc to One
  706. glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  707. });
  708. nvgBeginPath(vg);
  709. static constexpr auto r = 5.0f;
  710. for (auto &p: corner) {
  711. nvgCircle(vg, p.x, size.height - p.y, r); // flip Y
  712. }
  713. nvgPathWinding(vg, NVG_HOLE);
  714. nvgFillColor(vg, color);
  715. nvgFill(vg);
  716. }
  717. void camera_calibrator::impl::render() {
  718. if (tp == nullptr) return; // not running
  719. // render coverage
  720. assert(coverage_fbo != nullptr);
  721. if (coverage_fbo->id == 0) [[unlikely]] { // lazy creation for img_size
  722. auto fbo_conf = smart_frame_buffer::create_config{
  723. .size = img_size,
  724. .depth_fmt = GL_DEPTH24_STENCIL8,
  725. };
  726. coverage_fbo->create(fbo_conf);
  727. } else {
  728. auto tex_conf = tex_render_info{
  729. .mode = TEX_COLOR_ONLY,
  730. };
  731. tex_conf.color.fmt = COLOR_RGB;
  732. tex_conf.color.id = coverage_fbo->color_tex[0].id;
  733. tex_conf.color.alpha = 0.5f;
  734. render_texture(tex_conf);
  735. }
  736. auto st = last_finish.load(std::memory_order::acquire);
  737. if (st == nullptr) return;
  738. assert(st->process_finished);
  739. if (!st->corners_detected) return;
  740. render_corners(st->corners, nvgRGB(255, 0, 0));
  741. coverage_fbo->bind();
  742. render_corners(st->corners, nvgRGB(0, 255, 0));
  743. coverage_fbo->unbind();
  744. }
  745. void camera_calibrator::impl::show() {
  746. auto ctx = conf.ctx;
  747. bool is_running = (tp != nullptr);
  748. ImGui::SeparatorText("Actions");
  749. if (!is_running) {
  750. if (ImGui::Button("Start")) {
  751. post(*ctx, [this] { start(); });
  752. }
  753. } else {
  754. if (ImGui::Button("Close")) {
  755. post(*ctx, [this] { stop(); });
  756. }
  757. }
  758. // Info
  759. if (is_running) {
  760. ImGui::SeparatorText("Info");
  761. assert(tp != nullptr);
  762. ImGui::Text("Pending Images: %ld", tp->get_tasks_total());
  763. ImGui::Text("Track Count: %ld", track_pool.size());
  764. ImGui::Text("Image Count: %d / %ld", (int) img_ok_cnt, img_pool.size());
  765. { // track error
  766. auto guard = imgui_valid_style_guard(
  767. last_track_err <= track_err_threshold);
  768. ImGui::Text("Last Track Error: %.2f", last_track_err);
  769. }
  770. auto last_st = last_finish.load(std::memory_order::consume);
  771. if (last_st != nullptr && last_st->corners_detected) {
  772. auto guard = imgui_valid_style_guard(
  773. last_st->corner_sharpness <= sharpness_threshold);
  774. ImGui::Text("Last Sharpness: %.2f", last_st->corner_sharpness);
  775. }
  776. }
  777. }
  778. camera_calibrator::camera_calibrator(const create_config &conf)
  779. : pimpl(std::make_unique<impl>(conf)) {
  780. }
  781. camera_calibrator::~camera_calibrator() = default;
  782. void camera_calibrator::show() {
  783. pimpl->show();
  784. }
  785. void camera_calibrator::render() {
  786. pimpl->render();
  787. }
  788. void camera_calibrator::simulate_process(const simulate_info_type &info) {
  789. pimpl->simulate_process(info);
  790. }