binary_utility.hpp 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404
  1. #ifndef DEPTHGUIDE_BINARY_UTILITY_HPP
  2. #define DEPTHGUIDE_BINARY_UTILITY_HPP
  3. #include "core_v2/memory_manager.h"
  4. #include <nlohmann/json.hpp>
  5. #include <boost/core/noncopyable.hpp>
  6. #include <boost/endian.hpp>
  7. #include <bit>
  8. #include <cassert>
  9. #include <cstdint>
  10. #include <memory>
  11. #include <utility>
  12. struct data_mem_type : private boost::noncopyable {
  13. host_memory_info mem;
  14. uint8_t *ptr = nullptr;
  15. size_t size = 0;
  16. explicit data_mem_type(size_t _size) {
  17. size = _size;
  18. mem = HOST_ALLOC(size);
  19. ptr = static_cast<uint8_t *>(mem.ptr);
  20. }
  21. uint8_t *start_ptr() const {
  22. return ptr;
  23. }
  24. uint8_t *end_ptr() const {
  25. return ptr + size;
  26. }
  27. };
  28. struct data_type {
  29. uint8_t *ptr = nullptr;
  30. size_t size = 0;
  31. data_type() = default;
  32. // pre_size: reserved size before ptr.
  33. explicit data_type(size_t _size, size_t pre_size = 0)
  34. : data_type(
  35. std::make_shared<data_mem_type>(_size + pre_size), pre_size, _size) {
  36. }
  37. data_type(size_t _size, void *data, size_t pre_size = 0)
  38. : data_type(_size, pre_size) {
  39. replace(0, _size, (uint8_t *) data);
  40. }
  41. bool empty() const {
  42. return ptr == nullptr;
  43. }
  44. auto clone(size_t pre_size = 0) const {
  45. auto ret = data_type(size, pre_size);
  46. ret.replace(0, *this);
  47. return ret;
  48. }
  49. /* if _size == -1:
  50. * ret_size = -offset (offset < 0)
  51. * ret_size keeps end_ptr() (offset >= 0) */
  52. auto sub_data(ptrdiff_t offset, size_t _size = -1) const {
  53. // determine ret_size
  54. if (_size == -1) {
  55. if (offset < 0) {
  56. _size = -offset;
  57. } else {
  58. _size = size - offset;
  59. }
  60. }
  61. auto ret_ptr = ptr + offset;
  62. assert(ret_ptr >= mem->start_ptr());
  63. assert(ret_ptr + _size <= mem->end_ptr());
  64. return data_type(mem, ret_ptr - mem->ptr, _size);
  65. }
  66. void replace(size_t offset, size_t _size, uint8_t *data) {
  67. assert(offset + _size <= size);
  68. std::copy_n(data, _size, start_ptr() + offset);
  69. }
  70. void replace(size_t offset, const data_type &data) {
  71. replace(offset, data.size, data.start_ptr());
  72. }
  73. auto extend(size_t _size) const {
  74. return sub_data(0, size + _size);
  75. }
  76. void extend_self(size_t _size) {
  77. reserve(size + _size, true);
  78. }
  79. void reserve(size_t _size, bool keep_data = false) {
  80. if (_size <= size) [[likely]] {
  81. size = _size;
  82. return;
  83. }
  84. assert(_size > size);
  85. if (mem != nullptr &&
  86. start_ptr() + _size <= mem->end_ptr()) {
  87. size = _size;
  88. return;
  89. }
  90. auto next_size = std::bit_ceil(_size);
  91. auto next = data_type(next_size);
  92. next.shrink(_size);
  93. if (keep_data) {
  94. assert(next.size >= size);
  95. next.replace(0, size, start_ptr());
  96. }
  97. *this = next;
  98. }
  99. void shrink(size_t _size) {
  100. assert(_size <= size);
  101. size = _size;
  102. }
  103. template<typename T>
  104. T *at(size_t pos) {
  105. static_assert(std::is_trivial_v<T>);
  106. assert(size % sizeof(T) == 0);
  107. assert(pos < size / sizeof(T));
  108. return ((T *) start_ptr()) + pos;
  109. }
  110. uint8_t *start_ptr() const {
  111. return ptr;
  112. }
  113. uint8_t *end_ptr() const {
  114. return ptr + size;
  115. }
  116. private:
  117. std::shared_ptr<data_mem_type> mem;
  118. data_type(const std::shared_ptr<data_mem_type> &_mem,
  119. size_t offset, size_t _size) {
  120. mem = _mem;
  121. ptr = mem->ptr + offset;
  122. size = _size;
  123. assert(offset + size <= mem->size);
  124. }
  125. };
  126. template<boost::endian::order net_order, typename T>
  127. std::enable_if_t<std::is_arithmetic_v<T>>
  128. inline swap_net_loc_endian(T &val) {
  129. if constexpr (boost::endian::order::native == net_order) {
  130. return;
  131. } else {
  132. boost::endian::endian_reverse_inplace(val);
  133. }
  134. }
  135. class versatile_io : public boost::noncopyable {
  136. public:
  137. versatile_io() // enable dynamic memory if no data provided
  138. : versatile_io({}, true) {
  139. }
  140. explicit versatile_io(data_type _data,
  141. bool _extendable = false)
  142. : data(std::move(_data)) {
  143. cur_ptr = start_ptr();
  144. extendable = _extendable;
  145. }
  146. // 从当前位置开始调整 offset
  147. void manual_offset(ptrdiff_t offset) {
  148. cur_ptr += offset;
  149. assert(cur_ptr >= start_ptr());
  150. assert(cur_ptr <= end_ptr());
  151. }
  152. auto current_offset() const {
  153. return cur_ptr - start_ptr();
  154. }
  155. auto remaining_bytes() const {
  156. return end_ptr() - cur_ptr;
  157. }
  158. bool empty() const {
  159. return cur_ptr == end_ptr();
  160. }
  161. data_type current_data() const {
  162. return data;
  163. }
  164. protected:
  165. data_type data;
  166. uint8_t *cur_ptr = nullptr;
  167. bool extendable = false;
  168. uint8_t *start_ptr() const {
  169. return data.start_ptr();
  170. }
  171. uint8_t *end_ptr() const {
  172. return data.end_ptr();
  173. }
  174. void ensure_remaining(size_t size, bool may_extend = true) {
  175. if (remaining_bytes() >= size) [[likely]] return;
  176. if (extendable && may_extend) {
  177. auto offset = current_offset(); // underlying data may be changed after reserve
  178. data.reserve(offset + size, true);
  179. cur_ptr = start_ptr() + offset;
  180. } else {
  181. RET_ERROR;
  182. }
  183. }
  184. };
  185. // 分多次读取数据
  186. template<boost::endian::order net_order>
  187. class versatile_reader : public versatile_io {
  188. public:
  189. using versatile_io::versatile_io;
  190. template<typename T>
  191. std::enable_if_t<std::is_arithmetic_v<T>, T>
  192. read_value() {
  193. T tmp_val;
  194. ensure_remaining(sizeof(T), false);
  195. std::copy_n(cur_ptr, sizeof(T), (uint8_t *) &tmp_val);
  196. swap_net_loc_endian<net_order>(tmp_val);
  197. cur_ptr += sizeof(T);
  198. return tmp_val;
  199. }
  200. template<typename T>
  201. std::enable_if_t<std::is_arithmetic_v<T>>
  202. read_value(T &val) {
  203. val = read_value<T>();
  204. }
  205. template<typename T, size_t Length>
  206. void read_value(std::array<T, Length> &arr) {
  207. for (auto &val: arr) {
  208. read_value(val);
  209. }
  210. }
  211. data_type read_data(size_t size) {
  212. ensure_remaining(size, false);
  213. auto offset = cur_ptr - start_ptr();
  214. auto ret = data.sub_data(offset, size);
  215. cur_ptr += size;
  216. return ret;
  217. }
  218. template<typename SizeType = uint32_t>
  219. data_type read_data_with_length() {
  220. auto size = read_value<SizeType>();
  221. return read_data(size);
  222. }
  223. void read_data(void *data, size_t size) {
  224. ensure_remaining(size, false);
  225. std::copy_n(cur_ptr, size, (uint8_t *) data);
  226. cur_ptr += size;
  227. }
  228. void read_data(const data_type &_data) {
  229. read_data(_data.start_ptr(), _data.size);
  230. }
  231. std::string read_std_string(size_t size) {
  232. ensure_remaining(size, false);
  233. auto ret = std::string((char *) cur_ptr, size);
  234. cur_ptr += size;
  235. return ret;
  236. }
  237. template<typename SizeType = uint32_t>
  238. nlohmann::json read_json_with_length() {
  239. auto j_size = read_value<SizeType>();
  240. auto j_str = read_std_string(j_size);
  241. return nlohmann::json::parse(j_str);
  242. }
  243. data_type read_remain() {
  244. auto offset = cur_ptr - start_ptr();
  245. auto size = end_ptr() - cur_ptr;
  246. auto ret = data.sub_data(offset, size);
  247. cur_ptr = end_ptr();
  248. return ret;
  249. }
  250. template<typename T>
  251. auto &operator>>(T &val) {
  252. read_value(val);
  253. return *this;
  254. }
  255. };
  256. // 分多次写入数据
  257. template<boost::endian::order net_order>
  258. class versatile_writer : public versatile_io {
  259. public:
  260. using versatile_io::versatile_io;
  261. template<typename T>
  262. std::enable_if_t<std::is_arithmetic_v<T>>
  263. write_value(T val) {
  264. swap_net_loc_endian<net_order>(val);
  265. ensure_remaining(sizeof(T));
  266. std::copy_n((uint8_t *) &val, sizeof(T), cur_ptr);
  267. cur_ptr += sizeof(T);
  268. }
  269. template<typename T, size_t Length>
  270. void write_value(const std::array<T, Length> &arr) {
  271. for (auto val: arr) {
  272. write_value(val);
  273. }
  274. }
  275. void write_data(const void *data, size_t size) {
  276. ensure_remaining(size);
  277. std::copy_n((uint8_t *) data, size, cur_ptr);
  278. cur_ptr += size;
  279. }
  280. void write_data(const data_type &_data) {
  281. write_data(_data.start_ptr(), _data.size);
  282. }
  283. template<typename SizeType = uint32_t>
  284. void write_with_length(const data_type &_data) {
  285. auto size = (SizeType) _data.size;
  286. write_value(size);
  287. write_value(_data);
  288. }
  289. void write_value(const std::string &str) {
  290. write_data(str.data(), str.length());
  291. }
  292. void write_value(const data_type &_data) {
  293. write_data(_data);
  294. }
  295. template<typename SizeType = uint32_t>
  296. void write_with_length(const nlohmann::json &j) {
  297. auto j_str = j.dump();
  298. auto j_size = (SizeType) j_str.length();
  299. write_value(j_size);
  300. write_value(j_str);
  301. }
  302. template<typename T>
  303. auto &operator<<(const T &val) {
  304. write_value(val);
  305. return *this;
  306. }
  307. };
  308. static constexpr auto network_order = boost::endian::order::big;
  309. using network_writer = versatile_writer<network_order>;
  310. using network_reader = versatile_reader<network_order>;
  311. template<typename T, boost::endian::order net_order = network_order>
  312. static uint8_t *write_binary_number(uint8_t *ptr, T val) {
  313. static constexpr auto need_swap =
  314. (boost::endian::order::native != net_order);
  315. auto real_ptr = (T *) ptr;
  316. if constexpr (need_swap) {
  317. *real_ptr = boost::endian::endian_reverse(val);
  318. } else {
  319. *real_ptr = val;
  320. }
  321. return ptr + sizeof(T);
  322. }
  323. template<typename T, boost::endian::order net_order = network_order>
  324. static uint8_t *read_binary_number(uint8_t *ptr, T *val) {
  325. static constexpr auto need_swap =
  326. (boost::endian::order::native != net_order);
  327. *val = *(T *) ptr;
  328. if constexpr (need_swap) {
  329. boost::endian::endian_reverse_inplace(*val);
  330. }
  331. return ptr + sizeof(T);
  332. }
  333. #endif //DEPTHGUIDE_BINARY_UTILITY_HPP