summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--c++/core/id_map.h153
1 files changed, 153 insertions, 0 deletions
diff --git a/c++/core/id_map.h b/c++/core/id_map.h
new file mode 100644
index 0000000..2ca3025
--- /dev/null
+++ b/c++/core/id_map.h
@@ -0,0 +1,153 @@
+#pragma once
+
+#include "id.h"
+#include "error.h"
+
+#include <deque>
+
+namespace saw {
+/**
+ * Fast random access id based container.
+ *
+ * Access - O(1)
+ * Insert - O(1)
+ * Erase - O(n) ? Dunno
+ */
+template<typename T>
+class id_map final {
+private:
+ /**
+ * Container which stores the primary data
+ */
+ std::vector<T> data_;
+ /**
+ * Container which tracks free'd/fragmented elements within the
+ * main container
+ */
+ std::deque<id<T>> free_ids_;
+
+private:
+ /**
+ * Tries to reduce top ids
+ */
+ void reduce_free_ids() noexcept {
+ for(;;){
+ if(free_ids_.empty()){
+ break;
+ }
+
+ if((free_ids_.front() + 1) < data_.size()){
+ break;
+ }
+
+ /**
+ * Can this throw?
+ */
+ free_ids_.pop_front();
+ }
+ }
+public:
+ /**
+ * Default constructor
+ */
+ id_map() = default;
+
+ SAW_FORBID_COPY(id_map);
+ SAW_DEFAULT_MOVE(id_map);
+
+ /**
+ * Inserts an element into the container and returns either an id on success
+ * or an error on failure.
+ */
+ error_or<id<T>> insert(T val) noexcept {
+ /// @todo Fix size_t and id base type
+ if(free_ids_.empty()){
+ try {
+ size_t i = data._size();
+ data_.emplace_back(std::move(val));
+ return saw::id<T>{i};
+ } catch(std::exception& e) {
+ return make_error<err::out_of_memory>();
+ }
+ } else {
+ auto f_id = std::move(free_ids_.back());
+ free_ids_.pop_back();
+ if(free_ids_.size()*2 <= free_ids_.capacity()){
+ try{
+ free_ids_.shrink_to_fit();
+ }catch(std::exception& e){
+ free_ids_.push_back(f_id);
+ return make_error<err::out_of_memory>();
+ }
+ }
+ data_.at(f_id.get_value()) = std::move(dat);
+ return f_id;
+ }
+
+ exit(-1);
+ // Dummy return since this is not reachable
+ return make_error<err::critical>();
+ }
+
+ /**
+ * Erase a value at this id.
+ */
+ error_or<void> erase(const id<T>& val) noexcept {
+ /**
+ * If id is bigger than the available vector then return an error.
+ */
+ if(val.get_value() >= data_.size()){
+ return make_error<err::not_found>("ID is too large");
+ }
+
+ /**
+ * Check if it's the highest ID. Then we can just try to reduce the highest
+ * IDs.
+ */
+ if((val.get_value() + 1) == data_.size()){
+ data_.pop_back();
+ this->reduce_free_ids();
+ if(free_ids_.size()*2 <= free_ids_.capacity()){
+ try {
+ free_ids_.shrink_to_fit();
+ }catch(std::exception& e){
+ return make_error<err::out_of_memory>();
+ }
+
+ }
+ if(data_.size()*2 <= data_.capacity()){
+ try {
+ data_.shrink_to_fit();
+ }catch(std::exception& e){
+ return make_error<err::out_of_memory>();
+ }
+ }
+ return void_t{};
+ }
+
+ /**
+ * Check if ID already exists with the free IDs.
+ * This would mean that a double free has occured.
+ */
+ auto find_id = std::find(free_ids_.begin(), free_ids_.end(), val);
+ if(find_id != free_ids_.end()){
+ return make_error<err::not_found>("ID value has already been freed");
+ }
+
+ /**
+ * Insert id into deque and sort it.
+ */
+ try {
+ free_ids_.push_back(val);
+ } catch(std::exception& e){
+ return make_error<err::out_of_memory>();
+ }
+ std::stable_sort(free_ids_.begin(), free_ids_.end(),
+ [](const id<T>& left, const id<T>& right) -> bool {
+ return left.get_value() > right.get_value();
+ }
+ );
+ return void_t{};
+ }
+};
+}