Commit 6fa33108 authored by jinhai's avatar jinhai
Browse files

Merge branch 'branch-0.5.0' into 'branch-0.5.0'

MS-626 Refactor DataObj to support cache any type data

See merge request megasearch/milvus!681

Former-commit-id: e87c8be0ab6ea7d353cf780b0173f45db121b992
parents e532bd39 761d0e12
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -32,6 +32,7 @@ Please mark all change in change log and use the ticket from JIRA.
- MS-611 - Add resources validity check in ResourceMgr
- MS-619 - Add optimizer class in scheduler
- MS-614 - Preload table at startup
- MS-626 - Refactor DataObj to support cache any type data

## New Feature

+6 −6
Original line number Diff line number Diff line
@@ -86,11 +86,11 @@ Cache<ItemObj>::insert(const std::string &key, const ItemObj &item) {
        //if key already exist, subtract old item size
        if (lru_.exists(key)) {
            const ItemObj &old_item = lru_.get(key);
            usage_ -= old_item->size();
            usage_ -= old_item->Size();
        }

        //plus new item size
        usage_ += item->size();
        usage_ += item->Size();
    }

    //if usage exceed capacity, free some items
@@ -106,7 +106,7 @@ Cache<ItemObj>::insert(const std::string &key, const ItemObj &item) {
        std::lock_guard<std::mutex> lock(mutex_);

        lru_.put(key, item);
        SERVER_LOG_DEBUG << "Insert " << key << " size:" << item->size()
        SERVER_LOG_DEBUG << "Insert " << key << " size:" << item->Size()
                         << " bytes into cache, usage: " << usage_ << " bytes";
    }
}
@@ -120,9 +120,9 @@ Cache<ItemObj>::erase(const std::string &key) {
    }

    const ItemObj &old_item = lru_.get(key);
    usage_ -= old_item->size();
    usage_ -= old_item->Size();

    SERVER_LOG_DEBUG << "Erase " << key << " size: " << old_item->size();
    SERVER_LOG_DEBUG << "Erase " << key << " size: " << old_item->Size();

    lru_.erase(key);
}
@@ -160,7 +160,7 @@ Cache<ItemObj>::free_memory() {
            auto &obj_ptr = it->second;

            key_array.emplace(key);
            released_size += obj_ptr->size();
            released_size += obj_ptr->Size();
            ++it;
        }
    }
+2 −6
Original line number Diff line number Diff line
@@ -59,14 +59,10 @@ CpuCacheMgr::GetInstance() {
    return &s_mgr;
}

engine::VecIndexPtr
DataObjPtr
CpuCacheMgr::GetIndex(const std::string& key) {
    DataObjPtr obj = GetItem(key);
    if (obj != nullptr) {
        return obj->data();
    }

    return nullptr;
    return obj;
}

}  // namespace cache
+1 −1
Original line number Diff line number Diff line
@@ -35,7 +35,7 @@ class CpuCacheMgr : public CacheMgr<DataObjPtr> {
    static CpuCacheMgr*
    GetInstance();

    engine::VecIndexPtr
    DataObjPtr
    GetIndex(const std::string& key);
};

+2 −32
Original line number Diff line number Diff line
@@ -17,7 +17,6 @@

#pragma once

#include "src/wrapper/VecIndex.h"

#include <memory>

@@ -26,38 +25,9 @@ namespace cache {

class DataObj {
 public:
    explicit DataObj(const engine::VecIndexPtr& index) : index_(index) {
    }
    virtual int64_t
    Size() = 0;

    DataObj(const engine::VecIndexPtr& index, int64_t size) : index_(index), size_(size) {
    }

    engine::VecIndexPtr
    data() {
        return index_;
    }

    const engine::VecIndexPtr&
    data() const {
        return index_;
    }

    int64_t
    size() const {
        if (index_ == nullptr) {
            return 0;
        }

        if (size_ > 0) {
            return size_;
        }

        return index_->Count() * index_->Dimension() * sizeof(float);
    }

 private:
    engine::VecIndexPtr index_ = nullptr;
    int64_t size_ = 0;
};

using DataObjPtr = std::shared_ptr<DataObj>;
Loading