Commit 114ad93e by tianxing wang

finish lfucache

parent 8f24d345
No preview for this file type
#include "lru_cache.h" #include "lru_cache.h"
#include "fifo_cache.h" #include "fifo_cache.h"
#include "lfu_cache.h"
#include <pybind11/pybind11.h> #include <pybind11/pybind11.h>
...@@ -70,6 +71,27 @@ PYBIND11_MODULE(TORCH_EXTENSION_NAME, m){ ...@@ -70,6 +71,27 @@ PYBIND11_MODULE(TORCH_EXTENSION_NAME, m){
/*-----------------------------------------------------------------------------------------------------------------------------------*/ /*-----------------------------------------------------------------------------------------------------------------------------------*/
/* lfucache */
/*-----------------------------------------------------------------------------------------------------------------------------------*/
py::class_<gpucache::lfucache::LFUCacheWrapper> lfu_cache(m, "LFUCache");
lfu_cache
.def(py::init<at::Tensor, gpucache::CacheConfig>())
.def("Get",&gpucache::lfucache::LFUCacheWrapper::Get,"get values for keys, find_mask return whether each key exists in cache")
.def("Put",&gpucache::lfucache::LFUCacheWrapper::Put,"put key-value pairs")
.def("Strategy",&gpucache::lfucache::LFUCacheWrapper::Strategy,"get evict strategy")
.def("Capacity",&gpucache::lfucache::LFUCacheWrapper::Capacity,"return cache capacity")
.def("KeySize",&gpucache::lfucache::LFUCacheWrapper::KeySize,"return key size")
.def("ValueSize",&gpucache::lfucache::LFUCacheWrapper::ValueSize,"return value size")
.def("MaxQueryNum",&gpucache::lfucache::LFUCacheWrapper::MaxQueryNum,"return max number of keys to get or key-values to put once")
.def("Clear",&gpucache::lfucache::LFUCacheWrapper::Clear,"clear cache")
.def("Device",&gpucache::lfucache::LFUCacheWrapper::DeviceId,"return device id")
.def("Dim",&gpucache::lfucache::LFUCacheWrapper::Dim,"return value dim");
m.def("NewLFUCache", &gpucache::lfucache::NewLFUCache, "create a lfu cache",py::return_value_policy::reference);
/*-----------------------------------------------------------------------------------------------------------------------------------*/
} }
......
...@@ -51,14 +51,15 @@ ...@@ -51,14 +51,15 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"FIFOCache: keySize: 4, valueSize: 128, dim: 32, capacity: 65536, maxQueryNum: 4096, deviceId: 0\n" "LFUCache: keySize: 4, valueSize: 128, dim: 32, capacity: 65536, maxQueryNum: 4096, deviceId: 0\n"
] ]
} }
], ],
"source": [ "source": [
"t = torch.empty([1],dtype=torch.float32)\n", "t = torch.empty([1],dtype=torch.float32)\n",
"# cache = libgpucache.NewLRUCache(t,cfg)\n", "# cache = libgpucache.NewLRUCache(t,cfg)\n",
"cache = libgpucache.NewFIFOCache(t,cfg)" "# cache = libgpucache.NewFIFOCache(t,cfg)\n",
"cache = libgpucache.NewLFUCache(t,cfg)"
] ]
}, },
{ {
...@@ -169,6 +170,13 @@ ...@@ -169,6 +170,13 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [] "source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment