Part of https://www.notion.so/Laconic-Mainnet-Plan-1eca6b22d47280569cd0d1e6d711d949 Co-authored-by: Shreerang Kale <shreerangkale@gmail.com> Reviewed-on: #1 Co-authored-by: shreerang <shreerang@noreply.git.vdb.to> Co-committed-by: shreerang <shreerang@noreply.git.vdb.to>
60 lines
2.0 KiB
Python
60 lines
2.0 KiB
Python
import json
|
|
import os
|
|
import time
|
|
from typing import Dict, Any, Optional
|
|
import diskcache
|
|
|
|
|
|
class Cache:
|
|
def __init__(self, cache_dir: str = "./cache", size_limit_gb: int = 1):
|
|
self.cache_dir = cache_dir
|
|
self.size_limit_bytes = size_limit_gb * 1024 * 1024 * 1024
|
|
self.cache = diskcache.Cache(
|
|
directory=cache_dir,
|
|
size_limit=self.size_limit_bytes,
|
|
eviction_policy='least-recently-used'
|
|
)
|
|
|
|
def _make_key(self, method: str, params: Dict[str, Any]) -> str:
|
|
return f"{method}:{json.dumps(params, sort_keys=True)}"
|
|
|
|
def get(self, method: str, params: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
key = self._make_key(method, params)
|
|
cached_data = self.cache.get(key)
|
|
|
|
if cached_data is None:
|
|
return None
|
|
|
|
# Check if cached data has TTL and if it's expired
|
|
if isinstance(cached_data, dict) and '_cache_expiry' in cached_data:
|
|
if time.time() > cached_data['_cache_expiry']:
|
|
# Remove expired entry
|
|
self.cache.delete(key)
|
|
return None
|
|
# Remove cache metadata before returning
|
|
response = cached_data.copy()
|
|
del response['_cache_expiry']
|
|
return response
|
|
|
|
return cached_data
|
|
|
|
def set(self, method: str, params: Dict[str, Any], response: Dict[str, Any], ttl: Optional[int] = None) -> None:
|
|
key = self._make_key(method, params)
|
|
|
|
# Add TTL metadata if specified
|
|
if ttl is not None:
|
|
cached_response = response.copy()
|
|
cached_response['_cache_expiry'] = time.time() + ttl
|
|
self.cache.set(key, cached_response)
|
|
else:
|
|
self.cache.set(key, response)
|
|
|
|
def size_check(self) -> Dict[str, Any]:
|
|
stats = self.cache.stats()
|
|
return {
|
|
"size_bytes": stats[1],
|
|
"size_gb": stats[1] / (1024 * 1024 * 1024),
|
|
"count": stats[0],
|
|
"limit_gb": self.size_limit_bytes / (1024 * 1024 * 1024)
|
|
}
|