Create cache.py
Browse files- services/cache.py +24 -0
services/cache.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# cache.py
|
2 |
+
from functools import lru_cache
|
3 |
+
from typing import Tuple, Any
|
4 |
+
|
5 |
+
# TODO explain howto use the cache
|
6 |
+
class ResponseCache:
|
7 |
+
def __init__(self, cache_size: int = 1000):
|
8 |
+
self.cache_size = cache_size
|
9 |
+
self._initialize_cache()
|
10 |
+
|
11 |
+
def _initialize_cache(self):
|
12 |
+
@lru_cache(maxsize=self.cache_size)
|
13 |
+
def cached_response(prompt: str, config_hash: str) -> Tuple[str, float]:
|
14 |
+
pass
|
15 |
+
self.get_cached_response = cached_response
|
16 |
+
|
17 |
+
def cache_response(self, prompt: str, config: GenerationConfig, response: str, score: float) -> None:
|
18 |
+
config_hash = hash(str(config.__dict__))
|
19 |
+
self.get_cached_response(prompt, str(config_hash))
|
20 |
+
|
21 |
+
def get_response(self, prompt: str, config: GenerationConfig) -> Optional[Tuple[str, float]]:
|
22 |
+
config_hash = hash(str(config.__dict__))
|
23 |
+
return self.get_cached_response(prompt, str(config_hash))
|
24 |
+
|