File size: 1,010 Bytes
b00b451 734e88d b00b451 be7213a b00b451 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
# cache.py
from functools import lru_cache
from typing import Tuple, Any, Optional,AsyncGenerator, Dict, List
from config.config import GenerationConfig, ModelConfig
# TODO explain howto use the cache
class ResponseCache:
def __init__(self, cache_size: int = 1000):
self.cache_size = cache_size
self._initialize_cache()
def _initialize_cache(self):
@lru_cache(maxsize=self.cache_size)
def cached_response(prompt: str, config_hash: str) -> Tuple[str, float]:
pass
self.get_cached_response = cached_response
def cache_response(self, prompt: str, config: GenerationConfig, response: str, score: float) -> None:
config_hash = hash(str(config.__dict__))
self.get_cached_response(prompt, str(config_hash))
def get_response(self, prompt: str, config: GenerationConfig) -> Optional[Tuple[str, float]]:
config_hash = hash(str(config.__dict__))
return self.get_cached_response(prompt, str(config_hash))
|