File size: 1,238 Bytes
640b1c8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# src/llms/base_llm.py
from abc import ABC, abstractmethod
from typing import List, Optional, Dict, Any

class BaseLLM(ABC):
    @abstractmethod
    def generate(
        self, 
        prompt: str, 
        max_tokens: Optional[int] = None,
        temperature: float = 0.7,
        **kwargs
    ) -> str:
        """
        Generate a response based on the given prompt
        
        Args:
            prompt (str): Input prompt for the model
            max_tokens (Optional[int]): Maximum number of tokens to generate
            temperature (float): Sampling temperature for randomness
        
        Returns:
            str: Generated response
        """
        pass
    
    @abstractmethod
    def tokenize(self, text: str) -> List[str]:
        """
        Tokenize the input text
        
        Args:
            text (str): Input text to tokenize
        
        Returns:
            List[str]: List of tokens
        """
        pass
    
    @abstractmethod
    def count_tokens(self, text: str) -> int:
        """
        Count tokens in the input text
        
        Args:
            text (str): Input text to count tokens
        
        Returns:
            int: Number of tokens
        """
        pass