File size: 977 Bytes
27e1425
641da69
 
 
27e1425
641da69
 
 
27e1425
641da69
 
 
 
 
 
 
 
 
 
4aa8a87
22a629d
641da69
4aa8a87
641da69
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import os
import cohere
from langchain.llms.base import LLM

co = cohere.Client(os.environ['COHERE_API_KEY'])  # This is my trial API key

# 24222433-2a40-410c-af5c-65a5422282fb-ft
class CustomLLM(LLM):
    model: str = os.environ['CODE_GEN_MODEL_ID']   # The custom model we used.

    @property
    def _llm_type(self) -> str:
        return "custom"

    def _call(self,prompt: str,stop = None,run_manager = None,) -> str:
        """ This is where the main logic of the """
        if stop is not None:
            raise ValueError("stop kwargs are not permitted.")

        response = co.chat(message=f'{prompt}', max_tokens=1700, temperature=0.9)
        print(response.text)

        return f' {response.text}  '

    @property
    def _identifying_params(self):
        """Get the identifying parameters."""
        return {"model_type": f'COHERE_CUSTOM-<{self.model}>'}


""" Now, this thing can be used as a custom LLM. Use it in the LLM Chain thing. Done mi boy. """