CodeSmith / custom_llm.py
yash-srivastava19's picture
Create custom_llm.py
641da69
raw
history blame
1.06 kB
import cohere
from langchain.llms.base import LLM
co = cohere.Client('3FZy1Q1sd9Yfs289shdPgwjAt4MEuMxUmQx3oecK') # This is my trial API key
# 24222433-2a40-410c-af5c-65a5422282fb-ft
class CustomLLM(LLM):
model: str = '24222433-2a40-410c-af5c-65a5422282fb-ft' # The custom model we used.
@property
def _llm_type(self) -> str:
return "custom"
def _call(self,prompt: str,stop = None,run_manager = None,) -> str:
""" This is where the main logic of the """
if stop is not None:
raise ValueError("stop kwargs are not permitted.")
response = co.generate(model=self.model, prompt=f'{prompt}', max_tokens=1700, temperature=0.9)
print(response.generations[0].text)
return f' ```python \n {response.generations[0].text} \n ``` '
@property
def _identifying_params(self):
"""Get the identifying parameters."""
return {"model_type": f'COHERE_CUSTOM-<{self.model}>'}
""" Now, this thing can be used as a custom LLM. Use it in the LLM Chain thing. Done mi boy. """