CodeSmith / custom_llm.py
yash-srivastava19's picture
Update custom_llm.py
4aa8a87 verified
raw
history blame contribute delete
977 Bytes
import os
import cohere
from langchain.llms.base import LLM
co = cohere.Client(os.environ['COHERE_API_KEY']) # This is my trial API key
# 24222433-2a40-410c-af5c-65a5422282fb-ft
class CustomLLM(LLM):
model: str = os.environ['CODE_GEN_MODEL_ID'] # The custom model we used.
@property
def _llm_type(self) -> str:
return "custom"
def _call(self,prompt: str,stop = None,run_manager = None,) -> str:
""" This is where the main logic of the """
if stop is not None:
raise ValueError("stop kwargs are not permitted.")
response = co.chat(message=f'{prompt}', max_tokens=1700, temperature=0.9)
print(response.text)
return f' {response.text} '
@property
def _identifying_params(self):
"""Get the identifying parameters."""
return {"model_type": f'COHERE_CUSTOM-<{self.model}>'}
""" Now, this thing can be used as a custom LLM. Use it in the LLM Chain thing. Done mi boy. """