Spaces:
Sleeping
Sleeping
import cohere | |
from langchain.llms.base import LLM | |
co = cohere.Client('3FZy1Q1sd9Yfs289shdPgwjAt4MEuMxUmQx3oecK') # This is my trial API key | |
# 24222433-2a40-410c-af5c-65a5422282fb-ft | |
class CustomLLM(LLM): | |
model: str = '24222433-2a40-410c-af5c-65a5422282fb-ft' # The custom model we used. | |
def _llm_type(self) -> str: | |
return "custom" | |
def _call(self,prompt: str,stop = None,run_manager = None,) -> str: | |
""" This is where the main logic of the """ | |
if stop is not None: | |
raise ValueError("stop kwargs are not permitted.") | |
response = co.generate(model=self.model, prompt=f'{prompt}', max_tokens=1700, temperature=0.9) | |
print(response.generations[0].text) | |
return f' ```python \n {response.generations[0].text} \n ``` ' | |
def _identifying_params(self): | |
"""Get the identifying parameters.""" | |
return {"model_type": f'COHERE_CUSTOM-<{self.model}>'} | |
""" Now, this thing can be used as a custom LLM. Use it in the LLM Chain thing. Done mi boy. """ |