Spaces:
Sleeping
Sleeping
Commit
·
641da69
1
Parent(s):
3723d1a
Create custom_llm.py
Browse files- custom_llm.py +30 -0
custom_llm.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cohere
|
2 |
+
from langchain.llms.base import LLM
|
3 |
+
|
4 |
+
co = cohere.Client('3FZy1Q1sd9Yfs289shdPgwjAt4MEuMxUmQx3oecK') # This is my trial API key
|
5 |
+
|
6 |
+
# 24222433-2a40-410c-af5c-65a5422282fb-ft
|
7 |
+
class CustomLLM(LLM):
|
8 |
+
model: str = '24222433-2a40-410c-af5c-65a5422282fb-ft' # The custom model we used.
|
9 |
+
|
10 |
+
@property
|
11 |
+
def _llm_type(self) -> str:
|
12 |
+
return "custom"
|
13 |
+
|
14 |
+
def _call(self,prompt: str,stop = None,run_manager = None,) -> str:
|
15 |
+
""" This is where the main logic of the """
|
16 |
+
if stop is not None:
|
17 |
+
raise ValueError("stop kwargs are not permitted.")
|
18 |
+
|
19 |
+
response = co.generate(model=self.model, prompt=f'{prompt}', max_tokens=1700, temperature=0.9)
|
20 |
+
print(response.generations[0].text)
|
21 |
+
|
22 |
+
return f' ```python \n {response.generations[0].text} \n ``` '
|
23 |
+
|
24 |
+
@property
|
25 |
+
def _identifying_params(self):
|
26 |
+
"""Get the identifying parameters."""
|
27 |
+
return {"model_type": f'COHERE_CUSTOM-<{self.model}>'}
|
28 |
+
|
29 |
+
|
30 |
+
""" Now, this thing can be used as a custom LLM. Use it in the LLM Chain thing. Done mi boy. """
|