Spaces:
Sleeping
Sleeping
Create run_local_LLM.py
Browse files- Classes/run_local_LLM.py +17 -0
Classes/run_local_LLM.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from gradio_client import Client
|
2 |
+
from Classes.Owiki_Class import OWiki
|
3 |
+
|
4 |
+
class LocalLLM(OWiki):
|
5 |
+
def __init__(self,**kwargs):
|
6 |
+
self.gradio_url = kwargs['gradio_url']
|
7 |
+
self.client = None
|
8 |
+
try:
|
9 |
+
self.client = Client(self.gradio_url)
|
10 |
+
except:
|
11 |
+
pass
|
12 |
+
|
13 |
+
def predict(self, question, invocation_type, schemas):
|
14 |
+
if self.client:
|
15 |
+
response = self.client.predict(question, invocation_type, schemas)
|
16 |
+
return response
|
17 |
+
return None
|