Spaces:
Running
Running
import gradio as gr | |
from llama_cpp import Llama | |
llm = Llama(model_path="./model.bin") | |
with open('system.prompt', 'r', encoding='utf-8') as f: | |
prompt = f.read() | |
title = "Openbuddy LLama Api" | |
desc = '''<h1>Hello, world!</h1> | |
This is showcase how to make own server with OpenBuddy's model.<br> | |
I'm using here 3b model just for example. Also here's only CPU power.<br> | |
But you can use GPU power as well!<br><br> | |
<h1>How to GPU?</h1> | |
Change <code>`CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS`</code> in Dockerfile on <code>`CMAKE_ARGS="-DLLAMA_CUBLAS=on"`</code>. Also you can try <code>`DLLAMA_CLBLAST`</code>, <code>`DLLAMA_METAL`</code> or <code>`DLLAMA_METAL`</code>.<br> | |
Powered by <a href="https://github.com/abetlen/llama-cpp-python">llama-cpp-python</a> and <a href="https://www.gradio.app/">Gradio</a>.<br><br> | |
<h1>How to test it on own machine?</h1> | |
You can install Docker, build image and run it. I made <code>`run-docker.sh`</code> for ya. To stop container run <code>`docker ps`</code>, find name of container and run <code>`docker stop _dockerContainerName_`</code><br> | |
Or you can once follow steps in Dockerfile and try it on your machine, not in Docker.<br><br> | |
Also it can run with quart+uvicorn! Check the repo!''' | |
def greet(request: str, max_tokens: int = 64, override_system_prompt: str = ""): | |
try: | |
system_prompt = override_system_prompt if override_system_prompt != "" else prompt | |
max_tokens = max_tokens if max_tokens > 0 and max_tokens < 256 else 64 | |
userPrompt = system_prompt + "\n\nUser: " + request + "\nAssistant: " | |
except: return "ERROR 400: Not enough data" | |
try: | |
output = llm(userPrompt, max_tokens=max_tokens, stop=["User:", "\n"], echo=False) | |
print(output) | |
return output["choices"][0]["text"] | |
except Exception as e: | |
print(e) | |
return "ERROR 500: Server error. Check logs!!" | |
demo = gr.Interface( | |
fn=greet, | |
inputs=[gr.Text("Hello, how are you?"), gr.Number(64), gr.Textbox()], | |
outputs=["text"], | |
description=desc, | |
title=title, | |
allow_flagging="never" | |
).queue() | |
if __name__ == "__main__": | |
demo.launch() |