AFischer1985's picture
Update run.py
66886a6 verified
raw
history blame
8.94 kB
#############################################################################################################
# Title: Gradio Interface to LLM-chatbot (for recommending AI) with RAG-funcionality and ChromaDB on HF-Hub
# Author: Andreas Fischer
# Date: December 30th, 2023
# Last update: February 27th, 2024
##############################################################################################################
# Chroma-DB
#-----------
import os
import chromadb
dbPath="/home/af/Schreibtisch/gradio/Chroma/db"
if(os.path.exists(dbPath)==False):
dbPath="/home/user/app/db"
print(dbPath)
#client = chromadb.Client()
path=dbPath
client = chromadb.PersistentClient(path=path)
print(client.heartbeat())
print(client.get_version())
print(client.list_collections())
from chromadb.utils import embedding_functions
default_ef = embedding_functions.DefaultEmbeddingFunction()
sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="T-Systems-onsite/cross-en-de-roberta-sentence-transformer")
#instructor_ef = embedding_functions.InstructorEmbeddingFunction(model_name="hkunlp/instructor-large", device="cuda")
print(str(client.list_collections()))
global collection
if("name=ChromaDB1" in str(client.list_collections())):
print("ChromaDB1 found!")
collection = client.get_collection(name="ChromaDB1", embedding_function=sentence_transformer_ef)
else:
print("ChromaDB1 created!")
collection = client.create_collection(
"ChromaDB1",
embedding_function=sentence_transformer_ef,
metadata={"hnsw:space": "cosine"})
collection.add(
documents=[
"Text generating AI model mistralai/Mixtral-8x7B-Instruct-v0.1: Suitable for text generation, e.g., social media content, marketing copy, blog posts, short stories, etc.",
"Image generating AI model stabilityai/sdxl-turbo: Suitable for image generation, e.g., illustrations, graphics, AI art, etc.",
"Audio transcribing AI model openai/whisper-large-v3: Suitable for audio-transcription in different languages",
"Speech synthesizing AI model coqui/XTTS-v2: Suitable for generating audio from text and for voice-cloning",
"Code generating AI model deepseek-ai/deepseek-coder-6.7b-instruct: Suitable for programming in Python, JavaScript, PHP, Bash and many other programming languages.",
"Translation AI model Helsinki-NLP/opus-mt: Suitable for translating text, e.g., from English to German or vice versa",
"Search result-integrating AI model phind/phind-v9-model: Suitable for researching current topics and for obtaining precise and up-to-date answers to questions based on web search results"
],
metadatas=[{"source": "AF"}, {"source": "AF"}, {"source": "AF"}, {"source": "AF"}, {"source": "AF"}, {"source": "AF"}, {"source": "AF"}],
ids=["ai1", "ai2", "ai3", "ai4", "ai5", "ai6", "ai7"],
)
print("Database ready!")
print(collection.count())
# Model
#-------
from huggingface_hub import InferenceClient
import gradio as gr
modelPath="mistralai/Mixtral-8x7B-Instruct-v0.1"
client = InferenceClient(
modelPath
#"mistralai/Mistral-7B-Instruct-v0.1"
)
# Gradio-GUI
#------------
import gradio as gr
import json
def extend_prompt(message="", history=None, system=None, RAGAddon=None, system2=None, zeichenlimit=None,historylimit=4, removeHTML=False):
startOfString=""
if zeichenlimit is None: zeichenlimit=1000000000 # :-)
template0=" [INST]{system}\n [/INST] </s>"
template1=" [INST] {message} [/INST]"
template2=" {response}</s>"
if("Gemma-" in modelPath): # https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1
template0="<start_of_turn>user{system}</end_of_turn>"
template1="<start_of_turn>user{message}</end_of_turn><start_of_turn>model"
template2="{response}</end_of_turn>"
if("Mixtral-8x7b-instruct" in modelPath): # https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1
startOfString="<s>"
template0=" [INST]{system}\n [/INST] </s>"
template1=" [INST] {message} [/INST]"
template2=" {response}</s>"
if("Mistral-7B-Instruct" in modelPath): #https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2
startOfString="<s>"
template0="[INST]{system}\n [/INST]</s>"
template1="[INST] {message} [/INST]"
template2=" {response}</s>"
if("Openchat-3.5" in modelPath): #https://huggingface.co/TheBloke/openchat-3.5-0106-GGUF
template0="GPT4 Correct User: {system}<|end_of_turn|>GPT4 Correct Assistant: Okay.<|end_of_turn|>"
template1="GPT4 Correct User: {message}<|end_of_turn|>GPT4 Correct Assistant: "
template2="{response}<|end_of_turn|>"
if(("Discolm_german_7b" in modelPath) or ("SauerkrautLM-7b-HerO" in modelPath)): #https://huggingface.co/VAGOsolutions/SauerkrautLM-7b-HerO
template0="<|im_start|>system\n{system}<|im_end|>\n"
template1="<|im_start|>user\n{message}<|im_end|>\n<|im_start|>assistant\n"
template2="{response}<|im_end|>\n"
if("WizardLM-13B-V1.2" in modelPath): #https://huggingface.co/WizardLM/WizardLM-13B-V1.2
template0="{system} " #<s>
template1="USER: {message} ASSISTANT: "
template2="{response}</s>"
if("Phi-2" in modelPath): #https://huggingface.co/TheBloke/phi-2-GGUF
template0="Instruct: {system}\nOutput: Okay.\n"
template1="Instruct: {message}\nOutput:"
template2="{response}\n"
prompt = ""
if RAGAddon is not None:
system += RAGAddon
if system is not None:
prompt += template0.format(system=system) #"<s>"
if history is not None:
for user_message, bot_response in history[-historylimit:]:
if user_message is None: user_message = ""
if bot_response is None: bot_response = ""
bot_response = re.sub("\n\n<details>((.|\n)*?)</details>","", bot_response) # remove RAG-compontents
if removeHTML==True: bot_response = re.sub("<(.*?)>","\n", bot_response) # remove HTML-components in general (may cause bugs with markdown-rendering)
if user_message is not None: prompt += template1.format(message=user_message[:zeichenlimit])
if bot_response is not None: prompt += template2.format(response=bot_response[:zeichenlimit])
if message is not None: prompt += template1.format(message=message[:zeichenlimit])
if system2 is not None:
prompt += system2
return startOfString+prompt
def response(
prompt, history, temperature=0.9, max_new_tokens=500, top_p=0.95, repetition_penalty=1.0,
):
temperature = float(temperature)
if temperature < 1e-2: temperature = 1e-2
top_p = float(top_p)
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
addon=""
results=collection.query(
query_texts=[prompt],
n_results=2,
#where={"source": "google-docs"}
#where_document={"$contains":"search_string"}
)
dists=["<br><small>(relevance: "+str(round((1-d)*100)/100)+";" for d in results['distances'][0]]
sources=["source: "+s["source"]+")</small>" for s in results['metadatas'][0]]
results=results['documents'][0]
combination = zip(results,dists,sources)
combination = [' '.join(triplets) for triplets in combination]
print(combination)
if(len(results)>1):
addon=" Bitte berücksichtige bei deiner Antwort ggf. folgende Auszüge aus unserer Datenbank, sofern sie für die Antwort erforderlich sind. Beantworte die Frage knapp und präzise. Ignoriere unpassende Datenbank-Auszüge OHNE sie zu kommentieren, zu erwähnen oder aufzulisten:\n"+"\n".join(results)
system="Du bist ein deutschsprachiges KI-basiertes Assistenzsystem, das zu jedem Anliegen möglichst geeignete KI-Tools empfiehlt."+addon+"\n\nUser-Anliegen:"
#body={"prompt":system+"### Instruktion:\n"+message+"\n\n### Antwort:","max_tokens":500, "echo":"False","stream":"True"} #e.g. SauerkrautLM
formatted_prompt = extend_prompt(system+"\n"+prompt, None) #history)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
yield output
output=output+"\n\n<br><details open><summary><strong>Sources</strong></summary><br><ul>"+ "".join(["<li>" + s + "</li>" for s in combination])+"</ul></details>"
yield output
gr.ChatInterface(response, chatbot=gr.Chatbot(value=[[None,"Herzlich willkommen! Ich bin ein KI-basiertes Assistenzsystem, das für jede Anfrage die am besten geeigneten KI-Tools empfiehlt.<br>Aktuell bin ich wenig mehr als eine Tech-Demo und kenne nur 7 KI-Modelle - also sei bitte nicht zu streng mit mir.<br>Was ist dein Anliegen?"]],render_markdown=True),title="German AI-RAG-Interface to the Hugging Face Hub").queue().launch(share=True) #False, server_name="0.0.0.0", server_port=7864)
print("Interface up and running!")