AFischer1985 commited on
Commit
8db8595
·
1 Parent(s): 32727e3

Update run.py

Browse files
Files changed (1) hide show
  1. run.py +23 -4
run.py CHANGED
@@ -2,9 +2,28 @@ import gradio as gr
2
  import requests
3
  import random
4
  import json
5
- def response(message, history):
6
- url="https://AFischer1985-CollectiveCognition-GGUF-API.hf.space/v1/completions"
7
- body={"prompt":"###Frage: "+message+" ###Antwort:","max_tokens":1000,"stop":"###","stream":True} #128
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  response=""
9
  buffer=""
10
  print("User: "+message+"\nAI: ")
@@ -30,4 +49,4 @@ def response(message, history):
30
  pass
31
  yield response
32
 
33
- gr.ChatInterface(response,title="AI-Interface",chatbot=gr.Chatbot(height=250), description="Graphical User Interface to *CollectiveCognition-v1.1-Mistral-7B-GGUF* (Q4_0) based on the [API here](https://huggingface.co/spaces/AFischer1985/CollectiveCognition-GGUF-API)").queue().launch(share=True)
 
2
  import requests
3
  import random
4
  import json
5
+ def response(message, history,prompt_type,model):
6
+ if(model=="WizardLM-13B"):
7
+ url="https://wizardlm-13b-v1-2-q4-0-gguf.hf.space/v1/completions"
8
+ prompt_type="Alpaca"
9
+ if(model=="SauerkrautLM-7B"):
10
+ url="https://SauerkrautLM-GGUF-API.hf.space/v1/completions"
11
+ prompt_type="Alpaca"
12
+ if(model=="OpenHermes2-7B"):
13
+ url="https://AFischer1985-CollectiveCognition-GGUF-API.hf.space/v1/completions"
14
+ prompt_type="ChatML"
15
+ if(model=="CollectiveCognition-7B"):
16
+ url="https://AFischer1985-CollectiveCognition-GGUF-API.hf.space/v1/completions"
17
+ prompt_type="ChatML"
18
+ if(prompt_type=="Alpaca"):
19
+ body={"prompt":"###Frage: "+message+" ###Antwort:","max_tokens":1000,"stop":"###","stream":True} #128
20
+ if(prompt_type=="ChatML"):
21
+ body={"prompt":"""<|im_start|>system
22
+ Du bist ein deutschsprachiger KI-basierter Assistent.<|im_end|>
23
+ <|im_start|>user
24
+ """+message+"""<|im_end|>
25
+ <|im_start|>assistant
26
+ ""","max_tokens":1000,"stop":"<|im_end|>","stream":True}
27
  response=""
28
  buffer=""
29
  print("User: "+message+"\nAI: ")
 
49
  pass
50
  yield response
51
 
52
+ gr.ChatInterface(response,additional_inputs=[gr.Dropdown(["Default","Alpaca", "ChatML"],value="Default"),gr.Dropdown(["WizardLM-13B","SauerkrautLM-7B","CollectiveCognition-7B", "OpenHermes2-7B"],value="OpenHermes2-7B")]).queue().launch(share=True)