Samuel L Meyers commited on
Commit
5ee6253
·
1 Parent(s): 3137101
Files changed (1) hide show
  1. code/app.py +53 -28
code/app.py CHANGED
@@ -17,46 +17,71 @@ model_path = "./starling-lm-7b-alpha.Q6_K.gguf"
17
  mdlpath = hf_hub_download(repo_id="TheBloke/Starling-LM-7B-alpha-GGUF", filename=model_path, local_dir="./")
18
 
19
  lcpp_model = Llama(model_path=model_path)
20
-
21
- global otxt
22
  otxt = ""
23
 
24
- def m3b_talk(text):
25
- global otxt
26
- resp = ""
27
- if otxt != "":
28
- otxt += "\n\n"
29
- otxt += "<User>: " + text
30
- otxt += "\n\n<Assistant>: "
31
- formattedQuery = "GPT4 User: " + text + "<|end_of_text|>GPT4 Assistant:"
32
- r = lcpp_model(formattedQuery, stop=["GPT4 User:", "<|end_of_text|>", "<|im_sep|>", "\n\n"], echo=True, stream=True)
33
- rfq = False
34
- for c in r:
35
- otxt += c["choices"][0]["text"]
36
- if formattedQuery in otxt and not rfq:
37
- otxt.replace(formattedQuery, "")
38
- rfq = True
39
- else:
40
- yield otxt
41
- print(resp)
42
- return otxt
43
- #return resp.replace(formattedQuery, "")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
  def main():
46
- global otxt
47
  logging.basicConfig(level=logging.INFO)
48
 
49
  with gr.Blocks() as demo:
50
  with gr.Row(variant="panel"):
51
- gr.Markdown("## Talk to Starling 7B\n")
 
 
52
  with gr.Row(variant="panel"):
53
- m3b_talk_output = gr.Textbox()
54
  with gr.Row(variant="panel"):
55
- m3b_talk_input = gr.Textbox(label="Message", placeholder="Type something here...")
56
  with gr.Row(variant="panel"):
57
- m3b_talk_btn = gr.Button("Send")
 
58
 
59
- m3b_talk_btn.click(m3b_talk, inputs=m3b_talk_input, outputs=m3b_talk_output, api_name="talk_m3b")
 
 
60
 
61
  demo.queue().launch(server_name="0.0.0.0", server_port=7860, share=True)
62
 
 
17
  mdlpath = hf_hub_download(repo_id="TheBloke/Starling-LM-7B-alpha-GGUF", filename=model_path, local_dir="./")
18
 
19
  lcpp_model = Llama(model_path=model_path)
20
+ global otxt, txtinput, txtoutput
 
21
  otxt = ""
22
 
23
+ def stowtext(curr, inp):
24
+ curr.append({
25
+ "role": "user",
26
+ "content": inp,
27
+ })
28
+ return [curr, curr]
29
+
30
+ def stowchunk(curr, inp):
31
+ first = curr[-1]["role"] == "user"
32
+ if first:
33
+ curr.append({
34
+ "role": "assistant",
35
+ "content": inp,
36
+ })
37
+ else:
38
+ curr[-1]["content"] += inp
39
+ return curr
40
+
41
+ def printfmt(jsn):
42
+ txt = ""
43
+ for msg in jsn:
44
+ if msg["role"] == "user":
45
+ txt += "<User>: " + msg["content"] + "\n"
46
+ elif msg["role"] == "assistant":
47
+ txt += "<Assistant>: " + msg["content"] + "\n"
48
+ elif msg["role"] == "system":
49
+ txt += "# " + msg["content"] + "\n\n"
50
+ return txt
51
+
52
+ def talk(txt):
53
+ result = lcpp_model.create_chat_completion(messages=txt, stop=["</s>", "<|end_of_text|>", "GPT4 User: ", "<|im_sep|>", "\n\n"], stream=True)
54
+ for r in result:
55
+ txt2 = None
56
+ if "content" in r["choices"][0]["delta"]:
57
+ txt2 = r["choices"][0]["delta"]["content"]
58
+ if txt2.startswith("\n"):
59
+ txt2 = txt2[1:]
60
+ if txt2 is not None:
61
+ txt = stowchunk(txt, txt2)
62
+ yield [printfmt(txt), txt]
63
+ yield [printfmt(txt), txt]
64
 
65
  def main():
66
+ global otxt, txtinput
67
  logging.basicConfig(level=logging.INFO)
68
 
69
  with gr.Blocks() as demo:
70
  with gr.Row(variant="panel"):
71
+ gr.Markdown("## Talk to Deepseek LLM Chat 7b!\n")
72
+ with gr.Row(variant="panel"):
73
+ talk_output = gr.Textbox()
74
  with gr.Row(variant="panel"):
75
+ txtinput = gr.Textbox(label="Message", placeholder="Type something here...")
76
  with gr.Row(variant="panel"):
77
+ talk_btn = gr.Button("Send")
78
  with gr.Row(variant="panel"):
79
+ jsn = gr.JSON(visible=False, value="[]")
80
+ jsn2 = gr.JSON(visible=False, value="[]")
81
 
82
+ talk_btn.click(stowtext, inputs=[jsn2, txtinput], outputs=[jsn, jsn2], api_name="talk")
83
+ talk_btn.click(lambda x: gr.update(value=""), inputs=txtinput, outputs=txtinput)
84
+ jsn.change(talk, inputs=jsn, outputs=[talk_output, jsn2], api_name="talk")
85
 
86
  demo.queue().launch(server_name="0.0.0.0", server_port=7860, share=True)
87