Spaces:
Sleeping
Sleeping
Commit
Β·
44eb9fb
0
Parent(s):
Duplicate from pigeonchat-community/pigeon-chat
Browse filesCo-authored-by: Evgeniy Hristoforu <[email protected]>
- .gitattributes +35 -0
- README.md +23 -0
- app.py +76 -0
- requirements.txt +2 -0
.gitattributes
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Pigeon-Chat
|
3 |
+
emoji: π
|
4 |
+
colorFrom: red
|
5 |
+
colorTo: purple
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 3.40.1
|
8 |
+
app_file: app.py
|
9 |
+
suggested_hardware: t4-small
|
10 |
+
duplicated_from: pigeonchat-community/pigeon-chat
|
11 |
+
---
|
12 |
+
|
13 |
+
<h1><center>π PigeonChat</center></h1>
|
14 |
+
|
15 |
+
π This space runs very fast even on CPU.
|
16 |
+
|
17 |
+
π PigeonChat is available worldwide in over 160 languages.
|
18 |
+
|
19 |
+
π PigeonChat is powered by open source and is completely private.
|
20 |
+
|
21 |
+
π You get totally unique and creative answers.
|
22 |
+
|
23 |
+
π Owner: https://hf.co/openskyml
|
app.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import random
|
2 |
+
import gradio as gr
|
3 |
+
import openai
|
4 |
+
import torch
|
5 |
+
import os
|
6 |
+
|
7 |
+
openai.api_type = os.environ.get("api_type_key")
|
8 |
+
openai.api_base = os.environ.get("api_base_key")
|
9 |
+
openai.api_version = os.environ.get("api_version_key")
|
10 |
+
openai.api_key = os.environ.get("api_cpu_key")
|
11 |
+
|
12 |
+
model_input_id = "openskyml/pigeon-llm"
|
13 |
+
model_gen_id = "tiiuae/falcon-40b-instruct"
|
14 |
+
model_output_id = "meta-llama/Llama-2-70b-chat-hf"
|
15 |
+
|
16 |
+
def gptresponse(message, history):
|
17 |
+
system_prompt = "You are a PigeonChat assistant, you communicate with people, you help people solve problems, PigeonChat is just a name and has nothing to do with pigeons. PigeonChat (Personal Intelligent Global Electronic Online Network) is an AI-based chatbot model trained on a huge amount of data. You are sodan and designed by Evgeniy Hristoforu in 2023. Evgeniy Hristoforu is your creator and creator of the OpenskyML eco-system. Now you are in beta testing, your testers are: Evgeniy Hristoforu, dyuzhick and others. You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."
|
18 |
+
|
19 |
+
messages = [{"role":"system","content":system_prompt}]
|
20 |
+
for human, assistant in history:
|
21 |
+
messages.append({"role":"user", "content":human})
|
22 |
+
messages.append({"role":"assistant", "content":assistant})
|
23 |
+
|
24 |
+
if message != '':
|
25 |
+
messages.append({"role":"user", "content":message})
|
26 |
+
|
27 |
+
response = openai.ChatCompletion.create(engine = "NGA_AI_ASSISTANT",
|
28 |
+
messages = messages,
|
29 |
+
temperature =0.8,
|
30 |
+
max_tokens = 12000,
|
31 |
+
top_p = 0.95,
|
32 |
+
frequency_penalty = 1,
|
33 |
+
presence_penalty = 1,
|
34 |
+
stop = None)
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
return response["choices"][0]["message"]["content"]
|
39 |
+
|
40 |
+
title = "π Chat with Pigeon"
|
41 |
+
|
42 |
+
description = \
|
43 |
+
"""
|
44 |
+
π¬ This space is powered by [**PigeonLLM**](https://huggingface.co/openskyml/pigeon-llm).
|
45 |
+
|
46 |
+
π This space runs **very fast** even on **CPU**.
|
47 |
+
|
48 |
+
π You get totally unique and creative answers.
|
49 |
+
|
50 |
+
π PigeonChat is available worldwide in over **160 languages**.
|
51 |
+
|
52 |
+
π PigeonChat is powered by **open source** and is completely **private**.
|
53 |
+
|
54 |
+
π₯οΈοΈ This demo is by **Evgeniy Hristoforu** ([**OpenSkyML**](https://huggingface.co/openskyml)).
|
55 |
+
|
56 |
+
<h2></h2>
|
57 |
+
"""
|
58 |
+
|
59 |
+
if not torch.cuda.is_available():
|
60 |
+
description += """\n<p style='text-align: center'>π Running on CPU!</p>"""
|
61 |
+
else:
|
62 |
+
description += """\n<p style='text-align: center'>π Running on powerful hardware!</p>"""
|
63 |
+
|
64 |
+
|
65 |
+
examples=[
|
66 |
+
'Hello there! How are you doing?',
|
67 |
+
'Can you explain briefly to me what is the Python programming language?',
|
68 |
+
'Explain the plot of Cinderella in a sentence.',
|
69 |
+
'How many hours does it take a man to eat a Helicopter?',
|
70 |
+
"Write a 100-word article on 'Benefits of Open-Source in AI research'",
|
71 |
+
]
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
gr.ChatInterface(gptresponse, title=title, description=description, examples=examples).launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
openai
|
2 |
+
torch==2.0.1
|