File size: 6,678 Bytes
7af3c54
 
 
 
 
 
 
 
 
 
 
 
66e1af3
7af3c54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2132d11
 
 
 
 
7af3c54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a3eca49
7af3c54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4d1bcfd
 
c15beb0
 
7af3c54
 
 
 
 
514bc4b
59c4faa
31d1515
59c4faa
 
 
 
 
 
 
 
 
 
514bc4b
7af3c54
 
 
6a87911
7af3c54
 
 
 
 
 
 
 
 
 
 
59c4faa
7af3c54
 
 
4d1bcfd
 
7af3c54
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
'''
1. 增加的可联网的模式,可以增强对话的能力。
1. 联网与不联网模式统一在一个函数里面,通过choice来选择。
'''

import openai
import gradio as gr
# from gradio import Radio
import os
import openai
import requests
from rich import print
import pandas as pd
import gc

## 这里是huggingface的secret里面存放当前可以用的api key。
openai.api_key = os.environ['my_api_key']
os.environ["OPENAI_API_KEY"] = os.environ['my_api_key']

### Bing Search
bing_search_api_key = os.environ['bing_api_key']
bing_search_endpoint = 'https://api.bing.microsoft.com/v7.0/search' 

def clear_conversation():
    return gr.update(value=None, visible=True), None, "",

def search(query):
    # Construct a request
    # mkt = 'en-EN'
    mkt = 'zh-CN'
    params = {'q': query, 'mkt': mkt}
    headers = {'Ocp-Apim-Subscription-Key': bing_search_api_key}

    # Call the API
    try:
        response = requests.get(bing_search_endpoint, headers=headers, params=params)
        response.raise_for_status()
        json = response.json()
        return json["webPages"]["value"]
        # print("\nJSON Response:\n")
        # pprint(response.json())

    except Exception as e:
        raise e

messages = [
    # {"role": "system", "content": "You are a helpful and kind AI Assistant."},
    {"role": "system", "content": "你是一个专业和友好的AI助手。"},]


def chatbot(input, choice):
    global messages #! 通过制定messages可以在非增强模式中,记忆对话。
    history = []
    try:
        if input and choice!='联网增强模式':
            print('start the NO internet version of ChatGPT')
            
            # messages = [
            # # {"role": "system", "content": "You are a helpful and kind AI Assistant."},
            # {"role": "system", "content": "你是一个专业和友好的AI助手。"},] ## 这里可以开关memory,history功能。
            
            messages.append({"role": "user", "content": input})
            for resp in openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True, max_tokens=2048,temperature=0.9):
                answer = str(resp['choices'][0]['delta'].get('content'))
                if answer != "None":
                    history.append(answer)
                    result = "".join(history).strip() #* working!
                    yield result

        elif input and choice=='联网增强模式':
            print('start the internet version of ChatGPT')

            #NOTE: 重置messages,等于遗忘了之前的所有记录。
            messages = [
                # {"role": "system", "content": "You are a helpful and kind AI Assistant."},
            {"role": "system", "content": "你是一个专业和友好的AI助手。"},
            ] 

            internet_search_result = search(input)
            search_prompt = [f"Source:\nTitle: {result['name']}\nURL: {result['url']}\nContent: {result['snippet']}" for result in internet_search_result]
            print('content:\n', search_prompt[0])
            # prompt = "Use these sources to answer the question:\n\n" + "\n\n".join(search_prompt) + "\n\nQuestion: " + input + "\n\nAnswer:"
            prompt = "Use these sources to answer the question:\n\n" + "\n\n".join(search_prompt[0:3]) + "\n\nQuestion: " + input + "\n\nAnswer:(注意:回答问题时请提示'以下答案基于互联网公开信息。')" ## 限制了只有3个搜索结果。
            messages.append({"role": "user", "content": prompt})

            ## no streaming version. 
            # messages.append({"role": "user", "content": input})
            # print(input)
            # chat = openai.ChatCompletion.create(
            #     model="gpt-3.5-turbo", messages=messages
            # )
            # reply = chat.choices[0].message.content
            # messages.append({"role": "assistant", "content": reply})

            ## streaming version. typewriter effect, word by word output. 
            for resp in openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True, max_tokens=2048, temperature=0.9):
                
                # sys.stdout.write(str(resp['choices'][0]['delta'].get('content'))) ## 这个方式仅在terminal中是working的。
                # sys.stdout.flush()
                
                #* 以下内容在Gradio中是working的。
                answer = str(resp['choices'][0]['delta'].get('content'))
                if answer != "None":
                    history.append(answer)
                    result = "".join(history).strip() #* working!
                    yield result

    except Exception as e:
        print(e)
        messages = [{"role": "system", "content": "你是一个专业和友好的AI助手。"},]
        messages.append({"role": "user", "content": ""})
        yield e ## 在outputs的textbot里面显示错误。


    # return None

css = "textarea {-webkit-text-fill-color:black; -webkit-opacity: 1;}"


# auth_list = (
    
#     ('1234','1234'),
#     ('yao190476','0476'),
#     ('bluedongting','ting'),
#     ('mio','mio'),
#     ('ainachen','chen'),
#     ('wenshan','shan'),    
# )

user_csv = pd.read_csv('auth_list.csv')
auth_list = [(x, y) for (x, y) in user_csv[['username', 'password']].values]

## 如果要优化显示效果,参见: https://gradio.app/theming-guide/
try: 
    inputs = [gr.inputs.Textbox(lines=5, label="请输入你的问题/任务"), gr.Radio(['默认ChatGPT模式', '联网增强模式'], label="ChatGPT运行模式")] #! working. 如果有多个input,那么需要把这些input放在一个list里面,然后传给inputs。注意这里如果更改了选项的文字,需要到chatbot中更改对应的文字。
    outputs = gr.Textbox(lines=11, label="ChatGPT的回答").style(show_copy_button=True)
    interface = gr.Interface(
        # fn=chatbot,
        fn=chatbot,
        inputs=inputs,
        outputs=outputs,
        title="极速版ChatGPT",
        description="_联网增强模式:可联网的ChatGPT, 使用Internet公共搜索引擎作为增强知识库。选择这个模式,信息检索范围将大幅增加,但回答质量将变得些许不稳定,且与一般搜索引擎如百度类似,回答记忆只限于一轮。_",
        theme=gr.themes.Soft(), 
        css=css, 
        )
    interface.queue(concurrency_count=100)
    interface.launch(height=500,auth=auth_list,auth_message="欢迎使用ChatGPT")

except Exception as e:
    print(e)
    messages = [{"role": "system", "content": "你是一个专业和友好的AI助手。"},]
    messages.append({"role": "user", "content": ""})