gpt-academic4446 / request_llm /bridge_newbing.py
qingxu99's picture
实现Newbing聊天功能
2b96217
raw
history blame
20.4 kB
"""
Main.py
"""
from transformers import AutoModel, AutoTokenizer
import time
import importlib
from toolbox import update_ui, get_conf
from multiprocessing import Process, Pipe
import argparse
import asyncio
import json
import os
import random
import re
import ssl
import sys
import uuid
from enum import Enum
from pathlib import Path
from typing import Generator
from typing import Literal
from typing import Optional
from typing import Union
import certifi
import httpx
import websockets.client as websockets
from prompt_toolkit import PromptSession
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.completion import WordCompleter
from prompt_toolkit.history import InMemoryHistory
from prompt_toolkit.key_binding import KeyBindings
from rich.live import Live
from rich.markdown import Markdown
DELIMITER = "\x1e"
# Generate random IP between range 13.104.0.0/14
FORWARDED_IP = (
f"13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}"
)
HEADERS = {
"accept": "application/json",
"accept-language": "en-US,en;q=0.9",
"content-type": "application/json",
"sec-ch-ua": '"Not_A Brand";v="99", "Microsoft Edge";v="110", "Chromium";v="110"',
"sec-ch-ua-arch": '"x86"',
"sec-ch-ua-bitness": '"64"',
"sec-ch-ua-full-version": '"109.0.1518.78"',
"sec-ch-ua-full-version-list": '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-model": "",
"sec-ch-ua-platform": '"Windows"',
"sec-ch-ua-platform-version": '"15.0.0"',
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
"x-ms-client-request-id": str(uuid.uuid4()),
"x-ms-useragent": "azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32",
"Referer": "https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx",
"Referrer-Policy": "origin-when-cross-origin",
"x-forwarded-for": FORWARDED_IP,
}
HEADERS_INIT_CONVER = {
"authority": "edgeservices.bing.com",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
"accept-language": "en-US,en;q=0.9",
"cache-control": "max-age=0",
"sec-ch-ua": '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
"sec-ch-ua-arch": '"x86"',
"sec-ch-ua-bitness": '"64"',
"sec-ch-ua-full-version": '"110.0.1587.69"',
"sec-ch-ua-full-version-list": '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-model": '""',
"sec-ch-ua-platform": '"Windows"',
"sec-ch-ua-platform-version": '"15.0.0"',
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69",
"x-edge-shopping-flag": "1",
"x-forwarded-for": FORWARDED_IP,
}
ssl_context = ssl.create_default_context()
ssl_context.load_verify_locations(certifi.where())
class NotAllowedToAccess(Exception):
pass
class ConversationStyle(Enum):
creative = "h3imaginative,clgalileo,gencontentv3"
balanced = "galileo"
precise = "h3precise,clgalileo"
CONVERSATION_STYLE_TYPE = Optional[
Union[ConversationStyle, Literal["creative", "balanced", "precise"]]
]
def _append_identifier(msg: dict) -> str:
"""
Appends special character to end of message to identify end of message
"""
# Convert dict to json string
return json.dumps(msg) + DELIMITER
def _get_ran_hex(length: int = 32) -> str:
"""
Returns random hex string
"""
return "".join(random.choice("0123456789abcdef") for _ in range(length))
class _ChatHubRequest:
"""
Request object for ChatHub
"""
def __init__(
self,
conversation_signature: str,
client_id: str,
conversation_id: str,
invocation_id: int = 0,
) -> None:
self.struct: dict = {}
self.client_id: str = client_id
self.conversation_id: str = conversation_id
self.conversation_signature: str = conversation_signature
self.invocation_id: int = invocation_id
def update(
self,
prompt: str,
conversation_style: CONVERSATION_STYLE_TYPE,
options: list | None = None,
) -> None:
"""
Updates request object
"""
if options is None:
options = [
"deepleo",
"enable_debug_commands",
"disable_emoji_spoken_text",
"enablemm",
]
if conversation_style:
if not isinstance(conversation_style, ConversationStyle):
conversation_style = getattr(ConversationStyle, conversation_style)
options = [
"nlu_direct_response_filter",
"deepleo",
"disable_emoji_spoken_text",
"responsible_ai_policy_235",
"enablemm",
conversation_style.value,
"dtappid",
"cricinfo",
"cricinfov2",
"dv3sugg",
]
self.struct = {
"arguments": [
{
"source": "cib",
"optionsSets": options,
"sliceIds": [
"222dtappid",
"225cricinfo",
"224locals0",
],
"traceId": _get_ran_hex(32),
"isStartOfSession": self.invocation_id == 0,
"message": {
"author": "user",
"inputMethod": "Keyboard",
"text": prompt,
"messageType": "Chat",
},
"conversationSignature": self.conversation_signature,
"participant": {
"id": self.client_id,
},
"conversationId": self.conversation_id,
},
],
"invocationId": str(self.invocation_id),
"target": "chat",
"type": 4,
}
self.invocation_id += 1
class _Conversation:
"""
Conversation API
"""
def __init__(
self,
cookies: dict,
proxy: str | None = None,
) -> None:
self.struct: dict = {
"conversationId": None,
"clientId": None,
"conversationSignature": None,
"result": {"value": "Success", "message": None},
}
self.proxy = proxy
proxy = (
proxy
or os.environ.get("all_proxy")
or os.environ.get("ALL_PROXY")
or os.environ.get("https_proxy")
or os.environ.get("HTTPS_PROXY")
or None
)
if proxy is not None and proxy.startswith("socks5h://"):
proxy = "socks5://" + proxy[len("socks5h://") :]
self.session = httpx.Client(
proxies=proxy,
timeout=30,
headers=HEADERS_INIT_CONVER,
)
for cookie in cookies:
self.session.cookies.set(cookie["name"], cookie["value"])
# Send GET request
response = self.session.get(
url=os.environ.get("BING_PROXY_URL")
or "https://edgeservices.bing.com/edgesvc/turing/conversation/create",
)
if response.status_code != 200:
response = self.session.get(
"https://edge.churchless.tech/edgesvc/turing/conversation/create",
)
if response.status_code != 200:
print(f"Status code: {response.status_code}")
print(response.text)
print(response.url)
raise Exception("Authentication failed")
try:
self.struct = response.json()
except (json.decoder.JSONDecodeError, NotAllowedToAccess) as exc:
raise Exception(
"Authentication failed. You have not been accepted into the beta.",
) from exc
if self.struct["result"]["value"] == "UnauthorizedRequest":
raise NotAllowedToAccess(self.struct["result"]["message"])
class _ChatHub:
"""
Chat API
"""
def __init__(self, conversation: _Conversation) -> None:
self.wss: websockets.WebSocketClientProtocol | None = None
self.request: _ChatHubRequest
self.loop: bool
self.task: asyncio.Task
self.request = _ChatHubRequest(
conversation_signature=conversation.struct["conversationSignature"],
client_id=conversation.struct["clientId"],
conversation_id=conversation.struct["conversationId"],
)
async def ask_stream(
self,
prompt: str,
wss_link: str,
conversation_style: CONVERSATION_STYLE_TYPE = None,
raw: bool = False,
options: dict = None,
) -> Generator[str, None, None]:
"""
Ask a question to the bot
"""
if self.wss and not self.wss.closed:
await self.wss.close()
# Check if websocket is closed
self.wss = await websockets.connect(
wss_link,
extra_headers=HEADERS,
max_size=None,
ssl=ssl_context,
)
await self._initial_handshake()
# Construct a ChatHub request
self.request.update(
prompt=prompt,
conversation_style=conversation_style,
options=options,
)
# Send request
await self.wss.send(_append_identifier(self.request.struct))
final = False
while not final:
objects = str(await self.wss.recv()).split(DELIMITER)
for obj in objects:
if obj is None or not obj:
continue
response = json.loads(obj)
if response.get("type") != 2 and raw:
yield False, response
elif response.get("type") == 1 and response["arguments"][0].get(
"messages",
):
resp_txt = response["arguments"][0]["messages"][0]["adaptiveCards"][
0
]["body"][0].get("text")
yield False, resp_txt
elif response.get("type") == 2:
final = True
yield True, response
async def _initial_handshake(self) -> None:
await self.wss.send(_append_identifier({"protocol": "json", "version": 1}))
await self.wss.recv()
async def close(self) -> None:
"""
Close the connection
"""
if self.wss and not self.wss.closed:
await self.wss.close()
class Chatbot:
"""
Combines everything to make it seamless
"""
def __init__(
self,
cookies: dict = None,
proxy: str | None = None,
cookie_path: str = None,
) -> None:
if cookies is None:
cookies = {}
if cookie_path is not None:
try:
with open(cookie_path, encoding="utf-8") as f:
self.cookies = json.load(f)
except FileNotFoundError as exc:
raise FileNotFoundError("Cookie file not found") from exc
else:
self.cookies = cookies
self.proxy: str | None = proxy
self.chat_hub: _ChatHub = _ChatHub(
_Conversation(self.cookies, self.proxy),
)
async def ask(
self,
prompt: str,
wss_link: str = "wss://sydney.bing.com/sydney/ChatHub",
conversation_style: CONVERSATION_STYLE_TYPE = None,
options: dict = None,
) -> dict:
"""
Ask a question to the bot
"""
async for final, response in self.chat_hub.ask_stream(
prompt=prompt,
conversation_style=conversation_style,
wss_link=wss_link,
options=options,
):
if final:
return response
await self.chat_hub.wss.close()
return None
async def ask_stream(
self,
prompt: str,
wss_link: str = "wss://sydney.bing.com/sydney/ChatHub",
conversation_style: CONVERSATION_STYLE_TYPE = None,
raw: bool = False,
options: dict = None,
) -> Generator[str, None, None]:
"""
Ask a question to the bot
"""
async for response in self.chat_hub.ask_stream(
prompt=prompt,
conversation_style=conversation_style,
wss_link=wss_link,
raw=raw,
options=options,
):
yield response
async def close(self) -> None:
"""
Close the connection
"""
await self.chat_hub.close()
async def reset(self) -> None:
"""
Reset the conversation
"""
await self.close()
self.chat_hub = _ChatHub(_Conversation(self.cookies))
async def _get_input_async(
session: PromptSession = None,
completer: WordCompleter = None,
) -> str:
"""
Multiline input function.
"""
return await session.prompt_async(
completer=completer,
multiline=True,
auto_suggest=AutoSuggestFromHistory(),
)
def _create_session() -> PromptSession:
kb = KeyBindings()
@kb.add("enter")
def _(event):
buffer_text = event.current_buffer.text
if buffer_text.startswith("!"):
event.current_buffer.validate_and_handle()
else:
event.current_buffer.insert_text("\n")
@kb.add("escape")
def _(event):
if event.current_buffer.complete_state:
# event.current_buffer.cancel_completion()
event.current_buffer.text = ""
return PromptSession(key_bindings=kb, history=InMemoryHistory())
def _create_completer(commands: list, pattern_str: str = "$"):
return WordCompleter(words=commands, pattern=re.compile(pattern_str))
load_message = ""
#################################################################################
#################################################################################
#################################################################################
#################################################################################
#################################################################################
#################################################################################
class GetNewBingHandle(Process):
def __init__(self):
super().__init__(daemon=True)
self.parent, self.child = Pipe()
self.chatglm_model = None
self.chatglm_tokenizer = None
self.info = ""
self.success = True
self.check_dependency()
self.start()
def check_dependency(self):
try:
import rich
self.info = "依赖检测通过"
self.success = True
except:
self.info = "缺少的依赖,如果要使用Newbing,除了基础的pip依赖以外,您还需要运行`pip install -r request_llm/requirements_newbing.txt`安装Newbing的依赖。"
self.success = False
def ready(self):
return self.chatglm_model is not None
async def async_run(self, question):
async for final, response in self.chatglm_model.ask_stream(
prompt=question,
conversation_style="balanced", # ["creative", "balanced", "precise"]
wss_link="wss://sydney.bing.com/sydney/ChatHub", # "wss://sydney.bing.com/sydney/ChatHub"
):
if not final:
self.child.send(response)
print(response)
def run(self):
# 第一次运行,加载参数
retry = 0
while True:
try:
if self.chatglm_model is None:
proxies, = get_conf('proxies')
newbing_cookies, = get_conf('newbing_cookies')
cookies = json.loads(newbing_cookies)
self.chatglm_model = Chatbot(proxy=proxies['https'], cookies=cookies)
break
else:
break
except:
retry += 1
if retry > 3:
self.child.send('[Local Message] 不能加载Newbing组件。')
raise RuntimeError("不能加载Newbing组件。")
# 进入任务等待状态
while True:
kwargs = self.child.recv()
try:
asyncio.run(self.async_run(question=kwargs['query']))
except:
self.child.send('[Local Message] Newbing失败.')
self.child.send('[Finish]')
def stream_chat(self, **kwargs):
self.parent.send(kwargs)
while True:
res = self.parent.recv()
if res != '[Finish]':
yield res
else:
break
return
global glm_handle
glm_handle = None
#################################################################################
def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", observe_window=None, console_slience=False):
"""
多线程方法
函数的说明请见 request_llm/bridge_all.py
"""
global glm_handle
if glm_handle is None:
glm_handle = GetNewBingHandle()
observe_window[0] = load_message + "\n\n" + glm_handle.info
if not glm_handle.success:
error = glm_handle.info
glm_handle = None
raise RuntimeError(error)
# chatglm 没有 sys_prompt 接口,因此把prompt加入 history
history_feedin = []
history_feedin.append(["What can I do?", sys_prompt])
for i in range(len(history)//2):
history_feedin.append([history[2*i], history[2*i+1]] )
watch_dog_patience = 5 # 看门狗 (watchdog) 的耐心, 设置5秒即可
response = ""
for response in glm_handle.stream_chat(query=inputs, history=history_feedin, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
observe_window[0] = response
if len(observe_window) >= 2:
if (time.time()-observe_window[1]) > watch_dog_patience:
raise RuntimeError("程序终止。")
return response
def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_prompt='', stream = True, additional_fn=None):
"""
单线程方法
函数的说明请见 request_llm/bridge_all.py
"""
chatbot.append((inputs, ""))
global glm_handle
if glm_handle is None:
glm_handle = GetNewBingHandle()
chatbot[-1] = (inputs, load_message + "\n\n" + glm_handle.info)
yield from update_ui(chatbot=chatbot, history=[])
if not glm_handle.success:
glm_handle = None
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
history_feedin = []
history_feedin.append(["What can I do?", system_prompt] )
for i in range(len(history)//2):
history_feedin.append([history[2*i], history[2*i+1]] )
for response in glm_handle.stream_chat(query=inputs, history=history_feedin, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)
history.extend([inputs, response])
yield from update_ui(chatbot=chatbot, history=history)