File size: 1,067 Bytes
4755e8c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import openai
import gradio as gr

openai.api_key = "sk-R3HlMsYBk0NpAlLu2aA4B19054Ea4884A2Cf93D25662243d"
openai.api_base="https://apai.zyai.online/v1"

def predict(message, history):
    history_openai_format = []
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human })
        history_openai_format.append({"role": "assistant", "content":assistant})
    history_openai_format.append({"role": "user", "content": message})

    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",  # ๅฏน่ฏๆจกๅž‹็š„ๅ็งฐ
        messages=history_openai_format,
        temperature=1,  # ๅ€ผๅœจ[0,1]ไน‹้—ด๏ผŒ่ถŠๅคง่กจ็คบๅ›žๅค่ถŠๅ…ทๆœ‰ไธ็กฎๅฎšๆ€ง
        max_tokens=600,  # ๅ›žๅคๆœ€ๅคง็š„ๅญ—็ฌฆๆ•ฐ
        top_p=1,
        frequency_penalty=0,  # [-2,2]ไน‹้—ด๏ผŒ่ฏฅๅ€ผ่ถŠๅคงๅˆ™ๆ›ดๅ€พๅ‘ไบŽไบง็”ŸไธๅŒ็š„ๅ†…ๅฎน
        presence_penalty=0,  # [-2,2]ไน‹้—ด๏ผŒ่ฏฅๅ€ผ่ถŠๅคงๅˆ™ๆ›ดๅ€พๅ‘ไบŽไบง็”ŸไธๅŒ็š„ๅ†…ๅฎน
    )
    yield response.choices[0]['message']['content']

gr.ChatInterface(predict).queue().launch()