llamaindex_demo / app.py
BinZhang
dftmsg
1173285
raw
history blame
1.16 kB
import streamlit as st
from openai import OpenAI
# 设置 API 参数
base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
api_key = "eyJ0eXBlIjoiSldUIiwiYWxnIjoiSFM1MTIifQ.eyJqdGkiOiIxMTIwNDk3OSIsInJvbCI6IlJPTEVfUkVHSVNURVIiLCJpc3MiOiJPcGVuWExhYiIsImlhdCI6MTczMzQxMjU1NCwiY2xpZW50SWQiOiJlYm1ydm9kNnlvMG5semFlazF5cCIsInBob25lIjoiMTUxMzcxMTY1MzEiLCJ1dWlkIjoiYmVlYTk0NTQtNWE5OS00OGNkLTgxNzctZDdjZWYzNmQwNTAxIiwiZW1haWwiOiIiLCJleHAiOjE3NDg5NjQ1NTR9.0-DNSkviINNJhGmx49-kUfTSRvyXNrT4LXU1sB01FprErwGCVinJStN5aNsaHjF2K95Pl7B15SQ_fa2l8cIT3Q"
model = "internlm2.5-latest"
# 创建 OpenAI 客户端
client = OpenAI(
api_key=api_key,
base_url=base_url,
)
# 设置页面标题
st.title("Chat with InternLM")
# 创建一个文本输入框供用户输入问题
user_input = st.text_input("请输入你的问题:")
# 按钮用于提交问题
if st.button("发送"):
# 调用 OpenAI API 获取回复
chat_rsp = client.chat.completions.create(
model=model,
messages=[{"role": "user", "content": user_input}],
)
# 显示回复
for choice in chat_rsp.choices:
st.write(f"回复: {choice.message.content}")