Spaces:
Runtime error
Runtime error
File size: 2,928 Bytes
e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b e278e9e 57d3d9b aa544c1 57d3d9b 3a46326 57d3d9b e278e9e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import os
import gradio as gr
from openai import OpenAI
SYSTEM_PROMPT = "You are an intelligent programming assistant named CodeGeeX. You will answer any questions users have about programming, coding, and computers, and provide code that is formatted correctly, executable, accurate, and secure, and offer detailed explanations when necessary."
client = OpenAI(base_url="https://api.chatglm.cn/v1", api_key=os.getenv("CHATGLM_API_KEY"))
def respond(message, history: list[tuple[str, str]]):
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat.completions.create(
messages=messages, # type: ignore
model="codegeex4-9b-biz",
stream=True,
temperature=0.7,
max_tokens=1024,
top_p=0.95,
): # type: ignore
token = message.choices[0].delta.content
response += token
yield response
with gr.Blocks(fill_height=True) as demo:
gr.Markdown(
"""
<p align="center" style="margin: 32px 32px 0 0;">
<img src="https://gist.githubusercontent.com/rojas-diego/0c1b444aff2c6b6420ff635bfd206869/raw/16566317fabce71d35ab3cf8c71adf3b5dc11d87/codegeex.svg" style="width: 30%">
</p>
""")
gr.Markdown(
"""
<p align="center">
π <a href="https://codegeex.cn" target="_blank">Homepage</a> | π <a href="http://keg.cs.tsinghua.edu.cn/codegeex/" target="_blank">Blog</a> | π <a href="https://marketplace.visualstudio.com/items?itemName=aminer.codegeex" target="_blank">VS Code</a> or <a href="https://plugins.jetbrains.com/plugin/20587-codegeex" target="_blank">Jetbrains</a> Extensions | π» <a href="https://github.com/THUDM/CodeGeeX4" target="_blank">Github</a> | π€ <a href="https://huggingface.co/THUDM/codegeex-4-9b" target="_blank">HuggingFace</a>
</p>
""")
gr.Markdown(
"""
<p align="center">
We introduce CodeGeeX4 9B, a large-scale multilingual code generation model with 9 billion parameters, pre-trained on a large code corpus of more than 300 programming languages. CodeGeeX4 9B is open source, please refer to our <a href="https://github.com/THUDM/codegeex-4-9b" target="_blank">GitHub</a> for more details. We also offer free <a href="https://marketplace.visualstudio.com/items?itemName=aminer.codegeex" target="_blank">VS Code</a> and <a href="https://plugins.jetbrains.com/plugin/20587-codegeex" target="_blank">Jetbrains</a> extensions for full functionality.
</p>
""")
gr.ChatInterface(respond, fill_height=True)
if __name__ == "__main__":
demo.launch()
|