Spaces:
Sleeping
Sleeping
import gradio as gr | |
from huggingface_hub import InferenceClient | |
from typing import List, Tuple, Dict | |
client = InferenceClient("AuriLab/gpt-bi-instruct-cesar") | |
def format_messages(history: List[Tuple[str, str]], system_message: str, user_message: str) -> List[Dict[str, str]]: | |
messages = [{"role": "system", "content": system_message}] | |
messages.extend([ | |
{"role": "user" if i % 2 == 0 else "assistant", "content": str(msg)} # Convert msg to string | |
for turn in history | |
for i, msg in enumerate(turn) | |
if msg is not None | |
]) | |
messages.append({"role": "user", "content": str(user_message)}) # Convert user_message to string | |
return messages | |
def respond(message: str, history: List[Tuple[str, str]]) -> str: | |
# Default values for parameters | |
system_message = "You are a helpful AI assistant." | |
max_tokens = 1000 | |
temperature = 0.7 | |
top_p = 0.85 | |
messages = format_messages(history, system_message, message) | |
response = "" | |
try: | |
for msg in client.chat_completion( | |
messages, | |
max_tokens=max_tokens, | |
stream=True, | |
temperature=temperature, | |
top_p=top_p, | |
): | |
if hasattr(msg.choices[0].delta, 'content'): | |
token = msg.choices[0].delta.content | |
if token is not None: | |
response += token | |
yield response | |
except Exception as e: | |
return f"Error: {str(e)}" | |
demo = gr.ChatInterface( | |
fn=respond, | |
title="Demo GPT-BI instruct", | |
examples=["nola duzu izena?", "Nola egiten duzu?"] | |
) | |
if __name__ == "__main__": | |
demo.launch(share=False) | |