Spaces:
Runtime error
Runtime error
# Import necessary packages | |
from ibm_watsonx_ai import Credentials | |
from ibm_watsonx_ai import APIClient | |
from ibm_watsonx_ai.foundation_models import Model, ModelInference | |
from ibm_watsonx_ai.foundation_models.schema import TextChatParameters | |
from ibm_watsonx_ai.metanames import GenTextParamsMetaNames | |
import gradio as gr | |
watsonx_API="L0sx3BXcQRWNmz45mbBLxL1UiZGnftHFQTwITAci-523" | |
project_id="ed8f7a2c-e597-4a09-a98f-dbdcef57a0d0" | |
# Set credentials to use the model | |
credentials = { | |
"url" : "https://au-syd.ml.cloud.ibm.com", | |
"apikey": watsonx_API | |
} | |
# Model and project settings | |
model_id = "codellama/CodeLlama-7b-Instruct-hf" # Directly specifying the LLAMA3 model | |
project_id = project_id # Specifying project_id as provided | |
params = TextChatParameters( | |
temperature=0.1, | |
max_tokens=1024 | |
) | |
# Initialize the model | |
model = ModelInference( | |
model_id=model_id, | |
credentials=credentials, | |
project_id=project_id, | |
params=params | |
) | |
# Function to generate a response from the model | |
def generate_response(prompt_txt): | |
messages = [ | |
{ | |
"role": "user", | |
"content": [ | |
{ | |
"type": "text", | |
"text": prompt_txt | |
}, | |
] | |
} | |
] | |
generated_response = model.chat(messages=messages) | |
generated_text = generated_response['choices'][0]['message']['content'] | |
return generated_text | |
# Create Gradio interface | |
chat_application = gr.Interface( | |
fn=generate_response, | |
flagging_mode="never", | |
inputs=gr.Textbox(label="Input", lines=2, placeholder="Type your question here..."), | |
outputs=gr.Textbox(label="Output"), | |
title="CodeLLama Chatbot", | |
description="Ask coding related questions and the chatbot will try to answer." | |
) | |
# Launch the app | |
chat_application.launch(share=True) | |