import gradio as gr from llama_cpp import Llama llm = Llama(model_path="ggml-model-q4_0.bin", n_ctx=256, n_batch=128) def generate_text(input_text): print(input_text) output = llm(f"### Instruction:\n{input_text}\n\n### Response:\n", max_tokens=128, echo=False) print(output) return output['choices'][0]['text'] input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text") output_text = gr.outputs.Textbox(label="Output text") gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="🐢 Olive: OdiaGPT Model built by the OdiaGenAI Team").launch()