{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from llama_cpp import Llama" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model_path = f'../bot/question_answering/local_models/ggml-alpaca-7b-q4.bin'\n", "llm = Llama(model_path=model_path, n_ctx=2048)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "prompt = ''\n", "\n", "output = llm(\n", " prompt,\n", " max_tokens=512,\n", " stop=['Q:'],\n", " echo=False\n", ")\n", "output_text = output['choices'][0]['text']" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "hf_qa_bot", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.10" }, "orig_nbformat": 4, "vscode": { "interpreter": { "hash": "e769ac600d1c65682759767682b2a946c0eaa09d353302f712fe4c2e822e15df" } } }, "nbformat": 4, "nbformat_minor": 2 }