|
import os |
|
from pathlib import Path |
|
from llama_index.embeddings import HuggingFaceEmbedding, VoyageEmbedding |
|
from llama_index import (load_index_from_storage, ServiceContext, StorageContext) |
|
from llama_index import download_loader, SimpleDirectoryReader |
|
from llama_index.retrievers import RecursiveRetriever |
|
from llama_index.query_engine import RetrieverQueryEngine |
|
from llama_index.llms import Anyscale |
|
from fastapi import FastAPI |
|
|
|
app = FastAPI() |
|
|
|
|
|
llm = Anyscale(model="mistralai/Mistral-7B-Instruct-v0.1", api_key=os.getenv("ANYSCALE_API_KEY")) |
|
|
|
|
|
embed_model = VoyageEmbedding(model_name="voyage-01", voyage_api_key=os.getenv("VOYAGE_API_KEY")) |
|
service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model) |
|
|
|
if "index" in os.listdir(): |
|
storage_context = StorageContext.from_defaults(persist_dir=Path("./index")) |
|
else: |
|
dir_reader = SimpleDirectoryReader(Path('./docs')) |
|
documents = dir_reader.load_data() |
|
index = VectorStoreIndex.from_documents(documents, service_context=service_context) |
|
index.storage_context.persist(Path('./index')) |
|
storage_context = StorageContext.from_defaults(persist_dir=Path("./index")) |
|
|
|
|
|
index = load_index_from_storage(storage_context=storage_context, service_context=service_context) |
|
|
|
|
|
index_engine = index.as_retriever(similarity_top_k=4) |
|
index_retriever = RecursiveRetriever("vector",retriever_dict={"vector": index_engine}) |
|
query_engine = RetrieverQueryEngine.from_args(index_retriever, service_context=service_context) |
|
|
|
|
|
@app.get("/generate") |
|
def generate(query: str): |
|
return str(query_engine.query(query)) |
|
|
|
if __name__ == '__main__': |
|
uvicorn.run('main:app', reload=True) |