Spaces:
Running
Running
ionosphere
commited on
Commit
·
8e003ad
1
Parent(s):
3ac4842
Update
Browse files- .gitignore +2 -1
- README.md +4 -2
- app.py +13 -6
- requirements.txt +1 -0
.gitignore
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
chroma_db/*
|
2 |
-
__pycache__/*
|
|
|
|
1 |
chroma_db/*
|
2 |
+
__pycache__/*
|
3 |
+
.venv
|
README.md
CHANGED
@@ -25,11 +25,13 @@ Check https://www.gradio.app/guides/quickstart for more details about Gradio.
|
|
25 |
|
26 |
## Install dependencies
|
27 |
|
28 |
-
`
|
|
|
|
|
29 |
|
30 |
`pip install -r requirements.txt`
|
31 |
|
32 |
-
## Add Mistral API Key to your environement variables
|
33 |
|
34 |
in `~/.profile` or `~/.bashrc`
|
35 |
|
|
|
25 |
|
26 |
## Install dependencies
|
27 |
|
28 |
+
`python -m venv .venv`
|
29 |
+
|
30 |
+
`source .venv/bin/activate`
|
31 |
|
32 |
`pip install -r requirements.txt`
|
33 |
|
34 |
+
## Add Mistral API Key to your environement variables or in .env locally
|
35 |
|
36 |
in `~/.profile` or `~/.bashrc`
|
37 |
|
app.py
CHANGED
@@ -1,4 +1,8 @@
|
|
1 |
import os
|
|
|
|
|
|
|
|
|
2 |
import json
|
3 |
import gradio as gr
|
4 |
import chromadb
|
@@ -6,7 +10,7 @@ import chromadb
|
|
6 |
from llama_index.core import (
|
7 |
VectorStoreIndex,
|
8 |
StorageContext,
|
9 |
-
|
10 |
download_loader,
|
11 |
)
|
12 |
|
@@ -14,13 +18,15 @@ from llama_index.llms.mistralai import MistralAI
|
|
14 |
from llama_index.embeddings.mistralai import MistralAIEmbedding
|
15 |
from llama_index.vector_stores.chroma import ChromaVectorStore
|
16 |
|
|
|
|
|
17 |
title = "Gaia Mistral 8x7b Chat RAG PDF Demo"
|
18 |
description = "Example of an assistant with Gradio, RAG from PDF documents and Mistral AI via its API"
|
19 |
placeholder = (
|
20 |
"Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider"
|
21 |
)
|
22 |
placeholder_url = "Extract text from this url"
|
23 |
-
llm_model = "open-mixtral-
|
24 |
|
25 |
env_api_key = os.environ.get("MISTRAL_API_KEY")
|
26 |
query_engine = None
|
@@ -36,15 +42,16 @@ chroma_collection = db.get_or_create_collection("quickstart")
|
|
36 |
# set up ChromaVectorStore and load in data
|
37 |
vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
|
38 |
storage_context = StorageContext.from_defaults(vector_store=vector_store)
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
42 |
|
43 |
PDFReader = download_loader("PDFReader")
|
44 |
loader = PDFReader()
|
45 |
|
46 |
index = VectorStoreIndex(
|
47 |
-
[],
|
48 |
)
|
49 |
query_engine = index.as_query_engine(similarity_top_k=5)
|
50 |
|
|
|
1 |
import os
|
2 |
+
__import__('pysqlite3')
|
3 |
+
import sys
|
4 |
+
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
|
5 |
+
from dotenv import load_dotenv
|
6 |
import json
|
7 |
import gradio as gr
|
8 |
import chromadb
|
|
|
10 |
from llama_index.core import (
|
11 |
VectorStoreIndex,
|
12 |
StorageContext,
|
13 |
+
Settings,
|
14 |
download_loader,
|
15 |
)
|
16 |
|
|
|
18 |
from llama_index.embeddings.mistralai import MistralAIEmbedding
|
19 |
from llama_index.vector_stores.chroma import ChromaVectorStore
|
20 |
|
21 |
+
load_dotenv()
|
22 |
+
|
23 |
title = "Gaia Mistral 8x7b Chat RAG PDF Demo"
|
24 |
description = "Example of an assistant with Gradio, RAG from PDF documents and Mistral AI via its API"
|
25 |
placeholder = (
|
26 |
"Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider"
|
27 |
)
|
28 |
placeholder_url = "Extract text from this url"
|
29 |
+
llm_model = "open-mixtral-8x22b"
|
30 |
|
31 |
env_api_key = os.environ.get("MISTRAL_API_KEY")
|
32 |
query_engine = None
|
|
|
42 |
# set up ChromaVectorStore and load in data
|
43 |
vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
|
44 |
storage_context = StorageContext.from_defaults(vector_store=vector_store)
|
45 |
+
|
46 |
+
Settings.llm = llm
|
47 |
+
Settings.embed_model = embed_model
|
48 |
+
Settings.chunk_size = 1024
|
49 |
|
50 |
PDFReader = download_loader("PDFReader")
|
51 |
loader = PDFReader()
|
52 |
|
53 |
index = VectorStoreIndex(
|
54 |
+
[], storage_context=storage_context
|
55 |
)
|
56 |
query_engine = index.as_query_engine(similarity_top_k=5)
|
57 |
|
requirements.txt
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
pypdf
|
2 |
mistralai
|
3 |
gradio
|
|
|
4 |
chromadb
|
5 |
llama-index
|
6 |
llama-index-readers-web
|
|
|
1 |
pypdf
|
2 |
mistralai
|
3 |
gradio
|
4 |
+
pysqlite3-binary
|
5 |
chromadb
|
6 |
llama-index
|
7 |
llama-index-readers-web
|