Spaces:
Running
on
T4
Running
on
T4
Update app.py
Browse files
app.py
CHANGED
|
@@ -8,7 +8,7 @@ import re
|
|
| 8 |
import json
|
| 9 |
from auditqa.sample_questions import QUESTIONS
|
| 10 |
from auditqa.reports import POSSIBLE_REPORTS
|
| 11 |
-
from auditqa.engine.prompts import audience_prompts, answer_prompt_template,
|
| 12 |
from auditqa.doc_process import process_pdf
|
| 13 |
from langchain_core.prompts import ChatPromptTemplate
|
| 14 |
from langchain_core.output_parsers import StrOutputParser
|
|
@@ -72,7 +72,7 @@ async def chat(query,history,sources,reports):
|
|
| 72 |
|
| 73 |
# get prompt
|
| 74 |
|
| 75 |
-
prompt = ChatPromptTemplate.from_template(
|
| 76 |
|
| 77 |
# get llm
|
| 78 |
# llm_qa = HuggingFaceEndpoint(
|
|
@@ -89,8 +89,7 @@ async def chat(query,history,sources,reports):
|
|
| 89 |
llm_qa = HuggingFaceEndpoint(
|
| 90 |
endpoint_url= "https://mnczdhmrf7lkfd9d.eu-west-1.aws.endpoints.huggingface.cloud",
|
| 91 |
task="text-generation",
|
| 92 |
-
huggingfacehub_api_token=HF_token
|
| 93 |
-
model_kwargs = {'stop':["<|eot_id|>"]})
|
| 94 |
|
| 95 |
|
| 96 |
# create rag chain
|
|
|
|
| 8 |
import json
|
| 9 |
from auditqa.sample_questions import QUESTIONS
|
| 10 |
from auditqa.reports import POSSIBLE_REPORTS
|
| 11 |
+
from auditqa.engine.prompts import audience_prompts, answer_prompt_template, llama3_prompt
|
| 12 |
from auditqa.doc_process import process_pdf
|
| 13 |
from langchain_core.prompts import ChatPromptTemplate
|
| 14 |
from langchain_core.output_parsers import StrOutputParser
|
|
|
|
| 72 |
|
| 73 |
# get prompt
|
| 74 |
|
| 75 |
+
prompt = ChatPromptTemplate.from_template(llama3_prompt)
|
| 76 |
|
| 77 |
# get llm
|
| 78 |
# llm_qa = HuggingFaceEndpoint(
|
|
|
|
| 89 |
llm_qa = HuggingFaceEndpoint(
|
| 90 |
endpoint_url= "https://mnczdhmrf7lkfd9d.eu-west-1.aws.endpoints.huggingface.cloud",
|
| 91 |
task="text-generation",
|
| 92 |
+
huggingfacehub_api_token=HF_token)
|
|
|
|
| 93 |
|
| 94 |
|
| 95 |
# create rag chain
|