Spaces:
Sleeping
Sleeping
File size: 1,080 Bytes
b22f922 74164b2 c10c2de dee4e98 b22f922 dee4e98 b22f922 c10c2de b22f922 c10c2de 74164b2 c10c2de dd7546f 74164b2 c10c2de b22f922 74164b2 dee4e98 c10c2de |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
from taskAI import TaskAI
from taskNonAI import compile_pdf
from _data_test import mock_cv, pdf_context
from _secret import api_test
from llama_index.llms.openai_like import OpenAILike
from llama_index.core.llms import ChatMessage
def test_ai_integration():
messages = [
ChatMessage(role="system", content="You are a helpful assistant"),
ChatMessage(role="user", content="What is your name"),
]
print("Testing integration:")
response = OpenAILike(
model=api_test["model"],
api_key=api_test["key"],
api_base=api_test["base"],
max_retries=0,
is_chat_model=True,
).chat(messages)
print(response)
def test_taskAI():
taskAI = TaskAI(api_test)
gen = taskAI.cv_preprocess(mock_cv)
for chunk in gen:
print(chunk)
def test_typst_pdf():
compile_pdf(
tmpl_path="typst/template_letter.tmpl",
context=pdf_context,
output_path="test_result.pdf",
)
# os
if __name__ == "__main__":
# test_taskAI()
# test_ai_integration()
test_typst_pdf()
|