File size: 1,414 Bytes
ac035ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# -*- coding: utf-8 -*-
"""llama_cpp_quiz_generator.ipynb

Automatically generated by Colab.

Original file is located at
    https://colab.research.google.com/drive/1V287A0U9MlfTEEPjBPDsREKdNMbxwg9V
"""

!pip install langchain
!pip install llama-cpp-python
!pip install langchain_community

# Mount the Google Drive folder to Colab
from google.colab import drive
drive.mount('/content/drive')

from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain
from langchain_community.llms import LlamaCpp

llm = LlamaCpp(
    model_path="/content/drive/My Drive/LLM/models/Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf",
    )

prompt = ChatPromptTemplate.from_template(
    "Generate 2 questions and their correct answers based on the following text:\n\n{text}\n\n"
)

chain = LLMChain(llm=llm, prompt=prompt)

text = "In general, IFC, or “Industry Foundation Classes”, \
is a standardized, digital description of the built environment, \
including buildings and civil infrastructure. \
It is an open, international standard (ISO 16739-1:2018), \
meant to be vendor-neutral, or agnostic, and usable across a wide range of hardware devices, \
software platforms, and interfaces for many different use cases. \
The IFC schema specification is the primary technical deliverable of buildingSMART International to fulfill its goal to promote openBIM."

quiz = chain.invoke(text)
print(quiz['text'])