Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,35 +1,28 @@
|
|
| 1 |
-
# -*- coding: utf-8 -*-
|
| 2 |
-
"""llama_cpp_quiz_generator.ipynb
|
| 3 |
-
|
| 4 |
-
Automatically generated by Colab.
|
| 5 |
-
|
| 6 |
-
Original file is located at
|
| 7 |
-
https://colab.research.google.com/drive/1V287A0U9MlfTEEPjBPDsREKdNMbxwg9V
|
| 8 |
-
"""
|
| 9 |
-
|
| 10 |
-
!pip install langchain
|
| 11 |
-
!pip install llama-cpp-python
|
| 12 |
-
!pip install langchain_community
|
| 13 |
-
|
| 14 |
-
# Mount the Google Drive folder to Colab
|
| 15 |
-
from google.colab import drive
|
| 16 |
-
drive.mount('/content/drive')
|
| 17 |
-
|
| 18 |
from langchain.prompts import ChatPromptTemplate
|
| 19 |
from langchain.chains import LLMChain
|
| 20 |
from langchain_community.llms import LlamaCpp
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
llm = LlamaCpp(
|
| 23 |
-
model_path="
|
| 24 |
)
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
chain = LLMChain(llm=llm, prompt=prompt)
|
|
|
|
|
|
|
| 31 |
|
| 32 |
-
|
|
|
|
| 33 |
is a standardized, digital description of the built environment, \
|
| 34 |
including buildings and civil infrastructure. \
|
| 35 |
It is an open, international standard (ISO 16739-1:2018), \
|
|
@@ -37,5 +30,17 @@ meant to be vendor-neutral, or agnostic, and usable across a wide range of hardw
|
|
| 37 |
software platforms, and interfaces for many different use cases. \
|
| 38 |
The IFC schema specification is the primary technical deliverable of buildingSMART International to fulfill its goal to promote openBIM."
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from langchain.prompts import ChatPromptTemplate
|
| 2 |
from langchain.chains import LLMChain
|
| 3 |
from langchain_community.llms import LlamaCpp
|
| 4 |
+
from huggingface_hub import hf_hub_download
|
| 5 |
+
|
| 6 |
+
hf_hub_download(
|
| 7 |
+
repo_id="bartowski/Meta-Llama-3.1-8B-Instruct-GGUF",
|
| 8 |
+
filename="Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf",
|
| 9 |
+
local_dir = "./models"
|
| 10 |
+
)
|
| 11 |
|
| 12 |
llm = LlamaCpp(
|
| 13 |
+
model_path="models/Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf",
|
| 14 |
)
|
| 15 |
|
| 16 |
+
# Generator function
|
| 17 |
+
def gen_quiz(input, num):
|
| 18 |
+
prompt = ChatPromptTemplate.from_template(
|
| 19 |
+
"Generate {num} questions and their correct answers based on the following text:\n\n{text}\n\n")
|
| 20 |
+
chain = LLMChain(llm=llm, prompt=prompt)
|
| 21 |
+
quiz = chain.invoke(input)
|
| 22 |
+
return quiz['text']
|
| 23 |
|
| 24 |
+
# Example
|
| 25 |
+
text_example = "In general, IFC, or “Industry Foundation Classes”, \
|
| 26 |
is a standardized, digital description of the built environment, \
|
| 27 |
including buildings and civil infrastructure. \
|
| 28 |
It is an open, international standard (ISO 16739-1:2018), \
|
|
|
|
| 30 |
software platforms, and interfaces for many different use cases. \
|
| 31 |
The IFC schema specification is the primary technical deliverable of buildingSMART International to fulfill its goal to promote openBIM."
|
| 32 |
|
| 33 |
+
|
| 34 |
+
import gradio as gr
|
| 35 |
+
|
| 36 |
+
# Gradio Interface
|
| 37 |
+
gr.close_all()
|
| 38 |
+
demo = gr.Interface(fn=gen_quiz,
|
| 39 |
+
inputs=[gr.Textbox(label="Text to generate quiz from", lines=6),
|
| 40 |
+
gr.Slider(minimum=1, maximum=10, value=2048, step=1, label="Number of Quiz")],
|
| 41 |
+
outputs=[gr.Textbox(label="Result", lines=10)],
|
| 42 |
+
examples=[[text_example, 3]],
|
| 43 |
+
title="Quiz Generator with LlamaCpp",
|
| 44 |
+
description="Generating quiz based on given texts using Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf!"
|
| 45 |
+
)
|
| 46 |
+
demo.launch()
|