Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -1,11 +1,13 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import
|
3 |
-
import
|
4 |
|
5 |
# Load the Phi-3.5-mini-instruct model and tokenizer
|
6 |
-
model_name = "
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
-
|
|
|
|
|
9 |
|
10 |
# Simple HTML template for the website
|
11 |
simple_website_template = """
|
@@ -42,10 +44,15 @@ def personalize_website_llm(persona_text):
|
|
42 |
# Create a prompt for the model
|
43 |
prompt = f"Generate personalized website content for the following persona: {persona_text}. Provide a title and main content."
|
44 |
|
45 |
-
# Tokenize
|
46 |
-
inputs = tokenizer(prompt, return_tensors="
|
47 |
-
|
48 |
-
|
|
|
|
|
|
|
|
|
|
|
49 |
|
50 |
# Split the response into a title and content
|
51 |
title, content = generated_text.split('\n', 1)
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
3 |
+
import onnxruntime as ort
|
4 |
|
5 |
# Load the Phi-3.5-mini-instruct model and tokenizer
|
6 |
+
model_name = "microsoft/Phi-3.5-mini-instruct"
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
+
|
9 |
+
# Load the ONNX model
|
10 |
+
session = ort.InferenceSession(f"{model_name}/model.onnx")
|
11 |
|
12 |
# Simple HTML template for the website
|
13 |
simple_website_template = """
|
|
|
44 |
# Create a prompt for the model
|
45 |
prompt = f"Generate personalized website content for the following persona: {persona_text}. Provide a title and main content."
|
46 |
|
47 |
+
# Tokenize the prompt
|
48 |
+
inputs = tokenizer(prompt, return_tensors="np")
|
49 |
+
|
50 |
+
# Run the ONNX model
|
51 |
+
ort_inputs = {session.get_inputs()[0].name: inputs["input_ids"]}
|
52 |
+
ort_outs = session.run(None, ort_inputs)
|
53 |
+
|
54 |
+
# Decode the output
|
55 |
+
generated_text = tokenizer.decode(ort_outs[0][0], skip_special_tokens=True)
|
56 |
|
57 |
# Split the response into a title and content
|
58 |
title, content = generated_text.split('\n', 1)
|