Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -15,16 +15,16 @@ DEFAULT_MAX_NEW_TOKENS = 1024
|
|
15 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
16 |
|
17 |
DESCRIPTION = """\
|
18 |
-
# Storytell AI
|
19 |
-
Welcome to the Storytell AI space, crafted with care by Ranam & George. Dive into the world of educational storytelling with our [Storytell](https://huggingface.co/ranamhamoud/storytell) model. This iteration of the Llama 2 model with 7 billion parameters is fine-tuned to generate educational stories that engage and educate. Enjoy a journey of discovery and creativity—your storytelling lesson begins here!
|
20 |
"""
|
21 |
|
22 |
|
23 |
LICENSE = """
|
24 |
<p/>
|
25 |
---
|
26 |
-
As a derivate work of [Llama-2-7b-chat](https://huggingface.co/meta-llama/Llama-2-7b-chat) by Meta,
|
27 |
-
this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/USE_POLICY.md).
|
28 |
"""
|
29 |
|
30 |
if not torch.cuda.is_available():
|
@@ -51,7 +51,7 @@ class Story(Document):
|
|
51 |
story_id = SequenceField(primary_key=True)
|
52 |
|
53 |
def make_prompt(entry):
|
54 |
-
return f"### Human: YOUR INSTRUCTION HERE,ALWAYS USE A STORY,INCLUDE ASSESMENTS
|
55 |
|
56 |
@spaces.GPU
|
57 |
def generate(
|
|
|
15 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
16 |
|
17 |
DESCRIPTION = """\
|
18 |
+
# ✨Storytell AI🧑🏽💻
|
19 |
+
Welcome to the **Storytell AI** space, crafted with care by Ranam & George. Dive into the world of educational storytelling with our [Storytell](https://huggingface.co/ranamhamoud/storytell) model. This iteration of the Llama 2 model with 7 billion parameters is fine-tuned to generate educational stories that engage and educate. Enjoy a journey of discovery and creativity—your storytelling lesson begins here!
|
20 |
"""
|
21 |
|
22 |
|
23 |
LICENSE = """
|
24 |
<p/>
|
25 |
---
|
26 |
+
######As a derivate work of [Llama-2-7b-chat](https://huggingface.co/meta-llama/Llama-2-7b-chat) by Meta,
|
27 |
+
######this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/USE_POLICY.md).
|
28 |
"""
|
29 |
|
30 |
if not torch.cuda.is_available():
|
|
|
51 |
story_id = SequenceField(primary_key=True)
|
52 |
|
53 |
def make_prompt(entry):
|
54 |
+
return f"### Human:YOU ARE A CS STORYTELLER, YOUR INSTRUCTION HERE,ALWAYS USE A STORY,INCLUDE ASSESMENTS AND A TECHNICAL SUMMARY: {entry} ### Assistant:"
|
55 |
|
56 |
@spaces.GPU
|
57 |
def generate(
|