ranamhamoud commited on
Commit
4c4df5c
·
verified ·
1 Parent(s): db22f97

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -51,14 +51,14 @@ class Story(Document):
51
  story_id = SequenceField(primary_key=True)
52
 
53
  def make_prompt(entry):
54
- return f"### Human: YOUR INSTRUCTION HERE,ALWAYS USE A STORY,INCLUDE ASSESMENTS,TECHNICAL SUMMARY: {entry} ### Assistant:"
55
 
56
  @spaces.GPU
57
  def generate(
58
  message: str,
59
  chat_history: list[tuple[str, str]],
60
  max_new_tokens: int = 1024,
61
- temperature: float = 0.1, # Lower -> less random
62
  top_p: float = 0.1, # Lower -> less random, considering only the top 10% of tokens at each step
63
  top_k: int = 1, # Least random, only the most likely next token is considered
64
  repetition_penalty: float = 1.0, # No repetition penalty
@@ -96,6 +96,7 @@ def generate(
96
  for text in streamer:
97
  outputs.append(text)
98
  yield "".join(outputs)
 
99
  try:
100
  saved_story = Story(message=message, content=final_story).save()
101
  yield f"Story saved with ID: {saved_story.story_id}"
@@ -119,5 +120,6 @@ with gr.Blocks(css="style.css") as demo:
119
 
120
  if __name__ == "__main__":
121
  demo.queue(max_size=20).launch()
 
122
 
123
 
 
51
  story_id = SequenceField(primary_key=True)
52
 
53
  def make_prompt(entry):
54
+ return f"### Human: YOUR INSTRUCTION HERE,ALWAYS USE A STORY,INCLUDE ASSESMENTS THROUGHOUT AND A TECHNICAL SUMMARY: {entry} ### Assistant:"
55
 
56
  @spaces.GPU
57
  def generate(
58
  message: str,
59
  chat_history: list[tuple[str, str]],
60
  max_new_tokens: int = 1024,
61
+ temperature: float = 0.3, # Lower -> less random
62
  top_p: float = 0.1, # Lower -> less random, considering only the top 10% of tokens at each step
63
  top_k: int = 1, # Least random, only the most likely next token is considered
64
  repetition_penalty: float = 1.0, # No repetition penalty
 
96
  for text in streamer:
97
  outputs.append(text)
98
  yield "".join(outputs)
99
+ final_story = "".join(outputs)
100
  try:
101
  saved_story = Story(message=message, content=final_story).save()
102
  yield f"Story saved with ID: {saved_story.story_id}"
 
120
 
121
  if __name__ == "__main__":
122
  demo.queue(max_size=20).launch()
123
+ demo.launch(share=True)
124
 
125