suraj commited on
Commit
b080356
1 Parent(s): ff0a367
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -48,7 +48,7 @@ def combine(a, b, c, d,e,f):
48
  max_new_tokens = d
49
  repeat_penalty = f
50
  top_p = e
51
- prompt = f"<|user|>\n{b}<|endoftext|>\n<|assistant|>"
52
  start = datetime.datetime.now()
53
  generation = ""
54
  delta = ""
@@ -57,11 +57,11 @@ def combine(a, b, c, d,e,f):
57
  answer_tokens = ''
58
  total_tokens = ''
59
  for character in llm(prompt,
60
- max_tokens=max_new_tokens,
61
  stop=["</s>"],
62
- temperature = temperature,
63
- repeat_penalty = repeat_penalty,
64
- top_p = top_p, # Example stop token - not necessarily correct for this specific model! Please check before using.
65
  echo=False,
66
  stream=True):
67
  generation += character["choices"][0]["text"]
@@ -86,8 +86,8 @@ with gr.Blocks(theme='Medguy/base2') as demo: #theme=gr.themes.Glass() #theme
86
  with gr.Column(scale=12):
87
  gr.HTML("<center>"
88
  + "<h3>Prompt Engineering Playground!</h3>"
89
- + "<h1>🐦 StableLM-Zephyr-3B - 4K context window</h2></center>")
90
- gr.Image(value='https://github.com/fabiomatricardi/GradioStudies/raw/main/20231205/logo-banner-StableZephyr.jpg', height=95, show_label = False,
91
  show_download_button = False, container = False)
92
  # INTERACTIVE INFOGRAPHIC SECTION
93
  with gr.Row():
@@ -122,7 +122,7 @@ with gr.Blocks(theme='Medguy/base2') as demo: #theme=gr.themes.Glass() #theme
122
  - **Repetition Penalty**: {repetitionpenalty}
123
  - **Context Lenght**: {contextlength} tokens
124
  - **LLM Engine**: CTransformers
125
- - **Model**: 🐦 StarlingLM-7b
126
  - **Log File**: {logfile}
127
  """)
128
 
 
48
  max_new_tokens = d
49
  repeat_penalty = f
50
  top_p = e
51
+ prompt = f"<|user|>\n{b}<|endoftext|>"
52
  start = datetime.datetime.now()
53
  generation = ""
54
  delta = ""
 
57
  answer_tokens = ''
58
  total_tokens = ''
59
  for character in llm(prompt,
60
+ max_tokens=512,
61
  stop=["</s>"],
62
+ temperature = 0.9,
63
+ repeat_penalty = 1,
64
+ top_p = 0.9, # Example stop token - not necessarily correct for this specific model! Please check before using.
65
  echo=False,
66
  stream=True):
67
  generation += character["choices"][0]["text"]
 
86
  with gr.Column(scale=12):
87
  gr.HTML("<center>"
88
  + "<h3>Prompt Engineering Playground!</h3>"
89
+ + "<h1>🐦 deepseek-coder-1.3b </h2></center>")
90
+ gr.Image(value='https://modishcard.com/app/assets/icons/ModishCard_Logo6-02.svg', height=95, show_label = False,
91
  show_download_button = False, container = False)
92
  # INTERACTIVE INFOGRAPHIC SECTION
93
  with gr.Row():
 
122
  - **Repetition Penalty**: {repetitionpenalty}
123
  - **Context Lenght**: {contextlength} tokens
124
  - **LLM Engine**: CTransformers
125
+ - **Model**: 🐦 deepseek-coder-1.3b
126
  - **Log File**: {logfile}
127
  """)
128