afrizalha commited on
Commit
aff4485
·
verified ·
1 Parent(s): 45bc2a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -17
app.py CHANGED
@@ -10,10 +10,7 @@ tinier = AutoModelForCausalLM.from_pretrained("afrizalha/Sasando-1-7M", token=hf
10
 
11
  desc = """Sasando-1 is a tiny, highly experimental text generator built using the Phi-3 architecture. It comes with two variations of microscopic sizes: 7M and 25M parameters. It is trained on a tightly-controlled Indo4B dataset filtered to only have 18000 unique words. The method is inspired by Microsoft's TinyStories paper which demonstrates that a tiny language model can produce fluent text when trained on tightly-controlled dataset.\n\nTry prompting with two simple words, and let the model continue. Fun examples provided below."""
12
 
13
- def generate(starting_text=None, choice=None, temp=None, top_p=None):
14
- if info:
15
- return desc
16
-
17
  if choice == '7M':
18
  model = tinier
19
  elif choice == '25M':
@@ -38,18 +35,13 @@ def generate(starting_text=None, choice=None, temp=None, top_p=None):
38
 
39
  with gr.Blocks(theme=gr.themes.Soft()) as app:
40
  starting_text = gr.Textbox(label="Starting text", value="cinta adalah")
41
- choice = gr.Radio(["7M", "25M", "Info"], label="Select model", value='Info')
42
- with gr.Row():
43
- temp = gr.Slider(label="Temperature", minimum=0.1, maximum=1.0, step=0.1, value=0.7)
44
- top_p = gr.Slider(label="Top P", minimum=0.1, maximum=1.0, step=0.1, value=0.5)
45
- res = gr.Textbox(label="Continuation")
46
- gr.Interface(
47
- fn=generate,
48
- inputs=[starting_text, choice, temp, top_p],
49
- outputs=[res],
50
- allow_flagging="never",
51
  title="Sasando-1",
52
- )
53
- examples = gr.Examples([["gue"], ["presiden"], ["cinta adalah"], ["allah, aku"], ["dia marah karena"],
54
- ["inflasi"], ["kolam renang"], ["messi"], ["jalan-jalan"], ["komputer itu"]], [starting_text])
 
55
  app.launch()
 
10
 
11
  desc = """Sasando-1 is a tiny, highly experimental text generator built using the Phi-3 architecture. It comes with two variations of microscopic sizes: 7M and 25M parameters. It is trained on a tightly-controlled Indo4B dataset filtered to only have 18000 unique words. The method is inspired by Microsoft's TinyStories paper which demonstrates that a tiny language model can produce fluent text when trained on tightly-controlled dataset.\n\nTry prompting with two simple words, and let the model continue. Fun examples provided below."""
12
 
13
+ def generate(starting_text, choice, temp, top_p):
 
 
 
14
  if choice == '7M':
15
  model = tinier
16
  elif choice == '25M':
 
35
 
36
  with gr.Blocks(theme=gr.themes.Soft()) as app:
37
  starting_text = gr.Textbox(label="Starting text", value="cinta adalah")
38
+ choice = gr.Radio(["7M", "25M", "Info"], label="Select model", info="Built with the Phi-3 architecture", value='Info')
39
+ # num_runs = gr.Slider(label="Number of examples", minimum=1, maximum=10, step=1, value=5)
40
+ temp = gr.Slider(label="Temperature", minimum=0.1, maximum=1.0, step=0.1, value=0.7)
41
+ top_p = gr.Slider(label="Top P", minimum=0.1, maximum=1.0, step=0.1, value=0.5)
 
 
 
 
 
 
42
  title="Sasando-1",
43
+ )
44
+ examples=gr.Examples([["gue"], ["presiden"], ["cinta adalah"], ["allah, aku"], ["dia marah karena"],
45
+ ["inflasi"], ["kolam renang"], ["messi"], ["jalan-jalan"], ["komputer itu"]], [starting_text])
46
+
47
  app.launch()