Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -38,9 +38,11 @@ m.eval()
|
|
38 |
# "Model with {:.2f}M parameters".format(sum(p.numel() for p in m.parameters()) / 1e6)
|
39 |
#)
|
40 |
def model_generate(text, number_of_new_token, temperature, top_p):
|
|
|
41 |
context = encode(str(text), tokenizer).unsqueeze(0).to(DEVICE)
|
42 |
|
43 |
gen = decode(enc_sec=m.generate(idx=context, max_new_tokens=number_of_new_token, temperature = temperature, top_p = top_p)[0], tokenizer=tokenizer)
|
|
|
44 |
return gen
|
45 |
|
46 |
iface = gr.Interface(fn=model_generate, inputs=["text", gr.Slider(10, 1000), gr.Slider(0, 1, value=0.7, step = 0.05), gr.Slider(0, 1, value=0.95, step = 0.05)], outputs="text")
|
|
|
38 |
# "Model with {:.2f}M parameters".format(sum(p.numel() for p in m.parameters()) / 1e6)
|
39 |
#)
|
40 |
def model_generate(text, number_of_new_token, temperature, top_p):
|
41 |
+
print(text)
|
42 |
context = encode(str(text), tokenizer).unsqueeze(0).to(DEVICE)
|
43 |
|
44 |
gen = decode(enc_sec=m.generate(idx=context, max_new_tokens=number_of_new_token, temperature = temperature, top_p = top_p)[0], tokenizer=tokenizer)
|
45 |
+
print(gen)
|
46 |
return gen
|
47 |
|
48 |
iface = gr.Interface(fn=model_generate, inputs=["text", gr.Slider(10, 1000), gr.Slider(0, 1, value=0.7, step = 0.05), gr.Slider(0, 1, value=0.95, step = 0.05)], outputs="text")
|