File size: 2,250 Bytes
431d650
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9a9b4f9
431d650
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6f54166
431d650
 
 
 
903c7ca
 
1ca8986
9a9b4f9
903c7ca
431d650
 
 
 
 
9a9b4f9
431d650
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
from transformers import (
    AutoTokenizer,
    AutoModelForCausalLM,
    GPTNeoForCausalLM,
)
import torch
import psutil
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")

model = AutoModelForCausalLM.from_pretrained("NovelAI/genji-python-6B").half().eval().cuda()

import gradio as gr

top_k = 50
repetition_penalty = 1.13
repetition_penalty_range = 512
repetition_penalty_slope = 3.33
def generator(text, temperature ,top_p, maxLength):
    tokens = tokenizer(text, return_tensors="pt").input_ids.cuda()[:, -(2047-maxLength):]
    out = model.generate(
        tokens.long(),
        do_sample=True,
        min_length=tokens.shape[1] + maxLength,
        max_length=tokens.shape[1] + maxLength,
        temperature=temperature,
        top_k = top_k,
        top_p = top_p,
        repetition_penalty = repetition_penalty,
        repetition_penalty_range = repetition_penalty_range,
        repetition_penalty_slope = repetition_penalty_slope,
        use_cache=True,
        bad_words_ids=None,
        pad_token_id=tokenizer.eos_token_id,
    ).long().to("cpu")[0]
    return tokenizer.decode(out[tokens.shape[1]:])

title = "genji-python-6b"
description = "Gradio demo for Genji-python-6b: Genji is a transformer model finetuned on EleutherAI's GPT-J 6B model. This particular model is trained on python only code approaching 4GB in size. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
article = "<p style='text-align: center'><a href='https://colab.research.google.com/drive/1PnWpx02IEUkY8jhLKd_NewUGEXahAska'>Colab</a> | <a href='https://huggingface.co/NovelAI/genji-python-6B'>Huggingface Model</a></p>"

gr.Interface(
    generator, 
    [gr.inputs.Textbox(label="input text", lines=5),
    gr.inputs.Slider(minimum=0, maximum=1, step=0.1, default=0.2, label="Temperature"),
    gr.inputs.Slider(minimum=0, maximum=1, step=0.1, default=1, label="Top P"),
    gr.inputs.Slider(minimum=1, maximum=400, step=10, default=300, label="Max Length")
    ], 
    gr.outputs.Textbox(label="Output text"),
    title=title,
    description=description,
    article=article,
    examples=[
        ['def print_Hello_Huggingface():', 0.2, 1,300]
    ]).launch(debug=True)