Spaces:
Runtime error
Runtime error
File size: 1,030 Bytes
3ff77f2 544d0c8 3ff77f2 fe4ee9e 3ff77f2 fe4ee9e 3ff77f2 fe4ee9e 3ff77f2 fe4ee9e 3ff77f2 fe4ee9e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
import gradio as gr
from llama_cpp import Llama
model = Llama(model_path="CodeLlama_7B_nlp_pp_q8_0.gguf")
# These phrases/tokens indicate the start of a pass. For demonstration purposes, it's
# safe to assume that these should not be encountered in the output and represent a hallucination.
stop = ["@NODES", "@CODE", "@DECL"]
def generate(input_text):
output = model(input_text, max_tokens=128, stop=stop, echo=True)
return output['choices'][0]['text']
input_text = gr.inputs.Textbox(lines= 10, label="Enter your code to autocomplete")
output_text = gr.outputs.Textbox(label="Output code")
description = "Code generation for NLP++ with CodeLlama"
examples = [
'# Find concept named parent under root and print "num" val for each child attribute\n',
'L("iter") = getconcept(findroot(), L("parent_con"));\n',
'# Match node _noun when preceded by _noun\n'
]
gr.Interface(fn=generate, inputs=input_text, outputs=output_text, title="CodeLlama for NLP++", description=description, examples=examples).launch() |