import gradio as gr from llama_cpp import Llama model = Llama(model_path="CodeLlama_7B_nlp_pp_q8_0.gguf") # These phrases/tokens indicate the start of a pass. For demonstration purposes, it's # safe to assume that these should not be encountered in the output and represent a hallucination. stop = ["@NODES", "@CODE", "@DECL"] def generate(input_text): output = model(input_text, max_tokens=128, stop=stop, echo=True) return output['choices'][0]['text'] input_text = gr.inputs.Textbox(lines= 10, label="Enter your code to autocomplete") output_text = gr.outputs.Textbox(label="Output code") description = "Code generation for NLP++ with CodeLlama" examples = [ '# Find concept named parent under root and print "num" val for each child attribute\n', 'L("iter") = getconcept(findroot(), L("parent_con"));\n', '# Match node _noun when preceded by _noun\n' ] gr.Interface(fn=generate, inputs=input_text, outputs=output_text, title="CodeLlama for NLP++", description=description, examples=examples).launch()