Gosula commited on
Commit
5da9d3e
·
1 Parent(s): acc4514

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -26
app.py DELETED
@@ -1,26 +0,0 @@
1
- import gradio as gr
2
-
3
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
4
- import torch
5
- from peft import LoraConfig, PeftModel
6
-
7
- base_model_name = "microsoft/phi-2"
8
- new_model = "./checkpoint-360"
9
-
10
- model = AutoModelForCausalLM.from_pretrained( "microsoft/phi-2", trust_remote_code=True)
11
- model.config.use_cache = False
12
- model.load_adapter(new_model)
13
-
14
- tokenizer = AutoTokenizer.from_pretrained(base_model_name, trust_remote_code=True)
15
- tokenizer.pad_token = tokenizer.eos_token
16
- tokenizer.padding_side = "right"
17
-
18
- def QLoRA_Chatgpt(prompt):
19
- print(prompt)
20
- pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, max_length=200)
21
- result = pipe(f"<s>[INST] {prompt} [/INST]")
22
- return(result[0]['generated_text'])
23
- # return "Hello " + name + "!!"
24
-
25
- iface = gr.Interface(fn=QLoRA_Chatgpt, inputs=gr.Textbox("how can help you today", label='prompt'), outputs=gr.Textbox(label='Generated-output'))
26
- iface.launch(share=True)