rkwsuper commited on
Commit
2c70d7c
·
1 Parent(s): 1110a58

gradio app.py

Browse files
Files changed (2) hide show
  1. app.py +35 -0
  2. requirements.txt +0 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Load model and tokenizer
5
+ model_name = "rkwsuper/lora_model"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+
9
+ # Define the function for inference
10
+ def generate_text(prompt, max_length=100, temperature=1.0):
11
+ inputs = tokenizer(prompt, return_tensors="pt")
12
+ outputs = model.generate(
13
+ inputs["input_ids"],
14
+ max_length=max_length,
15
+ temperature=temperature,
16
+ pad_token_id=tokenizer.eos_token_id
17
+ )
18
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
19
+
20
+ # Create the Gradio interface
21
+ iface = gr.Interface(
22
+ fn=generate_text,
23
+ inputs=[
24
+ gr.Textbox(label="Enter Prompt"),
25
+ gr.Slider(10, 300, value=100, step=10, label="Max Length"),
26
+ gr.Slider(0.1, 2.0, value=1.0, step=0.1, label="Temperature")
27
+ ],
28
+ outputs="text",
29
+ title="Hugging Face Model Text Generator",
30
+ description="This interface generates text based on your input using a fine-tuned Hugging Face model."
31
+ )
32
+
33
+ # Launch
34
+ if __name__ == "__main__":
35
+ iface.launch()
requirements.txt ADDED
File without changes