Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -46,13 +46,13 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
46 |
model_checkpoint = "gokaygokay/Flux-Prompt-Enhance"
|
47 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
48 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_checkpoint)
|
49 |
-
enhancer = pipeline(
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
max_target_length = 256
|
57 |
|
58 |
@spaces.GPU
|
|
|
46 |
model_checkpoint = "gokaygokay/Flux-Prompt-Enhance"
|
47 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
48 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_checkpoint)
|
49 |
+
enhancer = pipeline(
|
50 |
+
'text2text-generation',
|
51 |
+
model=model,
|
52 |
+
tokenizer=tokenizer,
|
53 |
+
repetition_penalty= 1.2,
|
54 |
+
device=device
|
55 |
+
)
|
56 |
max_target_length = 256
|
57 |
|
58 |
@spaces.GPU
|