|
"use server" |
|
|
|
import { generateSeed } from "@/lib/generateSeed" |
|
import { StableCascadeParams } from "@/types" |
|
import { addBase64HeaderToPng } from "./addBase64HeaderToPng" |
|
|
|
const gradioApi = `https://jbilcke-hf-stable-cascade-api.hf.space` |
|
const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}` |
|
|
|
export async function stableCascade({ |
|
prompt, |
|
negativePrompt, |
|
guidanceScale, |
|
nbPriorInferenceSteps, |
|
nbDecoderInferenceSteps, |
|
seed, |
|
width, |
|
height, |
|
}: StableCascadeParams): Promise<string> { |
|
|
|
|
|
|
|
|
|
const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", { |
|
method: "POST", |
|
headers: { |
|
"Content-Type": "application/json", |
|
|
|
}, |
|
body: JSON.stringify({ |
|
fn_index: 0, |
|
data: [ |
|
microserviceApiKey, |
|
prompt, |
|
negativePrompt, |
|
height, |
|
width, |
|
guidanceScale, |
|
seed || generateSeed(), |
|
nbPriorInferenceSteps, |
|
nbDecoderInferenceSteps |
|
], |
|
}), |
|
cache: "no-store", |
|
|
|
|
|
}) |
|
|
|
const { data } = await res.json() |
|
|
|
|
|
|
|
if (res.status !== 200 || !Array.isArray(data)) { |
|
|
|
throw new Error(`Failed to fetch data (status: ${res.status})`) |
|
} |
|
|
|
|
|
const base64Content = (data?.[0] || "") as string |
|
|
|
if (!base64Content) { |
|
throw new Error(`invalid response (no content)`) |
|
} |
|
|
|
return addBase64HeaderToPng(base64Content) |
|
} |