File size: 1,906 Bytes
4905b6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
"use server"
import { generateSeed } from "@/lib/generateSeed"
import { StableCascadeParams } from "@/types"
import { addBase64HeaderToPng } from "./addBase64HeaderToPng"
const gradioApi = `https://jbilcke-hf-stable-cascade-api.hf.space`
const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
export async function stableCascade({
prompt,
negativePrompt,
guidanceScale,
nbPriorInferenceSteps,
nbDecoderInferenceSteps,
seed,
width,
height,
}: StableCascadeParams): Promise<string> {
// console.log(`calling `+ gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict")
// remember: a space needs to be public for the classic fetch() to work
const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
method: "POST",
headers: {
"Content-Type": "application/json",
// Authorization: `Bearer ${hfApiToken}`,
},
body: JSON.stringify({
fn_index: 0, // <- is it 0 or 1?
data: [
microserviceApiKey,
prompt,
negativePrompt,
height,
width,
guidanceScale,
seed || generateSeed(),
nbPriorInferenceSteps,
nbDecoderInferenceSteps
],
}),
cache: "no-store",
// we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
// next: { revalidate: 1 }
})
const { data } = await res.json()
// console.log("data:", data)
// Recommendation: handle errors
if (res.status !== 200 || !Array.isArray(data)) {
// This will activate the closest `error.js` Error Boundary
throw new Error(`Failed to fetch data (status: ${res.status})`)
}
// console.log("data:", data.slice(0, 50))
const base64Content = (data?.[0] || "") as string
if (!base64Content) {
throw new Error(`invalid response (no content)`)
}
return addBase64HeaderToPng(base64Content)
} |