|
"use server" |
|
|
|
import { StableDiffusionParams } from "@/types" |
|
import { serverHuggingfaceApiKey, serverHuggingfaceInferenceApiFileType, serverHuggingfaceInferenceApiModel, serverHuggingfaceInferenceApiModelRefinerModel, serverHuggingfaceInferenceApiModelTrigger } from "./config" |
|
|
|
export async function stableDiffusion({ |
|
prompt, |
|
negativePrompt, |
|
guidanceScale, |
|
seed, |
|
width, |
|
height, |
|
numInferenceSteps, |
|
hfApiKey, |
|
}: StableDiffusionParams) { |
|
|
|
if (!prompt) { |
|
const error = `cannot call the rendering API without a prompt, aborting..` |
|
console.error(error) |
|
throw new Error(error) |
|
} |
|
|
|
let huggingfaceApiKey = hfApiKey || serverHuggingfaceApiKey |
|
let huggingfaceInferenceApiModel = serverHuggingfaceInferenceApiModel |
|
let huggingfaceInferenceApiModelRefinerModel = serverHuggingfaceInferenceApiModelRefinerModel |
|
let huggingfaceInferenceApiModelTrigger = serverHuggingfaceInferenceApiModelTrigger |
|
let huggingfaceInferenceApiFileType = serverHuggingfaceInferenceApiFileType |
|
|
|
try { |
|
if (!huggingfaceApiKey) { |
|
throw new Error(`invalid huggingfaceApiKey, you need to configure your HF_API_TOKEN`) |
|
} |
|
|
|
if (!huggingfaceInferenceApiModel) { |
|
throw new Error(`invalid huggingfaceInferenceApiModel, you need to configure your HF_INFERENCE_API_BASE_MODEL`) |
|
} |
|
if (!huggingfaceInferenceApiModelRefinerModel) { |
|
throw new Error(`invalid huggingfaceInferenceApiModelRefinerModel, you need to configure your HF_INFERENCE_API_REFINER_MODEL`) |
|
} |
|
|
|
const baseModelUrl = `https://api-inference.huggingface.co/models/${huggingfaceInferenceApiModel}` |
|
|
|
const positivePrompt = [ |
|
huggingfaceInferenceApiModelTrigger || "", |
|
prompt, |
|
].filter(x => x).join(", ") |
|
|
|
const res = await fetch(baseModelUrl, { |
|
method: "POST", |
|
headers: { |
|
"Content-Type": "application/json", |
|
Accept: huggingfaceInferenceApiFileType, |
|
Authorization: `Bearer ${huggingfaceApiKey}`, |
|
}, |
|
body: JSON.stringify({ |
|
inputs: positivePrompt, |
|
parameters: { |
|
num_inference_steps: numInferenceSteps, |
|
guidance_scale: guidanceScale, |
|
width, |
|
height, |
|
}, |
|
|
|
|
|
use_cache: false, |
|
}), |
|
cache: "no-store", |
|
|
|
|
|
}) |
|
|
|
|
|
|
|
if (res.status !== 200) { |
|
const content = await res.text() |
|
console.error(content) |
|
|
|
throw new Error('Failed to fetch data') |
|
} |
|
|
|
const blob = await res.arrayBuffer() |
|
|
|
const contentType = res.headers.get('content-type') |
|
|
|
let assetUrl = `data:${contentType};base64,${Buffer.from(blob).toString('base64')}` |
|
|
|
try { |
|
const refinerModelUrl = `https://api-inference.huggingface.co/models/${huggingfaceInferenceApiModelRefinerModel}` |
|
|
|
const res = await fetch(refinerModelUrl, { |
|
method: "POST", |
|
headers: { |
|
"Content-Type": "application/json", |
|
Authorization: `Bearer ${huggingfaceApiKey}`, |
|
}, |
|
body: JSON.stringify({ |
|
inputs: Buffer.from(blob).toString('base64'), |
|
parameters: { |
|
prompt: positivePrompt, |
|
num_inference_steps: numInferenceSteps, |
|
guidance_scale: guidanceScale, |
|
width, |
|
height, |
|
}, |
|
|
|
|
|
use_cache: false, |
|
}), |
|
cache: "no-store", |
|
|
|
|
|
}) |
|
|
|
|
|
|
|
if (res.status !== 200) { |
|
const content = await res.json() |
|
|
|
|
|
throw new Error(content?.error || 'Failed to fetch data') |
|
} |
|
|
|
const refinedBlob = await res.arrayBuffer() |
|
|
|
const contentType = res.headers.get('content-type') |
|
|
|
assetUrl = `data:${contentType};base64,${Buffer.from(refinedBlob).toString('base64')}` |
|
|
|
} catch (err) { |
|
console.log(`Refiner step failed, but this is not a blocker. Error details: ${err}`) |
|
} |
|
|
|
return assetUrl |
|
} catch (err) { |
|
console.error(err) |
|
return "" |
|
} |
|
} |
|
|