File size: 2,427 Bytes
67ea2ab
 
 
 
 
 
 
 
 
 
 
 
e9d384b
 
 
67ea2ab
c23b289
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67ea2ab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
"use server";

import { HfInference } from "@huggingface/inference";
import fs from "fs/promises";

import { Form } from "@/_types";
import prisma from "@/_utils/prisma";

export async function generate({ brand_name, industry, description }: Form) {
  if (!process.env.PUBLIC_FILE_UPLOAD_DIR) {
    throw new Error("PUBLIC_FILE_UPLOAD_DIR is not set");
  }
  const inference = new HfInference(process.env.HF_ACCESS_TOKEN, {
    use_cache: false,
  });

  const prompt: any = await inference
    .chatCompletion({
      model: "meta-llama/Meta-Llama-3.1-70B-Instruct",
      messages: [
        { role: "user", content: "lee, a noodle restaurant" },
        {
          role: "assistant",
          content:
            'logo,Minimalist,A pair of chopsticks and a bowl of rice with the word "Lee",',
        },
        { role: "user", content: "cat shop" },
        { role: "assistant", content: "wablogo,Minimalist,Leaf and cat,logo," },
        { role: "user", content: "Ato, real estate company" },
        {
          role: "assistant",
          content:
            'logo,Minimalist,A man stands in front of a door,his shadow forming the word "A",',
        },
        { role: "user", content: `${brand_name}, ${description}, ${industry}` },
      ],
      temperature: 0.5,
      max_tokens: 1024,
      top_p: 0.7,
    })
    .then((res) => res)
    .catch((err) => {
      return { error: err.message };
    });

  if (prompt?.error) {
    return {
      error: prompt.error,
    };
  }

  if (prompt?.choices[0]?.message?.content) {
    const hfRequest = await inference.textToImage({
      inputs: prompt.choices[0].message.content,
      model: "Shakker-Labs/FLUX.1-dev-LoRA-Logo-Design",
      parameters: {
        num_inference_steps: 24,
        guidance_scale: 3.5,
      },
    });

    const buffer = await hfRequest.arrayBuffer();
    const array = new Uint8Array(buffer);

    const newImage = await prisma.logo.create({
      data: {
        name: prompt.choices[0].message.content,
      },
    });

    const indexFile = newImage.id;

    const dir = await fs
      .opendir(process.env.PUBLIC_FILE_UPLOAD_DIR)
      .catch(() => null);
    if (!dir) await fs.mkdir(process.env.PUBLIC_FILE_UPLOAD_DIR);
    await fs.writeFile(
      `${process.env.PUBLIC_FILE_UPLOAD_DIR}/${indexFile}.png`,
      array
    );

    return { data: indexFile };
  }

  return {
    error: "Failed to generate logo",
  };
}