File size: 1,686 Bytes
52b4c36
 
3ba9c0c
52b4c36
 
f80b091
 
 
52b4c36
a8e1cb0
c3e8f3d
3ba9c0c
52b4c36
3ba9c0c
 
f80b091
52b4c36
3ba9c0c
 
f80b091
 
 
 
 
42501f7
f80b091
 
 
 
 
 
52b4c36
f80b091
 
 
a8e1cb0
f80b091
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52b4c36
f80b091
 
 
 
 
 
 
52b4c36
f80b091
52b4c36
f80b091
3ba9c0c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import { OpenAIStream, StreamingTextResponse } from 'ai';
import OpenAI from 'openai';

import { auth } from '@/auth';
import {
  ChatCompletionMessageParam,
  ChatCompletionContentPart,
  ChatCompletionContentPartImage,
} from 'openai/resources';
import { MessageWithSelectedDataset } from '../../../lib/types';
// import { postAgentChat } from '@/lib/fetch';

export const runtime = 'edge';

const openai = new OpenAI({
  apiKey: process.env.OPENAI_API_KEY,
});

export async function POST(req: Request) {
  const json = await req.json();
  const { messages } = json as {
    messages: MessageWithSelectedDataset[];
  };
  console.log('[Ming] ~ POST ~ messages:', messages);

  const session = await auth();
  if (!session?.user?.email) {
    return new Response('Unauthorized', {
      status: 401,
    });
  }

  const formattedMessage: ChatCompletionMessageParam[] = messages.map(
    message => {
      const { dataset, ...rest } = message;

      const contentWithImage: ChatCompletionContentPart[] = [
        {
          type: 'text',
          text: message.content as string,
        },
        ...(dataset ?? []).map(
          entity =>
            ({
              type: 'image_url',
              image_url: { url: entity.url },
            }) satisfies ChatCompletionContentPartImage,
        ),
      ];
      return {
        role: 'user',
        content: contentWithImage,
      };
    },
  );

  const res = await openai.chat.completions.create({
    model: 'gpt-4-vision-preview',
    messages: formattedMessage,
    temperature: 0.3,
    stream: true,
    max_tokens: 300,
  });

  const stream = OpenAIStream(res);

  return new StreamingTextResponse(stream);
}