File size: 2,655 Bytes
c232e44
 
3c8b24f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ba9285c
 
3c8b24f
 
 
 
 
c232e44
 
 
 
 
 
 
 
4af6326
ba9285c
 
 
 
 
4af6326
c232e44
 
11e3c5e
 
c232e44
11e3c5e
 
 
c232e44
 
 
 
 
11e3c5e
 
 
 
 
 
 
 
 
c232e44
 
3c8b24f
ae074fc
 
3c8b24f
ae074fc
 
3c8b24f
 
 
 
 
 
 
 
 
 
c232e44
 
 
 
3c8b24f
c232e44
 
3c8b24f
c232e44
ba9285c
3c8b24f
ba9285c
c232e44
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import toast from 'react-hot-toast';

const WIPLogTypes = ['plans', 'tools', 'code'];
const AllLogTypes = [
  'plans',
  'tools',
  'code',
  'final_code',
  'final_error',
] as const;

export type ChunkBody = {
  type: (typeof AllLogTypes)[number];
  status: 'started' | 'completed' | 'failed' | 'running';
  timestamp?: string;
  payload:
    | Array<Record<string, string>> // PlansBody | ToolsBody
    | PrismaJson.FinalChatResult['payload'] // CodeBody & FinalCodeBody
    | PrismaJson.StructuredResult['error']; // ErrorBody
};

export type WIPChunkBodyGroup = ChunkBody & {
  duration?: number;
};

/**
 * Formats the stream logs and returns an array of grouped sections.
 *
 * @param content - The content of the stream logs.
 * @returns An array of grouped sections and an optional final code result.
 */
export const formatStreamLogs = (
  content: string | null | undefined,
): [
  WIPChunkBodyGroup[],
  PrismaJson.FinalChatResult['payload']?,
  PrismaJson.StructuredResult['error']?,
] => {
  if (!content) return [[], undefined];
  const streamLogs = content.split('\n').filter(log => !!log);

  const buffer = streamLogs.pop();
  const parsedStreamLogs: ChunkBody[] = [];
  try {
    streamLogs.forEach(streamLog =>
      parsedStreamLogs.push(JSON.parse(streamLog)),
    );
  } catch {
    toast.error('Error parsing stream logs');
    return [[], undefined];
  }

  if (buffer) {
    try {
      const lastLog = JSON.parse(buffer);
      parsedStreamLogs.push(lastLog);
    } catch {
      console.log(buffer);
    }
  }

  // Merge consecutive logs of the same type to the latest status
  const groupedSections = parsedStreamLogs.reduce((acc, curr) => {
    const lastGroup = acc[acc.length - 1];
    if (
      acc.length > 0 &&
      lastGroup.type === curr.type &&
      curr.status !== 'started'
    ) {
      acc[acc.length - 1] = {
        ...curr,
        // always use the timestamp of the first log
        timestamp: lastGroup?.timestamp,
        // duration is the difference between the last log and the first log
        duration:
          lastGroup?.timestamp && curr.timestamp
            ? Date.parse(curr.timestamp) - Date.parse(lastGroup.timestamp)
            : undefined,
      };
    } else {
      acc.push(curr);
    }
    return acc;
  }, [] as WIPChunkBodyGroup[]);

  return [
    groupedSections.filter(section => WIPLogTypes.includes(section.type)),
    groupedSections.find(section => section.type === 'final_code')
      ?.payload as PrismaJson.FinalChatResult['payload'],
    groupedSections.find(section => section.type === 'final_error')
      ?.payload as PrismaJson.StructuredResult['error'],
  ];
};