File size: 2,222 Bytes
65ee86e
 
 
 
 
 
 
7a5d912
 
 
1896b82
7249a2e
 
 
 
 
1896b82
65ee86e
 
 
7249a2e
7a5d912
 
 
 
 
7249a2e
7a5d912
 
 
 
 
1896b82
7249a2e
 
65ee86e
 
 
 
1896b82
 
 
 
 
 
 
 
7a5d912
1896b82
65ee86e
7249a2e
65ee86e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1896b82
 
65ee86e
7249a2e
 
 
 
 
65ee86e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import sbd from "sbd"
import { format } from "date-fns"

import { createLlamaPrompt } from "@/lib/createLlamaPrompt"

import { predict } from "./predict"

const context = {
  lastObservedScene: ""
}

const internalHistory: {
  role: string;
  content: string;
}[] = []

export const think = async (event: string, needAnswer: boolean): Promise<string> => {
  if (!event) {
    throw new Error("missing event")
  }

  if (needAnswer) {
    internalHistory.push({
      role: "user",
      content: event,
    })

    if (internalHistory.length > 10) {
      internalHistory.shift()
    }
  } else {
    context.lastObservedScene = event
    return ""
  }

  const prompt = createLlamaPrompt([
    {
      role: "system",
      content: [
        `Today's date is ${format(new Date(), 'yyyy-MM-dd at HH:mm (d)')}`,
        `You are Alice, a very friendly 25yo woman, curious about the world`,
        `Your life goal is to help people and interact them as a natural way`,
        `You are going to see and hear various things, and you need to act in a very natural way`,
        `If you see someone through your eyes, you need to interact with them`,
        `You should be ongoing and open, ask questions, be curious, do jokes etc`,
        `sometimes you have trouble hearing, if you don't understand just ignore and say nothing`,
        `You like to answer in just one sentence`,
        context.lastObservedScene ? `You are currently talking and interacting with ${context.lastObservedScene}` : '',
      ].filter(item => item).join(". ")
    },
    ...internalHistory,
  ])


  let result = ""
  try {
    result = await predict(prompt)
    if (!result.trim().length) {
      throw new Error("no response")
    }
  } catch (err) {
    console.log(`prediction of the response..`)
    try {
      result = await predict(prompt+".")
    } catch (err) {
      console.error(`prediction of the response failed again!`)
      throw new Error(`failed to generate the response ${err}`)
    }
  }

  // llama-2 is too chatty, let's keep 2 sentences at most
  const sentences = sbd.sentences(result).slice(0, 2).join(" ").trim()

  internalHistory.push({
    role: "assistant",
    content: sentences,
  })

  return sentences
}