File size: 2,552 Bytes
7def60a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import { loadQAStuffChain } from "langchain/chains";
import { Document } from "langchain/document";
import { pull } from "langchain/hub";
import { AgentExecutor, createOpenAIToolsAgent } from "langchain/agents";
import {Calculator} from "@langchain/community/tools/calculator";
import { ChatOpenAI } from "@langchain/openai";
import type { ChatPromptTemplate } from "@langchain/core/prompts";

const pathToLocalAI = process.env['OPENAI_API_BASE'] || 'http://api:8080/v1';
const fakeApiKey = process.env['OPENAI_API_KEY'] || '-';
const modelName = process.env['MODEL_NAME'] || 'gpt-3.5-turbo';

function getModel(): ChatOpenAI {
  return new ChatOpenAI({
    prefixMessages: [
      {
        role: "system",
        content: "You are a helpful assistant that answers in pirate language",
      },
    ],
    modelName: modelName,
    maxTokens: 50,
    openAIApiKey: fakeApiKey,
    maxRetries: 2
  }, {
    basePath: pathToLocalAI,
    apiKey: fakeApiKey,
  });
}

// Minimal example.
export const run = async () => {
  const model = getModel();
  console.log(`about to model.invoke at ${new Date().toUTCString()}`);
  const res = await model.invoke(
    "What would be a good company name a company that makes colorful socks?"
  );
  console.log(`${new Date().toUTCString()}`);
  console.log({ res });
};

await run();

// This example uses the `StuffDocumentsChain`
export const run2 = async () => {
  const model = getModel();
  const chainA = loadQAStuffChain(model);
  const docs = [
    new Document({ pageContent: "Harrison went to Harvard." }),
    new Document({ pageContent: "Ankush went to Princeton." }),
  ];
  const resA = await chainA.invoke({
    input_documents: docs,
    question: "Where did Harrison go to college?",
  });
  console.log({ resA });
};

await run2();

// Quickly thrown together example of using tools + agents.
// This seems like it should work, but it doesn't yet.
export const toolAgentTest = async () => {
  const model = getModel();

  const prompt = await pull<ChatPromptTemplate>("hwchase17/openai-tools-agent");

  const tools = [new Calculator()];

  const agent = await createOpenAIToolsAgent({
    llm: model,
    tools: tools,
    prompt: prompt
  });

  console.log("Loaded agent.");

  const agentExecutor = new AgentExecutor({
    agent,
    tools,
  });

  const input = `What is the value of (500 *2) + 350 - 13?`;

  console.log(`Executing with input "${input}"...`);

  const result = await agentExecutor.invoke({ input });

  console.log(`Got output ${result.output}`);
}

await toolAgentTest();