code
stringlengths 419
138k
| apis
sequencelengths 1
8
| extract_api
stringlengths 67
7.3k
|
---|---|---|
/**
* Copyright 2021 Rochester Institute of Technology (RIT). Developed with
* government support under contract 70RCSA22C00000008 awarded by the United
* States Department of Homeland Security for Cybersecurity and Infrastructure Security Agency.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the “Software”), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package edu.rit.se.nvip.reconciler.openai;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
public class GPTFilterModel {
private final Logger logger = LogManager.getLogger(getClass().getSimpleName());
private static final String MODEL = "gpt-3.5-turbo";
private static final double TEMP = 0.0;
private static final String PASS = "0";
private static final String FAIL = "1";
private static final String SYS_MESSAGE = String.format("You are a validation engine for vulnerability data scraped from the web." +
" If a user's message looks like a CVE description without errors, respond with \"%s\" or else \"%s\"", PASS, FAIL);
private static final String SYS_ROLE = "system";
private static final String USER_ROLE = "user";
private OpenAIRequestHandler requestHandler;
public GPTFilterModel() {
requestHandler = OpenAIRequestHandler.getInstance();
}
public void setRequestHandler(OpenAIRequestHandler handler) {
this.requestHandler = handler;
}
public boolean callModel(String arg) throws OpenAiInvalidReturnException{
try {
ChatCompletionRequest request = formRequest(arg);
Future<ChatCompletionResult> futureRes = requestHandler.createChatCompletion(request, RequestorIdentity.FILTER);
ChatCompletionResult res = futureRes.get();
return getAnswer(res);
} catch (OpenAiHttpException | InterruptedException | ExecutionException ex) {
logger.error(ex);
return true; // need a default answer
}
}
public int tokenCount(String description) {
return requestHandler.chatCompletionTokenCount(formRequest(description));
}
private ChatCompletionRequest formRequest(String description) {
List<ChatMessage> messages = formMessages(description);
return ChatCompletionRequest.builder().model(MODEL).temperature(TEMP).n(1).messages(messages).maxTokens(1).build();
}
private List<ChatMessage> formMessages(String description) {
List<ChatMessage> messages = new ArrayList<>();
messages.add(new ChatMessage(SYS_ROLE, SYS_MESSAGE));
messages.add(new ChatMessage(USER_ROLE, description));
return messages;
}
private boolean getAnswer(ChatCompletionResult res) throws OpenAiInvalidReturnException {
String answer = res.getChoices().get(0).getMessage().getContent();
switch (answer) {
case PASS:
return true;
case FAIL:
return false;
default:
throw new OpenAiInvalidReturnException("OpenAi responded with \"" + answer + "\"");
}
}
public static class OpenAiInvalidReturnException extends Exception {
public OpenAiInvalidReturnException(String errorMessage) {
super(errorMessage);
}
}
public static void main(String[] args) throws OpenAiInvalidReturnException, InterruptedException {
GPTFilterModel model = new GPTFilterModel();
ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
int a = 0;
for (int i = 0; i < 5; i++) {
int finalI = i;
executor.submit(() -> {
try {
boolean result = model.callModel("testing # " + finalI);
System.out.println("trial # " + finalI + " evaluated as " + result);
} catch (OpenAiInvalidReturnException e) {
System.out.println(e.toString());
}
});
}
executor.shutdown();
boolean res = executor.awaitTermination(10, TimeUnit.SECONDS);
OpenAIRequestHandler.getInstance().shutdown();
}
}
| [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((3549, 3656), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3648), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3635), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3616), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3593), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package com.example.gpt3javaexample.services;
import com.example.gpt3javaexample.aop.SaveToLogs;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@Service
public class GPTService {
@Value("${openai.max_tokens}")
private int MAX_TOKENS;
@Value("${openai.model}")
private String MODEL;
private final OpenAiService service;
private final StringBuilder chatHistory;
@Autowired
public GPTService(OpenAiService service) {
this.service = service;
this.chatHistory = new StringBuilder();
}
@SaveToLogs
public String doRequest(String prompt, Boolean newChat){
if (newChat){
clearHistory();
}
chatHistory.append("Input: ").append(prompt).append("\nOutput: ");
CompletionRequest request = CompletionRequest.builder()
.prompt(chatHistory.toString())
.model(MODEL)
.maxTokens(MAX_TOKENS)
.build();
String response = service.createCompletion(request).getChoices().stream()
.map(CompletionChoice::getText)
.reduce(String::concat)
.orElse("I don't know what to say");
chatHistory.append(response).append("\n");
return response;
}
public void clearHistory(){
chatHistory.delete(0, chatHistory.length());
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((1077, 1246), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1221), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1182), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1152), 'com.theokanning.openai.completion.CompletionRequest.builder')] |
package br.com.alura.screenmatch.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsumoChatGPT {
public static String obterTraducao(String texto) {
OpenAiService service = new OpenAiService("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");
CompletionRequest requisicao = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("traduza para o português o texto: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((389, 622), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 597), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 563), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 530), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 459), 'com.theokanning.openai.completion.CompletionRequest.builder')] |
package me.bowon.springbootdeveloper.controller;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import lombok.RequiredArgsConstructor;
import me.bowon.springbootdeveloper.domain.Song;
import me.bowon.springbootdeveloper.domain.YoutubeData;
import me.bowon.springbootdeveloper.service.BlogService;
import me.bowon.springbootdeveloper.service.GptService;
import me.bowon.springbootdeveloper.service.YoutubeService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import me.bowon.springbootdeveloper.service.BlogService;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.List;
@RequiredArgsConstructor
@RestController
@RequestMapping(value = "/gpt")
public class GptTest {
@Value("${openai.api-key}")
private String apiKey;
private final GptService gptService;
private final YoutubeService youtubeService;
private final String promptFormat = // 프롬프트 양식
"Desired Format: 1. song-singer, \n Input: 다음 일기를 보고 노래 3가지를 추천해줘 \n";
private String data;
@PostMapping("/post")
public List<YoutubeData> sendQuestion(@RequestBody String request) throws GeneralSecurityException, IOException {
OpenAiService service = new OpenAiService(apiKey);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(promptFormat + request)
.model("text-davinci-003")
.echo(false)
.maxTokens(100)
.temperature(0.7)
.build();
data = service.createCompletion(completionRequest).getChoices().toString();
List<Song> songs = gptService.parseSong(data);
System.out.println(songs);
List<YoutubeData> youtubeDataList = youtubeService.youtubeApi(songs);
return youtubeDataList;
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((1721, 1959), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1934), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1900), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1868), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1839), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1796), 'com.theokanning.openai.completion.CompletionRequest.builder')] |
package com.github.pablwoaraujo;
import java.util.Arrays;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
public class Main {
public static void main(String[] args) {
var user = "Gere 5 produtos";
var system = "Você é um gerador de produtos fictícios para um ecommerce e deve gerar apenas o nome dos produtos solicitados pelo usuário";
var apiKey = System.getenv("OPENAI_API_KEY");
OpenAiService service = new OpenAiService(apiKey);
ChatCompletionRequest completionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(Arrays.asList(
new ChatMessage(ChatMessageRole.USER.value(), user),
new ChatMessage(ChatMessageRole.SYSTEM.value(), system)))
.build();
service
.createChatCompletion(completionRequest)
.getChoices()
.forEach(c -> System.out.println(c.getMessage().getContent()));
System.out.println("Hello world!");
}
} | [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] | [((893, 921), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((970, 1000), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')] |
package br.com.fiap.gsjava.controllers;
import br.com.fiap.gsjava.models.ChatGPT;
import br.com.fiap.gsjava.repositories.ChatGPTRepository;
import br.com.fiap.gsjava.service.OpenAiService;
import jakarta.validation.ConstraintViolationException;
import jakarta.validation.Valid;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.web.PageableDefault;
import org.springframework.data.web.PagedResourcesAssembler;
import org.springframework.hateoas.EntityModel;
import org.springframework.hateoas.PagedModel;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.server.ResponseStatusException;
import com.theokanning.openai.completion.CompletionRequest;
import org.springframework.data.domain.Pageable;
import org.slf4j.Logger;
@RestController
@RequestMapping("/chatbot")
public class ChatGPTController {
@Autowired
ChatGPTRepository repo;
@Autowired
PagedResourcesAssembler<ChatGPT> assembler;
Logger log = LoggerFactory.getLogger(ChatGPTController.class);
private static final String API_KEY = "Sua Chave Aqui";
@GetMapping
public PagedModel<EntityModel<ChatGPT>> index(@PageableDefault(size = 5) Pageable pageable) {
return assembler.toModel(repo.findAll(pageable));
}
@GetMapping("/busca/{id}")
public EntityModel<ChatGPT> show(@PathVariable Long id) {
log.info("buscar chat com id: " + id);
ChatGPT chatGPT = repo.findById(id).orElseThrow(() ->
new ResponseStatusException(HttpStatus.NOT_FOUND, "Cliente não encontrado"));
return chatGPT.toModel();
}
@PostMapping("/api")
public ResponseEntity<ChatGPT> create(@RequestBody @Valid ChatGPT input) {
OpenAiService service = new OpenAiService(API_KEY);
CompletionRequest request = CompletionRequest.builder()
.model("text-davinci-003")
.prompt(input.getPergunta())
.maxTokens(400)
.build();
String resposta = service.createCompletion(request).getChoices().get(0).getText();
ChatGPT chatGPT = new ChatGPT(input.getPergunta(), resposta);
log.info("Saída do chatbot: " + chatGPT);
repo.save(chatGPT);
return ResponseEntity.status(HttpStatus.CREATED).body(chatGPT);
}
@DeleteMapping("/{id}")
public ResponseEntity<ChatGPT>destroy(@PathVariable Long id) {
log.info("deletar chat com o id: " + id);
ChatGPT chatgpt = repo.findById(id).orElseThrow(() ->
new ResponseStatusException(HttpStatus.NOT_FOUND, "Chat não encontrado"));;
repo.delete(chatgpt);
return ResponseEntity.noContent().build();
}
@ResponseStatus(HttpStatus.BAD_REQUEST)
@ExceptionHandler(ConstraintViolationException.class)
public ResponseEntity<String> handleValidationExceptions(ConstraintViolationException ex) {
log.error("Erro de validação: ", ex);
return ResponseEntity.badRequest().body(ex.getMessage());
}
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
@ExceptionHandler(Exception.class)
public ResponseEntity<String> handleAllExceptions(Exception ex) {
log.error("Erro não esperado: ", ex);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Ocorreu um erro inesperado. Tente novamente mais tarde.");
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((2006, 2182), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2156), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2123), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2077), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2448, 2503), 'org.springframework.http.ResponseEntity.status'), ((2868, 2902), 'org.springframework.http.ResponseEntity.noContent'), ((3179, 3228), 'org.springframework.http.ResponseEntity.badRequest'), ((3469, 3588), 'org.springframework.http.ResponseEntity.status')] |
package com.technoguyfication.admingpt;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.time.Duration;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import org.bstats.bukkit.Metrics;
import org.bstats.charts.SimplePie;
import org.bstats.charts.SingleLineChart;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.event.EventException;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.plugin.java.JavaPlugin;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
public class AdminGPT extends JavaPlugin implements Listener {
Pattern responsePattern = Pattern.compile("<([ctp])>\\/?(.*)<\\/[ctp]>");
OpenAiService service;
LinkedList<ChatMessage> messageHistory = new LinkedList<ChatMessage>();
String systemPrompt;
String languageModel;
int historyLength;
long timeoutSeconds;
Double temperature;
List<String> commandBlacklist;
// metrics
int totalMessages = 0;
int totalCommands = 0;
int totalResponses = 0;
@Override
public void onEnable() {
// bStats
int pluginId = 18196;
Metrics metrics = new Metrics(this, pluginId);
FileConfiguration config = this.getConfig();
InputStream langStream = this.getResource("lang.yml");
// Load lang.yml
YamlConfiguration langConfig = new YamlConfiguration();
try {
langConfig.load(new InputStreamReader(langStream));
// Load system prompt from lang.yml
systemPrompt = langConfig.getString("openai-system-prompt");
} catch (Exception e) {
getLogger().severe("Failed to load lang.yml file.");
e.printStackTrace();
// Disable plugin
this.setEnabled(false);
return;
}
// Load config
String apiKey = config.getString("openai-api-key");
if (apiKey == null || apiKey.isBlank() || apiKey.equals("your-api-key-here")) {
getLogger().severe("No OpenAI API key found in config.yml. Please add one and restart the server.");
// Save default config
this.saveDefaultConfig();
// Disable plugin
this.setEnabled(false);
return;
}
languageModel = config.getString("openai-language-model");
temperature = config.getDouble("openai-model-temperature");
timeoutSeconds = config.getLong("openai-timeout-secs");
historyLength = config.getInt("history-length");
commandBlacklist = config.getStringList("command-blacklist");
// Add bStats charts
metrics.addCustomChart(new SimplePie("language-model", () -> languageModel));
metrics.addCustomChart(new SingleLineChart("messages-sent", () -> {
var total = totalMessages;
totalMessages = 0;
return total;
}));
metrics.addCustomChart(new SingleLineChart("commands-run", () -> {
var total = totalCommands;
totalCommands = 0;
return total;
}));
metrics.addCustomChart(new SingleLineChart("responses-received", () -> {
var total = totalResponses;
totalResponses = 0;
return total;
}));
// Create OpenAI service
service = new OpenAiService(apiKey, Duration.ofSeconds(timeoutSeconds)); // set response timeout
// Register event listeners
getServer().getPluginManager().registerEvents(this, this);
// Startup messages
getLogger().info("Command blacklist: " + String.join(", ", commandBlacklist));
}
@Override
public void onDisable() {
// Plugin disabled
}
@EventHandler
public void onChat(AsyncPlayerChatEvent event) throws EventException {
// Increment total messages counter
totalMessages++;
// Add new message to list
addChatMessage(new ChatMessage(ChatMessageRole.USER.value(),
String.format("%s: %s", event.getPlayer().getName(), event.getMessage())));
// Replace placeholders in the system prompt
String templatedSystemPrompt = systemPrompt
.replace("{plugins}",
String.join(", ",
Stream.of(Bukkit.getPluginManager().getPlugins()).map(p -> p.getName())
.toArray(String[]::new)))
.replace("{players}",
String.join(", ",
Bukkit.getOnlinePlayers().stream().map(p -> p.getName()).toArray(String[]::new)))
.replace("{version}", Bukkit.getVersion());
// Make a new list with the system prompt and all messages
List<ChatMessage> messages = new LinkedList<ChatMessage>();
messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), templatedSystemPrompt));
messages.addAll(messageHistory);
// Create a chat completion request
ChatCompletionRequest request = ChatCompletionRequest
.builder()
.model(languageModel)
.messages(messages)
.user(event.getPlayer().getUniqueId().toString())
.temperature(temperature)
.build();
getLogger().fine("Sending chat completion request to OpenAI...");
Bukkit.getScheduler().runTaskAsynchronously(this, () -> {
ChatCompletionResult result = service.createChatCompletion(request);
ChatMessage responseMessage = result.getChoices().get(0).getMessage();
getLogger().fine("Received chat completion result from OpenAI.");
List<String> commands = new LinkedList<String>();
List<String> responses = new LinkedList<String>();
// Run regex on each line of the result
for (String line : responseMessage.getContent().split("\\r?\\n")) {
Matcher matcher = responsePattern.matcher(line);
if (matcher.find()) {
switch (matcher.group(1)) {
case "c":
String command = matcher.group(2);
getLogger().info(String.format("Command: %s", command));
commands.add(command);
break;
case "t":
String thought = matcher.group(2);
getLogger().info(String.format("Thought: %s", thought));
break;
case "p":
String response = matcher.group(2);
getLogger().info(String.format("Response: %s", response));
responses.add(response);
break;
default:
getLogger().warning(String.format("Invalid response pattern: %s", line));
break;
}
}
}
// Run the rest of the code on the main thread
Bukkit.getScheduler().runTask(this, () -> {
// Add commands and responses to total counters
totalCommands += commands.size();
totalResponses += responses.size();
// add the result to the list of messages
addChatMessage(responseMessage);
// Run the commands
for (String command : commands) {
// Check if command is blacklisted
String rootCommand = command.split(" ")[0];
if (commandBlacklist.contains(rootCommand.toLowerCase())) {
getLogger().warning(String.format("Command %s is blacklisted.", command));
continue;
}
Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command);
}
// Broadcast response lines
for (String response : responses) {
Bukkit.broadcastMessage(ChatColor.AQUA + String.format("<AdminGPT> %s", response));
}
});
});
}
private void addChatMessage(ChatMessage message) {
// Remove oldest message if list is full
if (messageHistory.size() >= historyLength) {
messageHistory.removeFirst();
}
// Add new message to list
messageHistory.add(message);
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] | [((4511, 4539), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4851, 4986), 'java.util.stream.Stream.of'), ((4851, 4922), 'java.util.stream.Stream.of'), ((4861, 4899), 'org.bukkit.Bukkit.getPluginManager'), ((5101, 5180), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5101, 5157), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5101, 5135), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5416, 5446), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5940, 8809), 'org.bukkit.Bukkit.getScheduler'), ((7705, 8797), 'org.bukkit.Bukkit.getScheduler')] |
package com.vission.chatGPT.service;
import com.google.common.collect.Lists;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import com.vission.chatGPT.properties.ChatGPTProperties;
import com.vission.chatGPT.utils.BeanUtils;
import com.vission.chatGPT.utils.JsonUtils;
import com.vission.chatGPT.utils.RedisUtils;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
@Service
@Slf4j
@RequiredArgsConstructor
public class ChatGPTService {
private final ChatGPTProperties properties;
private final OpenAiService openAiService;
private final RedisUtils redisUtils;
/**
* 翻译助手
*
* @param original 原文
* @return 翻译结果
*/
public String translation(String original) {
StringBuilder completion = new StringBuilder();
ChatMessage newQuestionMessage = new ChatMessage(ChatMessageRole.USER.value(), original);
ChatMessage system = new ChatMessage(ChatMessageRole.SYSTEM.value(),
"你是一个翻译助手,将我说的所有话翻译成中文");
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.messages(Lists.newArrayList(system, newQuestionMessage))
.build();
ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request);
List<ChatCompletionChoice> choices = chatCompletion.getChoices();
for (ChatCompletionChoice choice : choices) {
completion.append(choice.getMessage().getContent());
}
return completion.toString();
}
/**
* 聊天 不会保存上下文聊天
*
* @param original 原文
* @return 翻译结果
*/
public String chatCompletion(String original) {
StringBuilder completion = new StringBuilder();
ChatMessage newQuestionMessage = new ChatMessage(ChatMessageRole.USER.value(), original);
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.messages(Lists.newArrayList(newQuestionMessage))
.build();
ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request);
List<ChatCompletionChoice> choices = chatCompletion.getChoices();
for (ChatCompletionChoice choice : choices) {
completion.append(choice.getMessage().getContent());
}
return completion.toString();
}
/**
* 聊天 会保存上下文聊天
*
* @param original 原文
* @param userUuid 用户唯一标识
* @return 翻译结果
*/
public String chatCompletionByContext(String original, String userUuid) {
List<ChatMessage> messages = findChatMessagesByUuid(userUuid);
int messageCount = (int) messages.stream().map(ChatMessage::getRole)
.filter(t -> StringUtils.equals(t, ChatMessageRole.USER.value())).count();
if (messageCount > properties.getChatGptFlowNum()) {
redisUtils.del(userUuid);
return "您的连续对话已超过上限,系统已自动清空上下文";
}
StringBuilder result = new StringBuilder();
ChatMessage newMessage = new ChatMessage(ChatMessageRole.USER.value(), original);
messages.add(newMessage);
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo").messages(messages).build();
ChatGPTService.log.info("request:{}", JsonUtils.toJson(request));
ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request);
List<ChatCompletionChoice> choices = chatCompletion.getChoices();
for (ChatCompletionChoice choice : choices) {
messages.add(choice.getMessage());
result.append(choice.getMessage().getContent());
}
redisUtils.set(userUuid, messages, 1800);
return result.toString();
}
private List<ChatMessage> findChatMessagesByUuid(String userUuid) {
List result = redisUtils.getList(userUuid);
return BeanUtils.deepCopyList(result, ChatMessage.class);
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((1310, 1338), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1396, 1426), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1552, 1722), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1552, 1697), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1552, 1623), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2351, 2379), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2432, 2594), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2432, 2569), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2432, 2503), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3374, 3402), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3711, 3739), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3826, 3924), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3826, 3916), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3826, 3897), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package cos.peerna.domain.gpt.service;
import com.amazonaws.services.kms.model.NotFoundException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatCompletionChunk;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import cos.peerna.domain.gpt.dto.request.SendMessageRequest;
import cos.peerna.domain.gpt.event.ReviewReplyEvent;
import cos.peerna.domain.gpt.model.GPT;
import cos.peerna.domain.history.model.History;
import cos.peerna.domain.history.repository.HistoryRepository;
import cos.peerna.domain.reply.model.Reply;
import cos.peerna.domain.reply.repository.ReplyRepository;
import cos.peerna.domain.room.model.Chat;
import cos.peerna.domain.room.repository.ChatRepository;
import cos.peerna.global.security.dto.SessionUser;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.stereotype.Service;
@Slf4j
@Service
@RequiredArgsConstructor
public class GPTService {
private final ReplyRepository replyRepository;
private final SimpMessagingTemplate template;
private final RedisTemplate<String, Object> redisTemplate;
private final ObjectMapper objectMapper;
private final OpenAiService openAIService;
private final ChatRepository chatRepository;
private final HistoryRepository historyRepository;
/*
TODO: Async 로 변경
*/
public void reviewReply(ReviewReplyEvent event) {
/*
TODO: 사용자의 권한에 따른 gpt 모델 선택
*/
ChatMessage systemMessage = new ChatMessage("system", GPT.getConcept(event.question()));
ChatMessage userMessage = new ChatMessage("user", event.answer());
StringBuilder assistantMessageBuilder = new StringBuilder();
openAIService.streamChatCompletion(ChatCompletionRequest.builder()
.model(GPT.getModel())
.messages(List.of(
systemMessage,
userMessage
))
.build())
.doOnError(throwable -> sendErrorMessage(event.userId()))
.blockingForEach(chunk -> sendChatMessage(chunk, event.userId(), assistantMessageBuilder));
ChatMessage assistantMessage = new ChatMessage("assistant", assistantMessageBuilder.toString());
redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), systemMessage);
redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), userMessage);
redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), assistantMessage);
History history = historyRepository.findById(event.historyId())
.orElseThrow(() -> new NotFoundException("history not found"));
chatRepository.save(Chat.builder()
.writerId(0L)
.content(assistantMessageBuilder.toString())
.history(history)
.build());
}
/*
TODO: Async 로 변경
*/
public void sendMessage(SessionUser user, SendMessageRequest request) {
Reply lastReply = replyRepository.findFirstByUserIdOrderByIdDesc(user.getId())
.orElseThrow(() -> new NotFoundException("reply not found"));
List<ChatMessage> messages = getChatMessages(lastReply.getHistory().getId());
ChatMessage userMessage = new ChatMessage("user", request.message());
messages.add(userMessage);
StringBuilder assistantMessageBuilder = new StringBuilder();
openAIService.streamChatCompletion(ChatCompletionRequest.builder()
.model(GPT.getModel())
.messages(messages)
.build())
.doOnError(throwable -> sendErrorMessage(user.getId()))
.blockingForEach(chunk -> sendChatMessage(chunk, user.getId(), assistantMessageBuilder));
ChatMessage assistantMessage = new ChatMessage("assistant", assistantMessageBuilder.toString());
redisTemplate.opsForList().rightPush(String.valueOf(lastReply.getHistory().getId()), userMessage);
redisTemplate.opsForList().rightPush(String.valueOf(lastReply.getHistory().getId()), assistantMessage);
chatRepository.save(Chat.builder()
.writerId(user.getId())
.content(request.message())
.history(lastReply.getHistory())
.build());
chatRepository.save(Chat.builder()
.writerId(0L)
.content(assistantMessageBuilder.toString())
.history(lastReply.getHistory())
.build());
}
private List<ChatMessage> getChatMessages(Long historyId) {
List<Object> messageObjects = redisTemplate.opsForList().range(String.valueOf(historyId), 0, -1);
List<ChatMessage> messages = new ArrayList<>();
if (messageObjects == null) {
throw new NotFoundException("messageObjects is null");
}
for (Object messageObject : messageObjects) {
ChatMessage chatMessage = objectMapper.convertValue(messageObject, ChatMessage.class);
messages.add(chatMessage);
}
return messages;
}
private void sendChatMessage(ChatCompletionChunk chunk, Long userId, StringBuilder assistantMessageBuilder) {
/*
TODO: stream 이 끝나면, gpt 답변 전체를 저장
TODO: gpt에게서 오는 chunk의 순서가 보장되지 않음
*/
String message = chunk.getChoices().get(0).getMessage().getContent();
if (message == null) {
template.convertAndSend("/user/" + userId + "/gpt", GPT.getENDMessage());
return;
}
template.convertAndSend("/user/" + userId + "/gpt", message);
assistantMessageBuilder.append(message);
}
private void sendErrorMessage(Long userId) {
template.convertAndSend("/user/" + userId + "/gpt", GPT.getErrorMessage());
}
}
| [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((2107, 2379), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2107, 2346), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2107, 2185), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3139, 3303), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3278), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3244), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3183), 'cos.peerna.domain.room.model.Chat.builder'), ((3909, 4064), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3909, 4031), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3909, 3987), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4598, 4770), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4745), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4696), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4652), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4980), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4955), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4906), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4845), 'cos.peerna.domain.room.model.Chat.builder')] |
package link.locutus.discord.gpt.imps;
import com.knuddels.jtokkit.api.Encoding;
import com.knuddels.jtokkit.api.EncodingRegistry;
import com.knuddels.jtokkit.api.ModelType;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.embedding.Embedding;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.embedding.EmbeddingResult;
import link.locutus.discord.db.AEmbeddingDatabase;
import link.locutus.discord.gpt.pw.GptDatabase;
import java.sql.SQLException;
import java.util.List;
public class AdaEmbedding extends AEmbeddingDatabase {
private final EncodingRegistry registry;
private final Encoding embeddingEncoder;
private final OpenAiService service;
public AdaEmbedding(EncodingRegistry registry, OpenAiService service, GptDatabase database) throws SQLException, ClassNotFoundException {
super("ada", database);
this.registry = registry;
this.service = service;
this.embeddingEncoder = registry.getEncodingForModel(ModelType.TEXT_EMBEDDING_ADA_002);
}
public int getEmbeddingTokenSize(String text) {
return embeddingEncoder.encode(text).size();
}
@Override
public float[] fetchEmbedding(String text) {
EmbeddingRequest request = EmbeddingRequest.builder()
.model("text-embedding-ada-002")
.input(List.of(text))
.build();
EmbeddingResult embedResult = service.createEmbeddings(request);
List<Embedding> data = embedResult.getData();
if (data.size() != 1) {
throw new RuntimeException("Expected 1 embedding, got " + data.size());
}
List<Double> result = data.get(0).getEmbedding();
float[] target = new float[result.size()];
for (int i = 0; i < target.length; i++) {
target[i] = result.get(i).floatValue();
}
return target;
}
}
| [
"com.theokanning.openai.embedding.EmbeddingRequest.builder"
] | [((1288, 1426), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1288, 1401), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1288, 1363), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')] |
package com.redis.vss;
import redis.clients.jedis.JedisPooled;
import redis.clients.jedis.Protocol;
import redis.clients.jedis.search.Document;
import redis.clients.jedis.search.IndexDefinition;
import redis.clients.jedis.search.IndexOptions;
import redis.clients.jedis.search.Query;
import redis.clients.jedis.search.Schema;
import redis.clients.jedis.search.SearchResult;
import redis.clients.jedis.util.SafeEncoder;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import com.opencsv.CSVReader;
import com.opencsv.CSVReaderBuilder;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.service.OpenAiService;
/**
* Java VSS Wiki Articles Example
*
* @author Michael Yuan
*/
public class JavaVSSWikiArticlesExample {
// Redis client connection
private static JedisPooled client = null;
// OpenAI connection
private static OpenAiService service = null;
// Model
private static String MODEL = "text-embedding-ada-002";
private static int VECTOR_DIM = 1536; // length of the vectors
private static int VECTOR_NUMBER = 25000; // initial number of vectors
private static String INDEX_NAME = "idx_wiki"; // name of the search index
private static String INDEX_NAME_HNSW = "idx_wiki_hnsw"; // name of the search index
private static String PREFIX = "wiki"; // prefix for the document keys
private static String DISTANCE_METRIC = "COSINE"; // prefix for the document keys
private JavaVSSWikiArticlesExample() {
try {
// Initialize Redis connection
InputStream input = ClassLoader.getSystemResourceAsStream("config.properties");
Properties prop = new Properties();
prop.load(input);
client = new JedisPooled(prop.getProperty("redis.host"),
Integer.parseInt(prop.getProperty("redis.port")));
// Initialize OpenAI service connection
String token = System.getenv("OPENAI_API_KEY");
service = new OpenAiService(token);
// client = new JedisPooled(prop.getProperty("redis.host"),
// Integer.parseInt(prop.getProperty("redis.port")),
// prop.getProperty("redis.user"),
// prop.getProperty("redis.password"));
Object result = client.sendCommand(Protocol.Command.PING, "Connected to Redis...");
System.out.println(SafeEncoder.encode((byte[]) result));
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void createFlatIndex() {
try {
// Drop index if exists
try {
client.ftDropIndex(INDEX_NAME);
} catch (Exception e) {
}
;
System.out.println("Creating Flat index...");
HashMap<String, Object> attr = new HashMap<String, Object>();
attr.put("TYPE", "FLOAT64");
attr.put("DIM", VECTOR_DIM);
attr.put("DISTANCE_METRIC", DISTANCE_METRIC);
attr.put("INITIAL_CAP", VECTOR_NUMBER);
// Define index schema
Schema schema = new Schema().addNumericField("id")
.addTextField("title", 3.0).as("title")
.addTextField("url", 1.0).as("url")
.addTextField("text", 2.0).as("text")
.addVectorField("title_vector", Schema.VectorField.VectorAlgo.FLAT, attr).as("title_vector")
.addVectorField("content_vector", Schema.VectorField.VectorAlgo.FLAT, attr).as("content_vector");
IndexDefinition rule = new IndexDefinition(IndexDefinition.Type.HASH)
.setPrefixes(new String[] { "wiki:" });
client.ftCreate(INDEX_NAME, IndexOptions.defaultOptions().setDefinition(rule), schema);
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void createHNSWIndex() {
try {
// Drop index if exists
try {
client.ftDropIndex(INDEX_NAME_HNSW);
} catch (Exception e) {
}
;
System.out.println("Creating HNSW index...");
HashMap<String, Object> attr = new HashMap<String, Object>();
attr.put("TYPE", "FLOAT64");
attr.put("DIM", VECTOR_DIM);
attr.put("DISTANCE_METRIC", DISTANCE_METRIC);
attr.put("INITIAL_CAP", VECTOR_NUMBER);
// Define index schema
Schema schema = new Schema().addNumericField("id")
.addTextField("title", 3.0).as("title")
.addTextField("url", 1.0).as("url")
.addTextField("text", 2.0).as("text")
.addVectorField("title_vector", Schema.VectorField.VectorAlgo.HNSW, attr).as("title_vector")
.addVectorField("content_vector", Schema.VectorField.VectorAlgo.HNSW, attr).as("content_vector");
IndexDefinition rule = new IndexDefinition(IndexDefinition.Type.HASH)
.setPrefixes(new String[] { "wiki:" });
client.ftCreate(INDEX_NAME_HNSW, IndexOptions.defaultOptions().setDefinition(rule), schema);
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* @param csvFile
* Load data from csv file to Redis hashes
*/
private void loadData(String csvFile) {
System.out.println("Loading data in Redis...");
try {
FileInputStream input = new FileInputStream(csvFile);
String[] record = null;
String key;
try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader(input)).withSkipLines(1).build()) {
while ((record = reader.readNext()) != null) {
key = PREFIX + ":" + record[0];
double[] title_vector = Pattern.compile(", ")
.splitAsStream(record[4].replaceAll("\\[", "").replaceAll("\\]", ""))
.map(elem -> Double.parseDouble(elem))
.collect(Collectors.toList())
.stream().mapToDouble(Double::doubleValue).toArray();
double[] content_vector = Pattern.compile(", ")
.splitAsStream(record[5].replaceAll("\\[", "").replaceAll("\\]", ""))
.map(elem -> Double.parseDouble(elem))
.collect(Collectors.toList())
.stream().mapToDouble(Double::doubleValue).toArray();
Map<byte[], byte[]> map = new HashMap<>();
map.put("id".getBytes(), record[0].getBytes());
map.put("url".getBytes(), record[1].getBytes());
map.put("title".getBytes(), record[2].getBytes());
map.put("text".getBytes(), record[3].getBytes());
map.put("title_vector".getBytes(), doubleToByte(title_vector));
map.put("content_vector".getBytes(), doubleToByte(content_vector));
map.put("vector_id".getBytes(), record[6].getBytes());
client.hset(key.getBytes(), map);
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* @param input
* @return byte[]
*/
public byte[] doubleToByte(double[] input) {
ByteBuffer buffer = ByteBuffer.allocate(input.length * Double.BYTES);
buffer.order(ByteOrder.LITTLE_ENDIAN);
buffer.asDoubleBuffer().put(input);
return buffer.array();
}
public void searchRedis(String indexName, String queryString, String vector_field, int k) {
// Build OpenAI embedding request
EmbeddingRequest embeddingRequest = EmbeddingRequest.builder()
.model(MODEL)
.input(Collections.singletonList(queryString))
.build();
// Get vector embeddings from Open AI service
double[] embedding = service.createEmbeddings(embeddingRequest).getData().get(0).getEmbedding()
.stream().mapToDouble(Double::doubleValue).toArray();
// Build query
Query q = new Query("*=>[KNN $k @" + vector_field + "$vec AS vector_score]")
.setSortBy("vector_score", true)
.addParam("k", k)
.addParam("vec", doubleToByte(embedding))
.limit(0, k)
.dialect(2);
// Get and iterate over search results
SearchResult res = client.ftSearch(indexName, q);
List<Document> wikis = res.getDocuments();
int i = 1;
for (Document wiki : wikis) {
float score = Float.parseFloat((String) wiki.get("vector_score"));
System.out.println(i + ". " + wiki.get("title") + " (Score: " + (1 - score) + ")");
i++;
}
}
/**
* Run Redis VSS search examples using wiki articles.
*
* @param args The arguments of the program.
*/
public static void main(String[] args) {
// Zip archive of wiki articles with OpenAI embeddings
String fileUrl = "https://cdn.openai.com/API/examples/data/vector_database_wikipedia_articles_embedded.zip";
String saveAt = "/tmp/vector_database_wikipedia_articles_embedded.zip";
// CSV file of wiki articles with OpenAI embeddings
String csvFile = "/tmp/vector_database_wikipedia_articles_embedded.csv";
// Download and unzip csv file of wiki articles with OpenAI embeddings
try {
System.out.println("Downloading and unzipping csv file...");
LoadOpenAIData.downloadUsingNIO(fileUrl, saveAt);
LoadOpenAIData.unzipZip4j(saveAt, "/tmp");
} catch (IOException e) {
e.printStackTrace();
}
JavaVSSWikiArticlesExample vssArticles = new JavaVSSWikiArticlesExample();
vssArticles.createFlatIndex();
vssArticles.createHNSWIndex();
vssArticles.loadData(csvFile);
System.out.println("### VSS query: 'modern art in Europe' in 'title_vector'");
vssArticles.searchRedis(INDEX_NAME, "modern art in Europe", "title_vector", 10);
System.out.println("### VSS query: 'modern art in Europe' in 'title_vector'");
vssArticles.searchRedis(INDEX_NAME_HNSW, "modern art in Europe", "title_vector", 10);
System.out.println("### VSS query: 'Famous battles in Scottish history' in 'content_vector'");
vssArticles.searchRedis(INDEX_NAME, "Famous battles in Scottish history", "content_vector", 10);
}
}
| [
"com.theokanning.openai.embedding.EmbeddingRequest.builder"
] | [((4075, 4124), 'redis.clients.jedis.search.IndexOptions.defaultOptions'), ((5457, 5506), 'redis.clients.jedis.search.IndexOptions.defaultOptions'), ((6208, 6533), 'java.util.regex.Pattern.compile'), ((6208, 6523), 'java.util.regex.Pattern.compile'), ((6208, 6490), 'java.util.regex.Pattern.compile'), ((6208, 6452), 'java.util.regex.Pattern.compile'), ((6208, 6394), 'java.util.regex.Pattern.compile'), ((6208, 6327), 'java.util.regex.Pattern.compile'), ((6582, 6907), 'java.util.regex.Pattern.compile'), ((6582, 6897), 'java.util.regex.Pattern.compile'), ((6582, 6864), 'java.util.regex.Pattern.compile'), ((6582, 6826), 'java.util.regex.Pattern.compile'), ((6582, 6768), 'java.util.regex.Pattern.compile'), ((6582, 6701), 'java.util.regex.Pattern.compile'), ((8173, 8317), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((8173, 8292), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((8173, 8229), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')] |
package com.asleepyfish.strategy.event;
import com.alibaba.fastjson2.JSONObject;
import com.asleepyfish.dto.AiQa;
import com.asleepyfish.enums.WxMessageType;
import com.asleepyfish.repository.AiQaRepository;
import com.asleepyfish.strategy.WxEventStrategy;
import com.google.common.collect.Lists;
import com.theokanning.openai.image.CreateImageRequest;
import io.github.asleepyfish.enums.ImageResponseFormatEnum;
import io.github.asleepyfish.enums.ImageSizeEnum;
import io.github.asleepyfish.util.OpenAiUtils;
import lombok.extern.slf4j.Slf4j;
import me.chanjar.weixin.common.api.WxConsts;
import me.chanjar.weixin.common.bean.result.WxMediaUploadResult;
import me.chanjar.weixin.mp.api.WxMpService;
import me.chanjar.weixin.mp.bean.kefu.WxMpKefuMessage;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.io.ByteArrayInputStream;
import java.util.Base64;
import java.util.List;
import java.util.Map;
/**
* @Author: asleepyfish
* @Date: 2022/8/31 19:55
* @Description: 消息策略
*/
@Service("text")
@Slf4j
public class TextStrategy implements WxEventStrategy {
@Resource
private AiQaRepository aiQaRepository;
@Resource
private WxMpService wxMpService;
@Override
public void execute(Map<String, String> requestMap, HttpServletResponse response) throws Exception {
// 发送方账号
String openId = requestMap.get("FromUserName");
String acceptContent = requestMap.get("Content");
log.info(">>> 用户输入:{}", acceptContent);
// 关闭输出流,避免微信服务端重复发送信息
response.getOutputStream().close();
if (acceptContent.charAt(0) == '/') {
createImage(acceptContent, openId);
} else {
createCompletion(acceptContent, openId);
}
}
private void createCompletion(String acceptContent, String openId) throws Exception {
WxMpKefuMessage wxMpKefuMessage = new WxMpKefuMessage();
wxMpKefuMessage.setToUser(openId);
wxMpKefuMessage.setMsgType(WxMessageType.TEXT.getType());
List<String> results = Lists.newArrayList();
// 初始化标记status = 0,表示解答成功
int status = 0;
try {
results = OpenAiUtils.createChatCompletion(acceptContent, openId);
} catch (Exception e) {
status = -1;
log.error(e.getMessage());
results.add(e.getMessage());
}
for (String result : results) {
if (result.startsWith("?") || result.startsWith("?")) {
result = result.substring(1);
}
result = result.trim();
wxMpKefuMessage.setContent(result);
log.info(">>> ChatGPT:{}", result);
AiQa aiQa = new AiQa();
aiQa.setUser(openId);
aiQa.setQuestion(acceptContent);
aiQa.setAnswer(result);
aiQa.setStatus(status);
aiQaRepository.save(aiQa);
// 客服接口发送信息
wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage);
}
}
private void createImage(String acceptContent, String openId) throws Exception {
WxMpKefuMessage wxMpKefuMessage = new WxMpKefuMessage();
wxMpKefuMessage.setToUser(openId);
wxMpKefuMessage.setMsgType(WxMessageType.IMAGE.getType());
List<String> results = Lists.newArrayList();
// 初始化标记status = 0,表示解答成功
int status = 0;
try {
acceptContent = acceptContent.substring(1);
results = OpenAiUtils.createImage(CreateImageRequest.builder()
.prompt(acceptContent)
.size(ImageSizeEnum.S512x512.getSize())
.user(openId)
.responseFormat(ImageResponseFormatEnum.B64_JSON.getResponseFormat())
.build());
} catch (Exception e) {
status = -1;
log.error(e.getMessage());
results.add(e.getMessage());
}
for (String result : results) {
AiQa aiQa = new AiQa();
aiQa.setUser(openId);
aiQa.setQuestion(acceptContent);
aiQa.setAnswer(result);
aiQa.setStatus(status);
aiQaRepository.save(aiQa);
if (status == -1) {
wxMpKefuMessage.setMsgType(WxMessageType.TEXT.getType());
wxMpKefuMessage.setContent("生成图片失败!原因:" + result);
wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage);
return;
}
WxMediaUploadResult wxMediaUploadResult = getMediaUploadResult(result);
log.info(">>> 图片上传结果:{}", JSONObject.toJSONString(wxMediaUploadResult));
wxMpKefuMessage.setMediaId(wxMediaUploadResult.getMediaId());
// 客服接口发送信息
wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage);
}
}
private WxMediaUploadResult getMediaUploadResult(String base64) throws Exception {
byte[] imageBytes = Base64.getDecoder().decode(base64);
try (ByteArrayInputStream bis = new ByteArrayInputStream(imageBytes)) {
return wxMpService.getMaterialService().mediaUpload(WxConsts.MediaFileType.IMAGE, "PNG", bis);
}
}
}
| [
"com.theokanning.openai.image.CreateImageRequest.builder"
] | [((2115, 2143), 'com.asleepyfish.enums.WxMessageType.TEXT.getType'), ((3411, 3440), 'com.asleepyfish.enums.WxMessageType.IMAGE.getType'), ((3694, 3978), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3949), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3859), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3825), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3765), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3792, 3824), 'io.github.asleepyfish.enums.ImageSizeEnum.S512x512.getSize'), ((3896, 3948), 'io.github.asleepyfish.enums.ImageResponseFormatEnum.B64_JSON.getResponseFormat'), ((4469, 4497), 'com.asleepyfish.enums.WxMessageType.TEXT.getType'), ((5208, 5242), 'java.util.Base64.getDecoder')] |
package com.odde.doughnut.services.ai.tools;
import static com.theokanning.openai.service.OpenAiService.defaultObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kjetland.jackson.jsonSchema.JsonSchemaGenerator;
import com.odde.doughnut.controllers.dto.AiCompletionRequiredAction;
import com.theokanning.openai.assistants.AssistantFunction;
import com.theokanning.openai.assistants.AssistantToolsEnum;
import com.theokanning.openai.assistants.Tool;
import com.theokanning.openai.runs.ToolCall;
import com.theokanning.openai.runs.ToolCallFunction;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Stream;
public record AiTool(
String name,
String description,
Class<?> parameterClass,
Function<Object, AiCompletionRequiredAction> executor) {
public static <T> AiTool build(
String name,
String description,
Class<T> parameterClass,
Function<T, AiCompletionRequiredAction> executor) {
return new AiTool(
name, description, parameterClass, (arguments) -> executor.apply((T) arguments));
}
public Tool getTool() {
return new Tool(
AssistantToolsEnum.FUNCTION,
AssistantFunction.builder()
.name(name)
.description(description)
.parameters(serializeClassSchema(parameterClass))
.build());
}
private static Map<String, Object> serializeClassSchema(Class<?> value) {
ObjectMapper objectMapper = new ObjectMapper();
JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(objectMapper);
JsonNode jsonSchema = jsonSchemaGenerator.generateJsonSchema(value);
JsonNode jsonNode = objectMapper.valueToTree(jsonSchema);
return objectMapper.convertValue(jsonNode, Map.class);
}
public Stream<AiCompletionRequiredAction> tryConsume(ToolCall toolCall) {
ToolCallFunction function = toolCall.getFunction();
if (name.equals(function.getName())) {
return Stream.of(executor.apply(convertArguments(function)));
}
return Stream.empty();
}
private Object convertArguments(ToolCallFunction function) {
String arguments = function.getArguments();
try {
JsonNode jsonNode = defaultObjectMapper().readTree(arguments);
return defaultObjectMapper().treeToValue(jsonNode, parameterClass);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
| [
"com.theokanning.openai.assistants.AssistantFunction.builder"
] | [((1303, 1475), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1454), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1392), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1354), 'com.theokanning.openai.assistants.AssistantFunction.builder')] |
/*
* Copyright 2008-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package egovframework.example.sample.web;
import java.awt.Choice;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import egovframework.example.API.Keys;
import egovframework.example.sample.service.EgovSampleService;
import egovframework.example.sample.service.SampleDefaultVO;
import egovframework.example.sample.service.SampleVO;
import egovframework.rte.fdl.property.EgovPropertyService;
import egovframework.rte.ptl.mvc.tags.ui.pagination.PaginationInfo;
import javax.annotation.Resource;
import javax.servlet.ServletContext;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.http.HttpServletRequest;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.support.SessionStatus;
import org.springframework.web.multipart.MultipartFile;
import org.springmodules.validation.commons.DefaultBeanValidator;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.theokanning.openai.audio.CreateTranscriptionRequest;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* @Class Name : EgovSampleController.java
* @Description : EgovSample Controller Class
* @Modification Information
* @
* @ 수정일 수정자 수정내용
* @ --------- --------- -------------------------------
* @ 2009.03.16 최초생성
*
* @author 개발프레임웍크 실행환경 개발팀
* @since 2009. 03.16
* @version 1.0
* @see
*
* Copyright (C) by MOPAS All right reserved.
*/
@Controller
@MultipartConfig(
maxFileSize = 1024 * 1024 * 25, // 최대 25MB 파일 크기
maxRequestSize = 1024 * 1024 * 25, // 최대 25MB 요청 크기
fileSizeThreshold = 1024 * 1024 // 1MB 이상부터 디스크에 저장
)
public class EgovSampleController {
private static final Logger logger = LogManager.getLogger(EgovSampleController.class);
private final String UPLOAD_DIR = "uploads";
/** EgovSampleService */
@Resource(name = "sampleService")
private EgovSampleService sampleService;
/** EgovPropertyService */
@Resource(name = "propertiesService")
protected EgovPropertyService propertiesService;
/** Validator */
@Resource(name = "beanValidator")
protected DefaultBeanValidator beanValidator;
/**
* 글 목록을 조회한다. (pageing)
* @param searchVO - 조회할 정보가 담긴 SampleDefaultVO
* @param model
* @return "egovSampleList"
* @exception Exception
*/
@RequestMapping(value = "/egovSampleList.do")
public String selectSampleList(@ModelAttribute("searchVO") SampleDefaultVO searchVO, ModelMap model) throws Exception {
/** EgovPropertyService.sample */
searchVO.setPageUnit(propertiesService.getInt("pageUnit"));
searchVO.setPageSize(propertiesService.getInt("pageSize"));
/** pageing setting */
PaginationInfo paginationInfo = new PaginationInfo();
paginationInfo.setCurrentPageNo(searchVO.getPageIndex());
paginationInfo.setRecordCountPerPage(searchVO.getPageUnit());
paginationInfo.setPageSize(searchVO.getPageSize());
searchVO.setFirstIndex(paginationInfo.getFirstRecordIndex());
searchVO.setLastIndex(paginationInfo.getLastRecordIndex());
searchVO.setRecordCountPerPage(paginationInfo.getRecordCountPerPage());
List<?> sampleList = sampleService.selectSampleList(searchVO);
model.addAttribute("resultList", sampleList);
int totCnt = sampleService.selectSampleListTotCnt(searchVO);
paginationInfo.setTotalRecordCount(totCnt);
model.addAttribute("paginationInfo", paginationInfo);
return "sample/egovSampleList";
}
/**
* 글 등록 화면을 조회한다.
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param model
* @return "egovSampleRegister"
* @exception Exception
*/
@RequestMapping(value = "/addSample.do", method = RequestMethod.GET)
public String addSampleView(@ModelAttribute("searchVO") SampleDefaultVO searchVO, Model model) throws Exception {
model.addAttribute("sampleVO", new SampleVO());
return "sample/egovSampleRegister";
}
/**
* 글을 등록한다.
* @param sampleVO - 등록할 정보가 담긴 VO
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param status
* @return "forward:/egovSampleList.do"
* @exception Exception
*/
@RequestMapping(value = "/addSample.do", method = RequestMethod.POST)
public String addSample(@ModelAttribute("searchVO") SampleDefaultVO searchVO, SampleVO sampleVO, BindingResult bindingResult, Model model, SessionStatus status)
throws Exception {
// Server-Side Validation
beanValidator.validate(sampleVO, bindingResult);
if (bindingResult.hasErrors()) {
model.addAttribute("sampleVO", sampleVO);
return "sample/egovSampleRegister";
}
sampleService.insertSample(sampleVO);
status.setComplete();
return "forward:/egovSampleList.do";
}
/**
* 글 수정화면을 조회한다.
* @param id - 수정할 글 id
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param model
* @return "egovSampleRegister"
* @exception Exception
*/
@RequestMapping("/updateSampleView.do")
public String updateSampleView(@RequestParam("selectedId") String id, @ModelAttribute("searchVO") SampleDefaultVO searchVO, Model model) throws Exception {
SampleVO sampleVO = new SampleVO();
sampleVO.setId(id);
// 변수명은 CoC 에 따라 sampleVO
model.addAttribute(selectSample(sampleVO, searchVO));
return "sample/egovSampleRegister";
}
/**
* 글을 조회한다.
* @param sampleVO - 조회할 정보가 담긴 VO
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param status
* @return @ModelAttribute("sampleVO") - 조회한 정보
* @exception Exception
*/
public SampleVO selectSample(SampleVO sampleVO, @ModelAttribute("searchVO") SampleDefaultVO searchVO) throws Exception {
return sampleService.selectSample(sampleVO);
}
/**
* 글을 수정한다.
* @param sampleVO - 수정할 정보가 담긴 VO
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param status
* @return "forward:/egovSampleList.do"
* @exception Exception
*/
@RequestMapping("/updateSample.do")
public String updateSample(@ModelAttribute("searchVO") SampleDefaultVO searchVO, SampleVO sampleVO, BindingResult bindingResult, Model model, SessionStatus status)
throws Exception {
beanValidator.validate(sampleVO, bindingResult);
if (bindingResult.hasErrors()) {
model.addAttribute("sampleVO", sampleVO);
return "sample/egovSampleRegister";
}
sampleService.updateSample(sampleVO);
status.setComplete();
return "forward:/egovSampleList.do";
}
/**
* 글을 삭제한다.
* @param sampleVO - 삭제할 정보가 담긴 VO
* @param searchVO - 목록 조회조건 정보가 담긴 VO
* @param status
* @return "forward:/egovSampleList.do"
* @exception Exception
*/
@RequestMapping("/deleteSample.do")
public String deleteSample(SampleVO sampleVO, @ModelAttribute("searchVO") SampleDefaultVO searchVO, SessionStatus status) throws Exception {
sampleService.deleteSample(sampleVO);
status.setComplete();
return "forward:/egovSampleList.do";
}
@RequestMapping("/file.do")
public String fileReg() throws Exception {
return "sample/file";
}
//static String englishAudioFilePath = "/Users/jiuhyeong/Documents/Handong/capstone1/Dani_california.mp3";
//static String englishAudioFilePath = "/Users/jiuhyeong/Documents/Handong/capstone1/interview.mp4";
//requestparam으로 임시로 저장한 파일의 위치를 string으로 받은 후 whisper에게 전사를 맡김, 임시 파일 삭제?
@RequestMapping(value = "/file.do", method = RequestMethod.POST)
public String createTranscription(@RequestParam String absolutePath, Model model) {
OpenAiService service = new OpenAiService(Keys.OPENAPI_KEY,Duration.ofMinutes(9999));
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder()
.model("whisper-1")
.build();
String text = service.createTranscription(createTranscriptionRequest, absolutePath).getText();
logger.debug(text);
model.addAttribute("result", text);
model.addAttribute("absolutePath", absolutePath);
File fileToDelete = new File(absolutePath);
if (fileToDelete.exists()) {
if (fileToDelete.delete()) {
logger.debug("temp File deleted successfully.");
} else {
logger.error("Failed to delete the file.");
}
} else {
logger.debug("temp File not found");
}
return "sample/file";
}
//jsp에 저장버튼 추가 후 restapi로 보내기
@RequestMapping(value = "/summarize.do", method = RequestMethod.POST)
public String showSummaryResult(@RequestParam String transcription_result, Model model) {
OpenAiService service = new OpenAiService(Keys.OPENAPI_KEY,Duration.ofMinutes(9999));
List<ChatMessage> message = new ArrayList<ChatMessage>();
message.add(new ChatMessage("user", "텍스트의 주제를 파악해서 해당 언어로 다섯줄 내외 요약해줘 \""+transcription_result+"\""));
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.messages(message)
.model("gpt-3.5-turbo")
.maxTokens(1500)
.temperature((double) 0.5f)
.build();
String summary_restult=service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
model.addAttribute("summary_result",summary_restult);
return "sample/summarize";
}
//파일을 임시저장 후 file.do에 경로를 보냄.
@RequestMapping(value = "/postfile.do", method = RequestMethod.POST)
public String handleFile(@RequestParam(value = "file", required = false) MultipartFile file, Model model, HttpServletRequest request) throws IOException{
ServletContext context = request.getSession().getServletContext();
String projectPath = context.getRealPath("/");
System.out.println("Project Path: " + projectPath);
if (file.isEmpty()) {
return "redirect:/file.do"; // 파일이 선택되지 않았을 경우 폼으로 리다이렉트
}
try {
byte[] bytes = file.getBytes();
Path directoryPath = Paths.get(projectPath+UPLOAD_DIR);
// 디렉토리가 존재하지 않으면 생성
if (!Files.exists(directoryPath)) {
Files.createDirectories(directoryPath);
}
Path filePath = directoryPath.resolve(file.getOriginalFilename());
Files.write(filePath, bytes);
Path absolutePath = filePath.toAbsolutePath();
String absolutePathString = absolutePath.toString();
logger.debug("AbsolutePathString received"+absolutePathString);
model.addAttribute("absolutePath", absolutePathString);
} catch (IOException e) {
e.printStackTrace();
}
model.addAttribute("inputFile", file.getOriginalFilename());
return "sample/file";
}
@RequestMapping(value = "/save-result.do", method = RequestMethod.POST)
public String saveFile(@RequestParam(value = "dir", required = false) MultipartFile dir, @RequestParam String summ_result, Model model, HttpServletRequest request) throws IOException{
return "redirect:/summary.do";
}
}
| [
"com.theokanning.openai.audio.CreateTranscriptionRequest.builder",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((10123, 10222), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((10123, 10196), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((11541, 11746), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11724), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11683), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11638), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11601), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
/*
* Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license
* Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template
*/
package cloud.cleo.connectgpt;
import cloud.cleo.connectgpt.lang.LangUtil;
import static cloud.cleo.connectgpt.lang.LangUtil.LanguageIds.*;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.services.lambda.runtime.events.LexV2Event;
import com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction;
import com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent;
import com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState;
import com.amazonaws.services.lambda.runtime.events.LexV2Response;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import java.net.SocketTimeoutException;
import java.time.Duration;
import java.time.LocalDate;
import java.time.ZoneId;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient;
import software.amazon.awssdk.enhanced.dynamodb.DynamoDbTable;
import software.amazon.awssdk.enhanced.dynamodb.Key;
import software.amazon.awssdk.enhanced.dynamodb.TableSchema;
import software.amazon.awssdk.enhanced.dynamodb.extensions.AutoGeneratedTimestampRecordExtension;
/**
*
* @author sjensen
*/
public class ChatGPTLambda implements RequestHandler<LexV2Event, LexV2Response> {
// Initialize the Log4j logger.
final static Logger log = LogManager.getLogger(ChatGPTLambda.class);
final static ObjectMapper mapper = new ObjectMapper();
final static TableSchema<ChatGPTSessionState> schema = TableSchema.fromBean(ChatGPTSessionState.class);
final static DynamoDbEnhancedClient enhancedClient = DynamoDbEnhancedClient.builder()
.extensions(AutoGeneratedTimestampRecordExtension.create()).build();
final static DynamoDbTable<ChatGPTSessionState> sessionState = enhancedClient.table(System.getenv("SESSION_TABLE_NAME"), schema);
final static OpenAiService open_ai_service = new OpenAiService(System.getenv("OPENAI_API_KEY"), Duration.ofSeconds(20));
final static String OPENAI_MODEL = System.getenv("OPENAI_MODEL");
@Override
public LexV2Response handleRequest(LexV2Event lexRequest, Context cntxt) {
try {
log.debug(mapper.valueToTree(lexRequest).toString());
final var intentName = lexRequest.getSessionState().getIntent().getName();
log.debug("Intent: " + intentName);
return processGPT(lexRequest);
} catch (Exception e) {
log.error(e);
// Unhandled Exception
return buildResponse(lexRequest, new LangUtil(lexRequest.getBot().getLocaleId()).getString(UNHANDLED_EXCEPTION));
}
}
private LexV2Response processGPT(LexV2Event lexRequest) {
final var input = lexRequest.getInputTranscript();
final var localId = lexRequest.getBot().getLocaleId();
final var lang = new LangUtil(localId);
log.debug("Java Locale is " + lang.getLocale());
if (input == null || input.isBlank()) {
log.debug("Got blank input, so just silent or nothing");
final var attrs = lexRequest.getSessionState().getSessionAttributes();
var count = Integer.valueOf(attrs.getOrDefault("blankCounter", "0"));
count++;
if (count > 2) {
log.debug("Two blank responses, sending to Quit Intent");
// Hang up on caller after 2 silience requests
return buildQuitResponse(lexRequest);
} else {
attrs.put("blankCounter", count.toString());
// If we get slience (timeout without speech), then we get empty string on the transcript
return buildResponse(lexRequest, lang.getString(BLANK_RESPONSE));
}
}
// When testing in lex console input will be text, so use session ID, for speech we shoud have a phone via Connect
final var user_id = lexRequest.getSessionId();
// Key to record in Dynamo
final var key = Key.builder().partitionValue(user_id).sortValue(LocalDate.now(ZoneId.of("America/Chicago")).toString()).build();
// load session state if it exists
log.debug("Start Retreiving Session State");
var session = sessionState.getItem(key);
log.debug("End Retreiving Session State");
if (session == null) {
session = new ChatGPTSessionState(user_id);
}
// Since we can call and change language during session, always specifiy how we want responses
session.addSystemMessage(lang.getString(CHATGPT_RESPONSE_LANGUAGE));
// add this request to the session
session.addUserMessage(input);
String botResponse;
try {
ChatCompletionRequest request = ChatCompletionRequest.builder()
.messages(session.getChatMessages())
.model(OPENAI_MODEL)
.maxTokens(500)
.temperature(0.2) // More focused
.n(1) // Only return 1 completion
.build();
log.debug("Start API Call to ChatGPT");
final var completion = open_ai_service.createChatCompletion(request);
log.debug("End API Call to ChatGPT");
log.debug(completion);
botResponse = completion.getChoices().get(0).getMessage().getContent();
// Add response to session
session.addAssistantMessage(botResponse);
// Since we have a valid response, add message asking if there is anything else
if ( ! "Text".equalsIgnoreCase(lexRequest.getInputMode()) ) {
// Only add if not text (added to voice response)
botResponse = botResponse + lang.getString(ANYTHING_ELSE);
}
// Save the session to dynamo
log.debug("Start Saving Session State");
session.incrementCounter();
sessionState.putItem(session);
log.debug("End Saving Session State");
} catch (RuntimeException rte) {
if (rte.getCause() != null && rte.getCause() instanceof SocketTimeoutException) {
log.error("Response timed out", rte);
botResponse = lang.getString(OPERATION_TIMED_OUT);
} else {
throw rte;
}
}
return buildResponse(lexRequest, botResponse);
}
/**
* Response that sends you to the Quit intent so the call can be ended
*
* @param lexRequest
* @param response
* @return
*/
private LexV2Response buildQuitResponse(LexV2Event lexRequest) {
// State to return
final var ss = SessionState.builder()
// Retain the current session attributes
.withSessionAttributes(lexRequest.getSessionState().getSessionAttributes())
// Send back Quit Intent
.withIntent(Intent.builder().withName("Quit").withState("ReadyForFulfillment").build())
// Indicate the state is Delegate
.withDialogAction(DialogAction.builder().withType("Delegate").build())
.build();
final var lexV2Res = LexV2Response.builder()
.withSessionState(ss)
.build();
log.debug("Response is " + mapper.valueToTree(lexV2Res));
return lexV2Res;
}
/**
* General Response used to send back a message and Elicit Intent again at LEX
*
* @param lexRequest
* @param response
* @return
*/
private LexV2Response buildResponse(LexV2Event lexRequest, String response) {
// State to return
final var ss = SessionState.builder()
// Retain the current session attributes
.withSessionAttributes(lexRequest.getSessionState().getSessionAttributes())
// Always ElictIntent, so you're back at the LEX Bot looking for more input
.withDialogAction(DialogAction.builder().withType("ElicitIntent").build())
.build();
final var lexV2Res = LexV2Response.builder()
.withSessionState(ss)
// We are using plain text responses
.withMessages(new LexV2Response.Message[]{new LexV2Response.Message("PlainText", response, null)})
.build();
log.debug("Response is " + mapper.valueToTree(lexV2Res));
return lexV2Res;
}
}
| [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((1987, 2099), 'software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient.builder'), ((1987, 2091), 'software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient.builder'), ((4370, 4481), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4370, 4473), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4370, 4407), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4418, 4472), 'java.time.LocalDate.now'), ((5138, 5440), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5383), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5341), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5303), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5267), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5226), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7069, 7547), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7522), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7385), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7240), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7310, 7384), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7310, 7376), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7310, 7343), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7470, 7521), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((7470, 7513), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((7579, 7665), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((7579, 7640), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8067, 8446), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8067, 8421), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8067, 8238), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8365, 8420), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((8365, 8412), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((8478, 8732), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8478, 8707), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8478, 8539), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder')] |
package de.throughput.ircbot.handler;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import de.throughput.ircbot.api.Command;
import de.throughput.ircbot.api.CommandEvent;
import de.throughput.ircbot.api.CommandHandler;
import de.throughput.ircbot.api.MessageHandler;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.pircbotx.hooks.events.MessageEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
@Component
public class OpenAiChatMessageHandler implements MessageHandler, CommandHandler {
private static final Logger LOG = LoggerFactory.getLogger(OpenAiChatMessageHandler.class);
public static final Command CMD_RESET_CONTEXT = new Command("aireset",
"aireset - deletes the current context for the channel and reloads the system prompt from the file system.");
private static final String MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo";
private static final int MAX_CONTEXT_MESSAGES = 10;
private static final int MAX_TOKENS = 100;
private static final int MAX_IRC_MESSAGE_LENGTH = 420;
private static final String SHORT_ANSWER_HINT = " (Antwort auf 200 Zeichen begrenzen)";
private final Map<String, LinkedList<TimedChatMessage>> contextMessagesPerChannel = new ConcurrentHashMap<>();
private final OpenAiService openAiService;
private final Path systemPromptPath;
private String systemPrompt;
public OpenAiChatMessageHandler(OpenAiService openAiService, @Value("${openai.systemPrompt.path}") Path systemPromptPath) {
this.openAiService = openAiService;
this.systemPromptPath = systemPromptPath;
readSystemPromptFromFile();
}
@Override
public Set<Command> getCommands() {
return Set.of(CMD_RESET_CONTEXT);
}
@Override
public boolean onMessage(MessageEvent event) {
String message = event.getMessage().trim();
String botNick = event.getBot().getNick();
if (message.startsWith(botNick + ":") || message.startsWith(botNick + ",")) {
message = message.substring(event.getBot().getNick().length() + 1).trim();
generateResponse(event, message);
return true;
}
return false;
}
@Override
public boolean onCommand(CommandEvent command) {
// handles the aireset command
var contextMessages = contextMessagesPerChannel.get(command.getEvent().getChannel().getName());
if (contextMessages != null) {
synchronized (contextMessages) {
contextMessages.clear();
}
}
readSystemPromptFromFile();
command.respond("system prompt reloaded. context reset complete.");
return true;
}
/**
* Generates a response to the given (trimmed) message using the OpenAI API.
*/
private void generateResponse(MessageEvent event, String message) {
var contextMessages = contextMessagesPerChannel.computeIfAbsent(event.getChannel().getName(), k -> new LinkedList<>());
synchronized (contextMessages) {
try {
String channel = event.getChannel().getName();
var request = ChatCompletionRequest.builder()
.model(MODEL_GPT_3_5_TURBO)
.maxTokens(MAX_TOKENS)
.messages(createPromptMessages(contextMessages, channel, event.getUser().getNick(), message))
.build();
ChatCompletionResult completionResult = openAiService.createChatCompletion(request);
ChatMessage responseMessage = completionResult.getChoices().get(0).getMessage();
contextMessages.add(new TimedChatMessage(responseMessage));
event.respond(sanitizeResponse(responseMessage.getContent()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
event.respond("Tja. (" + ExceptionUtils.getRootCauseMessage(e) + ")");
}
}
}
/**
* Sanitizes the response by removing excessive whitespace and limiting the length.
*/
private static String sanitizeResponse(String content) {
String trim = content.replaceAll("\\s+", " ").trim();
return trim.length() > MAX_IRC_MESSAGE_LENGTH ? trim.substring(0, MAX_IRC_MESSAGE_LENGTH) : trim;
}
/**
* Creates the list of prompt messages for the OpenAI API call.
*/
private List<ChatMessage> createPromptMessages(LinkedList<TimedChatMessage> contextMessages, String channel, String nick, String message) {
message += SHORT_ANSWER_HINT;
contextMessages.add(new TimedChatMessage(new ChatMessage(ChatMessageRole.USER.value(), message, nick)));
pruneOldMessages(contextMessages);
List<ChatMessage> promptMessages = new ArrayList<>();
promptMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), systemPrompt));
promptMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), getDatePrompt()));
promptMessages.addAll(contextMessages);
return promptMessages;
}
/**
* Generates a system prompt containing the current date and time.
*/
private String getDatePrompt() {
TimeZone timeZone = TimeZone.getTimeZone("Europe/Berlin");
SimpleDateFormat dateFormat = new SimpleDateFormat("EEEE, 'der' dd. MMMM yyyy", Locale.GERMAN);
dateFormat.setTimeZone(timeZone);
SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm", Locale.GERMAN);
timeFormat.setTimeZone(timeZone);
Date now = new Date();
return "Heute ist " + dateFormat.format(now) + ", und es ist " + timeFormat.format(now) + " Uhr in Deutschland.";
}
/**
* Removes old messages from the context.
*/
private void pruneOldMessages(LinkedList<TimedChatMessage> contextMessages) {
LocalDateTime twoHoursAgo = LocalDateTime.now().minusHours(2);
contextMessages.removeIf(message -> message.getTimestamp().isBefore(twoHoursAgo));
while (contextMessages.size() > MAX_CONTEXT_MESSAGES) {
contextMessages.removeFirst();
}
}
/**
* Reads the system prompt from the file system.
*/
private void readSystemPromptFromFile() {
try {
systemPrompt = Files.readString(systemPromptPath);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public boolean isOnlyTalkChannels() {
return true;
}
/**
* Adds a timestamp to ChatMessage, allowing us to drop old messages from the context.
*/
private static class TimedChatMessage extends ChatMessage {
private final LocalDateTime timestamp;
public TimedChatMessage(ChatMessage chatMessage) {
super(chatMessage.getRole(), chatMessage.getContent(), chatMessage.getName());
this.timestamp = LocalDateTime.now();
}
@JsonIgnore
public LocalDateTime getTimestamp() {
return timestamp;
}
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((4011, 4292), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4259), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4141), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4094), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5521, 5549), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5718, 5748), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5809, 5839), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((6750, 6783), 'java.time.LocalDateTime.now')] |
package com.cvcopilot.resumebuilding.service;
import com.cvcopilot.resumebuilding.models.Modification;
import com.cvcopilot.resumebuilding.repository.ModificationRepository;
import com.cvcopilot.resumebuilding.repository.ProfileRepository;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Service;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
@Service
public class ResumeService {
@Autowired
private ProfileRepository profileRepository;
@Autowired
private StateService stateService;
@Autowired
private RedisTemplate<String, String> redisTemplate;
private HashOperations<String, String, String> hashOperations;
private ZSetOperations<String, String> zSetOperations;
@Autowired
private ModificationRepository modificationRepository;
@Value("${openai.api-key}")
private String openAIKey;
@Value("${openai.model}")
private String openAIModel;
@PostConstruct
private void init() {
hashOperations = redisTemplate.opsForHash();
zSetOperations = redisTemplate.opsForZSet();
}
private String prompt = "Based on the user's experiences, write a optimized resume according to the job description. Emit the personal information.";
private static final Logger logger = LoggerFactory.getLogger(ResumeService.class);
@KafkaListener(topics = "resume", groupId = "test-group", containerFactory = "kafkaListenerContainerFactory")
public void consume(@Payload String message) {
String userId = message.substring(0, 19);
String modificationId = message.substring(19, 55);
List<ChatCompletionChoice> res;
try {
stateService.addOrUpdateState(userId, modificationId, "in_progress");
OpenAiService service = new OpenAiService(openAIKey, Duration.ofSeconds(120));
List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a hr from big tech company.");
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), message.substring(56) + prompt);
messages.add(systemMessage);
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(600)
.logitBias(new HashMap<>())
.build();
res = service.createChatCompletion(chatCompletionRequest).getChoices();
service.shutdownExecutor();
} catch (RuntimeException e) {
logger.error("RuntimeException: " + e.getMessage());
stateService.addOrUpdateState(userId, modificationId, "failed");
return;
}
try {
// write to postgres
modificationRepository.save(new Modification(modificationId, res.get(0).getMessage().getContent(), Long.valueOf(userId), System.currentTimeMillis()));
} catch (RuntimeException e) {
logger.error("Failed to write to Postgres: " + e.getMessage());
stateService.addOrUpdateState(userId, modificationId, "failed_db_error");
return;
}
// write state to redis
stateService.addOrUpdateState(userId, modificationId, "finished");
// invalidate cache of all results of this user
zSetOperations.remove(userId);
}
} | [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] | [((2811, 2841), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2943, 2971), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')] |
package podsofkon;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.*;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.client.RestTemplate;
import javax.servlet.http.HttpServletRequest;
import javax.sound.sampled.*;
import java.io.*;
import java.time.Duration;
import java.util.*;
@RestController
@RequestMapping("/picturestory")
public class GenerateAPictureStoryUsingOnlySpeech {
static List<String> storyImages = new ArrayList();
@GetMapping("/form")
public String newstory(
HttpServletRequest request) throws Exception {
storyImages = new ArrayList();
return getHtmlString("");
}
@GetMapping("/picturestory")
public String picturestory(@RequestParam("genopts") String genopts) throws Exception {
AudioFormat format =
new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0f, 16, 1,
(16 / 8) * 1, 44100.0f, true);
SoundRecorder soundRecorder = new SoundRecorder();
soundRecorder.build(format);
System.out.println("Start recording ....");
soundRecorder.start();
Thread.sleep(8000);
soundRecorder.stop();
System.out.println("Stopped recording ....");
Thread.sleep(3000); //give the process time
String name = "AISoundClip";
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
AudioInputStream audioInputStream = soundRecorder.audioInputStream;
System.out.println("Saving...");
File file = new File(name + "." + fileType.getExtension());
audioInputStream.reset();
AudioSystem.write(audioInputStream, fileType, file);
System.out.println("Saved " + file.getAbsolutePath());
String transcription = transcribe(file) + genopts;
System.out.println("transcription " + transcription);
String imageLocation = imagegeneration(transcription);
System.out.println("imageLocation " + imageLocation);
storyImages.add(imageLocation);
String htmlStoryFrames = "";
Iterator<String> iterator = storyImages.iterator();
while(iterator.hasNext()) {
htmlStoryFrames += "<td><img src=\"" + iterator.next() +"\" width=\"400\" height=\"400\"></td>";
}
return getHtmlString(htmlStoryFrames);
}
private static String getHtmlString(String htmlStoryFrames) {
return "<html><table>" +
" <tr>" +
htmlStoryFrames +
" </tr>" +
"</table><br><br>" +
"<form action=\"/picturestory/picturestory\">" +
" <input type=\"submit\" value=\"Click here and record (up to 10 seconds of audio) describing next scene.\">" +
"<br> Some additional options..." +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", using only one line\" checked >using only one line" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", photo taken on a Pentax k1000\">photo taken on a Pentax k1000" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", pixel art\">pixel art" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", digital art\">digital art" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", 3d render\">3d render" +
"</form><br><br>" +
"<form action=\"/picturestory/form\">" +
" <input type=\"submit\" value=\"Or click here to start a new story\">\n" +
"</form>" +
"</html>";
}
public String imagegeneration(String imagedescription) throws Exception {
OpenAiService service =
new OpenAiService("sk-sdf3HSWvb2HgV", Duration.ofSeconds(60));
CreateImageRequest openairequest = CreateImageRequest.builder()
.prompt(imagedescription)
.build();
System.out.println("\nImage is located at:");
String imageLocation = service.createImage(openairequest).getData().get(0).getUrl();
service.shutdownExecutor();
return imageLocation;
}
public String transcribe(File file) throws Exception {
OpenAiService service =
new OpenAiService("sk-nMVoZmUsOBjRasdfvb2HgV", Duration.ofSeconds(60));
String audioTranscription = transcribeFile(file, service);
service.shutdownExecutor();
return audioTranscription;
}
private String transcribeFile(File file, OpenAiService service) throws Exception
{
String endpoint = "https://api.openai.com/v1/audio/transcriptions";
String modelName = "whisper-1";
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.MULTIPART_FORM_DATA);
headers.setBearerAuth(System.getenv("OPENAI_KEY"));
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
byte[] fileBytes = new byte[0];
try (FileInputStream fis = new FileInputStream(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = fis.read(buffer)) != -1) {
bos.write(buffer, 0, bytesRead);
}
fileBytes = bos.toByteArray();
} catch (IOException e) {
e.printStackTrace();
}
body.add("file", new ByteArrayResource(fileBytes) {
@Override
public String getFilename() {
return file.getName();
}
});
body.add("model", modelName);
HttpEntity<MultiValueMap<String, Object>> requestEntity = new HttpEntity<>(body, headers);
RestTemplate restTemplate = new RestTemplate();
ResponseEntity<String> response = restTemplate.exchange(endpoint, HttpMethod.POST, requestEntity, String.class);
return response.getBody();
}
public class SoundRecorder implements Runnable {
AudioInputStream audioInputStream;
private AudioFormat format;
public Thread thread;
public SoundRecorder build(AudioFormat format) {
this.format = format;
return this;
}
public void start() {
thread = new Thread(this);
thread.start();
}
public void stop() {
thread = null;
}
@Override
public void run() {
try (final ByteArrayOutputStream out = new ByteArrayOutputStream(); final TargetDataLine line = getTargetDataLineForRecord();) {
int frameSizeInBytes = format.getFrameSize();
int bufferLengthInFrames = line.getBufferSize() / 8;
final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
buildByteOutputStream(out, line, frameSizeInBytes, bufferLengthInBytes);
this.audioInputStream = new AudioInputStream(line);
setAudioInputStream(convertToAudioIStream(out, frameSizeInBytes));
audioInputStream.reset();
} catch (IOException ex) {
ex.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public void buildByteOutputStream(final ByteArrayOutputStream out, final TargetDataLine line, int frameSizeInBytes, final int bufferLengthInBytes) throws IOException {
final byte[] data = new byte[bufferLengthInBytes];
int numBytesRead;
line.start();
while (thread != null) {
if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) {
break;
}
out.write(data, 0, numBytesRead);
}
}
private void setAudioInputStream(AudioInputStream aStream) {
this.audioInputStream = aStream;
}
public AudioInputStream convertToAudioIStream(final ByteArrayOutputStream out, int frameSizeInBytes) {
byte[] audioBytes = out.toByteArray();
AudioInputStream audioStream =
new AudioInputStream(new ByteArrayInputStream(audioBytes), format,
audioBytes.length / frameSizeInBytes);
System.out.println("Recording finished");
return audioStream;
}
public TargetDataLine getTargetDataLineForRecord() {
TargetDataLine line;
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
return null;
}
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format, line.getBufferSize());
} catch (final Exception ex) {
return null;
}
return line;
}
}
}
| [
"com.theokanning.openai.image.CreateImageRequest.builder"
] | [((4160, 4255), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((4160, 4230), 'com.theokanning.openai.image.CreateImageRequest.builder')] |
package de.garrafao.phitag.computationalannotator.usepair.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import de.garrafao.phitag.computationalannotator.common.error.WrongApiKeyException;
import de.garrafao.phitag.computationalannotator.common.error.WrongModelException;
import de.garrafao.phitag.computationalannotator.common.function.CommonFunction;
import de.garrafao.phitag.computationalannotator.usepair.data.UsePairPrompt;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class UsePairOpenAIService {
private final UsePairPrompt usePairPrompt;
private final CommonFunction commonFunction;
@Autowired
public UsePairOpenAIService(UsePairPrompt usePairPrompt, CommonFunction commonFunction) {
this.usePairPrompt = usePairPrompt;
this.commonFunction = commonFunction;
}
public String chat(final String apiKey, final String model, final String prompt, final String firstUsage,
final String secondUsage, final String lemma) {
try {
List<ChatMessage> messages = this.usePairPrompt.getChatMessages(prompt, firstUsage, secondUsage, lemma);
OpenAiService service = new OpenAiService(apiKey);
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.messages(messages)
.model(model)
.temperature(0.9)
.topP(0.9)
.n(1)
.build();
List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest).getChoices();
StringBuilder returnString = new StringBuilder();
for (ChatCompletionChoice choice : choices) {
ChatMessage message = choice.getMessage();
if (message != null) {
System.out.println(message.getContent());
returnString.append(message.getContent()).append(System.lineSeparator());
}
}
System.out.println("response "+ returnString);
int result = this.commonFunction.extractInteger(returnString.toString());
System.out.println("integer " + result);
return String.valueOf(result);
}catch (OpenAiHttpException e) {
if (e.getMessage().contains("The model")) {
throw new WrongModelException(model);
}
if (e.getMessage().contains("Incorrect API key provided")) {
throw new WrongApiKeyException();
}
throw e;
}
}
}
| [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((1606, 1835), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1806), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1780), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1749), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1711), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1677), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package idatt2106v231.backend.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import idatt2106v231.backend.model.OpenAiKey;
import idatt2106v231.backend.repository.OpenAiKeyRepository;
import io.github.cdimascio.dotenv.Dotenv;
import okhttp3.OkHttpClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import retrofit2.Retrofit;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static com.theokanning.openai.service.OpenAiService.*;
/**
* Class to manage Ai.
*/
@Service
public class AiServices {
private final OpenAiKeyRepository openAiKeyRepo;
/**
* Constructor which sets the Open AI key repository.
*/
@Autowired
public AiServices(OpenAiKeyRepository openAiKeyRepo) {
this.openAiKeyRepo = openAiKeyRepo;
}
/**
* Gets a chat completion using OpenAI GPT-3.
*
* @param content the content of the query
* @return the answer produced by the AI
*/
public String getChatCompletion(String content) {
try {
String token = getOpenAiApiKey();
if (token.startsWith("ERROR :")) throw new Exception(token);
ObjectMapper mapper = defaultObjectMapper();
Duration timeout = Duration.ofSeconds(300);
OkHttpClient client = defaultClient(token, timeout)
.newBuilder()
.build();
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api);
List<ChatMessage> messages = new ArrayList<>();
messages.add(new ChatMessage("user", content));
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
.messages(messages)
.model("gpt-3.5-turbo")
.temperature(0.0)
.build();
return String.valueOf(service.createChatCompletion(chatCompletionRequest)
.getChoices().get(0).getMessage().getContent());
} catch (Exception e) {
return "ERROR: " + e.getMessage();
}
}
/**
* Gets the OpenAi API key.
* This must either be stored in the table 'open_ai_key' in the database,
* or in a .env file in the root of the project folder as OPENAI_TOKEN=your_token.
*
* @return the key
*/
public String getOpenAiApiKey() {
try {
String token = null;
Optional<OpenAiKey> openAiKey = openAiKeyRepo.findFirstByOrderByIdDesc();
if (openAiKey.isPresent()) token = openAiKey.get().getApiKey();
if (token == null) {
Dotenv dotenv = Dotenv.configure().load();
token = dotenv.get("OPENAI_TOKEN");
if (token == null) {
return "Token is missing. " +
"Make sure a valid OpenAI API key is stored in the database " +
"or in a .env file in the root of the project";
}
}
return token;
} catch (Exception e) {
return "ERROR: " + e.getMessage();
}
}
} | [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((2086, 2268), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2239), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2201), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2157), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3079, 3104), 'io.github.cdimascio.dotenv.Dotenv.configure')] |
package com.ramesh.openai;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
/***
* This project demonstrates the Chain of Thought (CoT) prompting technique which is useful when there is need
* for analytical, reasoning, deriving etc. kind of problems
***/
class ChainOfThoughtPrompting {
public static void main(String... args) {
// Set the Open AI Token & Model
String token = "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";
String model = "gpt-3.5-turbo";
// service handle for calling OpenAI APIs
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
System.out.println("-----------------------------------------------------------");
// prompt - change this and run again and again. Mostly ChatGPT will not give the right response for complex prompt like puzzle.
// that's where Chain of thought comes to help (next prompt with COT is given below)
String prompt="I went to the market and bought 10 apples. I gave 2 apples to the neighbor and 2 to the repairman. I then went and bought 5 more apples and ate 1. How many apples did I remain with?";
System.out.println(prompt);
// create the Chat message object
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt);
messages.add(userMessage);
// call ChatGPT ChatCompletion API and get the response
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model(model)
.messages(messages)
.n(1)
.temperature(.1)
.maxTokens(200)
.logitBias(new HashMap<>())
.build();
System.out.println("------------");
System.out.print("ChatGPT response=");
service.createChatCompletion(chatCompletionRequest).getChoices().forEach((c) -> {
System.out.println(c.getMessage().getContent());
});
System.out.println("\n-----------------------------------------------------------");
// Call ChatGPT Chat Completion with a CoT (Chain of THought) prompting technique
// You will see that ChatGPT most likely will give the right answer. This is because in the prompt
// the thinking process is given in the form of examples
String[] prompts = new String[10];
prompts[0] = "The odd numbers in this group add up to an even number: 4, 8, 9, 15, 12, 2, 1.";
prompts[1] = "A: The answer is False.";
prompts[2] = "The odd numbers in this group add up to an even number: 17, 10, 19, 4, 8, 12, 24.";
prompts[3] = "A: The answer is True.";
prompts[4] = "The odd numbers in this group add up to an even number: 16, 11, 14, 4, 8, 13, 24.";
prompts[5] = "A: The answer is True.";
prompts[6] = "The odd numbers in this group add up to an even number: 17, 9, 10, 12, 13, 4, 2.";
prompts[7] = "A: The answer is False.";
prompts[8] = "The odd numbers in this group add up to an even number: 15, 32, 5, 13, 82, 7, 1. ";
prompts[9] = "A: ";
final List<ChatMessage> messages_cot = new ArrayList<>();
for (int i = 0; i < 10; i++) {
System.out.println(prompts[i]);
final ChatMessage assistantMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), prompts[i]);
messages_cot.add(assistantMessage);
}
ChatCompletionRequest chatCompletionRequest2 = ChatCompletionRequest
.builder()
.model(model)
.messages(messages_cot)
.n(1)
.temperature(.1)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
System.out.println("------------");
System.out.print("ChatGPT response=");
service.createChatCompletion(chatCompletionRequest2).getChoices().forEach((c) -> {
System.out.println(c.getMessage().getContent());
});
service.shutdownExecutor();
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] | [((1626, 1654), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3533, 3566), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')] |
package com.bambooleanlogic.ai;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
public class Main {
public static void main(String[] args) throws IOException {
SqlCode sql = generateSql(
"MySQL",
"Get all students who has at least one class where their grade is above average"
);
if (sql.code != null) {
System.out.println("--- CODE -----------------------");
System.out.println(sql.code);
System.out.println("--- COMMENT --------------------");
System.out.println(sql.comment);
System.out.println("--------------------------------");
} else {
System.out.println("--------------------------------");
System.out.println(sql.comment);
System.out.println("--------------------------------");
}
}
private static SqlCode generateSql(String dialect, String prompt) throws IOException {
String apiToken = Files.readString(Path.of("P:\\oapi.txt"));
OpenAiService service = new OpenAiService(apiToken);
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.messages(List.of(
new ChatMessage("system",
"You are a helpful assistant who produces " + dialect + " code."
),
new ChatMessage("user", prompt)
))
.build();
String response = service.createChatCompletion(request).getChoices().get(0).getMessage().getContent();
int start = response.indexOf("```");
if (start != -1) {
start += 3;
int end = response.indexOf("```", start);
if (end != -1) {
String code = response.substring(start, end).trim();
String comment = response.substring(end + 3).trim();
return new SqlCode(code, comment);
}
}
return new SqlCode(null, response);
}
private static final class SqlCode {
public final String code;
public final String comment;
public SqlCode(String code, String comment) {
this.code = code;
this.comment = comment;
}
}
} | [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((1375, 1755), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1375, 1730), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1375, 1446), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package com.chat.base.controller;
import com.chat.base.bean.annotation.VisitLimit;
import com.chat.base.bean.common.BaseCodeEnum;
import com.chat.base.bean.constants.*;
import com.chat.base.bean.entity.GptModelConfig;
import com.chat.base.bean.vo.*;
import com.chat.base.bean.entity.PromptModel;
import com.chat.base.bean.gpt.ApiChatReq;
import com.chat.base.bean.gpt.ChatReq;
import com.chat.base.bean.req.CompletionReq;
import com.chat.base.handler.*;
import com.chat.base.handler.gpt.OpenAiProxyServiceFactory;
import com.chat.base.service.ChatBaseOpenAiProxyService;
import com.chat.base.utils.*;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import io.github.asleepyfish.enums.RoleEnum;
import io.github.asleepyfish.exception.ChatGPTException;
import org.springframework.beans.factory.annotation.Value;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid;
import java.io.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* @author huyd
* @date 2023/5/5 11:19 PM
*/
@Slf4j
@RestController
public class AIChatController extends BaseController {
@Autowired
private UserLogManager userLogManager;
@Autowired
private AIChatManger AIChatManger;
@Autowired
private PromptModelManager promptModelManager;
@Autowired
private DrawTaskInfoManager drawTaskInfoManager;
@Autowired
private WeightAlgorithmManager weightAlgorithmManager;
@Value("${file-token-path}")
private String mjTokenPath;
private static Cache<String, ChatBaseOpenAiProxyService> cache = CacheBuilder.newBuilder().initialCapacity(10).maximumSize(1000).expireAfterWrite(1000, TimeUnit.SECONDS).build();
@VisitLimit(value = {LimitEnum.IP}, scope = CommonConstant.NO_LOGIN_SCOPE)
@PostMapping("/chat/streamChatWithWeb/V3")
public void streamChatWithWebV3(@RequestBody @Valid ChatReq chatReq, HttpServletResponse response) throws Exception {
String ip = HttpUtil.getIpAddress();
String browserName = HttpUtil.browserName();
Long id = SessionUser.getUserId();
String conversationId = chatReq.getConversationId();
String userId = id == null ? conversationId : String.valueOf(id);
ModelPriceEnum modelPriceEnum = ModelPriceEnum.modelPriceMap.get(chatReq.getModel());
if (modelPriceEnum == null) {
response.getOutputStream().write(BaseCodeEnum.MODEL_NO_OPEN.getMsg().getBytes());
return;
}
CacheUserInfoVo cacheUserInfoVo = SessionUser.get();
try {
if (Objects.nonNull(cacheUserInfoVo) && Objects.nonNull(cacheUserInfoVo.getGptApiTokenVo())) {
AIChatManger.chatStream(chatReq, cacheUserInfoVo, response);
} else {
AIChatManger.streamChatWithWebV3NoStatus(chatReq, response);
}
} catch (ChatGPTException e) {
// 用户主动停掉回答
log.error("streamChatWithWebV3 user error chatReq={} ", chatReq, e);
} catch (Exception e) {
log.error("streamChatWithWebV3 error chatReq={} ", chatReq, e);
userLogManager.addUserLog(chatReq.getAppName(), userId, OpEnum.GPT3.getOp(), ip, browserName);
response.getOutputStream().write(BaseCodeEnum.SERVER_BUSY.getMsg().getBytes());
} finally {
response.getOutputStream().close();
}
}
/**
* 验证gpt的token效果
*
* @param chatReq
* @param response
* @throws Exception
*/
@PostMapping("/chat/streamChatWithWeb/api/chat")
public void streamChatWithApiChatWeb(@RequestBody @Valid ApiChatReq chatReq, HttpServletResponse response) throws Exception {
String ip = HttpUtil.getIpAddress();
String browserName = HttpUtil.browserName();
String uid = chatReq.getToken();
try {
response.setContentType("text/event-stream");
response.setCharacterEncoding("UTF-8");
response.setHeader("Cache-Control", "no-cache");
String model = StringUtils.isNoneEmpty(chatReq.getModel()) ? chatReq.getModel() : "gpt-3.5-turbo";
ChatBaseOpenAiProxyService proxyService = cache.get(chatReq.getToken() + model, () ->
OpenAiProxyServiceFactory.getService(chatReq.getToken(), chatReq.getProxyUrl(), model));
Integer contentNumber = CommonConstant.CONTENT_NUMBER;
String user = chatReq.getConversationId();
LinkedList<ChatMessage> userChatMessages = ChatMessageCacheUtil.getUserChatMessages(user, contentNumber);
userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), chatReq.getPrompt()));
ChatMessageCacheUtil.getOkUserChatMessages(userChatMessages, model);
if (userChatMessages.size() <= 0) {
response.getOutputStream().write(BaseCodeEnum.TOKEN_OVER.getMsg().getBytes());
response.getOutputStream().close();
return;
}
ChatMessageResultVo streamChatCompletion = proxyService.createStreamChatCompletion(ChatCompletionRequest.builder()
.model(model)
.messages(userChatMessages)
.user(user)
.temperature(chatReq.getTemperature())
.topP(chatReq.getTop_p())
.stream(true)
.build(), response.getOutputStream(), uid);
if(streamChatCompletion!=null){
ChatMessageCacheUtil.saveChatMessage(user,streamChatCompletion.getChatMessage());
}
} catch (ChatGPTException e) {
// 用户主动停掉回答
log.error("streamChatWithWebV3 user error chatReq={} ", chatReq, e);
response.getOutputStream().write(BaseCodeEnum.TERMINATE.getMsg().getBytes());
} catch (Exception e) {
log.error("streamChatWithWebV3 error chatReq={} ", chatReq, e);
userLogManager.addUserLog("BlueCatApiChat", uid, OpEnum.GPT3.getOp(), ip, browserName);
response.getOutputStream().write(BaseCodeEnum.SERVER_BUSY.getMsg().getBytes());
} finally {
response.getOutputStream().close();
}
}
@PostMapping("/chat/streamChatWithWeb/completion")
public void completion(@RequestBody @Validated CompletionReq completionReq, HttpServletResponse response) throws IOException {
CacheUserInfoVo cacheUserInfoVo = SessionUser.get();
if (cacheUserInfoVo == null) {
response.getOutputStream().write("请登录之后再使用!".getBytes());
return;
}
response.setContentType("text/event-stream");
response.setCharacterEncoding("UTF-8");
response.setHeader("Cache-Control", "no-cache");
StringBuilder builder = new StringBuilder();
PromptModel prompt = promptModelManager.getPromptById(Long.parseLong(completionReq.getModelId()));
if (prompt == null || StringUtils.isBlank(prompt.getContent())) {
response.getOutputStream().write("模板已过期,请联系管理员".getBytes());
return;
}
builder.append(prompt.getContent()).append("\n");
builder.append(completionReq.getContent());
String uid = UUID.randomUUID().toString();
String model = StringUtils.isNoneEmpty(completionReq.getModel()) ? completionReq.getModel() : "gpt-3.5-turbo";
Optional<GptModelConfig> modelConfig = weightAlgorithmManager.round(cacheUserInfoVo, model);
if (!modelConfig.isPresent()) {
response.getOutputStream().write(BaseCodeEnum.NO_MODEL_ROLE.getMsg().getBytes());
return;
}
GptModelConfig gptModelConfig = modelConfig.get();
ChatBaseOpenAiProxyService proxyService = OpenAiProxyServiceFactory.createProxyService(gptModelConfig.getId().toString());
if (proxyService == null) {
response.getOutputStream().write(BaseCodeEnum.NO_MODEL.getMsg().getBytes());
response.getOutputStream().close();
return;
}
LinkedList<ChatMessage> userChatMessages = new LinkedList<>();
userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), builder.toString()));
proxyService.createStreamChatCompletion(ChatCompletionRequest.builder()
.model(model)
.messages(userChatMessages)
.user(uid)
.temperature(1.0)
.topP(1.0)
.stream(true)
.build(), response.getOutputStream(), cacheUserInfoVo.getGptApiTokenVo().getToken());
}
}
| [
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((1974, 2086), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2078), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2037), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2019), 'com.google.common.cache.CacheBuilder.newBuilder'), ((2792, 2838), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((2792, 2827), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((3663, 3707), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((3663, 3696), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((5036, 5063), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((5266, 5309), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5266, 5298), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5498, 5811), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5782), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5748), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5702), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5643), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5563), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6209, 6251), 'com.chat.base.bean.common.BaseCodeEnum.TERMINATE.getMsg'), ((6209, 6240), 'com.chat.base.bean.common.BaseCodeEnum.TERMINATE.getMsg'), ((6507, 6551), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((6507, 6540), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((8032, 8078), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((8032, 8067), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((8383, 8424), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((8383, 8413), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((8622, 8649), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((8722, 8970), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8945), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8915), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8888), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8854), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8827), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8783), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package com.chunxia.chatgpt.chatapi;
import android.util.Log;
import com.blankj.utilcode.util.ThreadUtils;
import com.chunxia.chatgpt.model.review.SentenceCard;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import java.util.ArrayList;
import java.util.List;
public class MultiRoundChatAgent {
private static final String TAG = "MultiRoundChatAiApi";
private final List<ChatMessage> oldMessages = new ArrayList<>();
private String model = "gpt-3.5-turbo";
private int responseN = 1;
private int maxTokenN = 512;
private final ChatMessage systemMessage;
private final String systemCommand;
private final List<ThreadUtils.Task<String>> threadTasks = new ArrayList<>();
public MultiRoundChatAgent(String systemCommand, String model, int responseN, int maxTokenN) {
this.systemCommand = systemCommand;
this.model = model;
this.responseN = responseN;
this.maxTokenN = maxTokenN;
this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand);
oldMessages.add(systemMessage);
}
public MultiRoundChatAgent() {
this.systemCommand = "";
this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand);
oldMessages.add(systemMessage);
}
public MultiRoundChatAgent(String systemCommand) {
this.systemCommand = systemCommand;
this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand);
oldMessages.add(systemMessage);
}
public void sendMessageInThread(String message, ReceiveOpenAiReply onReceiveOpenAiReply) {
ThreadUtils.Task<String> tTask = new ThreadUtils.SimpleTask<String>() {
@Override
public String doInBackground() throws Throwable {
return sendToChatAi(message);
}
@Override
public void onSuccess(String result) {
Log.i(TAG, "receive reply from chatgpt");
onReceiveOpenAiReply.onSuccess(result);
}
};
threadTasks.add(tTask);
ThreadUtils.getIoPool().execute(tTask);
}
public String sendMessage(String message) {
return sendToChatAi(message);
}
public void cancelAllCurrentThread() {
// todo 只取消当前正在执行的
threadTasks.forEach(ThreadUtils::cancel);
}
public SentenceCard getOneRoundSentenceCard() {
if (oldMessages.size() < 3) {
return null;
}
SentenceCard sentenceCard = new SentenceCard(oldMessages.get(2).getContent(), oldMessages.get(1).getContent());
return sentenceCard;
}
public interface ReceiveOpenAiReply {
void onSuccess(String reply);
}
private void insertUserMessage(String message) {
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
oldMessages.add(userMessage);
}
private String sendToChatAi(String message) {
Log.i(TAG, "User: " + message);
insertUserMessage(message);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model(model)
.messages(oldMessages)
.n(responseN)
.maxTokens(maxTokenN)
.build();
OpenAiService openAiService = OpenAIServiceManager.getOpenAiService();
if (openAiService == null) {
return null;
} else {
List<ChatCompletionChoice> choices = openAiService.createChatCompletion(chatCompletionRequest).getChoices();
if (!choices.isEmpty()) {
String content = choices.get(0).getMessage().getContent();
Log.i(TAG, "ChatGpt: " + content);
addChatGptReplyToMessage(choices.get(0).getMessage());
return content;
}
}
return null;
}
public void clearOldMessage() {
oldMessages.clear();
oldMessages.add(systemMessage);
}
public void addChatGptReplyToMessage(ChatMessage message) {
oldMessages.add(message);
}
public int getMaxTokenN() {
return maxTokenN;
}
public void setMaxTokenN(int maxTokenN) {
this.maxTokenN = maxTokenN;
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] | [((1259, 1289), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1473, 1503), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1717, 1747), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2390, 2428), 'com.blankj.utilcode.util.ThreadUtils.getIoPool'), ((3155, 3183), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')] |
package com.theokanning.openai.service;
import com.theokanning.openai.moderation.Moderation;
import com.theokanning.openai.moderation.ModerationRequest;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ModerationTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void createModeration() {
ModerationRequest moderationRequest = ModerationRequest.builder()
.input("I want to kill them")
.model("text-moderation-latest")
.build();
Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0);
assertTrue(moderationScore.isFlagged());
}
}
| [
"com.theokanning.openai.moderation.ModerationRequest.builder"
] | [((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')] |
package com.theokanning.openai.service;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
class ChatCompletionTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
void createChatCompletion() {
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(5)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
List<ChatCompletionChoice> choices = service.createChatCompletion(chatCompletionRequest).getChoices();
assertEquals(5, choices.size());
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] | [((772, 802), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')] |
package com.couchbase.intellij.tree.iq.intents;
import com.couchbase.client.java.json.JsonArray;
import com.couchbase.client.java.json.JsonObject;
import com.couchbase.intellij.tree.iq.IQWindowContent;
import com.couchbase.intellij.tree.iq.chat.ChatExchangeAbortException;
import com.couchbase.intellij.tree.iq.chat.ChatGptHandler;
import com.couchbase.intellij.tree.iq.chat.ChatLink;
import com.couchbase.intellij.tree.iq.chat.ChatLinkService;
import com.couchbase.intellij.tree.iq.chat.ChatLinkState;
import com.couchbase.intellij.tree.iq.chat.ChatMessageEvent;
import com.couchbase.intellij.tree.iq.chat.ChatMessageListener;
import com.couchbase.intellij.tree.iq.chat.ConfigurationPage;
import com.couchbase.intellij.tree.iq.chat.ConversationContext;
import com.couchbase.intellij.tree.iq.core.IQCredentials;
import com.couchbase.intellij.tree.iq.intents.actions.ActionInterface;
import com.couchbase.intellij.tree.iq.settings.OpenAISettingsState;
import com.couchbase.intellij.workbench.Log;
import com.intellij.testFramework.fixtures.BasePlatformTestCase;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
public abstract class AbstractIQTest extends BasePlatformTestCase {
private static final String IQ_URL = System.getenv("CAPELLA_DOMAIN") + "/v2/organizations/%s/integrations/iq/";
private static final ChatGptHandler handler = new ChatGptHandler();
private static ConversationContext ctx;
private static ChatLink link;
@Override
protected void setUp() throws Exception {
super.setUp();
IQCredentials credentials = new IQCredentials(System.getenv("IQ_ORG_LOGIN"), System.getenv("IQ_ORG_PASSWD"));
assertTrue("Please set capella domain and IQ credentials using `CAPELLA_DOMAIN`, `IQ_ORG_ID`, `IQ_ORG_LOGIN`, and `IQ_ORG_PASSWD` envvars", credentials.doLogin());
String orgId = System.getenv("IQ_ORG_ID");
final String iqUrl = String.format(IQ_URL, orgId);
OpenAISettingsState.OpenAIConfig iqGptConfig = new OpenAISettingsState.OpenAIConfig();
OpenAISettingsState.getInstance().setGpt4Config(iqGptConfig);
OpenAISettingsState.getInstance().setEnableInitialMessage(false);
iqGptConfig.setApiKey(credentials.getAuth().getJwt());
iqGptConfig.setEnableStreamResponse(false);
iqGptConfig.setModelName("gpt-4");
iqGptConfig.setApiEndpointUrl(iqUrl);
iqGptConfig.setEnableCustomApiEndpointUrl(true);
ConfigurationPage cp = iqGptConfig.withSystemPrompt(IQWindowContent::systemPrompt);
Log.setLevel(3);
Log.setPrinter(new Log.StdoutPrinter());
link = new ChatLinkService(getProject(), null, cp);
ctx = new ChatLinkState(cp);
}
protected void send(String message, Consumer<ChatMessageEvent.ResponseArrived> listener) {
send(message, false, listener);
}
protected void send(String message, boolean isSystem, Consumer<ChatMessageEvent.ResponseArrived> listener) {
ChatMessage chatMessage = new ChatMessage(
isSystem ? ChatMessageRole.SYSTEM.value() : ChatMessageRole.USER.value(),
message
);
ChatMessageEvent.Starting event = ChatMessageEvent.starting(AbstractIQTest.link, chatMessage);
ctx.addChatMessage(chatMessage);
List<ChatMessage> messages = ctx.getChatMessages(ctx.getModelType(), chatMessage);
if (isSystem) {
messages.add(chatMessage);
}
ChatCompletionRequest request = ChatCompletionRequest.builder()
.messages(messages)
.build();
handler.handle(AbstractIQTest.ctx, event.initiating(request), new ChatMessageListener() {
@Override
public void exchangeStarting(ChatMessageEvent.Starting event) throws ChatExchangeAbortException {
}
@Override
public void exchangeStarted(ChatMessageEvent.Started event) {
}
@Override
public void responseArriving(ChatMessageEvent.ResponseArriving event) {
}
@Override
public void responseArrived(ChatMessageEvent.ResponseArrived event) {
listener.accept(event);
}
@Override
public void responseCompleted(ChatMessageEvent.ResponseArrived event) {
}
@Override
public void exchangeFailed(ChatMessageEvent.Failed event) {
throw new RuntimeException("IQ Exchange failed", event.getCause());
}
@Override
public void exchangeCancelled(ChatMessageEvent.Cancelled event) {
}
}).blockingLast();
}
protected String getResponse(ChatMessageEvent.ResponseArrived response) {
assertEquals(1, response.getResponseChoices().size());
return response.getResponseChoices().get(0).getContent();
}
protected JsonObject getJson(ChatMessageEvent.ResponseArrived response) {
return JsonObject.fromJson(getResponse(response));
}
protected void assertJsonResponse(ChatMessageEvent.ResponseArrived response) {
String message = getResponse(response);
assertTrue(message.startsWith("{"));
}
protected void assertNotJson(ChatMessageEvent.ResponseArrived response) {
assertFalse(getResponse(response).trim().charAt(0) == '{');
}
protected List<JsonObject> getIntents(ChatMessageEvent.ResponseArrived response, Class<? extends ActionInterface> action) {
List<JsonObject> results = new ArrayList<>();
JsonObject json = getJson(response);
assertInstanceOf(json.get("actions"), JsonArray.class);
JsonArray actions = json.getArray("actions");
for (int i = 0; i < actions.size(); i++) {
assertInstanceOf(actions.get(i), JsonObject.class);
JsonObject intent = actions.getObject(i);
assertInstanceOf(intent.get("action"), String.class);
if (intent.getString("action").equals(action.getSimpleName())) {
results.add(intent);
}
}
return results;
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] | [((2263, 2323), 'com.couchbase.intellij.tree.iq.settings.OpenAISettingsState.getInstance'), ((2333, 2397), 'com.couchbase.intellij.tree.iq.settings.OpenAISettingsState.getInstance'), ((3263, 3293), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3296, 3324), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3709, 3801), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3709, 3776), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')] |
package com.theokanning.openai.service;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.ListSearchParameters;
import com.theokanning.openai.OpenAiResponse;
import com.theokanning.openai.assistants.Assistant;
import com.theokanning.openai.assistants.AssistantFunction;
import com.theokanning.openai.assistants.AssistantRequest;
import com.theokanning.openai.assistants.AssistantToolsEnum;
import com.theokanning.openai.assistants.Tool;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.messages.Message;
import com.theokanning.openai.messages.MessageRequest;
import com.theokanning.openai.runs.RequiredAction;
import com.theokanning.openai.runs.Run;
import com.theokanning.openai.runs.RunCreateRequest;
import com.theokanning.openai.runs.RunStep;
import com.theokanning.openai.runs.SubmitToolOutputRequestItem;
import com.theokanning.openai.runs.SubmitToolOutputs;
import com.theokanning.openai.runs.SubmitToolOutputsRequest;
import com.theokanning.openai.runs.ToolCall;
import com.theokanning.openai.threads.Thread;
import com.theokanning.openai.threads.ThreadRequest;
import com.theokanning.openai.utils.TikTokensUtil;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
class AssistantFunctionTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofMinutes(1));
@Test
void createRetrieveRun() throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class);
mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class);
mapper.addMixIn(ChatFunctionCall.class, ChatFunctionCallMixIn.class);
String funcDef = "{\n" +
" \"type\": \"object\",\n" +
" \"properties\": {\n" +
" \"location\": {\n" +
" \"type\": \"string\",\n" +
" \"description\": \"The city and state, e.g. San Francisco, CA\"\n" +
" },\n" +
" \"unit\": {\n" +
" \"type\": \"string\",\n" +
" \"enum\": [\"celsius\", \"fahrenheit\"]\n" +
" }\n" +
" },\n" +
" \"required\": [\"location\"]\n" +
"}";
Map<String, Object> funcParameters = mapper.readValue(funcDef, new TypeReference<Map<String, Object>>() {});
AssistantFunction function = AssistantFunction.builder()
.name("weather_reporter")
.description("Get the current weather of a location")
.parameters(funcParameters)
.build();
List<Tool> toolList = new ArrayList<>();
Tool funcTool = new Tool(AssistantToolsEnum.FUNCTION, function);
toolList.add(funcTool);
AssistantRequest assistantRequest = AssistantRequest.builder()
.model(TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName())
.name("MATH_TUTOR")
.instructions("You are a personal Math Tutor.")
.tools(toolList)
.build();
Assistant assistant = service.createAssistant(assistantRequest);
ThreadRequest threadRequest = ThreadRequest.builder()
.build();
Thread thread = service.createThread(threadRequest);
MessageRequest messageRequest = MessageRequest.builder()
.content("What's the weather of Xiamen?")
.build();
Message message = service.createMessage(thread.getId(), messageRequest);
RunCreateRequest runCreateRequest = RunCreateRequest.builder()
.assistantId(assistant.getId())
.build();
Run run = service.createRun(thread.getId(), runCreateRequest);
assertNotNull(run);
Run retrievedRun = service.retrieveRun(thread.getId(), run.getId());
while (!(retrievedRun.getStatus().equals("completed"))
&& !(retrievedRun.getStatus().equals("failed"))
&& !(retrievedRun.getStatus().equals("requires_action"))){
retrievedRun = service.retrieveRun(thread.getId(), run.getId());
}
if (retrievedRun.getStatus().equals("requires_action")) {
RequiredAction requiredAction = retrievedRun.getRequiredAction();
System.out.println("requiredAction");
System.out.println(mapper.writeValueAsString(requiredAction));
List<ToolCall> toolCalls = requiredAction.getSubmitToolOutputs().getToolCalls();
ToolCall toolCall = toolCalls.get(0);
String toolCallId = toolCall.getId();
SubmitToolOutputRequestItem toolOutputRequestItem = SubmitToolOutputRequestItem.builder()
.toolCallId(toolCallId)
.output("sunny")
.build();
List<SubmitToolOutputRequestItem> toolOutputRequestItems = new ArrayList<>();
toolOutputRequestItems.add(toolOutputRequestItem);
SubmitToolOutputsRequest submitToolOutputsRequest = SubmitToolOutputsRequest.builder()
.toolOutputs(toolOutputRequestItems)
.build();
retrievedRun = service.submitToolOutputs(retrievedRun.getThreadId(), retrievedRun.getId(), submitToolOutputsRequest);
while (!(retrievedRun.getStatus().equals("completed"))
&& !(retrievedRun.getStatus().equals("failed"))
&& !(retrievedRun.getStatus().equals("requires_action"))){
retrievedRun = service.retrieveRun(thread.getId(), run.getId());
}
OpenAiResponse<Message> response = service.listMessages(thread.getId());
List<Message> messages = response.getData();
System.out.println(mapper.writeValueAsString(messages));
}
}
}
| [
"com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName",
"com.theokanning.openai.assistants.AssistantRequest.builder",
"com.theokanning.openai.messages.MessageRequest.builder",
"com.theokanning.openai.assistants.AssistantFunction.builder",
"com.theokanning.openai.runs.SubmitToolOutputsRequest.builder",
"com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder",
"com.theokanning.openai.threads.ThreadRequest.builder",
"com.theokanning.openai.runs.RunCreateRequest.builder"
] | [((3437, 3645), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3620), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3576), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3506), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3864, 4125), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4100), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4067), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4003), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 3967), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3914, 3966), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName'), ((4239, 4287), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((4391, 4498), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4391, 4473), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4627, 4726), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((4627, 4701), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((5724, 5871), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5842), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5805), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6090, 6210), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder'), ((6090, 6181), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder')] |
package com.theokanning.openai.service;
import com.theokanning.openai.audio.CreateSpeechRequest;
import com.theokanning.openai.audio.CreateTranscriptionRequest;
import com.theokanning.openai.audio.CreateTranslationRequest;
import com.theokanning.openai.audio.TranscriptionResult;
import com.theokanning.openai.audio.TranslationResult;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.time.Duration;
import okhttp3.MediaType;
import okhttp3.ResponseBody;
import static org.junit.jupiter.api.Assertions.*;
public class AudioTest {
static String englishAudioFilePath = "src/test/resources/hello-world.mp3";
static String koreanAudioFilePath = "src/test/resources/korean-hello.mp3";
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
@Test
void createTranscription() {
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder()
.model("whisper-1")
.build();
String text = service.createTranscription(createTranscriptionRequest, englishAudioFilePath).getText();
assertEquals("Hello World.", text);
}
@Test
void createTranscriptionVerbose() {
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder()
.model("whisper-1")
.responseFormat("verbose_json")
.build();
TranscriptionResult result = service.createTranscription(createTranscriptionRequest, englishAudioFilePath);
assertEquals("Hello World.", result.getText());
assertEquals("transcribe", result.getTask());
assertEquals("english", result.getLanguage());
assertTrue(result.getDuration() > 0);
assertEquals(1, result.getSegments().size());
}
@Test
void createTranslation() {
CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder()
.model("whisper-1")
.build();
String text = service.createTranslation(createTranslationRequest, koreanAudioFilePath).getText();
assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", text);
}
@Test
void createTranslationVerbose() {
CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder()
.model("whisper-1")
.responseFormat("verbose_json")
.build();
TranslationResult result = service.createTranslation(createTranslationRequest, koreanAudioFilePath);
assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", result.getText());
assertEquals("translate", result.getTask());
assertEquals("english", result.getLanguage());
assertTrue(result.getDuration() > 0);
assertEquals(1, result.getSegments().size());
}
@Test
void createSpeech() throws IOException {
CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder()
.model("tts-1")
.input("Hello World.")
.voice("alloy")
.build();
final ResponseBody speech = service.createSpeech(createSpeechRequest);
assertNotNull(speech);
assertEquals(MediaType.get("audio/mpeg"), speech.contentType());
assertTrue(speech.bytes().length > 0);
}
}
| [
"com.theokanning.openai.audio.CreateTranslationRequest.builder",
"com.theokanning.openai.audio.CreateSpeechRequest.builder",
"com.theokanning.openai.audio.CreateTranscriptionRequest.builder"
] | [((958, 1055), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((958, 1030), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1479), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1454), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1406), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1971, 2066), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((1971, 2041), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2519), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2494), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2446), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((3049, 3206), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3181), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3149), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3110), 'com.theokanning.openai.audio.CreateSpeechRequest.builder')] |
package org.zhong.chatgpt.wechat.bot.chatgptwechatbot.test;
import java.time.Duration;
import java.util.List;
import org.apache.http.client.CookieStore;
import org.apache.http.cookie.Cookie;
import org.apache.http.impl.client.BasicCookieStore;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.junit.jupiter.api.Test;
import org.zhong.chatgpt.wechat.bot.config.BotConfig;
import com.theokanning.openai.completion.CompletionRequest;
import cn.zhouyafeng.itchat4j.utils.MyHttpClient;
import com.theokanning.openai.OpenAiService;
public class TestOpenAI {
private static CloseableHttpClient httpClient;
private static MyHttpClient instance = null;
private static CookieStore cookieStore;
static {
cookieStore = new BasicCookieStore();
// 将CookieStore设置到httpClient中
httpClient = HttpClients.custom().setDefaultCookieStore(cookieStore).build();
}
public static String getCookie(String name) {
List<Cookie> cookies = cookieStore.getCookies();
for (Cookie cookie : cookies) {
if (cookie.getName().equalsIgnoreCase(name)) {
return cookie.getValue();
}
}
return null;
}
@Test
public void test() {
OpenAiService service = new OpenAiService(BotConfig.getAppKey(),"https://api.openai.com/", Duration.ofSeconds(300));
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt("你好")
.model("text-davinci-003")
.maxTokens(2000)
.temperature(0.8)
.topP(1.0)
.frequencyPenalty(0.55)
.presencePenalty(0.19)
.echo(true)
.user("1234213213")
.build();
String text = service.createCompletion(completionRequest).getChoices().get(0).getText();
System.out.print(text);
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((872, 935), 'org.apache.http.impl.client.HttpClients.custom'), ((872, 927), 'org.apache.http.impl.client.HttpClients.custom'), ((1374, 1638), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1625), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1601), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1585), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1558), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1530), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1515), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1493), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1466), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1429), 'com.theokanning.openai.completion.CompletionRequest.builder')] |
package com.touchbiz.chatgpt.simple;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import com.touchbiz.common.utils.tools.JsonUtils;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.MediaType;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
@Slf4j
public class EventStreamTest {
String token = "";
@Test
public void testRetrofit(){
CompletionRequest completionRequest = CompletionRequest.builder()
// .prompt("Human:" + chat.prompt +"\nAI:")
.prompt("胡寅恺帅嘛")
.model("text-davinci-003")
// .echo(true)
// .stop(Arrays.asList(" Human:"," AI:"))
.maxTokens(128)
.presencePenalty(0d)
.frequencyPenalty(0d)
.temperature(0.7D)
.bestOf(1)
.topP(1d)
// .stream(true)
.build();
OpenAiService service = new OpenAiService(token);
var result = service.createCompletion(completionRequest);
log.info("result:{}", JsonUtils.toJson(result));
}
@SneakyThrows
@Test
public void testHttp() {
HttpClient client = HttpClient.newBuilder().build();
CompletionRequest completionRequest = CompletionRequest.builder()
// .prompt("Human:" + chat.prompt +"\nAI:")
.prompt("给我推荐10本小说")
.model("text-davinci-001")
// .echo(true)
.stop(Arrays.asList(" Human:"," AI:"))
.maxTokens(1024)
.presencePenalty(0d)
.frequencyPenalty(0d)
.temperature(0.7D)
.bestOf(1)
.topP(1d)
.stream(true)
.build();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
var json = mapper.writeValueAsString(completionRequest);
log.info("json:{}", json);
HttpRequest request = HttpRequest.newBuilder()
.header("Authorization", "Bearer " + this.token)
.header( "Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(json))
.uri(URI.create("https://api.openai.com/v1/completions"))
.build();
client.sendAsync(request, HttpResponse.BodyHandlers.ofLines())
.thenApply(HttpResponse::body).get()
.forEach(System.out::println);
}
@SneakyThrows
@Test
public void testFlux(){
WebClient client = WebClient.create("https://api.openai.com/v1/completions");
ParameterizedTypeReference<ServerSentEvent<String>> type
= new ParameterizedTypeReference<>() {
};
CompletionRequest completionRequest = CompletionRequest.builder()
// .prompt("Human:" + chat.prompt +"\nAI:")
.prompt("给我推荐10本小说")
.model("text-davinci-001")
// .echo(true)
.stop(Arrays.asList(" Human:"," AI:"))
.maxTokens(1024)
.presencePenalty(0d)
.frequencyPenalty(0d)
.temperature(0.7D)
.bestOf(1)
.topP(1d)
.stream(true)
.build();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
Flux<ServerSentEvent<String>> eventStream = client.post()
.accept(MediaType.APPLICATION_JSON)
.contentType(MediaType.APPLICATION_JSON)
.header("Authorization", "Bearer ")
.body(BodyInserters.fromValue(mapper.writeValueAsString(completionRequest)))
.retrieve()
.bodyToFlux(type);
eventStream.doOnError(x-> log.error("doOnError SSE:", x));
eventStream.subscribe(consumer
,
error -> log.error("Error receiving SSE:", error),
() -> log.info("Completed!!!"));
Thread.sleep(10*1000);
}
private Consumer<ServerSentEvent<String>> consumer = content -> log.info("Time: {} - event: name[{}], id [{}], content[{}] ",
LocalTime.now(), content.event(), content.id(), content.data());
@SneakyThrows
@Test
public void testModels() {
HttpClient client = HttpClient.newBuilder().build();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
HttpRequest request = HttpRequest.newBuilder()
.header("Authorization", "Bearer " + this.token)
.header( "Content-Type", "application/json")
.GET()
.uri(URI.create("https://api.openai.com/v1/models"))
.build();
var response = client.sendAsync(request, HttpResponse.BodyHandlers.ofString())
.thenApply(HttpResponse::body).get();
log.info("response:{}", response);
}
@SneakyThrows
@Test
public void testChatGptModelHttp() {
HttpClient client = HttpClient.newBuilder().build();
List<ChatMessage> message = new ArrayList<>();
message.add(new ChatMessage("user","请给我推荐10本书"));
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
// .prompt("Human:" + chat.prompt +"\nAI:")
.model("gpt-3.5-turbo")
.stream(true)
.messages(message).build();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
var json = mapper.writeValueAsString(completionRequest);
log.info("json:{}", json);
HttpRequest request = HttpRequest.newBuilder()
.header("Authorization", "Bearer " + this.token)
.header( "Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(json))
.uri(URI.create("https://api.openai.com/v1/chat/completions"))
.build();
client.sendAsync(request, HttpResponse.BodyHandlers.ofLines())
.thenApply(HttpResponse::body).get()
.forEach(System.out::println);
}
@Builder
@Data
public static class ChatCompletionRequest{
private String model;
private Boolean stream;
private List<ChatMessage> messages;
}
@AllArgsConstructor
@Data
public static class ChatMessage{
private String role;
private String content;
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((1334, 1845), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1788), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1762), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1735), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1700), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1662), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1625), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1506), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1463), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2122, 2153), 'java.net.http.HttpClient.newBuilder'), ((2202, 2718), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2693), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2663), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2637), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2610), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2575), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2537), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2500), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2467), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2382), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2339), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3135, 3449), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3424), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3350), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3285), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3224), 'java.net.http.HttpRequest.newBuilder'), ((3308, 3349), 'java.net.http.HttpRequest.BodyPublishers.ofString'), ((3486, 3521), 'java.net.http.HttpResponse.BodyHandlers.ofLines'), ((3950, 4466), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4441), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4411), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4385), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4358), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4323), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4285), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4248), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4215), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4130), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4087), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5712, 5743), 'java.net.http.HttpClient.newBuilder'), ((6060, 6327), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6302), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6233), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6210), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6149), 'java.net.http.HttpRequest.newBuilder'), ((6379, 6415), 'java.net.http.HttpResponse.BodyHandlers.ofString'), ((6622, 6653), 'java.net.http.HttpClient.newBuilder'), ((7453, 7772), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7747), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7668), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7603), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7542), 'java.net.http.HttpRequest.newBuilder'), ((7626, 7667), 'java.net.http.HttpRequest.BodyPublishers.ofString'), ((7809, 7844), 'java.net.http.HttpResponse.BodyHandlers.ofLines')] |
package br.com.alura.ecomart.chatbot.infra.openai;
import br.com.alura.ecomart.chatbot.domain.DadosCalculoFrete;
import br.com.alura.ecomart.chatbot.domain.service.CalculadorDeFrete;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.messages.Message;
import com.theokanning.openai.messages.MessageRequest;
import com.theokanning.openai.runs.Run;
import com.theokanning.openai.runs.RunCreateRequest;
import com.theokanning.openai.runs.SubmitToolOutputRequestItem;
import com.theokanning.openai.runs.SubmitToolOutputsRequest;
import com.theokanning.openai.service.FunctionExecutor;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.threads.ThreadRequest;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
@Component
public class OpenAIClient {
private final String apiKey;
private final String assistantId;
private String threadId;
private final OpenAiService service;
private final CalculadorDeFrete calculadorDeFrete;
public OpenAIClient(@Value("${app.openai.api.key}") String apiKey, @Value("${app.openai.assistant.id}") String assistantId, CalculadorDeFrete calculadorDeFrete) {
this.apiKey = apiKey;
this.service = new OpenAiService(apiKey, Duration.ofSeconds(60));
this.assistantId = assistantId;
this.calculadorDeFrete = calculadorDeFrete;
}
public String enviarRequisicaoChatCompletion(DadosRequisicaoChatCompletion dados) {
var messageRequest = MessageRequest
.builder()
.role(ChatMessageRole.USER.value())
.content(dados.promptUsuario())
.build();
if (this.threadId == null) {
var threadRequest = ThreadRequest
.builder()
.messages(Arrays.asList(messageRequest))
.build();
var thread = service.createThread(threadRequest);
this.threadId = thread.getId();
} else {
service.createMessage(this.threadId, messageRequest);
}
var runRequest = RunCreateRequest
.builder()
.assistantId(assistantId)
.build();
var run = service.createRun(threadId, runRequest);
var concluido = false;
var precisaChamarFuncao = false;
try {
while (!concluido && !precisaChamarFuncao) {
Thread.sleep(1000 * 10);
run = service.retrieveRun(threadId, run.getId());
concluido = run.getStatus().equalsIgnoreCase("completed");
precisaChamarFuncao = run.getRequiredAction() != null;
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
if (precisaChamarFuncao) {
var precoDoFrete = chamarFuncao(run);
var submitRequest = SubmitToolOutputsRequest
.builder()
.toolOutputs(Arrays.asList(
new SubmitToolOutputRequestItem(
run
.getRequiredAction()
.getSubmitToolOutputs()
.getToolCalls()
.get(0)
.getId(),
precoDoFrete)
))
.build();
service.submitToolOutputs(threadId, run.getId(), submitRequest);
try {
while (!concluido) {
Thread.sleep(1000 * 10);
run = service.retrieveRun(threadId, run.getId());
concluido = run.getStatus().equalsIgnoreCase("completed");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
var mensagens = service.listMessages(threadId);
return mensagens
.getData()
.stream()
.sorted(Comparator.comparingInt(Message::getCreatedAt).reversed())
.findFirst().get().getContent().get(0).getText().getValue()
.replaceAll("\\\u3010.*?\\\u3011", "");
}
private String chamarFuncao(Run run) {
try {
var funcao = run.getRequiredAction().getSubmitToolOutputs().getToolCalls().get(0).getFunction();
var funcaoCalcularFrete = ChatFunction.builder()
.name("calcularFrete")
.executor(DadosCalculoFrete.class, d -> calculadorDeFrete.calcular(d))
.build();
var executorDeFuncoes = new FunctionExecutor(Arrays.asList(funcaoCalcularFrete));
var functionCall = new ChatFunctionCall(funcao.getName(), new ObjectMapper().readTree(funcao.getArguments()));
return executorDeFuncoes.execute(functionCall).toString();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public List<String> carregarHistoricoDeMensagens() {
var mensagens = new ArrayList<String>();
if (this.threadId != null) {
mensagens.addAll(
service
.listMessages(this.threadId)
.getData()
.stream()
.sorted(Comparator.comparingInt(Message::getCreatedAt))
.map(m -> m.getContent().get(0).getText().getValue())
.collect(Collectors.toList())
);
}
return mensagens;
}
public void apagarThread() {
if (this.threadId != null) {
service.deleteThread(this.threadId);
this.threadId = null;
}
}
}
| [
"com.theokanning.openai.completion.chat.ChatFunction.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] | [((1972, 2000), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4533, 4590), 'java.util.Comparator.comparingInt'), ((4935, 5120), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((4935, 5091), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((4935, 5000), 'com.theokanning.openai.completion.chat.ChatFunction.builder')] |
package learn.scraibe.controllers;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import learn.scraibe.models.Note;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
@RestController
@RequestMapping("/generate-completion")
public class OpenAIController {
@Value("${openai.api.key}")
private String openaiApiKey;
@PostMapping
public ResponseEntity<Object> generateCompletion(@RequestBody Note note) {
if(note.getContent() == null || note.getContent().isBlank()){
return new ResponseEntity<>("Cannot have blank notes", HttpStatus.BAD_REQUEST);
}
//create service that will route to OpenAI endpoint, provide key and timeout value incase openai takes a long time
OpenAiService service = new OpenAiService(openaiApiKey, Duration.ofSeconds(60));
//set up messages and Roles
List<ChatMessage> messages = new ArrayList<>();
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "organize with bullet points, only respond with bullet points "+ note.getContent());
ChatMessage systemMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), "you are a helpful assistant");
messages.add(userMessage);
messages.add((systemMessage));
// configure chatCompletionRequest object that will be sent over via the api
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo-0613")
.messages(messages)
.build();
//use service to make the request to OpenAI and then get the specific message to send back to the frontend.
ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage();
note.setContent(responseMessage.getContent());
return new ResponseEntity<>(note, HttpStatus.OK);
//TODO make a conditional statement based on the success of a response message,
//one previous error occurred because the request timed out(openai took too long to send back a request)
// but extending the duration seemed to solved the issue, just wondering what other issues to anticipate.
}
}
| [
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] | [((1638, 1666), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1805, 1838), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')] |
package com.theokanning.openai.service;
import com.theokanning.openai.moderation.Moderation;
import com.theokanning.openai.moderation.ModerationRequest;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ModerationTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void createModeration() {
ModerationRequest moderationRequest = ModerationRequest.builder()
.input("I want to kill them")
.model("text-moderation-latest")
.build();
Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0);
assertTrue(moderationScore.isFlagged());
}
}
| [
"com.theokanning.openai.moderation.ModerationRequest.builder"
] | [((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')] |
package com.theokanning.openai.service;
import com.theokanning.openai.moderation.Moderation;
import com.theokanning.openai.moderation.ModerationRequest;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ModerationTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void createModeration() {
ModerationRequest moderationRequest = ModerationRequest.builder()
.input("I want to kill them")
.model("text-moderation-latest")
.build();
Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0);
assertTrue(moderationScore.isFlagged());
}
}
| [
"com.theokanning.openai.moderation.ModerationRequest.builder"
] | [((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')] |
package com.theokanning.openai.service;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.ListSearchParameters;
import com.theokanning.openai.OpenAiResponse;
import com.theokanning.openai.assistants.Assistant;
import com.theokanning.openai.assistants.AssistantFunction;
import com.theokanning.openai.assistants.AssistantRequest;
import com.theokanning.openai.assistants.AssistantToolsEnum;
import com.theokanning.openai.assistants.Tool;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.messages.Message;
import com.theokanning.openai.messages.MessageRequest;
import com.theokanning.openai.runs.RequiredAction;
import com.theokanning.openai.runs.Run;
import com.theokanning.openai.runs.RunCreateRequest;
import com.theokanning.openai.runs.RunStep;
import com.theokanning.openai.runs.SubmitToolOutputRequestItem;
import com.theokanning.openai.runs.SubmitToolOutputs;
import com.theokanning.openai.runs.SubmitToolOutputsRequest;
import com.theokanning.openai.runs.ToolCall;
import com.theokanning.openai.threads.Thread;
import com.theokanning.openai.threads.ThreadRequest;
import com.theokanning.openai.utils.TikTokensUtil;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
class AssistantFunctionTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofMinutes(1));
@Test
void createRetrieveRun() throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class);
mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class);
mapper.addMixIn(ChatFunctionCall.class, ChatFunctionCallMixIn.class);
String funcDef = "{\n" +
" \"type\": \"object\",\n" +
" \"properties\": {\n" +
" \"location\": {\n" +
" \"type\": \"string\",\n" +
" \"description\": \"The city and state, e.g. San Francisco, CA\"\n" +
" },\n" +
" \"unit\": {\n" +
" \"type\": \"string\",\n" +
" \"enum\": [\"celsius\", \"fahrenheit\"]\n" +
" }\n" +
" },\n" +
" \"required\": [\"location\"]\n" +
"}";
Map<String, Object> funcParameters = mapper.readValue(funcDef, new TypeReference<Map<String, Object>>() {});
AssistantFunction function = AssistantFunction.builder()
.name("weather_reporter")
.description("Get the current weather of a location")
.parameters(funcParameters)
.build();
List<Tool> toolList = new ArrayList<>();
Tool funcTool = new Tool(AssistantToolsEnum.FUNCTION, function);
toolList.add(funcTool);
AssistantRequest assistantRequest = AssistantRequest.builder()
.model(TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName())
.name("MATH_TUTOR")
.instructions("You are a personal Math Tutor.")
.tools(toolList)
.build();
Assistant assistant = service.createAssistant(assistantRequest);
ThreadRequest threadRequest = ThreadRequest.builder()
.build();
Thread thread = service.createThread(threadRequest);
MessageRequest messageRequest = MessageRequest.builder()
.content("What's the weather of Xiamen?")
.build();
Message message = service.createMessage(thread.getId(), messageRequest);
RunCreateRequest runCreateRequest = RunCreateRequest.builder()
.assistantId(assistant.getId())
.build();
Run run = service.createRun(thread.getId(), runCreateRequest);
assertNotNull(run);
Run retrievedRun = service.retrieveRun(thread.getId(), run.getId());
while (!(retrievedRun.getStatus().equals("completed"))
&& !(retrievedRun.getStatus().equals("failed"))
&& !(retrievedRun.getStatus().equals("requires_action"))){
retrievedRun = service.retrieveRun(thread.getId(), run.getId());
}
if (retrievedRun.getStatus().equals("requires_action")) {
RequiredAction requiredAction = retrievedRun.getRequiredAction();
System.out.println("requiredAction");
System.out.println(mapper.writeValueAsString(requiredAction));
List<ToolCall> toolCalls = requiredAction.getSubmitToolOutputs().getToolCalls();
ToolCall toolCall = toolCalls.get(0);
String toolCallId = toolCall.getId();
SubmitToolOutputRequestItem toolOutputRequestItem = SubmitToolOutputRequestItem.builder()
.toolCallId(toolCallId)
.output("sunny")
.build();
List<SubmitToolOutputRequestItem> toolOutputRequestItems = new ArrayList<>();
toolOutputRequestItems.add(toolOutputRequestItem);
SubmitToolOutputsRequest submitToolOutputsRequest = SubmitToolOutputsRequest.builder()
.toolOutputs(toolOutputRequestItems)
.build();
retrievedRun = service.submitToolOutputs(retrievedRun.getThreadId(), retrievedRun.getId(), submitToolOutputsRequest);
while (!(retrievedRun.getStatus().equals("completed"))
&& !(retrievedRun.getStatus().equals("failed"))
&& !(retrievedRun.getStatus().equals("requires_action"))){
retrievedRun = service.retrieveRun(thread.getId(), run.getId());
}
OpenAiResponse<Message> response = service.listMessages(thread.getId());
List<Message> messages = response.getData();
System.out.println(mapper.writeValueAsString(messages));
}
}
}
| [
"com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName",
"com.theokanning.openai.assistants.AssistantRequest.builder",
"com.theokanning.openai.messages.MessageRequest.builder",
"com.theokanning.openai.assistants.AssistantFunction.builder",
"com.theokanning.openai.runs.SubmitToolOutputsRequest.builder",
"com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder",
"com.theokanning.openai.threads.ThreadRequest.builder",
"com.theokanning.openai.runs.RunCreateRequest.builder"
] | [((3437, 3645), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3620), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3576), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3506), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3864, 4125), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4100), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4067), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4003), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 3967), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3914, 3966), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName'), ((4239, 4287), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((4391, 4498), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4391, 4473), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4627, 4726), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((4627, 4701), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((5724, 5871), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5842), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5805), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6090, 6210), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder'), ((6090, 6181), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder')] |
package com.theokanning.openai.service;
import com.theokanning.openai.audio.CreateSpeechRequest;
import com.theokanning.openai.audio.CreateTranscriptionRequest;
import com.theokanning.openai.audio.CreateTranslationRequest;
import com.theokanning.openai.audio.TranscriptionResult;
import com.theokanning.openai.audio.TranslationResult;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.time.Duration;
import okhttp3.MediaType;
import okhttp3.ResponseBody;
import static org.junit.jupiter.api.Assertions.*;
public class AudioTest {
static String englishAudioFilePath = "src/test/resources/hello-world.mp3";
static String koreanAudioFilePath = "src/test/resources/korean-hello.mp3";
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
@Test
void createTranscription() {
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder()
.model("whisper-1")
.build();
String text = service.createTranscription(createTranscriptionRequest, englishAudioFilePath).getText();
assertEquals("Hello World.", text);
}
@Test
void createTranscriptionVerbose() {
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder()
.model("whisper-1")
.responseFormat("verbose_json")
.build();
TranscriptionResult result = service.createTranscription(createTranscriptionRequest, englishAudioFilePath);
assertEquals("Hello World.", result.getText());
assertEquals("transcribe", result.getTask());
assertEquals("english", result.getLanguage());
assertTrue(result.getDuration() > 0);
assertEquals(1, result.getSegments().size());
}
@Test
void createTranslation() {
CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder()
.model("whisper-1")
.build();
String text = service.createTranslation(createTranslationRequest, koreanAudioFilePath).getText();
assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", text);
}
@Test
void createTranslationVerbose() {
CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder()
.model("whisper-1")
.responseFormat("verbose_json")
.build();
TranslationResult result = service.createTranslation(createTranslationRequest, koreanAudioFilePath);
assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", result.getText());
assertEquals("translate", result.getTask());
assertEquals("english", result.getLanguage());
assertTrue(result.getDuration() > 0);
assertEquals(1, result.getSegments().size());
}
@Test
void createSpeech() throws IOException {
CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder()
.model("tts-1")
.input("Hello World.")
.voice("alloy")
.build();
final ResponseBody speech = service.createSpeech(createSpeechRequest);
assertNotNull(speech);
assertEquals(MediaType.get("audio/mpeg"), speech.contentType());
assertTrue(speech.bytes().length > 0);
}
}
| [
"com.theokanning.openai.audio.CreateTranslationRequest.builder",
"com.theokanning.openai.audio.CreateSpeechRequest.builder",
"com.theokanning.openai.audio.CreateTranscriptionRequest.builder"
] | [((958, 1055), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((958, 1030), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1479), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1454), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1406), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1971, 2066), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((1971, 2041), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2519), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2494), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2446), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((3049, 3206), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3181), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3149), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3110), 'com.theokanning.openai.audio.CreateSpeechRequest.builder')] |
package cn.shu.wechat.utils;
import cn.shu.wechat.configuration.OpenAIConfiguration;
import cn.shu.wechat.entity.Message;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import okhttp3.*;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.time.Duration.ofSeconds;
public class OpenAPIUtil {
private static final String BASE_URL = "https://api.openai.com/";
public static List<Message> chat(String q) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
OkHttpClient client = new OkHttpClient.Builder()
.addInterceptor(new Interceptor() {
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request()
.newBuilder()
.header("Authorization", "Bearer " + OpenAIConfiguration.getInstance().getOpenaiKey())
.build();
return chain.proceed(request);
}
})
.sslSocketFactory(TestSSLSocketClient.getSSLSocketFactory(), TestSSLSocketClient.getX509TrustManager())
.hostnameVerifier(TestSSLSocketClient.getHostnameVerifier())
.connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS))
.readTimeout(ofSeconds(OpenAIConfiguration.getInstance().getExpire()).toMillis(), TimeUnit.MILLISECONDS)
.build();
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(BASE_URL)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build();
OpenAiService service = new OpenAiService(retrofit.create(OpenAiApi.class));
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(q)
.maxTokens(1024)
.model("text-davinci-003")
.echo(true)
.build();
CompletionResult completion = service.createCompletion(completionRequest);
Stream<Message> messageStream = completion.getChoices().stream()
.map(e -> {
return Message.builder().content(e.getText().substring(e.getText().indexOf("\n\n") + 2)).build();
});
return messageStream.collect(Collectors.toList());
}
}
| [
"com.theokanning.openai.completion.CompletionRequest.builder"
] | [((1749, 1797), 'cn.shu.wechat.configuration.OpenAIConfiguration.getInstance'), ((2249, 2294), 'cn.shu.wechat.configuration.OpenAIConfiguration.getInstance'), ((2788, 2971), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2946), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2918), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2875), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2842), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3184, 3273), 'cn.shu.wechat.entity.Message.builder'), ((3184, 3265), 'cn.shu.wechat.entity.Message.builder')] |
package dev.langchain4j.service;
import dev.langchain4j.agent.tool.DefaultToolExecutor;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.moderation.Moderation;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.spi.services.AiServicesFactory;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static dev.langchain4j.agent.tool.ToolSpecifications.toolSpecificationFrom;
import static dev.langchain4j.exception.IllegalConfigurationException.illegalConfiguration;
import static dev.langchain4j.internal.ValidationUtils.ensureNotNull;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
import static java.util.stream.Collectors.toList;
/**
* AI Services provide a simpler and more flexible alternative to chains.
* You can define your own API (a Java interface with one or more methods),
* and AiServices will provide an implementation for it (we call this "AI Service").
* <p>
* Currently, AI Services support:
* <pre>
* - Prompt templates for user and system messages using {@link UserMessage} and {@link SystemMessage}
* - Structured prompts as method arguments (see {@link StructuredPrompt})
* - Shared or per-user (see {@link MemoryId}) chat memory
* - RAG (see {@link RetrievalAugmentor})
* - Tools (see {@link Tool})
* - Various return types (output parsers), see below
* - Streaming (use {@link TokenStream} as a return type)
* - Auto-moderation using {@link Moderate}
* </pre>
* <p>
* Here is the simplest example of an AI Service:
*
* <pre>
* interface Assistant {
*
* String chat(String userMessage);
* }
*
* Assistant assistant = AiServices.create(Assistant.class, model);
*
* String answer = assistant.chat("hello");
* System.out.println(answer); // Hello, how can I help you today?
* </pre>
*
* <pre>
* The return type of methods in your AI Service can be any of the following:
* - a {@link String}, an {@link AiMessage} or a {@code Response<AiMessage>}, if you want to get the answer from the LLM as-is
* - a {@code List<String>} or {@code Set<String>}, if you want to receive the answer as a collection of items or bullet points
* - any {@link Enum} or a {@code boolean}, if you want to use the LLM for classification
* - a primitive or boxed Java type: {@code int}, {@code Double}, etc., if you want to use the LLM for data extraction
* - many default Java types: {@code Date}, {@code LocalDateTime}, {@code BigDecimal}, etc., if you want to use the LLM for data extraction
* - any custom POJO, if you want to use the LLM for data extraction.
* For POJOs, it is advisable to use the "json mode" feature if the LLM provider supports it. For OpenAI, this can be enabled by calling {@code responseFormat("json_object")} during model construction.
*
* </pre>
* <p>
* Let's see how we can classify the sentiment of a text:
* <pre>
* enum Sentiment {
* POSITIVE, NEUTRAL, NEGATIVE
* }
*
* interface SentimentAnalyzer {
*
* {@code @UserMessage}("Analyze sentiment of {{it}}")
* Sentiment analyzeSentimentOf(String text);
* }
*
* SentimentAnalyzer assistant = AiServices.create(SentimentAnalyzer.class, model);
*
* Sentiment sentiment = analyzeSentimentOf.chat("I love you");
* System.out.println(sentiment); // POSITIVE
* </pre>
* <p>
* As demonstrated, you can put {@link UserMessage} and {@link SystemMessage} annotations above a method to define
* templates for user and system messages, respectively.
* In this example, the special {@code {{it}}} prompt template variable is used because there's only one method parameter.
* However, you can use more parameters as demonstrated in the following example:
* <pre>
* interface Translator {
*
* {@code @SystemMessage}("You are a professional translator into {{language}}")
* {@code @UserMessage}("Translate the following text: {{text}}")
* String translate(@V("text") String text, @V("language") String language);
* }
* </pre>
* <p>
* See more examples <a href="https://github.com/langchain4j/langchain4j-examples/tree/main/other-examples/src/main/java">here</a>.
*
* @param <T> The interface for which AiServices will provide an implementation.
*/
public abstract class AiServices<T> {
protected static final String DEFAULT = "default";
protected final AiServiceContext context;
private boolean retrieverSet = false;
private boolean contentRetrieverSet = false;
private boolean retrievalAugmentorSet = false;
protected AiServices(AiServiceContext context) {
this.context = context;
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param chatLanguageModel The chat model to be used under the hood.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, ChatLanguageModel chatLanguageModel) {
return builder(aiService)
.chatLanguageModel(chatLanguageModel)
.build();
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided streaming chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param streamingChatLanguageModel The streaming chat model to be used under the hood.
* The return type of all methods should be {@link TokenStream}.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, StreamingChatLanguageModel streamingChatLanguageModel) {
return builder(aiService)
.streamingChatLanguageModel(streamingChatLanguageModel)
.build();
}
/**
* Begins the construction of an AI Service.
*
* @param aiService The class of the interface to be implemented.
* @return builder
*/
public static <T> AiServices<T> builder(Class<T> aiService) {
AiServiceContext context = new AiServiceContext(aiService);
for (AiServicesFactory factory : loadFactories(AiServicesFactory.class)) {
return factory.create(context);
}
return new DefaultAiServices<>(context);
}
/**
* Configures chat model that will be used under the hood of the AI Service.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param chatLanguageModel Chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> chatLanguageModel(ChatLanguageModel chatLanguageModel) {
context.chatModel = chatLanguageModel;
return this;
}
/**
* Configures streaming chat model that will be used under the hood of the AI Service.
* The methods of the AI Service must return a {@link TokenStream} type.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param streamingChatLanguageModel Streaming chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> streamingChatLanguageModel(StreamingChatLanguageModel streamingChatLanguageModel) {
context.streamingChatModel = streamingChatLanguageModel;
return this;
}
/**
* Configures the chat memory that will be used to preserve conversation history between method calls.
* <p>
* Unless a {@link ChatMemory} or {@link ChatMemoryProvider} is configured, all method calls will be independent of each other.
* In other words, the LLM will not remember the conversation from the previous method calls.
* <p>
* The same {@link ChatMemory} instance will be used for every method call.
* <p>
* If you want to have a separate {@link ChatMemory} for each user/conversation, configure {@link #chatMemoryProvider} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemory An instance of chat memory to be used by the AI Service.
* @return builder
*/
public AiServices<T> chatMemory(ChatMemory chatMemory) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemories.put(DEFAULT, chatMemory);
return this;
}
/**
* Configures the chat memory provider, which provides a dedicated instance of {@link ChatMemory} for each user/conversation.
* To distinguish between users/conversations, one of the method's arguments should be a memory ID (of any data type)
* annotated with {@link MemoryId}.
* For each new (previously unseen) memoryId, an instance of {@link ChatMemory} will be automatically obtained
* by invoking {@link ChatMemoryProvider#get(Object id)}.
* Example:
* <pre>
* interface Assistant {
*
* String chat(@MemoryId int memoryId, @UserMessage String message);
* }
* </pre>
* If you prefer to use the same (shared) {@link ChatMemory} for all users/conversations, configure a {@link #chatMemory} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemoryProvider The provider of a {@link ChatMemory} for each new user/conversation.
* @return builder
*/
public AiServices<T> chatMemoryProvider(ChatMemoryProvider chatMemoryProvider) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemoryProvider = chatMemoryProvider;
return this;
}
/**
* Configures a moderation model to be used for automatic content moderation.
* If a method in the AI Service is annotated with {@link Moderate}, the moderation model will be invoked
* to check the user content for any inappropriate or harmful material.
*
* @param moderationModel The moderation model to be used for content moderation.
* @return builder
* @see Moderate
*/
public AiServices<T> moderationModel(ModerationModel moderationModel) {
context.moderationModel = moderationModel;
return this;
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools One or more objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) will be accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(Object... objectsWithTools) {
return tools(Arrays.asList(objectsWithTools));
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools A list of objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) are accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(List<Object> objectsWithTools) {
context.toolSpecifications = new ArrayList<>();
context.toolExecutors = new HashMap<>();
for (Object objectWithTool : objectsWithTools) {
for (Method method : objectWithTool.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(Tool.class)) {
ToolSpecification toolSpecification = toolSpecificationFrom(method);
context.toolSpecifications.add(toolSpecification);
context.toolExecutors.put(toolSpecification.name(), new DefaultToolExecutor(objectWithTool, method));
}
}
}
return this;
}
/**
* Deprecated. Use {@link #contentRetriever(ContentRetriever)}
* (e.g. {@link EmbeddingStoreContentRetriever}) instead.
* <br>
* Configures a retriever that will be invoked on every method call to fetch relevant information
* related to the current user message from an underlying source (e.g., embedding store).
* This relevant information is automatically injected into the message sent to the LLM.
*
* @param retriever The retriever to be used by the AI Service.
* @return builder
*/
@Deprecated
public AiServices<T> retriever(Retriever<TextSegment> retriever) {
if(contentRetrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
if (retriever != null) {
AiServices<T> withContentRetriever = contentRetriever(retriever.toContentRetriever());
retrieverSet = true;
return withContentRetriever;
}
return this;
}
/**
* Configures a content retriever to be invoked on every method call for retrieving relevant content
* related to the user's message from an underlying data source
* (e.g., an embedding store in the case of an {@link EmbeddingStoreContentRetriever}).
* The retrieved relevant content is then automatically incorporated into the message sent to the LLM.
* <br>
* This method provides a straightforward approach for those who do not require
* a customized {@link RetrievalAugmentor}.
* It configures a {@link DefaultRetrievalAugmentor} with the provided {@link ContentRetriever}.
*
* @param contentRetriever The content retriever to be used by the AI Service.
* @return builder
*/
public AiServices<T> contentRetriever(ContentRetriever contentRetriever) {
if(retrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
contentRetrieverSet = true;
context.retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(ensureNotNull(contentRetriever, "contentRetriever"))
.build();
return this;
}
/**
* Configures a retrieval augmentor to be invoked on every method call.
*
* @param retrievalAugmentor The retrieval augmentor to be used by the AI Service.
* @return builder
*/
public AiServices<T> retrievalAugmentor(RetrievalAugmentor retrievalAugmentor) {
if(retrieverSet || contentRetrieverSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
retrievalAugmentorSet = true;
context.retrievalAugmentor = ensureNotNull(retrievalAugmentor, "retrievalAugmentor");
return this;
}
/**
* Constructs and returns the AI Service.
*
* @return An instance of the AI Service implementing the specified interface.
*/
public abstract T build();
protected void performBasicValidation() {
if (context.chatModel == null && context.streamingChatModel == null) {
throw illegalConfiguration("Please specify either chatLanguageModel or streamingChatLanguageModel");
}
if (context.toolSpecifications != null && !context.hasChatMemory()) {
throw illegalConfiguration(
"Please set up chatMemory or chatMemoryProvider in order to use tools. "
+ "A ChatMemory that can hold at least 3 messages is required for the tools to work properly. "
+ "While the LLM can technically execute a tool without chat memory, if it only receives the " +
"result of the tool's execution without the initial message from the user, it won't interpret " +
"the result properly."
);
}
}
public static List<ChatMessage> removeToolMessages(List<ChatMessage> messages) {
return messages.stream()
.filter(it -> !(it instanceof ToolExecutionResultMessage))
.filter(it -> !(it instanceof AiMessage && ((AiMessage) it).hasToolExecutionRequests()))
.collect(toList());
}
public static void verifyModerationIfNeeded(Future<Moderation> moderationFuture) {
if (moderationFuture != null) {
try {
Moderation moderation = moderationFuture.get();
if (moderation.flagged()) {
throw new ModerationException(String.format("Text \"%s\" violates content policy", moderation.flaggedText()));
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder"
] | [((15779, 15926), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((15779, 15901), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
package org.mfusco;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import static java.time.Duration.ofSeconds;
public class MortgageChat {
private final ChatLanguageModel model;
private final PersonExtractor extractor;
private final DroolsMortgageCalculator droolsMortgageCalculator = new DroolsMortgageCalculator();
private final Assistant assistant;
public MortgageChat(String openAiApiKey) {
model = OpenAiChatModel.builder()
.apiKey(openAiApiKey)
.timeout(ofSeconds(60))
.build();
extractor = AiServices.create(PersonExtractor.class, model);
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(droolsMortgageCalculator)
.build();
}
public String chat(String text) {
return text.endsWith("?") ? assistant.chat(text) : extractPerson(text);
}
private String extractPerson(String text) {
Person person = extractor.extractPersonFrom(text);
droolsMortgageCalculator.register(person);
return person.toString();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((601, 729), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 704), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 664), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((822, 1046), 'dev.langchain4j.service.AiServices.builder'), ((822, 1021), 'dev.langchain4j.service.AiServices.builder'), ((822, 972), 'dev.langchain4j.service.AiServices.builder'), ((822, 899), 'dev.langchain4j.service.AiServices.builder')] |
package com.moyz.adi.common.service;
import com.moyz.adi.common.helper.LLMContext;
import com.moyz.adi.common.interfaces.TriConsumer;
import com.moyz.adi.common.util.AdiPgVectorEmbeddingStore;
import com.moyz.adi.common.vo.AnswerMeta;
import com.moyz.adi.common.vo.PromptMeta;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Slf4j
@Service
public class RAGService {
@Value("${spring.datasource.url}")
private String dataBaseUrl;
@Value("${spring.datasource.username}")
private String dataBaseUserName;
@Value("${spring.datasource.password}")
private String dataBasePassword;
private static final PromptTemplate promptTemplate = PromptTemplate.from("尽可能准确地回答下面的问题: {{question}}\n\n根据以下知识库的内容:\n{{information}}");
private EmbeddingModel embeddingModel;
private EmbeddingStore<TextSegment> embeddingStore;
public void init() {
log.info("initEmbeddingModel");
embeddingModel = new AllMiniLmL6V2EmbeddingModel();
embeddingStore = initEmbeddingStore();
}
private EmbeddingStore<TextSegment> initEmbeddingStore() {
// 正则表达式匹配
String regex = "jdbc:postgresql://([^:/]+):(\\d+)/(\\w+).+";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(dataBaseUrl);
String host = "";
String port = "";
String databaseName = "";
if (matcher.matches()) {
host = matcher.group(1);
port = matcher.group(2);
databaseName = matcher.group(3);
System.out.println("Host: " + host);
System.out.println("Port: " + port);
System.out.println("Database: " + databaseName);
} else {
throw new RuntimeException("parse url error");
}
AdiPgVectorEmbeddingStore embeddingStore = AdiPgVectorEmbeddingStore.builder()
.host(host)
.port(Integer.parseInt(port))
.database(databaseName)
.user(dataBaseUserName)
.password(dataBasePassword)
.dimension(384)
.createTable(true)
.dropTableFirst(false)
.table("adi_knowledge_base_embedding")
.build();
return embeddingStore;
}
private EmbeddingStoreIngestor getEmbeddingStoreIngestor() {
DocumentSplitter documentSplitter = DocumentSplitters.recursive(1000, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor embeddingStoreIngestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
return embeddingStoreIngestor;
}
/**
* 对文档切块并向量化
*
* @param document 知识库文档
*/
public void ingest(Document document) {
getEmbeddingStoreIngestor().ingest(document);
}
public Prompt retrieveAndCreatePrompt(String kbUuid, String question) {
// Embed the question
Embedding questionEmbedding = embeddingModel.embed(question).content();
// Find relevant embeddings in embedding store by semantic similarity
// You can play with parameters below to find a sweet spot for your specific use case
int maxResults = 3;
double minScore = 0.6;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = ((AdiPgVectorEmbeddingStore) embeddingStore).findRelevantByKbUuid(kbUuid, questionEmbedding, maxResults, minScore);
// Create a prompt for the model that includes question and relevant embeddings
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
if (StringUtils.isBlank(information)) {
return null;
}
return promptTemplate.apply(Map.of("question", question, "information", Matcher.quoteReplacement(information)));
}
/**
* 召回并提问
*
* @param kbUuid 知识库uuid
* @param question 用户的问题
* @param modelName LLM model name
* @return
*/
public Pair<String, Response<AiMessage>> retrieveAndAsk(String kbUuid, String question, String modelName) {
Prompt prompt = retrieveAndCreatePrompt(kbUuid, question);
if (null == prompt) {
return null;
}
Response<AiMessage> response = new LLMContext(modelName).getLLMService().chat(prompt.toUserMessage());
return new ImmutablePair<>(prompt.text(), response);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3615), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3590), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3535), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3496), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3461), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3429), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3385), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3345), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3305), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3259), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3894, 4099), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4074), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4026), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 3978), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package dev.zbendhiba.demo.telegram.openapi;
import java.util.List;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import jakarta.enterprise.context.ApplicationScoped;
import static java.time.Duration.ofSeconds;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.telegram.model.IncomingMessage;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class Routes extends RouteBuilder {
@ConfigProperty(name="open-api-key")
String openApiKey;
private EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
private EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
@Override
public void configure() throws Exception {
// REST endpoint to add a bio
rest("data")
.post("/camel-split-ingest/")
.to("direct:camel-split-ingest")
.post("/langchain4j-split-ingest/")
.to("direct:langchain4j-split-ingest");
// Ingest Data
from("direct:camel-split-ingest")
.wireTap("direct:processBio")
.transform().simple("Thanks");
from("direct:processBio")
// split into paragraphs and use OpenApiTokenizer
.split(body().tokenize("\\s*\\n\\s*\\n"))
.setHeader("paragraphNumber", simple("${exchangeProperty.CamelSplitIndex}"))
// Process each paragraph using the OpenAiTokenizerProcessor
.process(new CamelSplitterProcessor())
.to("direct:processTokenizedPart")
.end();
// Embed paragraphs into Vector Database
from("direct:processTokenizedPart")
.process(exchange -> {
embed(exchange.getIn().getBody(List.class));
});
from("direct:process-langchain4j-split-ingest")
.process(new LangchainSplitterProcessor())
.to("direct:processTokenizedPart");
from("direct:langchain4j-split-ingest")
.wireTap("direct:process-langchain4j-split-ingest")
.transform().simple("Thanks");
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(openApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.timeout(ofSeconds(3000))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(model)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate
.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
from("telegram:bots?timeout=30000")
.log("Text received in Telegram : ${body}")
// this is just a Hello World, we suppose that we receive only text messages from user
.filter(simple("${body} != '/start'"))
.process(e->{
IncomingMessage incomingMessage = e.getMessage().getBody(IncomingMessage.class);
var openapiMessage = chain.execute(incomingMessage.getText());
e.getMessage().setBody(openapiMessage);
})
.log("Text to send to user based on response from ChatGPT : ${body}")
.to("telegram:bots")
.end();
}
public void embed(List<TextSegment> textSegments ) {
List<Embedding> embeddings = embeddingModel.embedAll(textSegments).content();
embeddingStore.addAll(embeddings, textSegments);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2918, 3122), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3055), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3021), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 2979), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3171, 3658), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3633), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3413), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3340), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3251), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package eu.luminis.faqlangchain.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.time.Duration;
import java.util.Arrays;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.JsonNode;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.stereotype.Service;
import org.springframework.util.ResourceUtils;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
@Service
public class IngestService {
private static final Logger LOGGER = LoggerFactory.getLogger(IngestService.class);
private final WebClient webClient;
private final EmbeddingStore<TextSegment> embeddingStore;
private final EmbeddingModel embeddingModel;
public IngestService(@Value("${unstructured.apiKey}") String unstructuredApiKey,
@Qualifier("openaiModel") EmbeddingModel embeddingModel,
@Qualifier("inMemoryEmbeddingStore") EmbeddingStore<TextSegment> embeddingStore) {
this.embeddingModel = embeddingModel;
this.embeddingStore = embeddingStore;
this.webClient = WebClient.builder()
.baseUrl("https://api.unstructured.io/general/v0/")
.defaultHeader("unstructured-api-key", unstructuredApiKey)
.build();
}
public boolean ingestPDF() throws FileNotFoundException {
LOGGER.info("Ingesting PDF");
File file = ResourceUtils.getFile("classpath:data/faq.pdf");
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", new FileSystemResource(file));
builder.part("strategy", "ocr_only");
builder.part("ocr_languages", "eng");
Mono<Object> mono = webClient.post()
.uri("general")
.contentType(MediaType.MULTIPART_FORM_DATA)
.body(BodyInserters.fromMultipartData(builder.build()))
.exchangeToMono(response -> {
if (response.statusCode().equals(HttpStatus.OK)) {
return response.bodyToMono(UnstructuredResponse[].class);
} else {
LOGGER.error("Something went wrong when uploading file to Unstructured API. Received status code {}", response.statusCode());
return response.bodyToMono(JsonNode.class);
}
});
Object response = mono.block(Duration.ofMinutes(1));
if (response instanceof JsonNode jsonNode) {
LOGGER.error("Response: {}", jsonNode);
return false;
}
if (response instanceof UnstructuredResponse[] unstructuredResponses) {
String text = Arrays.stream(unstructuredResponses).map(UnstructuredResponse::getText).collect(Collectors.joining(" "));
Document document = Document.from(text);
DocumentSplitter documentSplitter = DocumentSplitters.recursive(300);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
LOGGER.info("Ingestion of PDF finished");
return true;
}
return false;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1939, 2126), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2101), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2026), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((3531, 3635), 'java.util.Arrays.stream'), ((3531, 3602), 'java.util.Arrays.stream'), ((3819, 4040), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 4011), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3959), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3907), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.vertexai;
import dev.langchain4j.model.vertexai.VertexAiChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useVertexAiLanguageModelBuilder();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###################################
// ### AZURE OPENAI LANGUAGE MODEL ###
// ###################################
public void useVertexAiLanguageModelBuilder() {
System.out.println("### useVertexAiLanguageModelBuilder");
// tag::adocSnippet[]
VertexAiChatModel model = VertexAiChatModel.builder()
.endpoint(AZURE_OPENAI_ENDPOINT)
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate(PROMPT);
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiChatModel.builder"
] | [((1100, 1205), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1190), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1166), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder')] |
package com.example.application;
import com.example.application.services.BookingTools;
import com.example.application.services.CustomerSupportAgent;
import com.vaadin.flow.component.page.AppShellConfigurator;
import com.vaadin.flow.theme.Theme;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_4;
@SpringBootApplication
@Theme(value = "customer-service-chatbot")
public class Application implements AppShellConfigurator {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Bean
Tokenizer tokenizer() {
return new OpenAiTokenizer(GPT_3_5_TURBO);
}
// In the real world, ingesting documents would often happen separately, on a CI server or similar
@Bean
CommandLineRunner docsToEmbeddings(
EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore,
Tokenizer tokenizer,
ResourceLoader resourceLoader
) throws IOException {
return args -> {
Resource resource =
resourceLoader.getResource("classpath:terms-of-service.txt");
var termsOfUse = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
DocumentSplitter documentSplitter = DocumentSplitters.recursive(200, 0,
tokenizer);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(termsOfUse);
};
}
@Bean
StreamingChatLanguageModel chatLanguageModel() {
return OpenAiStreamingChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.build();
}
@Bean
ContentRetriever retriever(
EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel
) {
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
}
@Bean
CustomerSupportAgent customerSupportAgent(
StreamingChatLanguageModel chatLanguageModel,
Tokenizer tokenizer,
ContentRetriever retriever,
BookingTools tools
) {
return AiServices.builder(CustomerSupportAgent.class)
.streamingChatLanguageModel(chatLanguageModel)
.chatMemoryProvider(chatId -> TokenWindowChatMemory.builder()
.id(chatId)
.maxTokens(1000, tokenizer)
.build())
.contentRetriever(retriever)
.tools(tools)
.build();
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.memory.chat.TokenWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3417), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3388), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3336), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3284), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3556, 3705), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3680), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3638), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3878, 4101), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4076), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4045), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4014), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 3966), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4354, 4763), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4738), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4708), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4663), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4463), 'dev.langchain4j.service.AiServices.builder'), ((4510, 4662), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4629), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4577), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
@Slf4j
public class TwoPassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
String linkingPromptStr = sqlPromptGenerator.generateLinkingPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPromptStr)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
keyPipelineLog.info("step one model response:{}", response.content().text());
String schemaLinkStr = OutputFormat.getSchemaLink(response.content().text());
String generateSqlPrompt = sqlPromptGenerator.generateSqlPrompt(llmReq, schemaLinkStr, sqlExamples);
Prompt sqlPrompt = PromptTemplate.from(JsonUtil.toString(generateSqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", sqlPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(sqlPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
Map<String, Double> sqlMap = new HashMap<>();
sqlMap.put(result, 1D);
keyPipelineLog.info("schemaLinkStr:{},sqlMap:{}", schemaLinkStr, sqlMap);
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMap));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((1891, 1970), 'dev.langchain4j.model.input.PromptTemplate.from'), ((2459, 2539), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package com.sg.chatbot.service;
import org.springframework.http.codec.ServerSentEvent;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
public class ChatService {
private String openaiApiKey = "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";
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
public ChatService(){
if (openaiApiKey == null) {
System.err
.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
}
public String chat(String message) {
System.out.println(message);
return assistant.chat(message);
}
public Flux<ServerSentEvent<String>> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux().map(mes -> ServerSentEvent.<String>builder()
.event("chat")
.data(mes)
.build());
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1177, 1326), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1309), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1281), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1530), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1513), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1485), 'dev.langchain4j.service.AiServices.builder'), ((1748, 1793), 'reactor.core.publisher.Sinks.many'), ((1748, 1770), 'reactor.core.publisher.Sinks.many'), ((2009, 2107), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2090), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2065), 'org.springframework.http.codec.ServerSentEvent.<String>builder')] |
package dev.langchain4j.model.azure;
import com.azure.ai.openai.models.*;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import java.util.List;
import static dev.langchain4j.model.azure.InternalAzureOpenAiHelper.finishReasonFrom;
import static java.util.Collections.singletonList;
/**
* This class needs to be thread safe because it is called when a streaming result comes back
* and there is no guarantee that this thread will be the same as the one that initiated the request,
* in fact it almost certainly won't be.
*/
class AzureOpenAiStreamingResponseBuilder {
private final StringBuffer contentBuilder = new StringBuffer();
private final StringBuffer toolNameBuilder = new StringBuffer();
private final StringBuffer toolArgumentsBuilder = new StringBuffer();
private volatile CompletionsFinishReason finishReason;
private final Integer inputTokenCount;
public AzureOpenAiStreamingResponseBuilder(Integer inputTokenCount) {
this.inputTokenCount = inputTokenCount;
}
public void append(ChatCompletions completions) {
if (completions == null) {
return;
}
List<ChatChoice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
ChatChoice chatCompletionChoice = choices.get(0);
if (chatCompletionChoice == null) {
return;
}
CompletionsFinishReason finishReason = chatCompletionChoice.getFinishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
com.azure.ai.openai.models.ChatResponseMessage delta = chatCompletionChoice.getDelta();
if (delta == null) {
return;
}
String content = delta.getContent();
if (content != null) {
contentBuilder.append(content);
return;
}
FunctionCall functionCall = delta.getFunctionCall();
if (functionCall != null) {
if (functionCall.getName() != null) {
toolNameBuilder.append(functionCall.getName());
}
if (functionCall.getArguments() != null) {
toolArgumentsBuilder.append(functionCall.getArguments());
}
}
}
public void append(Completions completions) {
if (completions == null) {
return;
}
List<Choice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
Choice completionChoice = choices.get(0);
if (completionChoice == null) {
return;
}
CompletionsFinishReason completionsFinishReason = completionChoice.getFinishReason();
if (completionsFinishReason != null) {
this.finishReason = completionsFinishReason;
}
String token = completionChoice.getText();
if (token != null) {
contentBuilder.append(token);
}
}
public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) {
String content = contentBuilder.toString();
if (!content.isEmpty()) {
return Response.from(
AiMessage.from(content),
tokenUsage(content, tokenizer),
finishReasonFrom(finishReason)
);
}
String toolName = toolNameBuilder.toString();
if (!toolName.isEmpty()) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(toolName)
.arguments(toolArgumentsBuilder.toString())
.build();
return Response.from(
AiMessage.from(toolExecutionRequest),
tokenUsage(toolExecutionRequest, tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
return null;
}
private TokenUsage tokenUsage(String content, Tokenizer tokenizer) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = tokenizer.estimateTokenCountInText(content);
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private TokenUsage tokenUsage(ToolExecutionRequest toolExecutionRequest, Tokenizer tokenizer, boolean forcefulToolExecution) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = 0;
if (forcefulToolExecution) {
// OpenAI calculates output tokens differently when tool is executed forcefully
outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest);
} else {
outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(singletonList(toolExecutionRequest));
}
return new TokenUsage(inputTokenCount, outputTokenCount);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((3735, 3894), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3865), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3801), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package dev.nano.sbot.configuration;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.nano.sbot.retriever.EmbeddingStoreLoggingRetriever;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.List;
import static dev.nano.sbot.constant.Constants.PROMPT_TEMPLATE_2;
@Configuration
@RequiredArgsConstructor
@Slf4j
public class LangChainConfiguration {
@Value("${langchain.api.key}")
private String apiKey;
@Value("${langchain.timeout}")
private Long timeout;
private final List<Document> documents;
@Bean
public ConversationalRetrievalChain chain() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
log.info("Ingesting Spring Boot Resources ...");
ingestor.ingest(documents);
log.info("Ingested {} documents", documents.size());
EmbeddingStoreRetriever retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
EmbeddingStoreLoggingRetriever loggingRetriever = new EmbeddingStoreLoggingRetriever(retriever);
/*MessageWindowChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.build();*/
log.info("Building ConversationalRetrievalChain ...");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.builder()
.apiKey(apiKey)
.timeout(Duration.ofSeconds(timeout))
.build()
)
.promptTemplate(PromptTemplate.from(PROMPT_TEMPLATE_2))
//.chatMemory(chatMemory)
.retriever(loggingRetriever)
.build();
log.info("Spring Boot knowledge base is ready!");
return chain;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1682, 1906), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1881), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1833), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1785), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2530, 2966), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2941), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2854), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2782), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2604, 2764), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2731), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2669), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.nexus.backend.service;
import com.nexus.backend.dto.UserTender;
import com.nexus.backend.entity.Act;
import com.nexus.backend.entity.Tender;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
@Service
public class AiService {
public void testGpt(){
PromptTemplate promptTemplate = PromptTemplate
.from("Tell me a {{adjective}} joke about {{content}}..");
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "computers");
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY").modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
}
public String checkIfCompliant(Act act, UserTender userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government act with a set of compliances {{act}}, With keeping this above act in mind, tell me if my tender/plan seems broadly compliant or not. " +
"Consider this tender/plan: {{tender}}" +
"Let me know if there are any shortcomings and where the tender/plan is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("act", act);
variables.put("tender", userTender);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("API_KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
public void Summarise(){
}
public String checkIfTenderIsCompliant(Tender tender, String userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government Tender with a set of compliances {{tender}}. With keeping this above act in mind, tell me if my tender seems broadly compliant or not. " +
"Consider this tender/plan: {{userTender}}" +
"Let me know if there are any shortcomings and where the tender is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("tender", tender.toString());
variables.put("userTender", userTender.toString());
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((957, 1097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1072), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1038), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1013), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2109), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2084), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2050), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2008), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3197), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3163), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3121), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package eu.luminis.faqlangchain.config;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.inprocess.InProcessEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.inprocess.InProcessEmbeddingModelType.*;
import static dev.langchain4j.model.openai.OpenAiModelName.*;
import static java.time.Duration.*;
@Configuration
public class QuestionAnsweringConfig {
@Value("${openai.apiKey}")
private String openaiApiKey;
@Qualifier("openaiModel")
@Bean
public EmbeddingModel openaiEmbeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(openaiApiKey)
.modelName(TEXT_EMBEDDING_ADA_002)
.build();
}
@Qualifier("inMemoryModel")
@Bean
public EmbeddingModel inMemoryEmbeddingModel() {
return new InProcessEmbeddingModel(ALL_MINILM_L6_V2);
}
@Qualifier("openaiChatModel")
@Bean
public ChatLanguageModel openaiChatModel() {
return OpenAiChatModel.builder()
.apiKey(openaiApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.7)
.timeout(ofSeconds(15))
.maxRetries(3)
.logResponses(true)
.logRequests(true)
.build();
}
@Qualifier("inMemoryEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> inMemoryEmbeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Qualifier("weaviateEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> weaviateEmbeddingStore(@Value("${weaviate.apiKey}") String apiKey,
@Value("${weaviate.host}") String host) {
return WeaviateEmbeddingStore.builder()
.apiKey(apiKey)
.scheme("https")
.host(host)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder"
] | [((1210, 1354), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1329), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1278), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1635, 1941), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1916), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1881), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1845), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1814), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1774), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1740), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1698), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2397, 2547), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2522), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2494), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2461), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder')] |
package com.example.demo;
import java.time.Duration;
import java.time.LocalDate;
import java.util.Arrays;
import java.util.List;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.V;
public class AiServicesExamples {
static Duration duration = Duration.ofSeconds(60);
static ChatLanguageModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
////////////////// SIMPLE EXAMPLE //////////////////////
static class Simple_AI_Service_Example {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
Assistant assistant = AiServices.create(Assistant.class, model);
String userMessage = "Translate 'Plus-Values des cessions de valeurs mobilières, de droits sociaux et gains assimilés'";
String answer = assistant.chat(userMessage);
System.out.println(answer);
}
}
////////////////// WITH MESSAGE AND VARIABLES //////////////////////
static class AI_Service_with_System_and_User_Messages_Example {
interface TextUtils {
@SystemMessage("You are a professional translator into {{language}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("language") String language);
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @V("n") int n);
}
public static void main(String[] args) {
TextUtils utils = AiServices.create(TextUtils.class, model);
String translation = utils.translate("Hello, how are you?", "italian");
System.out.println(translation); // Ciao, come stai?
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create "
+ "machines that mimic human intelligence. This can range from simple tasks such as recognizing "
+ "patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = utils.summarize(text, 3);
System.out.println(bulletPoints);
}
}
////////////////////EXTRACTING DIFFERENT DATA TYPES ////////////////////
static class Sentiment_Extracting_AI_Service_Example {
enum Sentiment {
POSITIVE, NEUTRAL, NEGATIVE;
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
@UserMessage("Does {{it}} have a positive sentiment?")
boolean isPositive(String text);
}
public static void main(String[] args) {
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model);
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is amazing!");
System.out.println(sentiment); // POSITIVE
boolean positive = sentimentAnalyzer.isPositive("It is bad!");
System.out.println(positive); // false
}
}
static class POJO_Extracting_AI_Service_Example {
static class Person {
private String firstName;
private String lastName;
private LocalDate birthDate;
@Override
public String toString() {
return "Person {" + " firstName = \"" + firstName + "\"" + ", lastName = \"" + lastName + "\""
+ ", birthDate = " + birthDate + " }";
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
public static void main(String[] args) {
PersonExtractor extractor = AiServices.create(PersonExtractor.class, model);
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person person = extractor.extractPersonFrom(text);
System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 }
}
}
////////////////////// DESCRIPTIONS ////////////////////////
static class POJO_With_Descriptions_Extracting_AI_Service_Example {
static class Recipe {
@Description("short title, 3 words maximum")
private String title;
@Description("short description, 2 sentences maximum")
private String description;
@Description("each step should be described in 6 to 8 words, steps should rhyme with each other")
private List<String> steps;
private Integer preparationTimeMinutes;
@Override
public String toString() {
return "Recipe {" +
" title = \"" + title + "\"" +
", description = \"" + description + "\"" +
", steps = " + steps +
", preparationTimeMinutes = " + preparationTimeMinutes +
" }";
}
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
interface Chef {
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipe(CreateRecipePrompt prompt);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, model);
Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives", "lemon");
System.out.println(recipe);
CreateRecipePrompt prompt = new CreateRecipePrompt();
prompt.dish = "oven dish";
prompt.ingredients = Arrays.asList("cucumber", "tomato", "feta", "onion", "olives", "potatoes");
Recipe anotherRecipe = chef.createRecipe(prompt);
System.out.println(anotherRecipe);
}
}
////////////////////////// WITH MEMORY /////////////////////////
static class ServiceWithMemoryExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(chatMemory)
.build();
String answer = assistant.chat("Hello! My name is Klaus.");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = assistant.chat("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
static class ServiceWithMemoryForEachUserExample {
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
public static void main(String[] args) {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
// Hi Klaus! How can I assist you today?
System.out.println(assistant.chat(2, "Hello, my name is Francine"));
// Hello Francine! How can I assist you today?
System.out.println(assistant.chat(1, "What is my name?"));
// Your name is Klaus.
System.out.println(assistant.chat(2, "What is my name?"));
// Your name is Francine.
}
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((792, 874), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 866), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 848), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((6740, 6894), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6865), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6821), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7685), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7656), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7559), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package me.nzuguem.bot.configurations.llm;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.annotation.PreDestroy;
import jakarta.enterprise.context.RequestScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@RequestScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
@PreDestroy
public void close() {
memories.clear();
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((631, 752), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 727), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 697), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package net.savantly.mainbot.config;
import java.time.Duration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import lombok.RequiredArgsConstructor;
import net.savantly.mainbot.service.replicate.ReplicateClient;
@Configuration
@RequiredArgsConstructor
public class ChatModelConfig {
private final OpenAIConfig openAIConfig;
@Bean
@Primary
@ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "true")
public ChatLanguageModel getChatModel(ReplicateClient replicateClient) {
return getOpenAiChatModel();
// return new ReplicateChatLanguageModel(replicateClient);
}
public ChatLanguageModel getOpenAiChatModel() {
String apiKey = openAIConfig.getApiKey();
return OpenAiChatModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(openAIConfig.getChatModelId())
.temperature(0.1)
.logResponses(false)
.logRequests(false)
.timeout(Duration.ofSeconds(openAIConfig.getTimeoutSeconds()))
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1056, 1430), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1405), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1326), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1290), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1253), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1219), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1113), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ApplicationScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(3)
.id(memoryId)
.build());
}
public void clear(Object session) {
memories.remove(session);
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((608, 728), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 703), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 673), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.example.demo;
import java.time.Duration;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class _07_ConversationalChain {
public static void main(String[] args) {
Duration duration = Duration.ofSeconds(60);
OpenAiChatModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
ConversationalChain chain = ConversationalChain.builder().chatLanguageModel(model)
// .chatMemory(...) // you can override default chat memory
.build();
String userMessage1 = "Can you give a brief explanation of the Agile methodology, 3 lines max?";
System.out.println("[User]: " + userMessage1);
String answer1 = chain.execute(userMessage1);
System.out.println("[LLM]: " + answer1);
String userMessage2 = "What are good tools for that? 3 lines max.";
System.out.println("[User]: " + userMessage2);
String answer2 = chain.execute(userMessage2);
System.out.println("[LLM]: " + answer2);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((313, 395), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 387), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 369), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((428, 559), 'dev.langchain4j.chain.ConversationalChain.builder'), ((428, 482), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package org.mf.langchain.service;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.localai.LocalAiChatModel;
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
import org.jetbrains.annotations.Nullable;
import org.mf.langchain.util.LanguageModel;
import org.mf.langchain.StreamLanguageModel;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.function.Consumer;
@Service
public class LangChainService {
private final LanguageModel lm;
private final StreamLanguageModel slm;
LangChainService() {
lm = new LanguageModel(LocalAiChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.build());
slm = new StreamLanguageModel(LocalAiStreamingChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.timeout(Duration.ofDays(1))
.temperature(0.8)
.build());
}
public String Generate(String prompt)
{
return lm.RunBlocking(prompt);
}
public void GenerateStream(String prompt, Consumer<String> onNext, Consumer<Throwable> onError, @Nullable Consumer<AiMessage> onComplete) {
slm.generate(prompt, onNext, onError, onComplete);
}
}
| [
"dev.langchain4j.model.localai.LocalAiChatModel.builder",
"dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder"
] | [((623, 760), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 735), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 685), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((802, 1027), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 1002), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 968), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 923), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 873), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package io.quarkiverse.langchain4j.samples;
import java.util.function.Supplier;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;
public class CustomProvider implements Supplier<ChatMemoryProvider> {
private final InMemoryChatMemoryStore store = new InMemoryChatMemoryStore();
@Override
public ChatMemoryProvider get() {
return new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
return MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.chatMemoryStore(store)
.build();
}
};
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((652, 845), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 812), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 764), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 726), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package dev.onurb.travelassistant;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import java.io.IOException;
import java.time.Duration;
import java.util.Scanner;
public class TravelAgency {
public static void main(String[] args) throws IOException {
String apiKey = System.getenv("OPENAPI_KEY");
TravelAssistant assistant = AiServices.builder(TravelAssistant.class)
.chatLanguageModel(OpenAiChatModel.builder().apiKey(apiKey).timeout(Duration.ofMinutes(3)).build())
.tools(new TripServices())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String input = readInput();
while (!"bye".equalsIgnoreCase(input)) {
String answer = assistant.chat(input);
System.out.println("\u001B[33m" + answer + "\u001B[37m");
input = readInput();
}
}
private static String readInput() {
Scanner in = new Scanner(System.in);
System.out.print("> ");
return in.nextLine();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((460, 758), 'dev.langchain4j.service.AiServices.builder'), ((460, 733), 'dev.langchain4j.service.AiServices.builder'), ((460, 660), 'dev.langchain4j.service.AiServices.builder'), ((460, 617), 'dev.langchain4j.service.AiServices.builder'), ((537, 616), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 608), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 577), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.agent.tool.P;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.service.AiServices;
public class Step8b_FunctionCalling {
record WeatherForecast(String location, String forecast, int temperature) {}
static class WeatherForecastService {
@Tool("Get the weather forecast for a location")
WeatherForecast getForecast(@P("Location to get the forecast for") String location) {
if (location.equals("Paris")) {
return new WeatherForecast("Paris", "Sunny", 20);
} else if (location.equals("London")) {
return new WeatherForecast("London", "Rainy", 15);
} else {
return new WeatherForecast("Unknown", "Unknown", 0);
}
}
}
interface WeatherAssistant {
String chat(String userMessage);
}
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro")
.maxOutputTokens(100)
.build();
WeatherForecastService weatherForecastService = new WeatherForecastService();
WeatherAssistant assistant = AiServices.builder(WeatherAssistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(weatherForecastService)
.build();
System.out.println(assistant.chat("What is the weather in Paris?"));
System.out.println(assistant.chat("What is the weather in London?"));
System.out.println(assistant.chat("Is the temperature warmer in Paris or London?"));
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1743, 1971), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1950), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1916), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1875), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1826), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((2098, 2311), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2290), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2247), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2178), 'dev.langchain4j.service.AiServices.builder')] |
package com.hillarocket.application.handler;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@BrowserCallable
@AnonymousAllowed
public class OpenApiHandler {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
if (OPENAI_API_KEY == null) {
System.err.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1336, 1511), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1486), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1450), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1745), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1720), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1684), 'dev.langchain4j.service.AiServices.builder'), ((1929, 1974), 'reactor.core.publisher.Sinks.many'), ((1929, 1951), 'reactor.core.publisher.Sinks.many')] |
package _Engenharia;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
//import dev.langchain4j.data.document.splitter.ParagraphSplitter; !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
import dev.langchain4j.data.document.splitter.DocumentSplitters; //Substituição
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.huggingface.HuggingFaceChatModel;
import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import java.io.File;
public class Assistente {
// You can get your own HuggingFace API key here: https://huggingface.co/settings/tokens
public static final String hfApiKey = "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";
private static String pergunta;
public String fazerPergunta() throws Exception {
Document document = loadDocument(toPath("template.txt")); //Usa documento criado com todos os dados do documento selecionado (Esse documento e criado dentro do pacote _Engenharia)
//escolhendo um modelo para vetorizar meu texto
EmbeddingModel embeddingModel = HuggingFaceEmbeddingModel.builder()
.accessToken(hfApiKey)
.modelId("sentence-transformers/all-MiniLM-L6-v2")
.waitForModel(true)
.timeout(ofSeconds(60))
.build();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//estou aplicando o modelo de vetorização escolhido ao meu texto
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
// .splitter(new ParagraphSplitter()) !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
.documentSplitter(DocumentSplitters.recursive(500)) //Substituição
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
//aqui eu escolho o modelo da inferência (a pergunta)
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(HuggingFaceChatModel.withAccessToken(hfApiKey))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
//aqui eu faço a inferência
String answer = chain.execute(pergunta);
File delete_file = new File("src/main/java/_Engenharia/template.txt"); //Apaga o documento depois da resposta
delete_file.delete(); //Caso erro na resposta o arquivo NAO e deletado
return answer; // Charlie is a cheerful carrot living in VeggieVille...
//exemplo para continuar a pesquisa
//https://github.com/langchain4j/langchain4j/blob/7307f43d9823af619f1e3196252d212f3df04ddc/langchain4j/src/main/java/dev/langchain4j/model/huggingface/HuggingFaceChatModel.java
}
private static Path toPath(String fileName) {
try {
URL fileUrl = Assistente.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public void setPergunta(String p) {
pergunta = p;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1706, 1948), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1923), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1883), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1847), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1780), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2162, 2524), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2499), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2451), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2385), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2675, 3064), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2885), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2796), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package com.kchandrakant;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
public class PromptTemplates {
public static void main(String[] args) {
// Create a prompt template
PromptTemplate promptTemplate = PromptTemplate.from("Tell me a {{adjective}} joke about {{content}}..");
// Generate prompt using the prompt template and user variables
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "humans");
Prompt prompt = promptTemplate.apply(variables);
System.out.println(prompt.text());
// Create an instance of a model
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
// Start interacting
String response = model.generate(prompt.text());
System.out.println(response);
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1019, 1193), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1168), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1134), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1092), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.azure.migration.java.copilot.service;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class Configure {
@Bean
ServiceAnalysisAgent chooseServiceAnalysisAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(ServiceAnalysisAgent.class)
.chatLanguageModel(chatLanguageModel)
.build();
}
@Bean
ConfigureResourceAgent configureResourceAgent(ChatLanguageModel chatLanguageModel,ContentRetriever contentRetriever) {
return AiServices.builder(ConfigureResourceAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.build();
}
@Bean
WorkflowChatAgent configureWorkflowChatAgent(ChatLanguageModel chatLanguageModel, ContentRetriever contentRetriever, MigrationWorkflowTools migrationWorkflowTools) {
return AiServices.builder(WorkflowChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.tools(migrationWorkflowTools)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
@Bean
ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResults = 5;
double minScore = 0.6;
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(maxResults)
.minScore(minScore)
.build();
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((846, 971), 'dev.langchain4j.service.AiServices.builder'), ((846, 946), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1307), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1282), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1230), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1753), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1728), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1655), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1608), 'dev.langchain4j.service.AiServices.builder'), ((2167, 2404), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2379), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2343), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2303), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2255), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
package com.example.application.services;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
@BrowserCallable
@AnonymousAllowed
public class ChatService {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(sink::tryEmitComplete)
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1208, 1383), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1358), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1322), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1617), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1592), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1556), 'dev.langchain4j.service.AiServices.builder'), ((1801, 1846), 'reactor.core.publisher.Sinks.many'), ((1801, 1823), 'reactor.core.publisher.Sinks.many')] |
package org.acme;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkus.logging.Log;
import io.quarkus.runtime.Startup;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonReader;
import jakarta.json.JsonValue;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class IngestData {
@Inject
EmbeddingStore<TextSegment> store;
@Inject
EmbeddingModel embeddingModel;
@Inject
@ConfigProperty(name = "data.file")
File dataFile;
@Inject
@ConfigProperty(name = "max.entries", defaultValue = "99999")
Integer maxEntries;
@Startup
public void init() {
List<Document> documents = new ArrayList<>();
try(JsonReader reader = Json.createReader(new FileReader(dataFile))) {
JsonArray results = reader.readArray();
Log.info("Ingesting news reports...");
int i = 0;
for (JsonValue newsEntry : results) {
i++;
if(i > maxEntries) {
break;
}
String content = newsEntry.asJsonObject().getString("content", null);
if(content != null && !content.isEmpty()) {
Document doc = new Document(content);
documents.add(doc);
continue;
}
String fullDescription = newsEntry.asJsonObject().getString("full_description", null);
if(fullDescription != null && !fullDescription.isEmpty()) {
Document doc = new Document(fullDescription);
documents.add(doc);
continue;
}
String description = newsEntry.asJsonObject().getString("description", null);
if(description != null && !description.isEmpty()) {
Document doc = new Document(description);
documents.add(doc);
continue;
}
}
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(1000, 50))
.build();
ingestor.ingest(documents);
Log.infof("Ingested %d news articles.", documents.size());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((2590, 2805), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2776), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2717), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2665), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.sivalabs.demo.langchain4j;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
public class OllamaChatDemo {
public static void main(String[] args) {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl("http://localhost:11434")
.modelName("llama2")
.build();
String answer = model.generate("List all the movies directed by Quentin Tarantino");
System.out.println(answer);
}
}
| [
"dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | [((257, 395), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 370), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 333), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] |
package com.ramesh.langchain;
import java.util.Scanner;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
/***
* This project demostrates the use of LangCHain Services which uses custom tools to generate the final output
*/
public class ServiceWithToolsLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
System.out.println("Using a custom Calculator as LangChain \"tool\"");
// Building a Custom LangChain Assistant using LangChain AiServices
System.out.println("Building a Custom Assistant using LangChain AiServices");
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY)).tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10)).build();
while (true) {
// get 2 words for which the total characters count is calculated
Scanner scanner = new Scanner(System.in);
System.out.print("Enter Word 1:");
String word1 = scanner.nextLine();
System.out.print("Enter Word 2:");
String word2 = scanner.nextLine();
String question = "What is the sum of the numbers of letters in the words \"" + word1 + "\" and \"" + word2 + "\"?";
System.out.println("Prompting ChatGPT :" + question);
// when a prompt having 2 words are sent LLM via LAngChain Assistant
// the Calcualtor functions are called to get the final answers
System.out.println("Invoking Custom Assistant Class chat() and getting response from ChatGPT...");
String answer = assistant.chat(question);
System.out.println("ChatGPT Response...\n");
System.out.println(answer);
}
}
// a custom tool
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((896, 1091), 'dev.langchain4j.service.AiServices.builder'), ((896, 1083), 'dev.langchain4j.service.AiServices.builder'), ((896, 1022), 'dev.langchain4j.service.AiServices.builder'), ((896, 998), 'dev.langchain4j.service.AiServices.builder')] |
package ${{ values.basePackage }};
import java.io.IOException;
import java.nio.file.Path;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
@SpringBootApplication
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
@Bean
ChatAgent chatAgent(ChatLanguageModel chatLanguageModel) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(ChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.build();
}
@Bean
DocumentAgent documentAgent(ChatLanguageModel chatLanguageModel, EmbeddingModel embeddingModel, EmbeddingStore<TextSegment> embeddingStore) throws IOException {
Path documentPath = ResourceUtils.getFile("classpath:documents/story.md").toPath();
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
EmbeddingStoreIngestor dataIngestor = EmbeddingStoreIngestor.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.documentSplitter(DocumentSplitters.recursive(300, 10))
.build();
dataIngestor.ingest(document);
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(3)
.minScore(0.5)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(DocumentAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.chatMemory(chatMemory)
.build();
}
}
@RestController
class ChatController {
private final ChatAgent chatAgent;
ChatController(ChatAgent chatAgent) {
this.chatAgent = chatAgent;
}
@PostMapping("/chat")
String chat(@RequestBody String prompt) {
return chatAgent.answer(prompt);
}
}
@RestController
class DocumentController {
private final DocumentAgent documentAgent;
DocumentController(DocumentAgent documentAgent) {
this.documentAgent = documentAgent;
}
@PostMapping("/chat/doc")
String chat(@RequestBody String prompt) {
return documentAgent.answer(prompt);
}
}
interface ChatAgent {
String answer(String prompt);
}
interface DocumentAgent {
String answer(String prompt);
}
| [
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1657, 1775), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1762), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1734), 'dev.langchain4j.service.AiServices.builder'), ((1972, 2034), 'org.springframework.util.ResourceUtils.getFile'), ((2228, 2405), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2392), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2332), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2296), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2479, 2642), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2629), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2610), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2591), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2555), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2727, 2889), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2876), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2848), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2808), 'dev.langchain4j.service.AiServices.builder')] |
package com.docuverse.backend.configuration;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import io.github.cdimascio.dotenv.Dotenv;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
import static java.time.Duration.ofSeconds;
@Configuration
public class EmbeddingModelConfiguration {
Dotenv dotenv = Dotenv.load();
@Bean
public EmbeddingModel embeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(dotenv.get("OPENAI_API_KEY"))
.modelName(TEXT_EMBEDDING_ADA_002)
.timeout(ofSeconds(15))
.logRequests(false)
.logResponses(false)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((784, 1057), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 1032), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 995), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 959), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 919), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 868), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package io.quarkiverse.langchain4j.openai.runtime;
import static io.quarkiverse.langchain4j.runtime.OptionalUtil.firstOrDefault;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.DisabledChatLanguageModel;
import dev.langchain4j.model.chat.DisabledStreamingChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.DisabledEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.DisabledImageModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.moderation.DisabledModerationModel;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModerationModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel;
import io.quarkiverse.langchain4j.openai.runtime.config.ChatModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.EmbeddingModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ImageModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.LangChain4jOpenAiConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ModerationModelConfig;
import io.quarkiverse.langchain4j.runtime.NamedModelUtil;
import io.quarkus.runtime.ShutdownContext;
import io.quarkus.runtime.annotations.Recorder;
import io.smallrye.config.ConfigValidationException;
@Recorder
public class OpenAiRecorder {
private static final String DUMMY_KEY = "dummy";
public Supplier<ChatLanguageModel> chatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return new DisabledChatLanguageModel();
}
};
}
}
public Supplier<StreamingChatLanguageModel> streamingChatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiStreamingChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return new DisabledStreamingChatLanguageModel();
}
};
}
}
public Supplier<EmbeddingModel> embeddingModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKeyOpt = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKeyOpt)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
EmbeddingModelConfig embeddingModelConfig = openAiConfig.embeddingModel();
var builder = OpenAiEmbeddingModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKeyOpt)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, embeddingModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, embeddingModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(embeddingModelConfig.modelName());
if (embeddingModelConfig.user().isPresent()) {
builder.user(embeddingModelConfig.user().get());
}
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return new DisabledEmbeddingModel();
}
};
}
}
public Supplier<ModerationModel> moderationModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ModerationModelConfig moderationModelConfig = openAiConfig.moderationModel();
var builder = OpenAiModerationModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, moderationModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, moderationModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(moderationModelConfig.modelName());
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public ModerationModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ModerationModel get() {
return new DisabledModerationModel();
}
};
}
}
public Supplier<ImageModel> imageModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ImageModelConfig imageModelConfig = openAiConfig.imageModel();
var builder = QuarkusOpenAiImageModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, imageModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, imageModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(imageModelConfig.modelName())
.size(imageModelConfig.size())
.quality(imageModelConfig.quality())
.style(imageModelConfig.style())
.responseFormat(imageModelConfig.responseFormat())
.user(imageModelConfig.user());
openAiConfig.organizationId().ifPresent(builder::organizationId);
// we persist if the directory was set explicitly and the boolean flag was not set to false
// or if the boolean flag was set explicitly to true
Optional<Path> persistDirectory = Optional.empty();
if (imageModelConfig.persist().isPresent()) {
if (imageModelConfig.persist().get()) {
persistDirectory = imageModelConfig.persistDirectory().or(new Supplier<>() {
@Override
public Optional<? extends Path> get() {
return Optional.of(Paths.get(System.getProperty("java.io.tmpdir"), "dall-e-images"));
}
});
}
} else {
if (imageModelConfig.persistDirectory().isPresent()) {
persistDirectory = imageModelConfig.persistDirectory();
}
}
builder.persistDirectory(persistDirectory);
return new Supplier<>() {
@Override
public ImageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ImageModel get() {
return new DisabledImageModel();
}
};
}
}
private LangChain4jOpenAiConfig.OpenAiConfig correspondingOpenAiConfig(LangChain4jOpenAiConfig runtimeConfig,
String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig;
if (NamedModelUtil.isDefault(modelName)) {
openAiConfig = runtimeConfig.defaultConfig();
} else {
openAiConfig = runtimeConfig.namedConfig().get(modelName);
}
return openAiConfig;
}
private ConfigValidationException.Problem[] createApiKeyConfigProblems(String modelName) {
return createConfigProblems("api-key", modelName);
}
private ConfigValidationException.Problem[] createConfigProblems(String key, String modelName) {
return new ConfigValidationException.Problem[] { createConfigProblem(key, modelName) };
}
private ConfigValidationException.Problem createConfigProblem(String key, String modelName) {
return new ConfigValidationException.Problem(String.format(
"SRCFG00014: The config property quarkus.langchain4j.openai%s%s is required but it could not be found in any config source",
NamedModelUtil.isDefault(modelName) ? "." : ("." + modelName + "."), key));
}
public void cleanUp(ShutdownContext shutdown) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
QuarkusOpenAiClient.clearCache();
}
});
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.model.openai.OpenAiModerationModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2450, 3312), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3229), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3155), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3033), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2969), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2909), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2791), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2676), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2617), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2564), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4555, 5367), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5284), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5210), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5138), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5088), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5024), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4964), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4846), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4731), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4678), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4642), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((6642, 7184), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 7119), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6996), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6876), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6817), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6764), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6725), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((8417, 8960), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8894), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8770), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8649), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8590), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8537), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8501), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((10032, 10845), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10794), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10723), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10670), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10613), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10562), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10501), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10382), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10266), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10207), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10154), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10118), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder')] |
package io.quarkiverse.langchain4j.sample;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class MyChatModelSupplier implements Supplier<ChatLanguageModel> {
@Override
public ChatLanguageModel get() {
return OpenAiChatModel.builder()
.apiKey("...")
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((328, 409), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((328, 384), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
@Service
public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
List<List<Map<String, String>>> exampleListPool = sqlPromptGenerator.getExampleCombos(sqlExamples,
optimizationConfig.getText2sqlFewShotsNum(), optimizationConfig.getText2sqlSelfConsistencyNum());
//2.generator linking prompt,and parallel generate response.
List<String> linkingPromptPool = sqlPromptGenerator.generatePromptPool(llmReq, exampleListPool, false);
List<String> linkingResults = new CopyOnWriteArrayList<>();
linkingPromptPool.parallelStream().forEach(
linkingPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> linkingResult = chatLanguageModel.generate(prompt.toSystemMessage());
String result = linkingResult.content().text();
keyPipelineLog.info("step one model response:{}", result);
linkingResults.add(OutputFormat.getSchemaLink(result));
}
);
List<String> sortedList = OutputFormat.formatList(linkingResults);
Pair<String, Map<String, Double>> linkingMap = OutputFormat.selfConsistencyVote(sortedList);
//3.generator sql prompt,and parallel generate response.
List<String> sqlPromptPool = sqlPromptGenerator.generateSqlPromptPool(llmReq, sortedList, exampleListPool);
List<String> sqlTaskPool = new CopyOnWriteArrayList<>();
sqlPromptPool.parallelStream().forEach(sqlPrompt -> {
Prompt linkingPrompt = PromptTemplate.from(JsonUtil.toString(sqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", linkingPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(linkingPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
sqlTaskPool.add(result);
});
//4.format response.
Pair<String, Map<String, Double>> sqlMapPair = OutputFormat.selfConsistencyVote(sqlTaskPool);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMapPair.getRight());
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMapPair.getRight()));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT_SELF_CONSISTENCY, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((2481, 2557), 'dev.langchain4j.model.input.PromptTemplate.from'), ((3537, 3609), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package org.example;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
public class _09_AIServices_06_ChatMemoryPersisted {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
FileStore store = new FileStore();
ChatMemoryProvider provider = memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(10)
.chatMemoryStore(store)
.build();
ChatAssistant assistant = AiServices.builder(ChatAssistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(provider)
.build();
System.out.println(assistant.chat(1, "Hello my name is Michael"));
System.out.println(assistant.chat(2, "Hello my name is Karl"));
// System.out.println(assistant.chat(1, "What is my name?"));
// System.out.println(assistant.chat(2, "What is my name?"));
}
}
class FileStore implements ChatMemoryStore {
public static final String PATH = "src/main/resources/messages_%s.txt";
@Override
public List<ChatMessage> getMessages(Object memoryId) {
List<ChatMessage> chatMessages = new ArrayList<>();
String file = PATH.formatted(memoryId);
try {
if (!Files.exists(Paths.get(file))) {
Files.createFile(Paths.get(file));
}
for (String s : Files.readAllLines(Paths.get(file))) {
chatMessages.add(UserMessage.from(s));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return chatMessages;
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String file = PATH.formatted(memoryId);
for (ChatMessage message : messages) {
try {
Files.writeString(Paths.get(file), message.text() + "\n", StandardOpenOption.APPEND);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void deleteMessages(Object memoryId) {
System.out.println("Not implemented");
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((843, 1004), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 979), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 939), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 906), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1041, 1193), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1168), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1122), 'dev.langchain4j.service.AiServices.builder')] |
package org.agoncal.fascicle.langchain4j.vectordb.pgvector;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore;
import java.util.List;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.usePGVectorToStoreEmbeddings();
}
public void usePGVectorToStoreEmbeddings() {
System.out.println("### usePGVectorToStoreEmbeddings");
// tag::adocSnippet[]
EmbeddingStore<TextSegment> embeddingStore =
PgVectorEmbeddingStore.builder()
.host("localhost")
.port(5432)
.createTable(true)
.dropTableFirst(true)
.dimension(384)
.table("langchain4j_collection")
.user("agoncal")
.password("agoncal")
.database("agoncal")
.build();
// end::adocSnippet[]
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I've been to France twice.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("New Delhi is the capital of India.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score());
System.out.println(embeddingMatch.embedded().text());
}
}
| [
"dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder"
] | [((989, 1290), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1273), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1244), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1215), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1190), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1149), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1125), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1095), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1068), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1048), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder')] |
package com.ramesh.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
/***
* This project demonstrates how to use LangChain to ingest data from a document and
* get responses for prompts from the same, by creating a LangChain Chain
*/
public class ChainWithDocumentLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
// embedding model to yse
EmbeddingModel embeddingModel = new AllMiniLmL6V2QuantizedEmbeddingModel();
// embeddings will be stored in memory
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//Creating instance of EmbeddingStoreIngestor
System.out.println("Creating instance of EmbeddingStoreIngestor...");
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
// ingesting input data
System.out.println("Loading content from simpsons_adventures.txt and ingesting...");
Document document = loadDocument(".\\simpsons_adventures.txt");
ingestor.ingest(document);
// building the chat model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
// Building LangChain with Embeddings Retriever
System.out.println("Building LangChain with Embeddings Retriever...");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
// prompting ChatGPT
System.out.println("Prompting ChatGPT \"Who is Simpson?\"...");
System.out.println("\nFetching response from ChatGPT via the created LangChain...\n");
// executing the LangChain chain
String answer = chain.execute("Who is Simpson?");
System.out.println(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1849, 2057), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 2036), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 1992), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 1948), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2366, 2484), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2366, 2463), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2366, 2427), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2667, 3113), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 3092), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2901), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2832), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2747), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package io.quarkiverse.langchain4j.samples;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.util.List;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.pinecone.PineconeEmbeddingStore;
@ApplicationScoped
public class IngestorExampleWithPinecone {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-pinecone extension.
*/
@Inject
PineconeEmbeddingStore store;
/**
* The embedding model (how is computed the vector of a document).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(List<Document> documents) {
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
// Warning - this can take a long time...
ingestor.ingest(documents);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1005, 1202), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1177), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1124), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1076), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
import dev.langchain4j.data.document.FileSystemDocumentLoader;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import static java.util.stream.Collectors.joining;
public class _03_Retrieval {
private static final String RETRIEVER_DOCUMENT_NAME = "";
public static void main(String[] args) {
var openAiKey = System.getenv("OPENAI_API_KEY");
var embeddingModel = OpenAiEmbeddingModel.withApiKey(openAiKey);
var embeddingStore = new InMemoryEmbeddingStore<TextSegment>();
// 0 - Ingesting the document and store in vectorized form
var ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
var filePath = toPath(RETRIEVER_DOCUMENT_NAME);
var document = FileSystemDocumentLoader.loadDocument(filePath);
ingestor.ingest(document);
var chatModel = OpenAiChatModel.withApiKey(openAiKey);
var chatMemory = MessageWindowChatMemory.withMaxMessages(10);
var retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
var promptTemplate = PromptTemplate.from("""
Answer the following question to the best of your ability: {{question}}
Base your answer on the following information:
{{information}}""");
try (Scanner scanner = new Scanner(System.in)) {
while (true) {
System.out.println("\nEnter your question: ");
// 1 - Retrieving the question from the user
String question = scanner.nextLine();
if (question.equals("exit")) {
break;
}
// 2, 3 - Retrieving the most relevant segments according to the question
var relevantSegments = retriever.findRelevant(question);
var prompt = promptTemplate.apply(
Map.of(
"question", question,
"information", format(relevantSegments)));
chatMemory.add(prompt.toUserMessage());
// 4 - Send the prompt to the model
var response = chatModel.generate(chatMemory.messages());
chatMemory.add(response.content());
// 5 - Printing answer to the user
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
}
private static String format(List<TextSegment> relevantSegments) {
return relevantSegments.stream()
.map(TextSegment::text)
.map(segment -> "..." + segment + "...")
.collect(joining("\n\n"));
}
private static Path toPath(String fileName) {
try {
URL fileUrl = _03_Retrieval.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1262, 1486), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1461), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1413), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1365), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package org.example;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import java.util.List;
public class _09_AIServices_04_PokemonTrainer {
public static void main(String[] args) {
// Zet logger op debug
OpenAiChatModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_DEMO)
.logRequests(true)
.build();
PokemonTrainerGeneratorService trainerGenerator = AiServices.create(PokemonTrainerGeneratorService.class, model);
Trainer trainer = trainerGenerator.generate("Generate a low level trainer named 'Kelvin' with 2 bug and 2 fire pokemon");
System.out.println(trainer);
}
}
interface PokemonTrainerGeneratorService {
@SystemMessage("You generate random pokemon trainers with random pokemon, in accordance to the user message")
Trainer generate(@UserMessage String text);
}
record Trainer(String name, List<Pokemon> team) {
}
record Pokemon(String name
// , @Description("All uppercase") String type
, String type
, int level
, int hp
, @Description("Random number of moves between 1 and 4") List<String> moves)
{} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((450, 580), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((450, 555), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((450, 520), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.ai4j.openai4j.Model;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class _00_Model {
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.modelName(Model.GPT_3_5_TURBO.stringValue())
.apiKey(openAiKey)
.build();
var prompt = "Write hello world example in Java printing 'Hello TDC Future 2023'";
var response = chatModel.generate(UserMessage.from(prompt));
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((359, 506), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((359, 481), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((359, 446), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((412, 445), 'dev.ai4j.openai4j.Model.GPT_3_5_TURBO.stringValue')] |
package com.example.application.services;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@BrowserCallable
@AnonymousAllowed
public class ChatService {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(sink::tryEmitComplete)
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1152, 1327), 'dev.langchain4j.service.AiServices.builder'), ((1152, 1302), 'dev.langchain4j.service.AiServices.builder'), ((1152, 1266), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1561), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1536), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1500), 'dev.langchain4j.service.AiServices.builder'), ((1745, 1790), 'reactor.core.publisher.Sinks.many'), ((1745, 1767), 'reactor.core.publisher.Sinks.many')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.time.Duration.ofSeconds;
import static java.util.Arrays.asList;
public class _03_PromptTemplate {
static class Simple_Prompt_Template_Example {
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
String template = "Create a recipe for a {{dishType}} with the following ingredients: {{ingredients}}";
PromptTemplate promptTemplate = PromptTemplate.from(template);
Map<String, Object> variables = new HashMap<>();
variables.put("dishType", "oven dish");
variables.put("ingredients", "potato, tomato, feta, olive oil");
Prompt prompt = promptTemplate.apply(variables);
String response = model.generate(prompt.text());
System.out.println(response);
}
}
static class Structured_Prompt_Template_Example {
@StructuredPrompt({
"Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}.",
"Structure your answer in the following way:",
"Recipe name: ...",
"Description: ...",
"Preparation time: ...",
"Required ingredients:",
"- ...",
"- ...",
"Instructions:",
"- ...",
"- ..."
})
static class CreateRecipePrompt {
String dish;
List<String> ingredients;
CreateRecipePrompt(String dish, List<String> ingredients) {
this.dish = dish;
this.ingredients = ingredients;
}
}
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
Structured_Prompt_Template_Example.CreateRecipePrompt createRecipePrompt = new Structured_Prompt_Template_Example.CreateRecipePrompt(
"salad",
asList("cucumber", "tomato", "feta", "onion", "olives")
);
Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt);
String recipe = model.generate(prompt.text());
System.out.println(recipe);
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((668, 818), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((668, 789), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((668, 745), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2455), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2426), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2382), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.Response;
public class Step3_Multimodal {
static final String CAT_IMAGE_URL =
"https://upload.wikimedia.org/wikipedia/commons/e/e9/" +
"Felis_silvestris_silvestris_small_gradual_decrease_of_quality.png";
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro-vision")
.build();
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
TextContent.from("Describe the picture")
);
Response<AiMessage> response = model.generate(userMessage);
System.out.println(response.content().text());
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1277, 1478), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1457), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1409), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1360), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder')] |
package dev.langchain4j.model.openai;
import dev.ai4j.openai4j.chat.*;
import dev.ai4j.openai4j.completion.CompletionChoice;
import dev.ai4j.openai4j.completion.CompletionResponse;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static dev.langchain4j.model.openai.InternalOpenAiHelper.finishReasonFrom;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
/**
* This class needs to be thread safe because it is called when a streaming result comes back
* and there is no guarantee that this thread will be the same as the one that initiated the request,
* in fact it almost certainly won't be.
*/
public class OpenAiStreamingResponseBuilder {
private final StringBuffer contentBuilder = new StringBuffer();
private final StringBuffer toolNameBuilder = new StringBuffer();
private final StringBuffer toolArgumentsBuilder = new StringBuffer();
private final Map<Integer, ToolExecutionRequestBuilder> indexToToolExecutionRequestBuilder = new ConcurrentHashMap<>();
private volatile String finishReason;
private final Integer inputTokenCount;
public OpenAiStreamingResponseBuilder(Integer inputTokenCount) {
this.inputTokenCount = inputTokenCount;
}
public void append(ChatCompletionResponse partialResponse) {
if (partialResponse == null) {
return;
}
List<ChatCompletionChoice> choices = partialResponse.choices();
if (choices == null || choices.isEmpty()) {
return;
}
ChatCompletionChoice chatCompletionChoice = choices.get(0);
if (chatCompletionChoice == null) {
return;
}
String finishReason = chatCompletionChoice.finishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
Delta delta = chatCompletionChoice.delta();
if (delta == null) {
return;
}
String content = delta.content();
if (content != null) {
contentBuilder.append(content);
return;
}
if (delta.functionCall() != null) {
FunctionCall functionCall = delta.functionCall();
if (functionCall.name() != null) {
toolNameBuilder.append(functionCall.name());
}
if (functionCall.arguments() != null) {
toolArgumentsBuilder.append(functionCall.arguments());
}
}
if (delta.toolCalls() != null && !delta.toolCalls().isEmpty()) {
ToolCall toolCall = delta.toolCalls().get(0);
ToolExecutionRequestBuilder toolExecutionRequestBuilder
= indexToToolExecutionRequestBuilder.computeIfAbsent(toolCall.index(), idx -> new ToolExecutionRequestBuilder());
if (toolCall.id() != null) {
toolExecutionRequestBuilder.idBuilder.append(toolCall.id());
}
FunctionCall functionCall = toolCall.function();
if (functionCall.name() != null) {
toolExecutionRequestBuilder.nameBuilder.append(functionCall.name());
}
if (functionCall.arguments() != null) {
toolExecutionRequestBuilder.argumentsBuilder.append(functionCall.arguments());
}
}
}
public void append(CompletionResponse partialResponse) {
if (partialResponse == null) {
return;
}
List<CompletionChoice> choices = partialResponse.choices();
if (choices == null || choices.isEmpty()) {
return;
}
CompletionChoice completionChoice = choices.get(0);
if (completionChoice == null) {
return;
}
String finishReason = completionChoice.finishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
String token = completionChoice.text();
if (token != null) {
contentBuilder.append(token);
}
}
public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) {
String content = contentBuilder.toString();
if (!content.isEmpty()) {
return Response.from(
AiMessage.from(content),
tokenUsage(content, tokenizer),
finishReasonFrom(finishReason)
);
}
String toolName = toolNameBuilder.toString();
if (!toolName.isEmpty()) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(toolName)
.arguments(toolArgumentsBuilder.toString())
.build();
return Response.from(
AiMessage.from(toolExecutionRequest),
tokenUsage(singletonList(toolExecutionRequest), tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
if (!indexToToolExecutionRequestBuilder.isEmpty()) {
List<ToolExecutionRequest> toolExecutionRequests = indexToToolExecutionRequestBuilder.values().stream()
.map(it -> ToolExecutionRequest.builder()
.id(it.idBuilder.toString())
.name(it.nameBuilder.toString())
.arguments(it.argumentsBuilder.toString())
.build())
.collect(toList());
return Response.from(
AiMessage.from(toolExecutionRequests),
tokenUsage(toolExecutionRequests, tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
return null;
}
private TokenUsage tokenUsage(String content, Tokenizer tokenizer) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = tokenizer.estimateTokenCountInText(content);
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private TokenUsage tokenUsage(List<ToolExecutionRequest> toolExecutionRequests, Tokenizer tokenizer, boolean forcefulToolExecution) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = 0;
if (forcefulToolExecution) {
// OpenAI calculates output tokens differently when tool is executed forcefully
for (ToolExecutionRequest toolExecutionRequest : toolExecutionRequests) {
outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest);
}
} else {
outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(toolExecutionRequests);
}
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private static class ToolExecutionRequestBuilder {
private final StringBuffer idBuilder = new StringBuffer();
private final StringBuffer nameBuilder = new StringBuffer();
private final StringBuffer argumentsBuilder = new StringBuffer();
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((4860, 5019), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4860, 4990), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4860, 4926), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5757), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5720), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5649), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5588), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package io.quarkiverse.langchain4j.huggingface;
import static java.util.stream.Collectors.joining;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.huggingface.client.HuggingFaceClient;
import dev.langchain4j.model.huggingface.client.Options;
import dev.langchain4j.model.huggingface.client.Parameters;
import dev.langchain4j.model.huggingface.client.TextGenerationRequest;
import dev.langchain4j.model.huggingface.client.TextGenerationResponse;
import dev.langchain4j.model.huggingface.spi.HuggingFaceClientFactory;
import dev.langchain4j.model.output.Response;
/**
* This is a Quarkus specific version of the HuggingFace model.
* <p>
* TODO: remove this in the future when the stock {@link dev.langchain4j.model.huggingface.HuggingFaceChatModel}
* has been updated to fit our needs (i.e. allowing {@code returnFullText} to be null and making {code accessToken} optional)
*/
public class QuarkusHuggingFaceChatModel implements ChatLanguageModel {
public static final QuarkusHuggingFaceClientFactory CLIENT_FACTORY = new QuarkusHuggingFaceClientFactory();
private final HuggingFaceClient client;
private final Double temperature;
private final Integer maxNewTokens;
private final Boolean returnFullText;
private final Boolean waitForModel;
private final Optional<Boolean> doSample;
private final OptionalDouble topP;
private final OptionalInt topK;
private final OptionalDouble repetitionPenalty;
private QuarkusHuggingFaceChatModel(Builder builder) {
this.client = CLIENT_FACTORY.create(builder, new HuggingFaceClientFactory.Input() {
@Override
public String apiKey() {
return builder.accessToken;
}
@Override
public String modelId() {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public Duration timeout() {
return builder.timeout;
}
}, builder.url);
this.temperature = builder.temperature;
this.maxNewTokens = builder.maxNewTokens;
this.returnFullText = builder.returnFullText;
this.waitForModel = builder.waitForModel;
this.doSample = builder.doSample;
this.topP = builder.topP;
this.topK = builder.topK;
this.repetitionPenalty = builder.repetitionPenalty;
}
public static Builder builder() {
return new Builder();
}
@Override
public Response<AiMessage> generate(List<ChatMessage> messages) {
Parameters.Builder builder = Parameters.builder()
.temperature(temperature)
.maxNewTokens(maxNewTokens)
.returnFullText(returnFullText);
doSample.ifPresent(builder::doSample);
topK.ifPresent(builder::topK);
topP.ifPresent(builder::topP);
repetitionPenalty.ifPresent(builder::repetitionPenalty);
Parameters parameters = builder
.build();
TextGenerationRequest request = TextGenerationRequest.builder()
.inputs(messages.stream()
.map(ChatMessage::text)
.collect(joining("\n")))
.parameters(parameters)
.options(Options.builder()
.waitForModel(waitForModel)
.build())
.build();
TextGenerationResponse textGenerationResponse = client.chat(request);
return Response.from(AiMessage.from(textGenerationResponse.generatedText()));
}
@Override
public Response<AiMessage> generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications) {
throw new IllegalArgumentException("Tools are currently not supported for HuggingFace models");
}
@Override
public Response<AiMessage> generate(List<ChatMessage> messages, ToolSpecification toolSpecification) {
throw new IllegalArgumentException("Tools are currently not supported for HuggingFace models");
}
public static final class Builder {
private String accessToken;
private Duration timeout = Duration.ofSeconds(15);
private Double temperature;
private Integer maxNewTokens;
private Boolean returnFullText;
private Boolean waitForModel = true;
private URI url;
private Optional<Boolean> doSample;
private OptionalInt topK;
private OptionalDouble topP;
private OptionalDouble repetitionPenalty;
public boolean logResponses;
public boolean logRequests;
public Builder accessToken(String accessToken) {
this.accessToken = accessToken;
return this;
}
public Builder url(URL url) {
try {
this.url = url.toURI();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
return this;
}
public Builder timeout(Duration timeout) {
this.timeout = timeout;
return this;
}
public Builder temperature(Double temperature) {
this.temperature = temperature;
return this;
}
public Builder maxNewTokens(Integer maxNewTokens) {
this.maxNewTokens = maxNewTokens;
return this;
}
public Builder returnFullText(Boolean returnFullText) {
this.returnFullText = returnFullText;
return this;
}
public Builder waitForModel(Boolean waitForModel) {
this.waitForModel = waitForModel;
return this;
}
public Builder doSample(Optional<Boolean> doSample) {
this.doSample = doSample;
return this;
}
public Builder topK(OptionalInt topK) {
this.topK = topK;
return this;
}
public Builder topP(OptionalDouble topP) {
this.topP = topP;
return this;
}
public Builder repetitionPenalty(OptionalDouble repetitionPenalty) {
this.repetitionPenalty = repetitionPenalty;
return this;
}
public QuarkusHuggingFaceChatModel build() {
return new QuarkusHuggingFaceChatModel(this);
}
public Builder logRequests(boolean logRequests) {
this.logRequests = logRequests;
return this;
}
public Builder logResponses(boolean logResponses) {
this.logResponses = logResponses;
return this;
}
}
}
| [
"dev.langchain4j.model.huggingface.client.Parameters.builder",
"dev.langchain4j.model.huggingface.client.TextGenerationRequest.builder",
"dev.langchain4j.model.huggingface.client.Options.builder"
] | [((2990, 3144), 'dev.langchain4j.model.huggingface.client.Parameters.builder'), ((2990, 3096), 'dev.langchain4j.model.huggingface.client.Parameters.builder'), ((2990, 3052), 'dev.langchain4j.model.huggingface.client.Parameters.builder'), ((3444, 3808), 'dev.langchain4j.model.huggingface.client.TextGenerationRequest.builder'), ((3444, 3783), 'dev.langchain4j.model.huggingface.client.TextGenerationRequest.builder'), ((3444, 3654), 'dev.langchain4j.model.huggingface.client.TextGenerationRequest.builder'), ((3444, 3614), 'dev.langchain4j.model.huggingface.client.TextGenerationRequest.builder'), ((3680, 3782), 'dev.langchain4j.model.huggingface.client.Options.builder'), ((3680, 3749), 'dev.langchain4j.model.huggingface.client.Options.builder')] |
package io.quarkiverse.langchain4j.runtime.aiservice;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.internal.Exceptions.runtime;
import static dev.langchain4j.service.AiServices.removeToolMessages;
import static dev.langchain4j.service.AiServices.verifyModerationIfNeeded;
import static dev.langchain4j.service.ServiceOutputParser.parse;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.function.Consumer;
import java.util.function.Function;
import org.jboss.logging.Logger;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolExecutor;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.moderation.Moderation;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.rag.query.Metadata;
import dev.langchain4j.service.AiServiceContext;
import dev.langchain4j.service.AiServiceTokenStream;
import dev.langchain4j.service.TokenStream;
import io.quarkiverse.langchain4j.audit.Audit;
import io.quarkiverse.langchain4j.audit.AuditService;
import io.quarkus.arc.Arc;
import io.quarkus.arc.ArcContainer;
import io.quarkus.arc.ManagedContext;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.infrastructure.Infrastructure;
import io.smallrye.mutiny.subscription.MultiEmitter;
/**
* Provides the basic building blocks that the generated Interface methods call into
*/
public class AiServiceMethodImplementationSupport {
private static final Logger log = Logger.getLogger(AiServiceMethodImplementationSupport.class);
private static final int MAX_SEQUENTIAL_TOOL_EXECUTIONS = 10;
/**
* This method is called by the implementations of each ai service method.
*/
public Object implement(Input input) {
QuarkusAiServiceContext context = input.context;
AiServiceMethodCreateInfo createInfo = input.createInfo;
Object[] methodArgs = input.methodArgs;
AuditService auditService = context.auditService;
Audit audit = null;
if (auditService != null) {
audit = auditService.create(new Audit.CreateInfo(createInfo.getInterfaceName(), createInfo.getMethodName(),
methodArgs, createInfo.getMemoryIdParamPosition()));
}
// TODO: add validation
try {
var result = doImplement(createInfo, methodArgs, context, audit);
if (audit != null) {
audit.onCompletion(result);
auditService.complete(audit);
}
return result;
} catch (Exception e) {
log.errorv(e, "Execution of {0}#{1} failed", createInfo.getInterfaceName(), createInfo.getMethodName());
if (audit != null) {
audit.onFailure(e);
auditService.complete(audit);
}
throw e;
}
}
private static Object doImplement(AiServiceMethodCreateInfo createInfo, Object[] methodArgs,
QuarkusAiServiceContext context, Audit audit) {
Optional<SystemMessage> systemMessage = prepareSystemMessage(createInfo, methodArgs);
UserMessage userMessage = prepareUserMessage(context, createInfo, methodArgs);
if (audit != null) {
audit.initialMessages(systemMessage, userMessage);
}
Object memoryId = memoryId(createInfo, methodArgs, context.chatMemoryProvider != null);
if (context.retrievalAugmentor != null) { // TODO extract method/class
List<ChatMessage> chatMemory = context.hasChatMemory()
? context.chatMemory(memoryId).messages()
: null;
Metadata metadata = Metadata.from(userMessage, memoryId, chatMemory);
userMessage = context.retrievalAugmentor.augment(userMessage, metadata);
}
// TODO give user ability to provide custom OutputParser
String outputFormatInstructions = createInfo.getUserMessageInfo().getOutputFormatInstructions();
userMessage = UserMessage.from(userMessage.text() + outputFormatInstructions);
if (context.hasChatMemory()) {
ChatMemory chatMemory = context.chatMemory(memoryId);
if (systemMessage.isPresent()) {
chatMemory.add(systemMessage.get());
}
chatMemory.add(userMessage);
}
List<ChatMessage> messages;
if (context.hasChatMemory()) {
messages = context.chatMemory(memoryId).messages();
} else {
messages = new ArrayList<>();
systemMessage.ifPresent(messages::add);
messages.add(userMessage);
}
Class<?> returnType = createInfo.getReturnType();
if (returnType.equals(TokenStream.class)) {
return new AiServiceTokenStream(messages, context, memoryId);
}
if (returnType.equals(Multi.class)) {
return Multi.createFrom().emitter(new Consumer<MultiEmitter<? super String>>() {
@Override
public void accept(MultiEmitter<? super String> em) {
new AiServiceTokenStream(messages, context, memoryId)
.onNext(em::emit)
.onComplete(new Consumer<Response<AiMessage>>() {
@Override
public void accept(Response<AiMessage> message) {
em.complete();
}
})
.onError(em::fail)
.start();
}
});
}
Future<Moderation> moderationFuture = triggerModerationIfNeeded(context, createInfo, messages);
log.debug("Attempting to obtain AI response");
Response<AiMessage> response = context.toolSpecifications == null
? context.chatModel.generate(messages)
: context.chatModel.generate(messages, context.toolSpecifications);
log.debug("AI response obtained");
if (audit != null) {
audit.addLLMToApplicationMessage(response);
}
TokenUsage tokenUsageAccumulator = response.tokenUsage();
verifyModerationIfNeeded(moderationFuture);
int executionsLeft = MAX_SEQUENTIAL_TOOL_EXECUTIONS;
while (true) {
if (executionsLeft-- == 0) {
throw runtime("Something is wrong, exceeded %s sequential tool executions",
MAX_SEQUENTIAL_TOOL_EXECUTIONS);
}
AiMessage aiMessage = response.content();
if (context.hasChatMemory()) {
context.chatMemory(memoryId).add(response.content());
}
if (!aiMessage.hasToolExecutionRequests()) {
break;
}
ChatMemory chatMemory = context.chatMemory(memoryId);
for (ToolExecutionRequest toolExecutionRequest : aiMessage.toolExecutionRequests()) {
log.debugv("Attempting to execute tool {0}", toolExecutionRequest);
ToolExecutor toolExecutor = context.toolExecutors.get(toolExecutionRequest.name());
if (toolExecutor == null) {
throw runtime("Tool executor %s not found", toolExecutionRequest.name());
}
String toolExecutionResult = toolExecutor.execute(toolExecutionRequest, memoryId);
log.debugv("Result of {0} is '{1}'", toolExecutionRequest, toolExecutionResult);
ToolExecutionResultMessage toolExecutionResultMessage = ToolExecutionResultMessage.from(
toolExecutionRequest,
toolExecutionResult);
if (audit != null) {
audit.addApplicationToLLMMessage(toolExecutionResultMessage);
}
chatMemory.add(toolExecutionResultMessage);
}
log.debug("Attempting to obtain AI response");
response = context.chatModel.generate(chatMemory.messages(), context.toolSpecifications);
log.debug("AI response obtained");
if (audit != null) {
audit.addLLMToApplicationMessage(response);
}
tokenUsageAccumulator = tokenUsageAccumulator.add(response.tokenUsage());
}
response = Response.from(response.content(), tokenUsageAccumulator, response.finishReason());
return parse(response, returnType);
}
private static Future<Moderation> triggerModerationIfNeeded(AiServiceContext context,
AiServiceMethodCreateInfo createInfo,
List<ChatMessage> messages) {
Future<Moderation> moderationFuture = null;
if (createInfo.isRequiresModeration()) {
log.debug("Moderation is required and it will be executed in the background");
// TODO: don't occupy a worker thread for this and instead use the reactive API provided by the client
ExecutorService defaultExecutor = (ExecutorService) Infrastructure.getDefaultExecutor();
moderationFuture = defaultExecutor.submit(new Callable<>() {
@Override
public Moderation call() {
List<ChatMessage> messagesToModerate = removeToolMessages(messages);
log.debug("Attempting to moderate messages");
var result = context.moderationModel.moderate(messagesToModerate).content();
log.debug("Moderation completed");
return result;
}
});
}
return moderationFuture;
}
private static Optional<SystemMessage> prepareSystemMessage(AiServiceMethodCreateInfo createInfo, Object[] methodArgs) {
if (createInfo.getSystemMessageInfo().isEmpty()) {
return Optional.empty();
}
AiServiceMethodCreateInfo.TemplateInfo systemMessageInfo = createInfo.getSystemMessageInfo().get();
Map<String, Object> templateParams = new HashMap<>();
Map<String, Integer> nameToParamPosition = systemMessageInfo.getNameToParamPosition();
for (var entry : nameToParamPosition.entrySet()) {
templateParams.put(entry.getKey(), methodArgs[entry.getValue()]);
}
Prompt prompt = PromptTemplate.from(systemMessageInfo.getText()).apply(templateParams);
return Optional.of(prompt.toSystemMessage());
}
private static UserMessage prepareUserMessage(AiServiceContext context, AiServiceMethodCreateInfo createInfo,
Object[] methodArgs) {
AiServiceMethodCreateInfo.UserMessageInfo userMessageInfo = createInfo.getUserMessageInfo();
String userName = null;
if (userMessageInfo.getUserNameParamPosition().isPresent()) {
userName = methodArgs[userMessageInfo.getUserNameParamPosition().get()]
.toString(); // LangChain4j does this, but might want to make anything other than a String a build time error
}
if (userMessageInfo.getTemplate().isPresent()) {
AiServiceMethodCreateInfo.TemplateInfo templateInfo = userMessageInfo.getTemplate().get();
Map<String, Object> templateParams = new HashMap<>();
Map<String, Integer> nameToParamPosition = templateInfo.getNameToParamPosition();
for (var entry : nameToParamPosition.entrySet()) {
Object value = transformTemplateParamValue(methodArgs[entry.getValue()]);
templateParams.put(entry.getKey(), value);
}
// we do not need to apply the instructions as they have already been added to the template text at build time
Prompt prompt = PromptTemplate.from(templateInfo.getText()).apply(templateParams);
return createUserMessage(userName, prompt.text());
} else if (userMessageInfo.getParamPosition().isPresent()) {
Integer paramIndex = userMessageInfo.getParamPosition().get();
Object argValue = methodArgs[paramIndex];
if (argValue == null) {
throw new IllegalArgumentException(
"Unable to construct UserMessage for class '" + context.aiServiceClass.getName()
+ "' because parameter with index "
+ paramIndex + " is null");
}
return createUserMessage(userName, toString(argValue));
} else {
throw new IllegalStateException("Unable to construct UserMessage for class '" + context.aiServiceClass.getName()
+ "'. Please contact the maintainers");
}
}
private static UserMessage createUserMessage(String name, String text) {
if (name == null) {
return userMessage(text);
} else {
return userMessage(name, text);
}
}
private static Object transformTemplateParamValue(Object value) {
if (value.getClass().isArray()) {
// Qute does not transform these values but LangChain4j expects to be converted to a [item1, item2, item3] like systax
return Arrays.toString((Object[]) value);
}
return value;
}
private static Object memoryId(AiServiceMethodCreateInfo createInfo, Object[] methodArgs, boolean hasChatMemoryProvider) {
if (createInfo.getMemoryIdParamPosition().isPresent()) {
return methodArgs[createInfo.getMemoryIdParamPosition().get()];
}
if (hasChatMemoryProvider) {
// first we try to use the current context in order to make sure that we don't interleave chat messages of concurrent requests
ArcContainer container = Arc.container();
if (container != null) {
ManagedContext requestContext = container.requestContext();
if (requestContext.isActive()) {
return requestContext.getState();
}
}
}
// fallback to the default since there is nothing else we can really use here
return "default";
}
//TODO: share these methods with LangChain4j
private static String toString(Object arg) {
if (arg.getClass().isArray()) {
return arrayToString(arg);
} else if (arg.getClass().isAnnotationPresent(StructuredPrompt.class)) {
return StructuredPromptProcessor.toPrompt(arg).text();
} else {
return arg.toString();
}
}
private static String arrayToString(Object arg) {
StringBuilder sb = new StringBuilder("[");
int length = Array.getLength(arg);
for (int i = 0; i < length; i++) {
sb.append(toString(Array.get(arg, i)));
if (i < length - 1) {
sb.append(", ");
}
}
sb.append("]");
return sb.toString();
}
public static class Input {
final QuarkusAiServiceContext context;
final AiServiceMethodCreateInfo createInfo;
final Object[] methodArgs;
public Input(QuarkusAiServiceContext context, AiServiceMethodCreateInfo createInfo, Object[] methodArgs) {
this.context = context;
this.createInfo = createInfo;
this.methodArgs = methodArgs;
}
}
public interface Wrapper {
Object wrap(Input input, Function<Input, Object> fun);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from",
"dev.langchain4j.model.input.structured.StructuredPromptProcessor.toPrompt"
] | [((5712, 6437), 'io.smallrye.mutiny.Multi.createFrom'), ((11153, 11223), 'dev.langchain4j.model.input.PromptTemplate.from'), ((12561, 12626), 'dev.langchain4j.model.input.PromptTemplate.from'), ((15232, 15278), 'dev.langchain4j.model.input.structured.StructuredPromptProcessor.toPrompt')] |
package io.thomasvitale.langchain4j.spring.core.chat.messages.jackson;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import org.skyscreamer.jsonassert.JSONCompareMode;
import io.thomasvitale.langchain4j.spring.core.json.jackson.LangChain4jJacksonProvider;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link AiMessageMixin}.
*/
class AiMessageMixinTests {
private final ObjectMapper objectMapper = LangChain4jJacksonProvider.getObjectMapper();
@Test
void serializeAndDeserializeAiMessageWithText() throws JsonProcessingException, JSONException {
var message = AiMessage.from("Simple answer");
var json = objectMapper.writeValueAsString(message);
JSONAssert.assertEquals("""
{
"text": "Simple answer",
"type": "AI"
}
""", json, JSONCompareMode.STRICT);
var deserializedMessage = objectMapper.readValue(json, ChatMessage.class);
assertThat(deserializedMessage).isEqualTo(message);
}
@Test
void serializeAndDeserializeAiMessageWithToolExecutionRequest() throws JsonProcessingException, JSONException {
var message = AiMessage.from(ToolExecutionRequest.builder().name("queryDatabase").arguments("{}").build());
var json = objectMapper.writeValueAsString(message);
JSONAssert.assertEquals("""
{
"toolExecutionRequests": [{
"name": "queryDatabase",
"arguments": "{}"
}],
"type": "AI"
}
""", json, JSONCompareMode.STRICT);
var deserializedMessage = objectMapper.readValue(json, ChatMessage.class);
assertThat(deserializedMessage).isEqualTo(message);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((1581, 1657), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((1581, 1649), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((1581, 1633), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package io.quarkiverse.langchain4j.test;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messageFromJson;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messageToJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.data.message.SystemMessage.systemMessage;
import static dev.langchain4j.data.message.ToolExecutionResultMessage.toolExecutionResultMessage;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ChatMessageSerializer;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.UserMessage;
import io.quarkus.test.QuarkusUnitTest;
class ChatMessageSerializerTest {
@RegisterExtension
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class));
@Test
void should_serialize_and_deserialize_user_message_with_name() {
UserMessage message = userMessage("dummy", "hello");
String json = messageToJson(message);
ChatMessage deserializedMessage = messageFromJson(json);
assertThat(deserializedMessage).isEqualTo(message);
}
@Test
void should_serialize_and_deserialize_user_message_without_name() {
UserMessage message = userMessage("hello");
String json = messageToJson(message);
ChatMessage deserializedMessage = messageFromJson(json);
assertThat(deserializedMessage).isEqualTo(message);
}
@Test
void should_serialize_and_deserialize_user_message_with_image_content() {
UserMessage message = UserMessage.from(ImageContent.from("http://image.url"));
String json = messageToJson(message);
ChatMessage deserializedMessage = messageFromJson(json);
assertThat(deserializedMessage).isEqualTo(message);
}
@Test
void should_serialize_and_deserialize_empty_list() {
List<ChatMessage> messages = emptyList();
String json = messagesToJson(messages);
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).isEmpty();
}
@Test
void should_deserialize_null_as_empty_list() {
assertThat(messagesFromJson(null)).isEmpty();
}
@Test
void should_serialize_and_deserialize_list_with_one_message() {
List<ChatMessage> messages = singletonList(userMessage("hello"));
String json = messagesToJson(messages);
assertThat(json).isEqualTo("[{\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}]");
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).isEqualTo(messages);
}
@Test
void should_serialize_and_deserialize_list_with_all_types_of_messages() {
List<ChatMessage> messages = asList(
systemMessage("Hello from system"),
userMessage("Hello from user"),
userMessage("Klaus", "Hello from Klaus"),
aiMessage("Hello from AI"),
aiMessage(ToolExecutionRequest.builder()
.name("calculator")
.arguments("{}")
.build()),
toolExecutionResultMessage("12345", "calculator", "4"));
String json = ChatMessageSerializer.messagesToJson(messages);
assertThat(json).isEqualTo("[" +
"{\"text\":\"Hello from system\",\"type\":\"SYSTEM\"}," +
"{\"contents\":[{\"text\":\"Hello from user\",\"type\":\"TEXT\"}],\"type\":\"USER\"}," +
"{\"name\":\"Klaus\",\"contents\":[{\"text\":\"Hello from Klaus\",\"type\":\"TEXT\"}],\"type\":\"USER\"}," +
"{\"text\":\"Hello from AI\",\"type\":\"AI\"}," +
"{\"toolExecutionRequests\":[{\"name\":\"calculator\",\"arguments\":\"{}\"}],\"type\":\"AI\"}," +
"{\"text\":\"4\",\"id\":\"12345\",\"toolName\":\"calculator\",\"type\":\"TOOL_EXECUTION_RESULT\"}" +
"]");
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).isEqualTo(messages);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((3823, 3971), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3823, 3938), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3823, 3897), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package io.thomasvitale.langchain4j.spring.core.image.jackson;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.langchain4j.data.image.Image;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import org.skyscreamer.jsonassert.JSONCompareMode;
import io.thomasvitale.langchain4j.spring.core.json.jackson.LangChain4jJacksonProvider;
import java.util.Base64;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link ImageMixin}.
*/
class ImageMixinTests {
private final ObjectMapper objectMapper = LangChain4jJacksonProvider.getObjectMapper();
@Test
void serializeAndDeserializeImageWithUrl() throws JsonProcessingException, JSONException {
var image = Image.builder().url("http://example.net").revisedPrompt("something funny").build();
var json = objectMapper.writeValueAsString(image);
JSONAssert.assertEquals("""
{
"url": "http://example.net",
"revisedPrompt": "something funny"
}
""", json, JSONCompareMode.STRICT);
var deserializedImage = objectMapper.readValue(json, Image.class);
assertThat(deserializedImage).isEqualTo(image);
}
@Test
void serializeAndDeserializeImageWithBase64AndMimeType() throws JsonProcessingException, JSONException {
var image = Image.builder()
.base64Data(Base64.getEncoder().encodeToString("image".getBytes()))
.mimeType("img/png")
.build();
var json = objectMapper.writeValueAsString(image);
JSONAssert.assertEquals("""
{
"base64Data": "aW1hZ2U=",
"mimeType": "img/png"
}
""", json, JSONCompareMode.STRICT);
var deserializedImage = objectMapper.readValue(json, Image.class);
assertThat(deserializedImage).isEqualTo(image);
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((845, 927), 'dev.langchain4j.data.image.Image.builder'), ((845, 919), 'dev.langchain4j.data.image.Image.builder'), ((845, 886), 'dev.langchain4j.data.image.Image.builder'), ((1512, 1661), 'dev.langchain4j.data.image.Image.builder'), ((1512, 1640), 'dev.langchain4j.data.image.Image.builder'), ((1512, 1607), 'dev.langchain4j.data.image.Image.builder'), ((1552, 1606), 'java.util.Base64.getEncoder')] |
package com.egineering.ai.llmjavademo;
import com.egineering.ai.llmjavademo.agents.FaqAgent;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.BertTokenizer;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.junit.jupiter.api.Test;
import org.springframework.util.ResourceUtils;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Proxy;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
public class Tests {
@Test
public void test() throws IOException {
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
File fileResource = ResourceUtils.getFile("classpath:jackson_lottery.pdf");
Document document = loadDocument(fileResource.toPath(), new ApachePdfBoxDocumentParser());
DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 2, new BertTokenizer());
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(new AllMiniLmL6V2EmbeddingModel())
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
}
@Test
public void test2() throws NoSuchFieldException, IllegalAccessException {
StreamingChatLanguageModel model = OllamaStreamingChatModel.builder()
.baseUrl("http://localhost:11434")
.modelName("llama2")
.temperature(0.0)
.build();
FaqAgent faqAgent = AiServices.builder(FaqAgent.class)
.streamingChatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.build();
Field defaultAiServiceField = Proxy.getInvocationHandler(faqAgent).getClass().getDeclaredField("context");
defaultAiServiceField.setAccessible(true);
Object defaultAiServices = defaultAiServiceField.get(AiServices.class);
Proxy.getInvocationHandler(faqAgent);
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder"
] | [((1713, 1937), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1713, 1912), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1713, 1864), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1713, 1797), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2112, 2293), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((2112, 2268), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((2112, 2234), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((2112, 2197), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((2324, 2507), 'dev.langchain4j.service.AiServices.builder'), ((2324, 2482), 'dev.langchain4j.service.AiServices.builder'), ((2324, 2409), 'dev.langchain4j.service.AiServices.builder'), ((2548, 2623), 'java.lang.reflect.Proxy.getInvocationHandler'), ((2548, 2595), 'java.lang.reflect.Proxy.getInvocationHandler')] |
package org.feuyeux.ai.langchain.hellolangchain;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import static org.feuyeux.ai.langchain.hellolangchain.OpenApi.getKey;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@Slf4j
public class RetrievalTest {
public static final String SIMPSON_S_ADVENTURES_TXT =
"src/test/resources/simpson's_adventures.txt";
@AfterEach
public void tearDown() throws InterruptedException {
TimeUnit.SECONDS.sleep(25);
}
@Test
public void givenDocument_whenPrompted_thenValidResponse() {
Document document = loadDocument(Paths.get(SIMPSON_S_ADVENTURES_TXT), new TextDocumentParser());
DocumentSplitter splitter =
DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
List<TextSegment> segments = splitter.split(document);
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question).content();
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings =
embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
PromptTemplate promptTemplate =
PromptTemplate.from(
"Answer the following question to the best of your ability:\n"
+ "\n"
+ "Question:\n"
+ "{{question}}\n"
+ "\n"
+ "Base your answer on the following information:\n"
+ "{{information}}");
String information =
relevantEmbeddings.stream().map(match -> match.embedded().text()).collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel chatModel =
OpenAiChatModel.builder().apiKey(getKey()).timeout(ofSeconds(60)).build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()).content();
log.info(aiMessage.text());
Assertions.assertNotNull(aiMessage.text());
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1821, 1847), 'java.util.concurrent.TimeUnit.SECONDS.sleep'), ((3509, 3582), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3509, 3574), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3509, 3551), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package org.feuyeux.ai.langchain.hellolangchain;
import static org.assertj.core.api.Assertions.assertThat;
import static org.feuyeux.ai.langchain.hellolangchain.OpenApi.getKey;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
@Slf4j
public class AgentsTest {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) throws InterruptedException {
log.info("Calculating the length of \"{}\"...", s);
TimeUnit.SECONDS.sleep(15);
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
@AfterEach
public void tearDown() throws InterruptedException {
TimeUnit.SECONDS.sleep(25);
}
@Test
public void givenServiceWithTools_whenPrompted_thenValidResponse() throws InterruptedException {
Assistant assistant =
AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(getKey()))
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question =
"What is the sum of the numbers of letters in the words \"language\" and \"model\"?";
String answer = assistant.chat(question);
log.info("answer:{}", answer);
assertThat(answer).contains("13");
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((756, 782), 'java.util.concurrent.TimeUnit.SECONDS.sleep'), ((1060, 1086), 'java.util.concurrent.TimeUnit.SECONDS.sleep'), ((1234, 1465), 'dev.langchain4j.service.AiServices.builder'), ((1234, 1444), 'dev.langchain4j.service.AiServices.builder'), ((1234, 1375), 'dev.langchain4j.service.AiServices.builder'), ((1234, 1338), 'dev.langchain4j.service.AiServices.builder')] |
package dev.langchain4j.rag.content.retriever;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.rag.query.Query;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingSearchRequest;
import dev.langchain4j.store.embedding.EmbeddingSearchResult;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.filter.Filter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static dev.langchain4j.store.embedding.filter.MetadataFilterBuilder.metadataKey;
import static java.util.Arrays.asList;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.*;
class EmbeddingStoreContentRetrieverTest {
private static EmbeddingStore<TextSegment> EMBEDDING_STORE;
private static EmbeddingModel EMBEDDING_MODEL;
private static final Embedding EMBEDDING = Embedding.from(asList(1f, 2f, 3f));
private static final Query QUERY = Query.from("query");
private static final int DEFAULT_MAX_RESULTS = 3;
private static final int CUSTOM_MAX_RESULTS = 1;
private static final double CUSTOM_MIN_SCORE = 0.7;
public static final double DEFAULT_MIN_SCORE = 0.0;
@BeforeEach
void beforeEach() {
EMBEDDING_STORE = mock(EmbeddingStore.class);
when(EMBEDDING_STORE.search(any())).thenReturn(new EmbeddingSearchResult<>(asList(
new EmbeddingMatch<>(0.9, "id 1", null, TextSegment.from("content 1")),
new EmbeddingMatch<>(0.7, "id 2", null, TextSegment.from("content 2"))
)));
EMBEDDING_MODEL = mock(EmbeddingModel.class);
when(EMBEDDING_MODEL.embed(anyString())).thenReturn(Response.from(EMBEDDING));
}
@AfterEach
void afterEach() {
verify(EMBEDDING_MODEL).embed(QUERY.text());
verifyNoMoreInteractions(EMBEDDING_MODEL);
}
@Test
void should_retrieve() {
// given
ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(EMBEDDING_STORE, EMBEDDING_MODEL);
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_builder() {
// given
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_maxResults() {
// given
ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(
EMBEDDING_STORE,
EMBEDDING_MODEL,
CUSTOM_MAX_RESULTS
);
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(CUSTOM_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_maxResults_builder() {
// given
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.maxResults(CUSTOM_MAX_RESULTS)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(CUSTOM_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_dynamicMaxResults_builder() {
// given
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.dynamicMaxResults((query) -> CUSTOM_MAX_RESULTS)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(CUSTOM_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_minScore_ctor() {
// given
ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(
EMBEDDING_STORE,
EMBEDDING_MODEL,
null,
CUSTOM_MIN_SCORE
);
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(CUSTOM_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_minScore_builder() {
// given
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.minScore(CUSTOM_MIN_SCORE)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(CUSTOM_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_dynamicMinScore_builder() {
// given
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.dynamicMinScore((query) -> CUSTOM_MIN_SCORE)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(CUSTOM_MIN_SCORE)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_filter() {
// given
Filter metadataFilter = metadataKey("key").isEqualTo("value");
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.filter(metadataFilter)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.filter(metadataFilter)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
@Test
void should_retrieve_with_custom_dynamicFilter() {
// given
Filter metadataFilter = metadataKey("key").isEqualTo("value");
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(EMBEDDING_STORE)
.embeddingModel(EMBEDDING_MODEL)
.dynamicFilter((query) -> metadataFilter)
.build();
// when
contentRetriever.retrieve(QUERY);
// then
verify(EMBEDDING_STORE).search(EmbeddingSearchRequest.builder()
.queryEmbedding(EMBEDDING)
.maxResults(DEFAULT_MAX_RESULTS)
.minScore(DEFAULT_MIN_SCORE)
.filter(metadataFilter)
.build());
verifyNoMoreInteractions(EMBEDDING_STORE);
}
} | [
"dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder"
] | [((2443, 2637), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((2443, 2612), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((2443, 2567), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((2443, 2518), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3087, 3281), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3087, 3256), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3087, 3211), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3087, 3162), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3729, 3922), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3729, 3897), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3729, 3852), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((3729, 3804), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((4443, 4636), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((4443, 4611), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((4443, 4566), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((4443, 4518), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5182, 5375), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5182, 5350), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5182, 5305), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5182, 5257), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5846, 6039), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5846, 6014), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5846, 5970), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((5846, 5921), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6554, 6747), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6554, 6722), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6554, 6678), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6554, 6629), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((7287, 7480), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((7287, 7455), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((7287, 7411), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((7287, 7362), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8053, 8287), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8053, 8262), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8053, 8222), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8053, 8177), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8053, 8128), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8885, 9119), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8885, 9094), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8885, 9054), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8885, 9009), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((8885, 8960), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder')] |
package dev.langchain4j.model.chat;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.Response;
import org.assertj.core.api.WithAssertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
class ChatLanguageModelTest implements WithAssertions {
public static class UpperCaseEchoModel implements ChatLanguageModel {
@Override
public Response<AiMessage> generate(List<ChatMessage> messages) {
ChatMessage lastMessage = messages.get(messages.size() - 1);
return new Response<>(new AiMessage(lastMessage.text().toUpperCase(Locale.ROOT)));
}
}
@Test
public void test_not_supported() {
ChatLanguageModel model = new UpperCaseEchoModel();
List<ChatMessage> messages = new ArrayList<>();
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> model.generate(messages, new ArrayList<>()))
.withMessageContaining("Tools are currently not supported by this model");
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> model.generate(messages, ToolSpecification.builder().name("foo").build()))
.withMessageContaining("Tools are currently not supported by this model");
}
@Test
public void test_generate() {
ChatLanguageModel model = new UpperCaseEchoModel();
assertThat(model.generate("how are you?"))
.isEqualTo("HOW ARE YOU?");
{
List<ChatMessage> messages = new ArrayList<>();
messages.add(new UserMessage("Hello"));
messages.add(new AiMessage("Hi"));
messages.add(new UserMessage("How are you?"));
Response<AiMessage> response = model.generate(messages);
assertThat(response.content().text()).isEqualTo("HOW ARE YOU?");
assertThat(response.tokenUsage()).isNull();
assertThat(response.finishReason()).isNull();
}
{
Response<AiMessage> response = model.generate(
new UserMessage("Hello"),
new AiMessage("Hi"),
new UserMessage("How are you?"));
assertThat(response.content().text()).isEqualTo("HOW ARE YOU?");
assertThat(response.tokenUsage()).isNull();
assertThat(response.finishReason()).isNull();
}
}
}
| [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((1374, 1421), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((1374, 1413), 'dev.langchain4j.agent.tool.ToolSpecification.builder')] |
package dev.langchain4j.store.embedding.filter.builder.sql;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.query.Query;
import dev.langchain4j.store.embedding.filter.Filter;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.time.LocalDate;
import java.util.stream.Stream;
import static dev.langchain4j.store.embedding.filter.MetadataFilterBuilder.metadataKey;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
class LanguageModelSqlFilterBuilderIT {
private static final String OLLAMA_BASE_URL = "http://localhost:11434";
private static final int OLLAMA_NUM_PREDICT = 25;
TableDefinition table = new TableDefinition(
"movies",
"",
asList(
new ColumnDefinition("name", "VARCHAR(50)", ""),
new ColumnDefinition("genre", "VARCHAR(50)", "one of: [comedy, drama, action]"),
new ColumnDefinition("year", "INTEGER", "")
)
);
@ParameterizedTest
@MethodSource("models")
void should_filter_by_genre(ChatLanguageModel model) {
// given
LanguageModelSqlFilterBuilder sqlFilterBuilder = new LanguageModelSqlFilterBuilder(model, table);
Query query = Query.from("I want to watch something funny");
// when
Filter filter = sqlFilterBuilder.build(query);
// then
assertThat(filter).isEqualTo(metadataKey("genre").isEqualTo("comedy"));
}
@ParameterizedTest
@MethodSource("models")
void should_filter_by_genre_and_year(ChatLanguageModel model) {
// given
LanguageModelSqlFilterBuilder sqlFilterBuilder = LanguageModelSqlFilterBuilder.builder()
.chatLanguageModel(model)
.tableDefinition(table)
.build();
Query query = Query.from("I want to watch drama from current year");
// when
Filter filter = sqlFilterBuilder.build(query);
// then
assertThat(filter).isEqualTo(metadataKey("genre").isEqualTo("drama").and(metadataKey("year").isEqualTo((long) LocalDate.now().getYear())));
}
@ParameterizedTest
@MethodSource("models")
void should_filter_by_year_range(ChatLanguageModel model) {
// given
LanguageModelSqlFilterBuilder sqlFilterBuilder = new LanguageModelSqlFilterBuilder(model, table);
Query query = Query.from("I want to watch some old movie from 90s");
// when
Filter filter = sqlFilterBuilder.build(query);
// then
assertThat(filter).isEqualTo(metadataKey("year").isGreaterThanOrEqualTo(1990L).and(metadataKey("year").isLessThanOrEqualTo(1999L)));
}
@ParameterizedTest
@MethodSource("models")
void should_filter_by_year_using_arithmetics(ChatLanguageModel model) {
// given
LanguageModelSqlFilterBuilder sqlFilterBuilder = new LanguageModelSqlFilterBuilder(model, table);
Query query = Query.from("I want to watch some recent movie from the previous year");
// when
Filter filter = sqlFilterBuilder.build(query);
// then
assertThat(filter).isEqualTo(metadataKey("year").isEqualTo((long) LocalDate.now().getYear() - 1));
}
static Stream<Arguments> models() {
return Stream.of(
Arguments.of(
OpenAiChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.logRequests(true)
.logResponses(true)
.build()
)
// Arguments.of(
// OllamaChatModel.builder()
// .baseUrl(OLLAMA_BASE_URL)
// .modelName("sqlcoder")
// .numPredict(OLLAMA_NUM_PREDICT)
// .build()
// ),
// Arguments.of(
// OllamaChatModel.builder()
// .baseUrl(OLLAMA_BASE_URL)
// .modelName("codellama")
// .numPredict(OLLAMA_NUM_PREDICT)
// .build()
// ),
// Arguments.of(
// OllamaChatModel.builder()
// .baseUrl(OLLAMA_BASE_URL)
// .modelName("mistral")
// .numPredict(OLLAMA_NUM_PREDICT)
// .build()
// ),
// Arguments.of(
// OllamaChatModel.builder()
// .baseUrl(OLLAMA_BASE_URL)
// .modelName("llama2")
// .numPredict(OLLAMA_NUM_PREDICT)
// .build()
// ),
// Arguments.of(
// OllamaChatModel.builder()
// .baseUrl(OLLAMA_BASE_URL)
// .modelName("phi")
// .numPredict(OLLAMA_NUM_PREDICT)
// .build()
// )
);
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2309, 2334), 'java.time.LocalDate.now'), ((3409, 3434), 'java.time.LocalDate.now'), ((3569, 3975), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3569, 3934), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3569, 3882), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3569, 3831), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3569, 3742), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3569, 3669), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package dev.langchain4j.store.memory.chat.cassandra;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import java.util.UUID;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static org.assertj.core.api.Assertions.assertThat;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Slf4j
abstract class CassandraChatMemoryStoreTestSupport {
protected final String KEYSPACE = "langchain4j";
protected static CassandraChatMemoryStore chatMemoryStore;
@Test
@Order(1)
@DisplayName("1. Should create a database")
void shouldInitializeDatabase() {
createDatabase();
}
@Test
@Order(2)
@DisplayName("2. Connection to the database")
void shouldConnectToDatabase() {
chatMemoryStore = createChatMemoryStore();
log.info("Chat memory store is created.");
// Connection to Cassandra is established
assertThat(chatMemoryStore.getCassandraSession()
.getMetadata()
.getKeyspace(KEYSPACE)).isPresent();
log.info("Chat memory table is present.");
}
@Test
@Order(3)
@DisplayName("3. ChatMemoryStore initialization (table)")
void shouldCreateChatMemoryStore() {
chatMemoryStore.create();
// Table exists
assertThat(chatMemoryStore.getCassandraSession()
.refreshSchema()
.getKeyspace(KEYSPACE).get()
.getTable(CassandraChatMemoryStore.DEFAULT_TABLE_NAME)).isPresent();
chatMemoryStore.clear();
}
@Test
@Order(4)
@DisplayName("4. Insert items")
void shouldInsertItems() {
// When
String chatSessionId = "chat-" + UUID.randomUUID();
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.chatMemoryStore(chatMemoryStore)
.maxMessages(100)
.id(chatSessionId)
.build();
// When
UserMessage userMessage = userMessage("I will ask you a few question about ff4j.");
chatMemory.add(userMessage);
AiMessage aiMessage = aiMessage("Sure, go ahead!");
chatMemory.add(aiMessage);
// Then
assertThat(chatMemory.messages()).containsExactly(userMessage, aiMessage);
}
abstract void createDatabase();
abstract CassandraChatMemoryStore createChatMemoryStore();
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((2372, 2549), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2372, 2524), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2372, 2489), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2372, 2455), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.