code
stringlengths
619
138k
apis
sequencelengths
1
8
extract_api
stringlengths
79
7.3k
/** * Copyright 2021 Rochester Institute of Technology (RIT). Developed with * government support under contract 70RCSA22C00000008 awarded by the United * States Department of Homeland Security for Cybersecurity and Infrastructure Security Agency. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the “Software”), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package edu.rit.se.nvip.reconciler.openai; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.List; import java.util.concurrent.*; public class GPTFilterModel { private final Logger logger = LogManager.getLogger(getClass().getSimpleName()); private static final String MODEL = "gpt-3.5-turbo"; private static final double TEMP = 0.0; private static final String PASS = "0"; private static final String FAIL = "1"; private static final String SYS_MESSAGE = String.format("You are a validation engine for vulnerability data scraped from the web." + " If a user's message looks like a CVE description without errors, respond with \"%s\" or else \"%s\"", PASS, FAIL); private static final String SYS_ROLE = "system"; private static final String USER_ROLE = "user"; private OpenAIRequestHandler requestHandler; public GPTFilterModel() { requestHandler = OpenAIRequestHandler.getInstance(); } public void setRequestHandler(OpenAIRequestHandler handler) { this.requestHandler = handler; } public boolean callModel(String arg) throws OpenAiInvalidReturnException{ try { ChatCompletionRequest request = formRequest(arg); Future<ChatCompletionResult> futureRes = requestHandler.createChatCompletion(request, RequestorIdentity.FILTER); ChatCompletionResult res = futureRes.get(); return getAnswer(res); } catch (OpenAiHttpException | InterruptedException | ExecutionException ex) { logger.error(ex); return true; // need a default answer } } public int tokenCount(String description) { return requestHandler.chatCompletionTokenCount(formRequest(description)); } private ChatCompletionRequest formRequest(String description) { List<ChatMessage> messages = formMessages(description); return ChatCompletionRequest.builder().model(MODEL).temperature(TEMP).n(1).messages(messages).maxTokens(1).build(); } private List<ChatMessage> formMessages(String description) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(SYS_ROLE, SYS_MESSAGE)); messages.add(new ChatMessage(USER_ROLE, description)); return messages; } private boolean getAnswer(ChatCompletionResult res) throws OpenAiInvalidReturnException { String answer = res.getChoices().get(0).getMessage().getContent(); switch (answer) { case PASS: return true; case FAIL: return false; default: throw new OpenAiInvalidReturnException("OpenAi responded with \"" + answer + "\""); } } public static class OpenAiInvalidReturnException extends Exception { public OpenAiInvalidReturnException(String errorMessage) { super(errorMessage); } } public static void main(String[] args) throws OpenAiInvalidReturnException, InterruptedException { GPTFilterModel model = new GPTFilterModel(); ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); int a = 0; for (int i = 0; i < 5; i++) { int finalI = i; executor.submit(() -> { try { boolean result = model.callModel("testing # " + finalI); System.out.println("trial # " + finalI + " evaluated as " + result); } catch (OpenAiInvalidReturnException e) { System.out.println(e.toString()); } }); } executor.shutdown(); boolean res = executor.awaitTermination(10, TimeUnit.SECONDS); OpenAIRequestHandler.getInstance().shutdown(); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((3549, 3656), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3648), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3635), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3616), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3549, 3593), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.example.gpt3javaexample.services; import com.example.gpt3javaexample.aop.SaveToLogs; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @Service public class GPTService { @Value("${openai.max_tokens}") private int MAX_TOKENS; @Value("${openai.model}") private String MODEL; private final OpenAiService service; private final StringBuilder chatHistory; @Autowired public GPTService(OpenAiService service) { this.service = service; this.chatHistory = new StringBuilder(); } @SaveToLogs public String doRequest(String prompt, Boolean newChat){ if (newChat){ clearHistory(); } chatHistory.append("Input: ").append(prompt).append("\nOutput: "); CompletionRequest request = CompletionRequest.builder() .prompt(chatHistory.toString()) .model(MODEL) .maxTokens(MAX_TOKENS) .build(); String response = service.createCompletion(request).getChoices().stream() .map(CompletionChoice::getText) .reduce(String::concat) .orElse("I don't know what to say"); chatHistory.append(response).append("\n"); return response; } public void clearHistory(){ chatHistory.delete(0, chatHistory.length()); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1077, 1246), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1221), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1182), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1077, 1152), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package br.com.alura.screenmatch.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsumoChatGPT { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService("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"); CompletionRequest requisicao = CompletionRequest.builder() .model("text-davinci-003") .prompt("traduza para o português o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((389, 622), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 597), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 563), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 530), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((389, 459), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package me.bowon.springbootdeveloper.controller; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import me.bowon.springbootdeveloper.domain.Song; import me.bowon.springbootdeveloper.domain.YoutubeData; import me.bowon.springbootdeveloper.service.BlogService; import me.bowon.springbootdeveloper.service.GptService; import me.bowon.springbootdeveloper.service.YoutubeService; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import me.bowon.springbootdeveloper.service.BlogService; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.List; @RequiredArgsConstructor @RestController @RequestMapping(value = "/gpt") public class GptTest { @Value("${openai.api-key}") private String apiKey; private final GptService gptService; private final YoutubeService youtubeService; private final String promptFormat = // 프롬프트 양식 "Desired Format: 1. song-singer, \n Input: 다음 일기를 보고 노래 3가지를 추천해줘 \n"; private String data; @PostMapping("/post") public List<YoutubeData> sendQuestion(@RequestBody String request) throws GeneralSecurityException, IOException { OpenAiService service = new OpenAiService(apiKey); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(promptFormat + request) .model("text-davinci-003") .echo(false) .maxTokens(100) .temperature(0.7) .build(); data = service.createCompletion(completionRequest).getChoices().toString(); List<Song> songs = gptService.parseSong(data); System.out.println(songs); List<YoutubeData> youtubeDataList = youtubeService.youtubeApi(songs); return youtubeDataList; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1721, 1959), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1934), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1900), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1868), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1839), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1721, 1796), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.github.pablwoaraujo; import java.util.Arrays; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; public class Main { public static void main(String[] args) { var user = "Gere 5 produtos"; var system = "Você é um gerador de produtos fictícios para um ecommerce e deve gerar apenas o nome dos produtos solicitados pelo usuário"; var apiKey = System.getenv("OPENAI_API_KEY"); OpenAiService service = new OpenAiService(apiKey); ChatCompletionRequest completionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(Arrays.asList( new ChatMessage(ChatMessageRole.USER.value(), user), new ChatMessage(ChatMessageRole.SYSTEM.value(), system))) .build(); service .createChatCompletion(completionRequest) .getChoices() .forEach(c -> System.out.println(c.getMessage().getContent())); System.out.println("Hello world!"); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((893, 921), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((970, 1000), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package br.com.fiap.gsjava.controllers; import br.com.fiap.gsjava.models.ChatGPT; import br.com.fiap.gsjava.repositories.ChatGPTRepository; import br.com.fiap.gsjava.service.OpenAiService; import jakarta.validation.ConstraintViolationException; import jakarta.validation.Valid; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.web.PageableDefault; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.EntityModel; import org.springframework.hateoas.PagedModel; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import org.springframework.web.server.ResponseStatusException; import com.theokanning.openai.completion.CompletionRequest; import org.springframework.data.domain.Pageable; import org.slf4j.Logger; @RestController @RequestMapping("/chatbot") public class ChatGPTController { @Autowired ChatGPTRepository repo; @Autowired PagedResourcesAssembler<ChatGPT> assembler; Logger log = LoggerFactory.getLogger(ChatGPTController.class); private static final String API_KEY = "Sua Chave Aqui"; @GetMapping public PagedModel<EntityModel<ChatGPT>> index(@PageableDefault(size = 5) Pageable pageable) { return assembler.toModel(repo.findAll(pageable)); } @GetMapping("/busca/{id}") public EntityModel<ChatGPT> show(@PathVariable Long id) { log.info("buscar chat com id: " + id); ChatGPT chatGPT = repo.findById(id).orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Cliente não encontrado")); return chatGPT.toModel(); } @PostMapping("/api") public ResponseEntity<ChatGPT> create(@RequestBody @Valid ChatGPT input) { OpenAiService service = new OpenAiService(API_KEY); CompletionRequest request = CompletionRequest.builder() .model("text-davinci-003") .prompt(input.getPergunta()) .maxTokens(400) .build(); String resposta = service.createCompletion(request).getChoices().get(0).getText(); ChatGPT chatGPT = new ChatGPT(input.getPergunta(), resposta); log.info("Saída do chatbot: " + chatGPT); repo.save(chatGPT); return ResponseEntity.status(HttpStatus.CREATED).body(chatGPT); } @DeleteMapping("/{id}") public ResponseEntity<ChatGPT>destroy(@PathVariable Long id) { log.info("deletar chat com o id: " + id); ChatGPT chatgpt = repo.findById(id).orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Chat não encontrado"));; repo.delete(chatgpt); return ResponseEntity.noContent().build(); } @ResponseStatus(HttpStatus.BAD_REQUEST) @ExceptionHandler(ConstraintViolationException.class) public ResponseEntity<String> handleValidationExceptions(ConstraintViolationException ex) { log.error("Erro de validação: ", ex); return ResponseEntity.badRequest().body(ex.getMessage()); } @ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR) @ExceptionHandler(Exception.class) public ResponseEntity<String> handleAllExceptions(Exception ex) { log.error("Erro não esperado: ", ex); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Ocorreu um erro inesperado. Tente novamente mais tarde."); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((2006, 2182), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2156), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2123), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2006, 2077), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2448, 2503), 'org.springframework.http.ResponseEntity.status'), ((2868, 2902), 'org.springframework.http.ResponseEntity.noContent'), ((3179, 3228), 'org.springframework.http.ResponseEntity.badRequest'), ((3469, 3588), 'org.springframework.http.ResponseEntity.status')]
package com.technoguyfication.admingpt; import java.io.InputStream; import java.io.InputStreamReader; import java.time.Duration; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; import org.bstats.bukkit.Metrics; import org.bstats.charts.SimplePie; import org.bstats.charts.SingleLineChart; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.event.EventException; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.player.AsyncPlayerChatEvent; import org.bukkit.plugin.java.JavaPlugin; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; public class AdminGPT extends JavaPlugin implements Listener { Pattern responsePattern = Pattern.compile("<([ctp])>\\/?(.*)<\\/[ctp]>"); OpenAiService service; LinkedList<ChatMessage> messageHistory = new LinkedList<ChatMessage>(); String systemPrompt; String languageModel; int historyLength; long timeoutSeconds; Double temperature; List<String> commandBlacklist; // metrics int totalMessages = 0; int totalCommands = 0; int totalResponses = 0; @Override public void onEnable() { // bStats int pluginId = 18196; Metrics metrics = new Metrics(this, pluginId); FileConfiguration config = this.getConfig(); InputStream langStream = this.getResource("lang.yml"); // Load lang.yml YamlConfiguration langConfig = new YamlConfiguration(); try { langConfig.load(new InputStreamReader(langStream)); // Load system prompt from lang.yml systemPrompt = langConfig.getString("openai-system-prompt"); } catch (Exception e) { getLogger().severe("Failed to load lang.yml file."); e.printStackTrace(); // Disable plugin this.setEnabled(false); return; } // Load config String apiKey = config.getString("openai-api-key"); if (apiKey == null || apiKey.isBlank() || apiKey.equals("your-api-key-here")) { getLogger().severe("No OpenAI API key found in config.yml. Please add one and restart the server."); // Save default config this.saveDefaultConfig(); // Disable plugin this.setEnabled(false); return; } languageModel = config.getString("openai-language-model"); temperature = config.getDouble("openai-model-temperature"); timeoutSeconds = config.getLong("openai-timeout-secs"); historyLength = config.getInt("history-length"); commandBlacklist = config.getStringList("command-blacklist"); // Add bStats charts metrics.addCustomChart(new SimplePie("language-model", () -> languageModel)); metrics.addCustomChart(new SingleLineChart("messages-sent", () -> { var total = totalMessages; totalMessages = 0; return total; })); metrics.addCustomChart(new SingleLineChart("commands-run", () -> { var total = totalCommands; totalCommands = 0; return total; })); metrics.addCustomChart(new SingleLineChart("responses-received", () -> { var total = totalResponses; totalResponses = 0; return total; })); // Create OpenAI service service = new OpenAiService(apiKey, Duration.ofSeconds(timeoutSeconds)); // set response timeout // Register event listeners getServer().getPluginManager().registerEvents(this, this); // Startup messages getLogger().info("Command blacklist: " + String.join(", ", commandBlacklist)); } @Override public void onDisable() { // Plugin disabled } @EventHandler public void onChat(AsyncPlayerChatEvent event) throws EventException { // Increment total messages counter totalMessages++; // Add new message to list addChatMessage(new ChatMessage(ChatMessageRole.USER.value(), String.format("%s: %s", event.getPlayer().getName(), event.getMessage()))); // Replace placeholders in the system prompt String templatedSystemPrompt = systemPrompt .replace("{plugins}", String.join(", ", Stream.of(Bukkit.getPluginManager().getPlugins()).map(p -> p.getName()) .toArray(String[]::new))) .replace("{players}", String.join(", ", Bukkit.getOnlinePlayers().stream().map(p -> p.getName()).toArray(String[]::new))) .replace("{version}", Bukkit.getVersion()); // Make a new list with the system prompt and all messages List<ChatMessage> messages = new LinkedList<ChatMessage>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), templatedSystemPrompt)); messages.addAll(messageHistory); // Create a chat completion request ChatCompletionRequest request = ChatCompletionRequest .builder() .model(languageModel) .messages(messages) .user(event.getPlayer().getUniqueId().toString()) .temperature(temperature) .build(); getLogger().fine("Sending chat completion request to OpenAI..."); Bukkit.getScheduler().runTaskAsynchronously(this, () -> { ChatCompletionResult result = service.createChatCompletion(request); ChatMessage responseMessage = result.getChoices().get(0).getMessage(); getLogger().fine("Received chat completion result from OpenAI."); List<String> commands = new LinkedList<String>(); List<String> responses = new LinkedList<String>(); // Run regex on each line of the result for (String line : responseMessage.getContent().split("\\r?\\n")) { Matcher matcher = responsePattern.matcher(line); if (matcher.find()) { switch (matcher.group(1)) { case "c": String command = matcher.group(2); getLogger().info(String.format("Command: %s", command)); commands.add(command); break; case "t": String thought = matcher.group(2); getLogger().info(String.format("Thought: %s", thought)); break; case "p": String response = matcher.group(2); getLogger().info(String.format("Response: %s", response)); responses.add(response); break; default: getLogger().warning(String.format("Invalid response pattern: %s", line)); break; } } } // Run the rest of the code on the main thread Bukkit.getScheduler().runTask(this, () -> { // Add commands and responses to total counters totalCommands += commands.size(); totalResponses += responses.size(); // add the result to the list of messages addChatMessage(responseMessage); // Run the commands for (String command : commands) { // Check if command is blacklisted String rootCommand = command.split(" ")[0]; if (commandBlacklist.contains(rootCommand.toLowerCase())) { getLogger().warning(String.format("Command %s is blacklisted.", command)); continue; } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } // Broadcast response lines for (String response : responses) { Bukkit.broadcastMessage(ChatColor.AQUA + String.format("<AdminGPT> %s", response)); } }); }); } private void addChatMessage(ChatMessage message) { // Remove oldest message if list is full if (messageHistory.size() >= historyLength) { messageHistory.removeFirst(); } // Add new message to list messageHistory.add(message); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((4511, 4539), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4851, 4986), 'java.util.stream.Stream.of'), ((4851, 4922), 'java.util.stream.Stream.of'), ((4861, 4899), 'org.bukkit.Bukkit.getPluginManager'), ((5101, 5180), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5101, 5157), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5101, 5135), 'org.bukkit.Bukkit.getOnlinePlayers'), ((5416, 5446), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5940, 8809), 'org.bukkit.Bukkit.getScheduler'), ((7705, 8797), 'org.bukkit.Bukkit.getScheduler')]
package com.vission.chatGPT.service; import com.google.common.collect.Lists; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import com.vission.chatGPT.properties.ChatGPTProperties; import com.vission.chatGPT.utils.BeanUtils; import com.vission.chatGPT.utils.JsonUtils; import com.vission.chatGPT.utils.RedisUtils; import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Service; @Service @Slf4j @RequiredArgsConstructor public class ChatGPTService { private final ChatGPTProperties properties; private final OpenAiService openAiService; private final RedisUtils redisUtils; /** * 翻译助手 * * @param original 原文 * @return 翻译结果 */ public String translation(String original) { StringBuilder completion = new StringBuilder(); ChatMessage newQuestionMessage = new ChatMessage(ChatMessageRole.USER.value(), original); ChatMessage system = new ChatMessage(ChatMessageRole.SYSTEM.value(), "你是一个翻译助手,将我说的所有话翻译成中文"); ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(Lists.newArrayList(system, newQuestionMessage)) .build(); ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request); List<ChatCompletionChoice> choices = chatCompletion.getChoices(); for (ChatCompletionChoice choice : choices) { completion.append(choice.getMessage().getContent()); } return completion.toString(); } /** * 聊天 不会保存上下文聊天 * * @param original 原文 * @return 翻译结果 */ public String chatCompletion(String original) { StringBuilder completion = new StringBuilder(); ChatMessage newQuestionMessage = new ChatMessage(ChatMessageRole.USER.value(), original); ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(Lists.newArrayList(newQuestionMessage)) .build(); ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request); List<ChatCompletionChoice> choices = chatCompletion.getChoices(); for (ChatCompletionChoice choice : choices) { completion.append(choice.getMessage().getContent()); } return completion.toString(); } /** * 聊天 会保存上下文聊天 * * @param original 原文 * @param userUuid 用户唯一标识 * @return 翻译结果 */ public String chatCompletionByContext(String original, String userUuid) { List<ChatMessage> messages = findChatMessagesByUuid(userUuid); int messageCount = (int) messages.stream().map(ChatMessage::getRole) .filter(t -> StringUtils.equals(t, ChatMessageRole.USER.value())).count(); if (messageCount > properties.getChatGptFlowNum()) { redisUtils.del(userUuid); return "您的连续对话已超过上限,系统已自动清空上下文"; } StringBuilder result = new StringBuilder(); ChatMessage newMessage = new ChatMessage(ChatMessageRole.USER.value(), original); messages.add(newMessage); ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo").messages(messages).build(); ChatGPTService.log.info("request:{}", JsonUtils.toJson(request)); ChatCompletionResult chatCompletion = openAiService.createChatCompletion(request); List<ChatCompletionChoice> choices = chatCompletion.getChoices(); for (ChatCompletionChoice choice : choices) { messages.add(choice.getMessage()); result.append(choice.getMessage().getContent()); } redisUtils.set(userUuid, messages, 1800); return result.toString(); } private List<ChatMessage> findChatMessagesByUuid(String userUuid) { List result = redisUtils.getList(userUuid); return BeanUtils.deepCopyList(result, ChatMessage.class); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1310, 1338), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1396, 1426), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1552, 1722), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1552, 1697), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1552, 1623), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2351, 2379), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2432, 2594), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2432, 2569), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2432, 2503), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3374, 3402), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3711, 3739), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3826, 3924), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3826, 3916), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3826, 3897), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package cos.peerna.domain.gpt.service; import com.amazonaws.services.kms.model.NotFoundException; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatCompletionChunk; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import cos.peerna.domain.gpt.dto.request.SendMessageRequest; import cos.peerna.domain.gpt.event.ReviewReplyEvent; import cos.peerna.domain.gpt.model.GPT; import cos.peerna.domain.history.model.History; import cos.peerna.domain.history.repository.HistoryRepository; import cos.peerna.domain.reply.model.Reply; import cos.peerna.domain.reply.repository.ReplyRepository; import cos.peerna.domain.room.model.Chat; import cos.peerna.domain.room.repository.ChatRepository; import cos.peerna.global.security.dto.SessionUser; import java.util.ArrayList; import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.messaging.simp.SimpMessagingTemplate; import org.springframework.stereotype.Service; @Slf4j @Service @RequiredArgsConstructor public class GPTService { private final ReplyRepository replyRepository; private final SimpMessagingTemplate template; private final RedisTemplate<String, Object> redisTemplate; private final ObjectMapper objectMapper; private final OpenAiService openAIService; private final ChatRepository chatRepository; private final HistoryRepository historyRepository; /* TODO: Async 로 변경 */ public void reviewReply(ReviewReplyEvent event) { /* TODO: 사용자의 권한에 따른 gpt 모델 선택 */ ChatMessage systemMessage = new ChatMessage("system", GPT.getConcept(event.question())); ChatMessage userMessage = new ChatMessage("user", event.answer()); StringBuilder assistantMessageBuilder = new StringBuilder(); openAIService.streamChatCompletion(ChatCompletionRequest.builder() .model(GPT.getModel()) .messages(List.of( systemMessage, userMessage )) .build()) .doOnError(throwable -> sendErrorMessage(event.userId())) .blockingForEach(chunk -> sendChatMessage(chunk, event.userId(), assistantMessageBuilder)); ChatMessage assistantMessage = new ChatMessage("assistant", assistantMessageBuilder.toString()); redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), systemMessage); redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), userMessage); redisTemplate.opsForList().rightPush(String.valueOf(event.historyId()), assistantMessage); History history = historyRepository.findById(event.historyId()) .orElseThrow(() -> new NotFoundException("history not found")); chatRepository.save(Chat.builder() .writerId(0L) .content(assistantMessageBuilder.toString()) .history(history) .build()); } /* TODO: Async 로 변경 */ public void sendMessage(SessionUser user, SendMessageRequest request) { Reply lastReply = replyRepository.findFirstByUserIdOrderByIdDesc(user.getId()) .orElseThrow(() -> new NotFoundException("reply not found")); List<ChatMessage> messages = getChatMessages(lastReply.getHistory().getId()); ChatMessage userMessage = new ChatMessage("user", request.message()); messages.add(userMessage); StringBuilder assistantMessageBuilder = new StringBuilder(); openAIService.streamChatCompletion(ChatCompletionRequest.builder() .model(GPT.getModel()) .messages(messages) .build()) .doOnError(throwable -> sendErrorMessage(user.getId())) .blockingForEach(chunk -> sendChatMessage(chunk, user.getId(), assistantMessageBuilder)); ChatMessage assistantMessage = new ChatMessage("assistant", assistantMessageBuilder.toString()); redisTemplate.opsForList().rightPush(String.valueOf(lastReply.getHistory().getId()), userMessage); redisTemplate.opsForList().rightPush(String.valueOf(lastReply.getHistory().getId()), assistantMessage); chatRepository.save(Chat.builder() .writerId(user.getId()) .content(request.message()) .history(lastReply.getHistory()) .build()); chatRepository.save(Chat.builder() .writerId(0L) .content(assistantMessageBuilder.toString()) .history(lastReply.getHistory()) .build()); } private List<ChatMessage> getChatMessages(Long historyId) { List<Object> messageObjects = redisTemplate.opsForList().range(String.valueOf(historyId), 0, -1); List<ChatMessage> messages = new ArrayList<>(); if (messageObjects == null) { throw new NotFoundException("messageObjects is null"); } for (Object messageObject : messageObjects) { ChatMessage chatMessage = objectMapper.convertValue(messageObject, ChatMessage.class); messages.add(chatMessage); } return messages; } private void sendChatMessage(ChatCompletionChunk chunk, Long userId, StringBuilder assistantMessageBuilder) { /* TODO: stream 이 끝나면, gpt 답변 전체를 저장 TODO: gpt에게서 오는 chunk의 순서가 보장되지 않음 */ String message = chunk.getChoices().get(0).getMessage().getContent(); if (message == null) { template.convertAndSend("/user/" + userId + "/gpt", GPT.getENDMessage()); return; } template.convertAndSend("/user/" + userId + "/gpt", message); assistantMessageBuilder.append(message); } private void sendErrorMessage(Long userId) { template.convertAndSend("/user/" + userId + "/gpt", GPT.getErrorMessage()); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2107, 2379), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2107, 2346), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2107, 2185), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3139, 3303), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3278), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3244), 'cos.peerna.domain.room.model.Chat.builder'), ((3139, 3183), 'cos.peerna.domain.room.model.Chat.builder'), ((3909, 4064), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3909, 4031), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3909, 3987), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4598, 4770), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4745), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4696), 'cos.peerna.domain.room.model.Chat.builder'), ((4598, 4652), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4980), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4955), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4906), 'cos.peerna.domain.room.model.Chat.builder'), ((4801, 4845), 'cos.peerna.domain.room.model.Chat.builder')]
package link.locutus.discord.gpt.imps; import com.knuddels.jtokkit.api.Encoding; import com.knuddels.jtokkit.api.EncodingRegistry; import com.knuddels.jtokkit.api.ModelType; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.embedding.Embedding; import com.theokanning.openai.embedding.EmbeddingRequest; import com.theokanning.openai.embedding.EmbeddingResult; import link.locutus.discord.db.AEmbeddingDatabase; import link.locutus.discord.gpt.pw.GptDatabase; import java.sql.SQLException; import java.util.List; public class AdaEmbedding extends AEmbeddingDatabase { private final EncodingRegistry registry; private final Encoding embeddingEncoder; private final OpenAiService service; public AdaEmbedding(EncodingRegistry registry, OpenAiService service, GptDatabase database) throws SQLException, ClassNotFoundException { super("ada", database); this.registry = registry; this.service = service; this.embeddingEncoder = registry.getEncodingForModel(ModelType.TEXT_EMBEDDING_ADA_002); } public int getEmbeddingTokenSize(String text) { return embeddingEncoder.encode(text).size(); } @Override public float[] fetchEmbedding(String text) { EmbeddingRequest request = EmbeddingRequest.builder() .model("text-embedding-ada-002") .input(List.of(text)) .build(); EmbeddingResult embedResult = service.createEmbeddings(request); List<Embedding> data = embedResult.getData(); if (data.size() != 1) { throw new RuntimeException("Expected 1 embedding, got " + data.size()); } List<Double> result = data.get(0).getEmbedding(); float[] target = new float[result.size()]; for (int i = 0; i < target.length; i++) { target[i] = result.get(i).floatValue(); } return target; } }
[ "com.theokanning.openai.embedding.EmbeddingRequest.builder" ]
[((1288, 1426), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1288, 1401), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1288, 1363), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
package com.redis.vss; import redis.clients.jedis.JedisPooled; import redis.clients.jedis.Protocol; import redis.clients.jedis.search.Document; import redis.clients.jedis.search.IndexDefinition; import redis.clients.jedis.search.IndexOptions; import redis.clients.jedis.search.Query; import redis.clients.jedis.search.Schema; import redis.clients.jedis.search.SearchResult; import redis.clients.jedis.util.SafeEncoder; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; import java.util.stream.Collectors; import com.opencsv.CSVReader; import com.opencsv.CSVReaderBuilder; import com.theokanning.openai.embedding.EmbeddingRequest; import com.theokanning.openai.service.OpenAiService; /** * Java VSS Wiki Articles Example * * @author Michael Yuan */ public class JavaVSSWikiArticlesExample { // Redis client connection private static JedisPooled client = null; // OpenAI connection private static OpenAiService service = null; // Model private static String MODEL = "text-embedding-ada-002"; private static int VECTOR_DIM = 1536; // length of the vectors private static int VECTOR_NUMBER = 25000; // initial number of vectors private static String INDEX_NAME = "idx_wiki"; // name of the search index private static String INDEX_NAME_HNSW = "idx_wiki_hnsw"; // name of the search index private static String PREFIX = "wiki"; // prefix for the document keys private static String DISTANCE_METRIC = "COSINE"; // prefix for the document keys private JavaVSSWikiArticlesExample() { try { // Initialize Redis connection InputStream input = ClassLoader.getSystemResourceAsStream("config.properties"); Properties prop = new Properties(); prop.load(input); client = new JedisPooled(prop.getProperty("redis.host"), Integer.parseInt(prop.getProperty("redis.port"))); // Initialize OpenAI service connection String token = System.getenv("OPENAI_API_KEY"); service = new OpenAiService(token); // client = new JedisPooled(prop.getProperty("redis.host"), // Integer.parseInt(prop.getProperty("redis.port")), // prop.getProperty("redis.user"), // prop.getProperty("redis.password")); Object result = client.sendCommand(Protocol.Command.PING, "Connected to Redis..."); System.out.println(SafeEncoder.encode((byte[]) result)); } catch (Exception ex) { ex.printStackTrace(); } } private void createFlatIndex() { try { // Drop index if exists try { client.ftDropIndex(INDEX_NAME); } catch (Exception e) { } ; System.out.println("Creating Flat index..."); HashMap<String, Object> attr = new HashMap<String, Object>(); attr.put("TYPE", "FLOAT64"); attr.put("DIM", VECTOR_DIM); attr.put("DISTANCE_METRIC", DISTANCE_METRIC); attr.put("INITIAL_CAP", VECTOR_NUMBER); // Define index schema Schema schema = new Schema().addNumericField("id") .addTextField("title", 3.0).as("title") .addTextField("url", 1.0).as("url") .addTextField("text", 2.0).as("text") .addVectorField("title_vector", Schema.VectorField.VectorAlgo.FLAT, attr).as("title_vector") .addVectorField("content_vector", Schema.VectorField.VectorAlgo.FLAT, attr).as("content_vector"); IndexDefinition rule = new IndexDefinition(IndexDefinition.Type.HASH) .setPrefixes(new String[] { "wiki:" }); client.ftCreate(INDEX_NAME, IndexOptions.defaultOptions().setDefinition(rule), schema); } catch (Exception ex) { ex.printStackTrace(); } } private void createHNSWIndex() { try { // Drop index if exists try { client.ftDropIndex(INDEX_NAME_HNSW); } catch (Exception e) { } ; System.out.println("Creating HNSW index..."); HashMap<String, Object> attr = new HashMap<String, Object>(); attr.put("TYPE", "FLOAT64"); attr.put("DIM", VECTOR_DIM); attr.put("DISTANCE_METRIC", DISTANCE_METRIC); attr.put("INITIAL_CAP", VECTOR_NUMBER); // Define index schema Schema schema = new Schema().addNumericField("id") .addTextField("title", 3.0).as("title") .addTextField("url", 1.0).as("url") .addTextField("text", 2.0).as("text") .addVectorField("title_vector", Schema.VectorField.VectorAlgo.HNSW, attr).as("title_vector") .addVectorField("content_vector", Schema.VectorField.VectorAlgo.HNSW, attr).as("content_vector"); IndexDefinition rule = new IndexDefinition(IndexDefinition.Type.HASH) .setPrefixes(new String[] { "wiki:" }); client.ftCreate(INDEX_NAME_HNSW, IndexOptions.defaultOptions().setDefinition(rule), schema); } catch (Exception ex) { ex.printStackTrace(); } } /** * @param csvFile * Load data from csv file to Redis hashes */ private void loadData(String csvFile) { System.out.println("Loading data in Redis..."); try { FileInputStream input = new FileInputStream(csvFile); String[] record = null; String key; try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader(input)).withSkipLines(1).build()) { while ((record = reader.readNext()) != null) { key = PREFIX + ":" + record[0]; double[] title_vector = Pattern.compile(", ") .splitAsStream(record[4].replaceAll("\\[", "").replaceAll("\\]", "")) .map(elem -> Double.parseDouble(elem)) .collect(Collectors.toList()) .stream().mapToDouble(Double::doubleValue).toArray(); double[] content_vector = Pattern.compile(", ") .splitAsStream(record[5].replaceAll("\\[", "").replaceAll("\\]", "")) .map(elem -> Double.parseDouble(elem)) .collect(Collectors.toList()) .stream().mapToDouble(Double::doubleValue).toArray(); Map<byte[], byte[]> map = new HashMap<>(); map.put("id".getBytes(), record[0].getBytes()); map.put("url".getBytes(), record[1].getBytes()); map.put("title".getBytes(), record[2].getBytes()); map.put("text".getBytes(), record[3].getBytes()); map.put("title_vector".getBytes(), doubleToByte(title_vector)); map.put("content_vector".getBytes(), doubleToByte(content_vector)); map.put("vector_id".getBytes(), record[6].getBytes()); client.hset(key.getBytes(), map); } } } catch (Exception ex) { ex.printStackTrace(); } } /** * @param input * @return byte[] */ public byte[] doubleToByte(double[] input) { ByteBuffer buffer = ByteBuffer.allocate(input.length * Double.BYTES); buffer.order(ByteOrder.LITTLE_ENDIAN); buffer.asDoubleBuffer().put(input); return buffer.array(); } public void searchRedis(String indexName, String queryString, String vector_field, int k) { // Build OpenAI embedding request EmbeddingRequest embeddingRequest = EmbeddingRequest.builder() .model(MODEL) .input(Collections.singletonList(queryString)) .build(); // Get vector embeddings from Open AI service double[] embedding = service.createEmbeddings(embeddingRequest).getData().get(0).getEmbedding() .stream().mapToDouble(Double::doubleValue).toArray(); // Build query Query q = new Query("*=>[KNN $k @" + vector_field + "$vec AS vector_score]") .setSortBy("vector_score", true) .addParam("k", k) .addParam("vec", doubleToByte(embedding)) .limit(0, k) .dialect(2); // Get and iterate over search results SearchResult res = client.ftSearch(indexName, q); List<Document> wikis = res.getDocuments(); int i = 1; for (Document wiki : wikis) { float score = Float.parseFloat((String) wiki.get("vector_score")); System.out.println(i + ". " + wiki.get("title") + " (Score: " + (1 - score) + ")"); i++; } } /** * Run Redis VSS search examples using wiki articles. * * @param args The arguments of the program. */ public static void main(String[] args) { // Zip archive of wiki articles with OpenAI embeddings String fileUrl = "https://cdn.openai.com/API/examples/data/vector_database_wikipedia_articles_embedded.zip"; String saveAt = "/tmp/vector_database_wikipedia_articles_embedded.zip"; // CSV file of wiki articles with OpenAI embeddings String csvFile = "/tmp/vector_database_wikipedia_articles_embedded.csv"; // Download and unzip csv file of wiki articles with OpenAI embeddings try { System.out.println("Downloading and unzipping csv file..."); LoadOpenAIData.downloadUsingNIO(fileUrl, saveAt); LoadOpenAIData.unzipZip4j(saveAt, "/tmp"); } catch (IOException e) { e.printStackTrace(); } JavaVSSWikiArticlesExample vssArticles = new JavaVSSWikiArticlesExample(); vssArticles.createFlatIndex(); vssArticles.createHNSWIndex(); vssArticles.loadData(csvFile); System.out.println("### VSS query: 'modern art in Europe' in 'title_vector'"); vssArticles.searchRedis(INDEX_NAME, "modern art in Europe", "title_vector", 10); System.out.println("### VSS query: 'modern art in Europe' in 'title_vector'"); vssArticles.searchRedis(INDEX_NAME_HNSW, "modern art in Europe", "title_vector", 10); System.out.println("### VSS query: 'Famous battles in Scottish history' in 'content_vector'"); vssArticles.searchRedis(INDEX_NAME, "Famous battles in Scottish history", "content_vector", 10); } }
[ "com.theokanning.openai.embedding.EmbeddingRequest.builder" ]
[((4075, 4124), 'redis.clients.jedis.search.IndexOptions.defaultOptions'), ((5457, 5506), 'redis.clients.jedis.search.IndexOptions.defaultOptions'), ((6208, 6533), 'java.util.regex.Pattern.compile'), ((6208, 6523), 'java.util.regex.Pattern.compile'), ((6208, 6490), 'java.util.regex.Pattern.compile'), ((6208, 6452), 'java.util.regex.Pattern.compile'), ((6208, 6394), 'java.util.regex.Pattern.compile'), ((6208, 6327), 'java.util.regex.Pattern.compile'), ((6582, 6907), 'java.util.regex.Pattern.compile'), ((6582, 6897), 'java.util.regex.Pattern.compile'), ((6582, 6864), 'java.util.regex.Pattern.compile'), ((6582, 6826), 'java.util.regex.Pattern.compile'), ((6582, 6768), 'java.util.regex.Pattern.compile'), ((6582, 6701), 'java.util.regex.Pattern.compile'), ((8173, 8317), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((8173, 8292), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((8173, 8229), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
package com.asleepyfish.strategy.event; import com.alibaba.fastjson2.JSONObject; import com.asleepyfish.dto.AiQa; import com.asleepyfish.enums.WxMessageType; import com.asleepyfish.repository.AiQaRepository; import com.asleepyfish.strategy.WxEventStrategy; import com.google.common.collect.Lists; import com.theokanning.openai.image.CreateImageRequest; import io.github.asleepyfish.enums.ImageResponseFormatEnum; import io.github.asleepyfish.enums.ImageSizeEnum; import io.github.asleepyfish.util.OpenAiUtils; import lombok.extern.slf4j.Slf4j; import me.chanjar.weixin.common.api.WxConsts; import me.chanjar.weixin.common.bean.result.WxMediaUploadResult; import me.chanjar.weixin.mp.api.WxMpService; import me.chanjar.weixin.mp.bean.kefu.WxMpKefuMessage; import org.springframework.stereotype.Service; import javax.annotation.Resource; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; import java.util.Base64; import java.util.List; import java.util.Map; /** * @Author: asleepyfish * @Date: 2022/8/31 19:55 * @Description: 消息策略 */ @Service("text") @Slf4j public class TextStrategy implements WxEventStrategy { @Resource private AiQaRepository aiQaRepository; @Resource private WxMpService wxMpService; @Override public void execute(Map<String, String> requestMap, HttpServletResponse response) throws Exception { // 发送方账号 String openId = requestMap.get("FromUserName"); String acceptContent = requestMap.get("Content"); log.info(">>> 用户输入:{}", acceptContent); // 关闭输出流,避免微信服务端重复发送信息 response.getOutputStream().close(); if (acceptContent.charAt(0) == '/') { createImage(acceptContent, openId); } else { createCompletion(acceptContent, openId); } } private void createCompletion(String acceptContent, String openId) throws Exception { WxMpKefuMessage wxMpKefuMessage = new WxMpKefuMessage(); wxMpKefuMessage.setToUser(openId); wxMpKefuMessage.setMsgType(WxMessageType.TEXT.getType()); List<String> results = Lists.newArrayList(); // 初始化标记status = 0,表示解答成功 int status = 0; try { results = OpenAiUtils.createChatCompletion(acceptContent, openId); } catch (Exception e) { status = -1; log.error(e.getMessage()); results.add(e.getMessage()); } for (String result : results) { if (result.startsWith("?") || result.startsWith("?")) { result = result.substring(1); } result = result.trim(); wxMpKefuMessage.setContent(result); log.info(">>> ChatGPT:{}", result); AiQa aiQa = new AiQa(); aiQa.setUser(openId); aiQa.setQuestion(acceptContent); aiQa.setAnswer(result); aiQa.setStatus(status); aiQaRepository.save(aiQa); // 客服接口发送信息 wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage); } } private void createImage(String acceptContent, String openId) throws Exception { WxMpKefuMessage wxMpKefuMessage = new WxMpKefuMessage(); wxMpKefuMessage.setToUser(openId); wxMpKefuMessage.setMsgType(WxMessageType.IMAGE.getType()); List<String> results = Lists.newArrayList(); // 初始化标记status = 0,表示解答成功 int status = 0; try { acceptContent = acceptContent.substring(1); results = OpenAiUtils.createImage(CreateImageRequest.builder() .prompt(acceptContent) .size(ImageSizeEnum.S512x512.getSize()) .user(openId) .responseFormat(ImageResponseFormatEnum.B64_JSON.getResponseFormat()) .build()); } catch (Exception e) { status = -1; log.error(e.getMessage()); results.add(e.getMessage()); } for (String result : results) { AiQa aiQa = new AiQa(); aiQa.setUser(openId); aiQa.setQuestion(acceptContent); aiQa.setAnswer(result); aiQa.setStatus(status); aiQaRepository.save(aiQa); if (status == -1) { wxMpKefuMessage.setMsgType(WxMessageType.TEXT.getType()); wxMpKefuMessage.setContent("生成图片失败!原因:" + result); wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage); return; } WxMediaUploadResult wxMediaUploadResult = getMediaUploadResult(result); log.info(">>> 图片上传结果:{}", JSONObject.toJSONString(wxMediaUploadResult)); wxMpKefuMessage.setMediaId(wxMediaUploadResult.getMediaId()); // 客服接口发送信息 wxMpService.getKefuService().sendKefuMessage(wxMpKefuMessage); } } private WxMediaUploadResult getMediaUploadResult(String base64) throws Exception { byte[] imageBytes = Base64.getDecoder().decode(base64); try (ByteArrayInputStream bis = new ByteArrayInputStream(imageBytes)) { return wxMpService.getMaterialService().mediaUpload(WxConsts.MediaFileType.IMAGE, "PNG", bis); } } }
[ "com.theokanning.openai.image.CreateImageRequest.builder" ]
[((2115, 2143), 'com.asleepyfish.enums.WxMessageType.TEXT.getType'), ((3411, 3440), 'com.asleepyfish.enums.WxMessageType.IMAGE.getType'), ((3694, 3978), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3949), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3859), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3825), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3694, 3765), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3792, 3824), 'io.github.asleepyfish.enums.ImageSizeEnum.S512x512.getSize'), ((3896, 3948), 'io.github.asleepyfish.enums.ImageResponseFormatEnum.B64_JSON.getResponseFormat'), ((4469, 4497), 'com.asleepyfish.enums.WxMessageType.TEXT.getType'), ((5208, 5242), 'java.util.Base64.getDecoder')]
package com.odde.doughnut.services.ai.tools; import static com.theokanning.openai.service.OpenAiService.defaultObjectMapper; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.kjetland.jackson.jsonSchema.JsonSchemaGenerator; import com.odde.doughnut.controllers.dto.AiCompletionRequiredAction; import com.theokanning.openai.assistants.AssistantFunction; import com.theokanning.openai.assistants.AssistantToolsEnum; import com.theokanning.openai.assistants.Tool; import com.theokanning.openai.runs.ToolCall; import com.theokanning.openai.runs.ToolCallFunction; import java.util.Map; import java.util.function.Function; import java.util.stream.Stream; public record AiTool( String name, String description, Class<?> parameterClass, Function<Object, AiCompletionRequiredAction> executor) { public static <T> AiTool build( String name, String description, Class<T> parameterClass, Function<T, AiCompletionRequiredAction> executor) { return new AiTool( name, description, parameterClass, (arguments) -> executor.apply((T) arguments)); } public Tool getTool() { return new Tool( AssistantToolsEnum.FUNCTION, AssistantFunction.builder() .name(name) .description(description) .parameters(serializeClassSchema(parameterClass)) .build()); } private static Map<String, Object> serializeClassSchema(Class<?> value) { ObjectMapper objectMapper = new ObjectMapper(); JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(objectMapper); JsonNode jsonSchema = jsonSchemaGenerator.generateJsonSchema(value); JsonNode jsonNode = objectMapper.valueToTree(jsonSchema); return objectMapper.convertValue(jsonNode, Map.class); } public Stream<AiCompletionRequiredAction> tryConsume(ToolCall toolCall) { ToolCallFunction function = toolCall.getFunction(); if (name.equals(function.getName())) { return Stream.of(executor.apply(convertArguments(function))); } return Stream.empty(); } private Object convertArguments(ToolCallFunction function) { String arguments = function.getArguments(); try { JsonNode jsonNode = defaultObjectMapper().readTree(arguments); return defaultObjectMapper().treeToValue(jsonNode, parameterClass); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } }
[ "com.theokanning.openai.assistants.AssistantFunction.builder" ]
[((1303, 1475), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1454), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1392), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((1303, 1354), 'com.theokanning.openai.assistants.AssistantFunction.builder')]
/* * Copyright 2008-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package egovframework.example.sample.web; import java.awt.Choice; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import egovframework.example.API.Keys; import egovframework.example.sample.service.EgovSampleService; import egovframework.example.sample.service.SampleDefaultVO; import egovframework.example.sample.service.SampleVO; import egovframework.rte.fdl.property.EgovPropertyService; import egovframework.rte.ptl.mvc.tags.ui.pagination.PaginationInfo; import javax.annotation.Resource; import javax.servlet.ServletContext; import javax.servlet.annotation.MultipartConfig; import javax.servlet.http.HttpServletRequest; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestPart; import org.springframework.web.bind.support.SessionStatus; import org.springframework.web.multipart.MultipartFile; import org.springmodules.validation.commons.DefaultBeanValidator; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import com.theokanning.openai.audio.CreateTranscriptionRequest; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** * @Class Name : EgovSampleController.java * @Description : EgovSample Controller Class * @Modification Information * @ * @ 수정일 수정자 수정내용 * @ --------- --------- ------------------------------- * @ 2009.03.16 최초생성 * * @author 개발프레임웍크 실행환경 개발팀 * @since 2009. 03.16 * @version 1.0 * @see * * Copyright (C) by MOPAS All right reserved. */ @Controller @MultipartConfig( maxFileSize = 1024 * 1024 * 25, // 최대 25MB 파일 크기 maxRequestSize = 1024 * 1024 * 25, // 최대 25MB 요청 크기 fileSizeThreshold = 1024 * 1024 // 1MB 이상부터 디스크에 저장 ) public class EgovSampleController { private static final Logger logger = LogManager.getLogger(EgovSampleController.class); private final String UPLOAD_DIR = "uploads"; /** EgovSampleService */ @Resource(name = "sampleService") private EgovSampleService sampleService; /** EgovPropertyService */ @Resource(name = "propertiesService") protected EgovPropertyService propertiesService; /** Validator */ @Resource(name = "beanValidator") protected DefaultBeanValidator beanValidator; /** * 글 목록을 조회한다. (pageing) * @param searchVO - 조회할 정보가 담긴 SampleDefaultVO * @param model * @return "egovSampleList" * @exception Exception */ @RequestMapping(value = "/egovSampleList.do") public String selectSampleList(@ModelAttribute("searchVO") SampleDefaultVO searchVO, ModelMap model) throws Exception { /** EgovPropertyService.sample */ searchVO.setPageUnit(propertiesService.getInt("pageUnit")); searchVO.setPageSize(propertiesService.getInt("pageSize")); /** pageing setting */ PaginationInfo paginationInfo = new PaginationInfo(); paginationInfo.setCurrentPageNo(searchVO.getPageIndex()); paginationInfo.setRecordCountPerPage(searchVO.getPageUnit()); paginationInfo.setPageSize(searchVO.getPageSize()); searchVO.setFirstIndex(paginationInfo.getFirstRecordIndex()); searchVO.setLastIndex(paginationInfo.getLastRecordIndex()); searchVO.setRecordCountPerPage(paginationInfo.getRecordCountPerPage()); List<?> sampleList = sampleService.selectSampleList(searchVO); model.addAttribute("resultList", sampleList); int totCnt = sampleService.selectSampleListTotCnt(searchVO); paginationInfo.setTotalRecordCount(totCnt); model.addAttribute("paginationInfo", paginationInfo); return "sample/egovSampleList"; } /** * 글 등록 화면을 조회한다. * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param model * @return "egovSampleRegister" * @exception Exception */ @RequestMapping(value = "/addSample.do", method = RequestMethod.GET) public String addSampleView(@ModelAttribute("searchVO") SampleDefaultVO searchVO, Model model) throws Exception { model.addAttribute("sampleVO", new SampleVO()); return "sample/egovSampleRegister"; } /** * 글을 등록한다. * @param sampleVO - 등록할 정보가 담긴 VO * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param status * @return "forward:/egovSampleList.do" * @exception Exception */ @RequestMapping(value = "/addSample.do", method = RequestMethod.POST) public String addSample(@ModelAttribute("searchVO") SampleDefaultVO searchVO, SampleVO sampleVO, BindingResult bindingResult, Model model, SessionStatus status) throws Exception { // Server-Side Validation beanValidator.validate(sampleVO, bindingResult); if (bindingResult.hasErrors()) { model.addAttribute("sampleVO", sampleVO); return "sample/egovSampleRegister"; } sampleService.insertSample(sampleVO); status.setComplete(); return "forward:/egovSampleList.do"; } /** * 글 수정화면을 조회한다. * @param id - 수정할 글 id * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param model * @return "egovSampleRegister" * @exception Exception */ @RequestMapping("/updateSampleView.do") public String updateSampleView(@RequestParam("selectedId") String id, @ModelAttribute("searchVO") SampleDefaultVO searchVO, Model model) throws Exception { SampleVO sampleVO = new SampleVO(); sampleVO.setId(id); // 변수명은 CoC 에 따라 sampleVO model.addAttribute(selectSample(sampleVO, searchVO)); return "sample/egovSampleRegister"; } /** * 글을 조회한다. * @param sampleVO - 조회할 정보가 담긴 VO * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param status * @return @ModelAttribute("sampleVO") - 조회한 정보 * @exception Exception */ public SampleVO selectSample(SampleVO sampleVO, @ModelAttribute("searchVO") SampleDefaultVO searchVO) throws Exception { return sampleService.selectSample(sampleVO); } /** * 글을 수정한다. * @param sampleVO - 수정할 정보가 담긴 VO * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param status * @return "forward:/egovSampleList.do" * @exception Exception */ @RequestMapping("/updateSample.do") public String updateSample(@ModelAttribute("searchVO") SampleDefaultVO searchVO, SampleVO sampleVO, BindingResult bindingResult, Model model, SessionStatus status) throws Exception { beanValidator.validate(sampleVO, bindingResult); if (bindingResult.hasErrors()) { model.addAttribute("sampleVO", sampleVO); return "sample/egovSampleRegister"; } sampleService.updateSample(sampleVO); status.setComplete(); return "forward:/egovSampleList.do"; } /** * 글을 삭제한다. * @param sampleVO - 삭제할 정보가 담긴 VO * @param searchVO - 목록 조회조건 정보가 담긴 VO * @param status * @return "forward:/egovSampleList.do" * @exception Exception */ @RequestMapping("/deleteSample.do") public String deleteSample(SampleVO sampleVO, @ModelAttribute("searchVO") SampleDefaultVO searchVO, SessionStatus status) throws Exception { sampleService.deleteSample(sampleVO); status.setComplete(); return "forward:/egovSampleList.do"; } @RequestMapping("/file.do") public String fileReg() throws Exception { return "sample/file"; } //static String englishAudioFilePath = "/Users/jiuhyeong/Documents/Handong/capstone1/Dani_california.mp3"; //static String englishAudioFilePath = "/Users/jiuhyeong/Documents/Handong/capstone1/interview.mp4"; //requestparam으로 임시로 저장한 파일의 위치를 string으로 받은 후 whisper에게 전사를 맡김, 임시 파일 삭제? @RequestMapping(value = "/file.do", method = RequestMethod.POST) public String createTranscription(@RequestParam String absolutePath, Model model) { OpenAiService service = new OpenAiService(Keys.OPENAPI_KEY,Duration.ofMinutes(9999)); CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .build(); String text = service.createTranscription(createTranscriptionRequest, absolutePath).getText(); logger.debug(text); model.addAttribute("result", text); model.addAttribute("absolutePath", absolutePath); File fileToDelete = new File(absolutePath); if (fileToDelete.exists()) { if (fileToDelete.delete()) { logger.debug("temp File deleted successfully."); } else { logger.error("Failed to delete the file."); } } else { logger.debug("temp File not found"); } return "sample/file"; } //jsp에 저장버튼 추가 후 restapi로 보내기 @RequestMapping(value = "/summarize.do", method = RequestMethod.POST) public String showSummaryResult(@RequestParam String transcription_result, Model model) { OpenAiService service = new OpenAiService(Keys.OPENAPI_KEY,Duration.ofMinutes(9999)); List<ChatMessage> message = new ArrayList<ChatMessage>(); message.add(new ChatMessage("user", "텍스트의 주제를 파악해서 해당 언어로 다섯줄 내외 요약해줘 \""+transcription_result+"\"")); ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() .messages(message) .model("gpt-3.5-turbo") .maxTokens(1500) .temperature((double) 0.5f) .build(); String summary_restult=service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent(); model.addAttribute("summary_result",summary_restult); return "sample/summarize"; } //파일을 임시저장 후 file.do에 경로를 보냄. @RequestMapping(value = "/postfile.do", method = RequestMethod.POST) public String handleFile(@RequestParam(value = "file", required = false) MultipartFile file, Model model, HttpServletRequest request) throws IOException{ ServletContext context = request.getSession().getServletContext(); String projectPath = context.getRealPath("/"); System.out.println("Project Path: " + projectPath); if (file.isEmpty()) { return "redirect:/file.do"; // 파일이 선택되지 않았을 경우 폼으로 리다이렉트 } try { byte[] bytes = file.getBytes(); Path directoryPath = Paths.get(projectPath+UPLOAD_DIR); // 디렉토리가 존재하지 않으면 생성 if (!Files.exists(directoryPath)) { Files.createDirectories(directoryPath); } Path filePath = directoryPath.resolve(file.getOriginalFilename()); Files.write(filePath, bytes); Path absolutePath = filePath.toAbsolutePath(); String absolutePathString = absolutePath.toString(); logger.debug("AbsolutePathString received"+absolutePathString); model.addAttribute("absolutePath", absolutePathString); } catch (IOException e) { e.printStackTrace(); } model.addAttribute("inputFile", file.getOriginalFilename()); return "sample/file"; } @RequestMapping(value = "/save-result.do", method = RequestMethod.POST) public String saveFile(@RequestParam(value = "dir", required = false) MultipartFile dir, @RequestParam String summ_result, Model model, HttpServletRequest request) throws IOException{ return "redirect:/summary.do"; } }
[ "com.theokanning.openai.audio.CreateTranscriptionRequest.builder", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((10123, 10222), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((10123, 10196), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((11541, 11746), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11724), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11683), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11638), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11541, 11601), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
/* * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template */ package cloud.cleo.connectgpt; import cloud.cleo.connectgpt.lang.LangUtil; import static cloud.cleo.connectgpt.lang.LangUtil.LanguageIds.*; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.amazonaws.services.lambda.runtime.events.LexV2Event; import com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction; import com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent; import com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState; import com.amazonaws.services.lambda.runtime.events.LexV2Response; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.service.OpenAiService; import java.net.SocketTimeoutException; import java.time.Duration; import java.time.LocalDate; import java.time.ZoneId; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient; import software.amazon.awssdk.enhanced.dynamodb.DynamoDbTable; import software.amazon.awssdk.enhanced.dynamodb.Key; import software.amazon.awssdk.enhanced.dynamodb.TableSchema; import software.amazon.awssdk.enhanced.dynamodb.extensions.AutoGeneratedTimestampRecordExtension; /** * * @author sjensen */ public class ChatGPTLambda implements RequestHandler<LexV2Event, LexV2Response> { // Initialize the Log4j logger. final static Logger log = LogManager.getLogger(ChatGPTLambda.class); final static ObjectMapper mapper = new ObjectMapper(); final static TableSchema<ChatGPTSessionState> schema = TableSchema.fromBean(ChatGPTSessionState.class); final static DynamoDbEnhancedClient enhancedClient = DynamoDbEnhancedClient.builder() .extensions(AutoGeneratedTimestampRecordExtension.create()).build(); final static DynamoDbTable<ChatGPTSessionState> sessionState = enhancedClient.table(System.getenv("SESSION_TABLE_NAME"), schema); final static OpenAiService open_ai_service = new OpenAiService(System.getenv("OPENAI_API_KEY"), Duration.ofSeconds(20)); final static String OPENAI_MODEL = System.getenv("OPENAI_MODEL"); @Override public LexV2Response handleRequest(LexV2Event lexRequest, Context cntxt) { try { log.debug(mapper.valueToTree(lexRequest).toString()); final var intentName = lexRequest.getSessionState().getIntent().getName(); log.debug("Intent: " + intentName); return processGPT(lexRequest); } catch (Exception e) { log.error(e); // Unhandled Exception return buildResponse(lexRequest, new LangUtil(lexRequest.getBot().getLocaleId()).getString(UNHANDLED_EXCEPTION)); } } private LexV2Response processGPT(LexV2Event lexRequest) { final var input = lexRequest.getInputTranscript(); final var localId = lexRequest.getBot().getLocaleId(); final var lang = new LangUtil(localId); log.debug("Java Locale is " + lang.getLocale()); if (input == null || input.isBlank()) { log.debug("Got blank input, so just silent or nothing"); final var attrs = lexRequest.getSessionState().getSessionAttributes(); var count = Integer.valueOf(attrs.getOrDefault("blankCounter", "0")); count++; if (count > 2) { log.debug("Two blank responses, sending to Quit Intent"); // Hang up on caller after 2 silience requests return buildQuitResponse(lexRequest); } else { attrs.put("blankCounter", count.toString()); // If we get slience (timeout without speech), then we get empty string on the transcript return buildResponse(lexRequest, lang.getString(BLANK_RESPONSE)); } } // When testing in lex console input will be text, so use session ID, for speech we shoud have a phone via Connect final var user_id = lexRequest.getSessionId(); // Key to record in Dynamo final var key = Key.builder().partitionValue(user_id).sortValue(LocalDate.now(ZoneId.of("America/Chicago")).toString()).build(); // load session state if it exists log.debug("Start Retreiving Session State"); var session = sessionState.getItem(key); log.debug("End Retreiving Session State"); if (session == null) { session = new ChatGPTSessionState(user_id); } // Since we can call and change language during session, always specifiy how we want responses session.addSystemMessage(lang.getString(CHATGPT_RESPONSE_LANGUAGE)); // add this request to the session session.addUserMessage(input); String botResponse; try { ChatCompletionRequest request = ChatCompletionRequest.builder() .messages(session.getChatMessages()) .model(OPENAI_MODEL) .maxTokens(500) .temperature(0.2) // More focused .n(1) // Only return 1 completion .build(); log.debug("Start API Call to ChatGPT"); final var completion = open_ai_service.createChatCompletion(request); log.debug("End API Call to ChatGPT"); log.debug(completion); botResponse = completion.getChoices().get(0).getMessage().getContent(); // Add response to session session.addAssistantMessage(botResponse); // Since we have a valid response, add message asking if there is anything else if ( ! "Text".equalsIgnoreCase(lexRequest.getInputMode()) ) { // Only add if not text (added to voice response) botResponse = botResponse + lang.getString(ANYTHING_ELSE); } // Save the session to dynamo log.debug("Start Saving Session State"); session.incrementCounter(); sessionState.putItem(session); log.debug("End Saving Session State"); } catch (RuntimeException rte) { if (rte.getCause() != null && rte.getCause() instanceof SocketTimeoutException) { log.error("Response timed out", rte); botResponse = lang.getString(OPERATION_TIMED_OUT); } else { throw rte; } } return buildResponse(lexRequest, botResponse); } /** * Response that sends you to the Quit intent so the call can be ended * * @param lexRequest * @param response * @return */ private LexV2Response buildQuitResponse(LexV2Event lexRequest) { // State to return final var ss = SessionState.builder() // Retain the current session attributes .withSessionAttributes(lexRequest.getSessionState().getSessionAttributes()) // Send back Quit Intent .withIntent(Intent.builder().withName("Quit").withState("ReadyForFulfillment").build()) // Indicate the state is Delegate .withDialogAction(DialogAction.builder().withType("Delegate").build()) .build(); final var lexV2Res = LexV2Response.builder() .withSessionState(ss) .build(); log.debug("Response is " + mapper.valueToTree(lexV2Res)); return lexV2Res; } /** * General Response used to send back a message and Elicit Intent again at LEX * * @param lexRequest * @param response * @return */ private LexV2Response buildResponse(LexV2Event lexRequest, String response) { // State to return final var ss = SessionState.builder() // Retain the current session attributes .withSessionAttributes(lexRequest.getSessionState().getSessionAttributes()) // Always ElictIntent, so you're back at the LEX Bot looking for more input .withDialogAction(DialogAction.builder().withType("ElicitIntent").build()) .build(); final var lexV2Res = LexV2Response.builder() .withSessionState(ss) // We are using plain text responses .withMessages(new LexV2Response.Message[]{new LexV2Response.Message("PlainText", response, null)}) .build(); log.debug("Response is " + mapper.valueToTree(lexV2Res)); return lexV2Res; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1987, 2099), 'software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient.builder'), ((1987, 2091), 'software.amazon.awssdk.enhanced.dynamodb.DynamoDbEnhancedClient.builder'), ((4370, 4481), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4370, 4473), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4370, 4407), 'software.amazon.awssdk.enhanced.dynamodb.Key.builder'), ((4418, 4472), 'java.time.LocalDate.now'), ((5138, 5440), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5383), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5341), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5303), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5267), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5138, 5226), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7069, 7547), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7522), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7385), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7069, 7240), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((7310, 7384), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7310, 7376), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7310, 7343), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.Intent.builder'), ((7470, 7521), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((7470, 7513), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((7579, 7665), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((7579, 7640), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8067, 8446), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8067, 8421), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8067, 8238), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.SessionState.builder'), ((8365, 8420), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((8365, 8412), 'com.amazonaws.services.lambda.runtime.events.LexV2Event.DialogAction.builder'), ((8478, 8732), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8478, 8707), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder'), ((8478, 8539), 'com.amazonaws.services.lambda.runtime.events.LexV2Response.builder')]
package de.throughput.ircbot.handler; import com.fasterxml.jackson.annotation.JsonIgnore; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import de.throughput.ircbot.api.Command; import de.throughput.ircbot.api.CommandEvent; import de.throughput.ircbot.api.CommandHandler; import de.throughput.ircbot.api.MessageHandler; import org.apache.commons.lang3.exception.ExceptionUtils; import org.pircbotx.hooks.events.MessageEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; @Component public class OpenAiChatMessageHandler implements MessageHandler, CommandHandler { private static final Logger LOG = LoggerFactory.getLogger(OpenAiChatMessageHandler.class); public static final Command CMD_RESET_CONTEXT = new Command("aireset", "aireset - deletes the current context for the channel and reloads the system prompt from the file system."); private static final String MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo"; private static final int MAX_CONTEXT_MESSAGES = 10; private static final int MAX_TOKENS = 100; private static final int MAX_IRC_MESSAGE_LENGTH = 420; private static final String SHORT_ANSWER_HINT = " (Antwort auf 200 Zeichen begrenzen)"; private final Map<String, LinkedList<TimedChatMessage>> contextMessagesPerChannel = new ConcurrentHashMap<>(); private final OpenAiService openAiService; private final Path systemPromptPath; private String systemPrompt; public OpenAiChatMessageHandler(OpenAiService openAiService, @Value("${openai.systemPrompt.path}") Path systemPromptPath) { this.openAiService = openAiService; this.systemPromptPath = systemPromptPath; readSystemPromptFromFile(); } @Override public Set<Command> getCommands() { return Set.of(CMD_RESET_CONTEXT); } @Override public boolean onMessage(MessageEvent event) { String message = event.getMessage().trim(); String botNick = event.getBot().getNick(); if (message.startsWith(botNick + ":") || message.startsWith(botNick + ",")) { message = message.substring(event.getBot().getNick().length() + 1).trim(); generateResponse(event, message); return true; } return false; } @Override public boolean onCommand(CommandEvent command) { // handles the aireset command var contextMessages = contextMessagesPerChannel.get(command.getEvent().getChannel().getName()); if (contextMessages != null) { synchronized (contextMessages) { contextMessages.clear(); } } readSystemPromptFromFile(); command.respond("system prompt reloaded. context reset complete."); return true; } /** * Generates a response to the given (trimmed) message using the OpenAI API. */ private void generateResponse(MessageEvent event, String message) { var contextMessages = contextMessagesPerChannel.computeIfAbsent(event.getChannel().getName(), k -> new LinkedList<>()); synchronized (contextMessages) { try { String channel = event.getChannel().getName(); var request = ChatCompletionRequest.builder() .model(MODEL_GPT_3_5_TURBO) .maxTokens(MAX_TOKENS) .messages(createPromptMessages(contextMessages, channel, event.getUser().getNick(), message)) .build(); ChatCompletionResult completionResult = openAiService.createChatCompletion(request); ChatMessage responseMessage = completionResult.getChoices().get(0).getMessage(); contextMessages.add(new TimedChatMessage(responseMessage)); event.respond(sanitizeResponse(responseMessage.getContent())); } catch (Exception e) { LOG.error(e.getMessage(), e); event.respond("Tja. (" + ExceptionUtils.getRootCauseMessage(e) + ")"); } } } /** * Sanitizes the response by removing excessive whitespace and limiting the length. */ private static String sanitizeResponse(String content) { String trim = content.replaceAll("\\s+", " ").trim(); return trim.length() > MAX_IRC_MESSAGE_LENGTH ? trim.substring(0, MAX_IRC_MESSAGE_LENGTH) : trim; } /** * Creates the list of prompt messages for the OpenAI API call. */ private List<ChatMessage> createPromptMessages(LinkedList<TimedChatMessage> contextMessages, String channel, String nick, String message) { message += SHORT_ANSWER_HINT; contextMessages.add(new TimedChatMessage(new ChatMessage(ChatMessageRole.USER.value(), message, nick))); pruneOldMessages(contextMessages); List<ChatMessage> promptMessages = new ArrayList<>(); promptMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), systemPrompt)); promptMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), getDatePrompt())); promptMessages.addAll(contextMessages); return promptMessages; } /** * Generates a system prompt containing the current date and time. */ private String getDatePrompt() { TimeZone timeZone = TimeZone.getTimeZone("Europe/Berlin"); SimpleDateFormat dateFormat = new SimpleDateFormat("EEEE, 'der' dd. MMMM yyyy", Locale.GERMAN); dateFormat.setTimeZone(timeZone); SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm", Locale.GERMAN); timeFormat.setTimeZone(timeZone); Date now = new Date(); return "Heute ist " + dateFormat.format(now) + ", und es ist " + timeFormat.format(now) + " Uhr in Deutschland."; } /** * Removes old messages from the context. */ private void pruneOldMessages(LinkedList<TimedChatMessage> contextMessages) { LocalDateTime twoHoursAgo = LocalDateTime.now().minusHours(2); contextMessages.removeIf(message -> message.getTimestamp().isBefore(twoHoursAgo)); while (contextMessages.size() > MAX_CONTEXT_MESSAGES) { contextMessages.removeFirst(); } } /** * Reads the system prompt from the file system. */ private void readSystemPromptFromFile() { try { systemPrompt = Files.readString(systemPromptPath); } catch (IOException e) { throw new UncheckedIOException(e); } } @Override public boolean isOnlyTalkChannels() { return true; } /** * Adds a timestamp to ChatMessage, allowing us to drop old messages from the context. */ private static class TimedChatMessage extends ChatMessage { private final LocalDateTime timestamp; public TimedChatMessage(ChatMessage chatMessage) { super(chatMessage.getRole(), chatMessage.getContent(), chatMessage.getName()); this.timestamp = LocalDateTime.now(); } @JsonIgnore public LocalDateTime getTimestamp() { return timestamp; } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((4011, 4292), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4259), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4141), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4011, 4094), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5521, 5549), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5718, 5748), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5809, 5839), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((6750, 6783), 'java.time.LocalDateTime.now')]
package com.cvcopilot.resumebuilding.service; import com.cvcopilot.resumebuilding.models.Modification; import com.cvcopilot.resumebuilding.repository.ModificationRepository; import com.cvcopilot.resumebuilding.repository.ProfileRepository; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.data.redis.core.HashOperations; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.core.ZSetOperations; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.messaging.handler.annotation.Payload; import org.springframework.stereotype.Service; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; @Service public class ResumeService { @Autowired private ProfileRepository profileRepository; @Autowired private StateService stateService; @Autowired private RedisTemplate<String, String> redisTemplate; private HashOperations<String, String, String> hashOperations; private ZSetOperations<String, String> zSetOperations; @Autowired private ModificationRepository modificationRepository; @Value("${openai.api-key}") private String openAIKey; @Value("${openai.model}") private String openAIModel; @PostConstruct private void init() { hashOperations = redisTemplate.opsForHash(); zSetOperations = redisTemplate.opsForZSet(); } private String prompt = "Based on the user's experiences, write a optimized resume according to the job description. Emit the personal information."; private static final Logger logger = LoggerFactory.getLogger(ResumeService.class); @KafkaListener(topics = "resume", groupId = "test-group", containerFactory = "kafkaListenerContainerFactory") public void consume(@Payload String message) { String userId = message.substring(0, 19); String modificationId = message.substring(19, 55); List<ChatCompletionChoice> res; try { stateService.addOrUpdateState(userId, modificationId, "in_progress"); OpenAiService service = new OpenAiService(openAIKey, Duration.ofSeconds(120)); List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a hr from big tech company."); final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), message.substring(56) + prompt); messages.add(systemMessage); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(600) .logitBias(new HashMap<>()) .build(); res = service.createChatCompletion(chatCompletionRequest).getChoices(); service.shutdownExecutor(); } catch (RuntimeException e) { logger.error("RuntimeException: " + e.getMessage()); stateService.addOrUpdateState(userId, modificationId, "failed"); return; } try { // write to postgres modificationRepository.save(new Modification(modificationId, res.get(0).getMessage().getContent(), Long.valueOf(userId), System.currentTimeMillis())); } catch (RuntimeException e) { logger.error("Failed to write to Postgres: " + e.getMessage()); stateService.addOrUpdateState(userId, modificationId, "failed_db_error"); return; } // write state to redis stateService.addOrUpdateState(userId, modificationId, "finished"); // invalidate cache of all results of this user zSetOperations.remove(userId); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((2811, 2841), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2943, 2971), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package podsofkon; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.core.io.ByteArrayResource; import org.springframework.http.*; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import javax.servlet.http.HttpServletRequest; import javax.sound.sampled.*; import java.io.*; import java.time.Duration; import java.util.*; @RestController @RequestMapping("/picturestory") public class GenerateAPictureStoryUsingOnlySpeech { static List<String> storyImages = new ArrayList(); @GetMapping("/form") public String newstory( HttpServletRequest request) throws Exception { storyImages = new ArrayList(); return getHtmlString(""); } @GetMapping("/picturestory") public String picturestory(@RequestParam("genopts") String genopts) throws Exception { AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0f, 16, 1, (16 / 8) * 1, 44100.0f, true); SoundRecorder soundRecorder = new SoundRecorder(); soundRecorder.build(format); System.out.println("Start recording ...."); soundRecorder.start(); Thread.sleep(8000); soundRecorder.stop(); System.out.println("Stopped recording ...."); Thread.sleep(3000); //give the process time String name = "AISoundClip"; AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE; AudioInputStream audioInputStream = soundRecorder.audioInputStream; System.out.println("Saving..."); File file = new File(name + "." + fileType.getExtension()); audioInputStream.reset(); AudioSystem.write(audioInputStream, fileType, file); System.out.println("Saved " + file.getAbsolutePath()); String transcription = transcribe(file) + genopts; System.out.println("transcription " + transcription); String imageLocation = imagegeneration(transcription); System.out.println("imageLocation " + imageLocation); storyImages.add(imageLocation); String htmlStoryFrames = ""; Iterator<String> iterator = storyImages.iterator(); while(iterator.hasNext()) { htmlStoryFrames += "<td><img src=\"" + iterator.next() +"\" width=\"400\" height=\"400\"></td>"; } return getHtmlString(htmlStoryFrames); } private static String getHtmlString(String htmlStoryFrames) { return "<html><table>" + " <tr>" + htmlStoryFrames + " </tr>" + "</table><br><br>" + "<form action=\"/picturestory/picturestory\">" + " <input type=\"submit\" value=\"Click here and record (up to 10 seconds of audio) describing next scene.\">" + "<br> Some additional options..." + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", using only one line\" checked >using only one line" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", photo taken on a Pentax k1000\">photo taken on a Pentax k1000" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", pixel art\">pixel art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", digital art\">digital art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", 3d render\">3d render" + "</form><br><br>" + "<form action=\"/picturestory/form\">" + " <input type=\"submit\" value=\"Or click here to start a new story\">\n" + "</form>" + "</html>"; } public String imagegeneration(String imagedescription) throws Exception { OpenAiService service = new OpenAiService("sk-sdf3HSWvb2HgV", Duration.ofSeconds(60)); CreateImageRequest openairequest = CreateImageRequest.builder() .prompt(imagedescription) .build(); System.out.println("\nImage is located at:"); String imageLocation = service.createImage(openairequest).getData().get(0).getUrl(); service.shutdownExecutor(); return imageLocation; } public String transcribe(File file) throws Exception { OpenAiService service = new OpenAiService("sk-nMVoZmUsOBjRasdfvb2HgV", Duration.ofSeconds(60)); String audioTranscription = transcribeFile(file, service); service.shutdownExecutor(); return audioTranscription; } private String transcribeFile(File file, OpenAiService service) throws Exception { String endpoint = "https://api.openai.com/v1/audio/transcriptions"; String modelName = "whisper-1"; HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.MULTIPART_FORM_DATA); headers.setBearerAuth(System.getenv("OPENAI_KEY")); MultiValueMap<String, Object> body = new LinkedMultiValueMap<>(); byte[] fileBytes = new byte[0]; try (FileInputStream fis = new FileInputStream(file); ByteArrayOutputStream bos = new ByteArrayOutputStream()) { byte[] buffer = new byte[1024]; int bytesRead; while ((bytesRead = fis.read(buffer)) != -1) { bos.write(buffer, 0, bytesRead); } fileBytes = bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } body.add("file", new ByteArrayResource(fileBytes) { @Override public String getFilename() { return file.getName(); } }); body.add("model", modelName); HttpEntity<MultiValueMap<String, Object>> requestEntity = new HttpEntity<>(body, headers); RestTemplate restTemplate = new RestTemplate(); ResponseEntity<String> response = restTemplate.exchange(endpoint, HttpMethod.POST, requestEntity, String.class); return response.getBody(); } public class SoundRecorder implements Runnable { AudioInputStream audioInputStream; private AudioFormat format; public Thread thread; public SoundRecorder build(AudioFormat format) { this.format = format; return this; } public void start() { thread = new Thread(this); thread.start(); } public void stop() { thread = null; } @Override public void run() { try (final ByteArrayOutputStream out = new ByteArrayOutputStream(); final TargetDataLine line = getTargetDataLineForRecord();) { int frameSizeInBytes = format.getFrameSize(); int bufferLengthInFrames = line.getBufferSize() / 8; final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes; buildByteOutputStream(out, line, frameSizeInBytes, bufferLengthInBytes); this.audioInputStream = new AudioInputStream(line); setAudioInputStream(convertToAudioIStream(out, frameSizeInBytes)); audioInputStream.reset(); } catch (IOException ex) { ex.printStackTrace(); } catch (Exception ex) { ex.printStackTrace(); } } public void buildByteOutputStream(final ByteArrayOutputStream out, final TargetDataLine line, int frameSizeInBytes, final int bufferLengthInBytes) throws IOException { final byte[] data = new byte[bufferLengthInBytes]; int numBytesRead; line.start(); while (thread != null) { if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) { break; } out.write(data, 0, numBytesRead); } } private void setAudioInputStream(AudioInputStream aStream) { this.audioInputStream = aStream; } public AudioInputStream convertToAudioIStream(final ByteArrayOutputStream out, int frameSizeInBytes) { byte[] audioBytes = out.toByteArray(); AudioInputStream audioStream = new AudioInputStream(new ByteArrayInputStream(audioBytes), format, audioBytes.length / frameSizeInBytes); System.out.println("Recording finished"); return audioStream; } public TargetDataLine getTargetDataLineForRecord() { TargetDataLine line; DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); if (!AudioSystem.isLineSupported(info)) { return null; } try { line = (TargetDataLine) AudioSystem.getLine(info); line.open(format, line.getBufferSize()); } catch (final Exception ex) { return null; } return line; } } }
[ "com.theokanning.openai.image.CreateImageRequest.builder" ]
[((4160, 4255), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((4160, 4230), 'com.theokanning.openai.image.CreateImageRequest.builder')]
package de.garrafao.phitag.computationalannotator.usepair.service; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import de.garrafao.phitag.computationalannotator.common.error.WrongApiKeyException; import de.garrafao.phitag.computationalannotator.common.error.WrongModelException; import de.garrafao.phitag.computationalannotator.common.function.CommonFunction; import de.garrafao.phitag.computationalannotator.usepair.data.UsePairPrompt; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.List; @Service public class UsePairOpenAIService { private final UsePairPrompt usePairPrompt; private final CommonFunction commonFunction; @Autowired public UsePairOpenAIService(UsePairPrompt usePairPrompt, CommonFunction commonFunction) { this.usePairPrompt = usePairPrompt; this.commonFunction = commonFunction; } public String chat(final String apiKey, final String model, final String prompt, final String firstUsage, final String secondUsage, final String lemma) { try { List<ChatMessage> messages = this.usePairPrompt.getChatMessages(prompt, firstUsage, secondUsage, lemma); OpenAiService service = new OpenAiService(apiKey); ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() .messages(messages) .model(model) .temperature(0.9) .topP(0.9) .n(1) .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest).getChoices(); StringBuilder returnString = new StringBuilder(); for (ChatCompletionChoice choice : choices) { ChatMessage message = choice.getMessage(); if (message != null) { System.out.println(message.getContent()); returnString.append(message.getContent()).append(System.lineSeparator()); } } System.out.println("response "+ returnString); int result = this.commonFunction.extractInteger(returnString.toString()); System.out.println("integer " + result); return String.valueOf(result); }catch (OpenAiHttpException e) { if (e.getMessage().contains("The model")) { throw new WrongModelException(model); } if (e.getMessage().contains("Incorrect API key provided")) { throw new WrongApiKeyException(); } throw e; } } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1606, 1835), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1806), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1780), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1749), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1711), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1606, 1677), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package idatt2106v231.backend.service; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.OpenAiApi; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import idatt2106v231.backend.model.OpenAiKey; import idatt2106v231.backend.repository.OpenAiKeyRepository; import io.github.cdimascio.dotenv.Dotenv; import okhttp3.OkHttpClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import retrofit2.Retrofit; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Optional; import static com.theokanning.openai.service.OpenAiService.*; /** * Class to manage Ai. */ @Service public class AiServices { private final OpenAiKeyRepository openAiKeyRepo; /** * Constructor which sets the Open AI key repository. */ @Autowired public AiServices(OpenAiKeyRepository openAiKeyRepo) { this.openAiKeyRepo = openAiKeyRepo; } /** * Gets a chat completion using OpenAI GPT-3. * * @param content the content of the query * @return the answer produced by the AI */ public String getChatCompletion(String content) { try { String token = getOpenAiApiKey(); if (token.startsWith("ERROR :")) throw new Exception(token); ObjectMapper mapper = defaultObjectMapper(); Duration timeout = Duration.ofSeconds(300); OkHttpClient client = defaultClient(token, timeout) .newBuilder() .build(); Retrofit retrofit = defaultRetrofit(client, mapper); OpenAiApi api = retrofit.create(OpenAiApi.class); OpenAiService service = new OpenAiService(api); List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage("user", content)); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .messages(messages) .model("gpt-3.5-turbo") .temperature(0.0) .build(); return String.valueOf(service.createChatCompletion(chatCompletionRequest) .getChoices().get(0).getMessage().getContent()); } catch (Exception e) { return "ERROR: " + e.getMessage(); } } /** * Gets the OpenAi API key. * This must either be stored in the table 'open_ai_key' in the database, * or in a .env file in the root of the project folder as OPENAI_TOKEN=your_token. * * @return the key */ public String getOpenAiApiKey() { try { String token = null; Optional<OpenAiKey> openAiKey = openAiKeyRepo.findFirstByOrderByIdDesc(); if (openAiKey.isPresent()) token = openAiKey.get().getApiKey(); if (token == null) { Dotenv dotenv = Dotenv.configure().load(); token = dotenv.get("OPENAI_TOKEN"); if (token == null) { return "Token is missing. " + "Make sure a valid OpenAI API key is stored in the database " + "or in a .env file in the root of the project"; } } return token; } catch (Exception e) { return "ERROR: " + e.getMessage(); } } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2086, 2268), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2239), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2201), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2086, 2157), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3079, 3104), 'io.github.cdimascio.dotenv.Dotenv.configure')]
package com.ramesh.openai; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; /*** * This project demonstrates the Chain of Thought (CoT) prompting technique which is useful when there is need * for analytical, reasoning, deriving etc. kind of problems ***/ class ChainOfThoughtPrompting { public static void main(String... args) { // Set the Open AI Token & Model String token = "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"; String model = "gpt-3.5-turbo"; // service handle for calling OpenAI APIs OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); System.out.println("-----------------------------------------------------------"); // prompt - change this and run again and again. Mostly ChatGPT will not give the right response for complex prompt like puzzle. // that's where Chain of thought comes to help (next prompt with COT is given below) String prompt="I went to the market and bought 10 apples. I gave 2 apples to the neighbor and 2 to the repairman. I then went and bought 5 more apples and ate 1. How many apples did I remain with?"; System.out.println(prompt); // create the Chat message object final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt); messages.add(userMessage); // call ChatGPT ChatCompletion API and get the response ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(model) .messages(messages) .n(1) .temperature(.1) .maxTokens(200) .logitBias(new HashMap<>()) .build(); System.out.println("------------"); System.out.print("ChatGPT response="); service.createChatCompletion(chatCompletionRequest).getChoices().forEach((c) -> { System.out.println(c.getMessage().getContent()); }); System.out.println("\n-----------------------------------------------------------"); // Call ChatGPT Chat Completion with a CoT (Chain of THought) prompting technique // You will see that ChatGPT most likely will give the right answer. This is because in the prompt // the thinking process is given in the form of examples String[] prompts = new String[10]; prompts[0] = "The odd numbers in this group add up to an even number: 4, 8, 9, 15, 12, 2, 1."; prompts[1] = "A: The answer is False."; prompts[2] = "The odd numbers in this group add up to an even number: 17, 10, 19, 4, 8, 12, 24."; prompts[3] = "A: The answer is True."; prompts[4] = "The odd numbers in this group add up to an even number: 16, 11, 14, 4, 8, 13, 24."; prompts[5] = "A: The answer is True."; prompts[6] = "The odd numbers in this group add up to an even number: 17, 9, 10, 12, 13, 4, 2."; prompts[7] = "A: The answer is False."; prompts[8] = "The odd numbers in this group add up to an even number: 15, 32, 5, 13, 82, 7, 1. "; prompts[9] = "A: "; final List<ChatMessage> messages_cot = new ArrayList<>(); for (int i = 0; i < 10; i++) { System.out.println(prompts[i]); final ChatMessage assistantMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), prompts[i]); messages_cot.add(assistantMessage); } ChatCompletionRequest chatCompletionRequest2 = ChatCompletionRequest .builder() .model(model) .messages(messages_cot) .n(1) .temperature(.1) .maxTokens(50) .logitBias(new HashMap<>()) .build(); System.out.println("------------"); System.out.print("ChatGPT response="); service.createChatCompletion(chatCompletionRequest2).getChoices().forEach((c) -> { System.out.println(c.getMessage().getContent()); }); service.shutdownExecutor(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value" ]
[((1626, 1654), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3533, 3566), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')]
package com.bambooleanlogic.ai; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; public class Main { public static void main(String[] args) throws IOException { SqlCode sql = generateSql( "MySQL", "Get all students who has at least one class where their grade is above average" ); if (sql.code != null) { System.out.println("--- CODE -----------------------"); System.out.println(sql.code); System.out.println("--- COMMENT --------------------"); System.out.println(sql.comment); System.out.println("--------------------------------"); } else { System.out.println("--------------------------------"); System.out.println(sql.comment); System.out.println("--------------------------------"); } } private static SqlCode generateSql(String dialect, String prompt) throws IOException { String apiToken = Files.readString(Path.of("P:\\oapi.txt")); OpenAiService service = new OpenAiService(apiToken); ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(List.of( new ChatMessage("system", "You are a helpful assistant who produces " + dialect + " code." ), new ChatMessage("user", prompt) )) .build(); String response = service.createChatCompletion(request).getChoices().get(0).getMessage().getContent(); int start = response.indexOf("```"); if (start != -1) { start += 3; int end = response.indexOf("```", start); if (end != -1) { String code = response.substring(start, end).trim(); String comment = response.substring(end + 3).trim(); return new SqlCode(code, comment); } } return new SqlCode(null, response); } private static final class SqlCode { public final String code; public final String comment; public SqlCode(String code, String comment) { this.code = code; this.comment = comment; } } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1375, 1755), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1375, 1730), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1375, 1446), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.chat.base.controller; import com.chat.base.bean.annotation.VisitLimit; import com.chat.base.bean.common.BaseCodeEnum; import com.chat.base.bean.constants.*; import com.chat.base.bean.entity.GptModelConfig; import com.chat.base.bean.vo.*; import com.chat.base.bean.entity.PromptModel; import com.chat.base.bean.gpt.ApiChatReq; import com.chat.base.bean.gpt.ChatReq; import com.chat.base.bean.req.CompletionReq; import com.chat.base.handler.*; import com.chat.base.handler.gpt.OpenAiProxyServiceFactory; import com.chat.base.service.ChatBaseOpenAiProxyService; import com.chat.base.utils.*; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import io.github.asleepyfish.enums.RoleEnum; import io.github.asleepyfish.exception.ChatGPTException; import org.springframework.beans.factory.annotation.Value; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import java.io.*; import java.util.*; import java.util.concurrent.TimeUnit; /** * @author huyd * @date 2023/5/5 11:19 PM */ @Slf4j @RestController public class AIChatController extends BaseController { @Autowired private UserLogManager userLogManager; @Autowired private AIChatManger AIChatManger; @Autowired private PromptModelManager promptModelManager; @Autowired private DrawTaskInfoManager drawTaskInfoManager; @Autowired private WeightAlgorithmManager weightAlgorithmManager; @Value("${file-token-path}") private String mjTokenPath; private static Cache<String, ChatBaseOpenAiProxyService> cache = CacheBuilder.newBuilder().initialCapacity(10).maximumSize(1000).expireAfterWrite(1000, TimeUnit.SECONDS).build(); @VisitLimit(value = {LimitEnum.IP}, scope = CommonConstant.NO_LOGIN_SCOPE) @PostMapping("/chat/streamChatWithWeb/V3") public void streamChatWithWebV3(@RequestBody @Valid ChatReq chatReq, HttpServletResponse response) throws Exception { String ip = HttpUtil.getIpAddress(); String browserName = HttpUtil.browserName(); Long id = SessionUser.getUserId(); String conversationId = chatReq.getConversationId(); String userId = id == null ? conversationId : String.valueOf(id); ModelPriceEnum modelPriceEnum = ModelPriceEnum.modelPriceMap.get(chatReq.getModel()); if (modelPriceEnum == null) { response.getOutputStream().write(BaseCodeEnum.MODEL_NO_OPEN.getMsg().getBytes()); return; } CacheUserInfoVo cacheUserInfoVo = SessionUser.get(); try { if (Objects.nonNull(cacheUserInfoVo) && Objects.nonNull(cacheUserInfoVo.getGptApiTokenVo())) { AIChatManger.chatStream(chatReq, cacheUserInfoVo, response); } else { AIChatManger.streamChatWithWebV3NoStatus(chatReq, response); } } catch (ChatGPTException e) { // 用户主动停掉回答 log.error("streamChatWithWebV3 user error chatReq={} ", chatReq, e); } catch (Exception e) { log.error("streamChatWithWebV3 error chatReq={} ", chatReq, e); userLogManager.addUserLog(chatReq.getAppName(), userId, OpEnum.GPT3.getOp(), ip, browserName); response.getOutputStream().write(BaseCodeEnum.SERVER_BUSY.getMsg().getBytes()); } finally { response.getOutputStream().close(); } } /** * 验证gpt的token效果 * * @param chatReq * @param response * @throws Exception */ @PostMapping("/chat/streamChatWithWeb/api/chat") public void streamChatWithApiChatWeb(@RequestBody @Valid ApiChatReq chatReq, HttpServletResponse response) throws Exception { String ip = HttpUtil.getIpAddress(); String browserName = HttpUtil.browserName(); String uid = chatReq.getToken(); try { response.setContentType("text/event-stream"); response.setCharacterEncoding("UTF-8"); response.setHeader("Cache-Control", "no-cache"); String model = StringUtils.isNoneEmpty(chatReq.getModel()) ? chatReq.getModel() : "gpt-3.5-turbo"; ChatBaseOpenAiProxyService proxyService = cache.get(chatReq.getToken() + model, () -> OpenAiProxyServiceFactory.getService(chatReq.getToken(), chatReq.getProxyUrl(), model)); Integer contentNumber = CommonConstant.CONTENT_NUMBER; String user = chatReq.getConversationId(); LinkedList<ChatMessage> userChatMessages = ChatMessageCacheUtil.getUserChatMessages(user, contentNumber); userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), chatReq.getPrompt())); ChatMessageCacheUtil.getOkUserChatMessages(userChatMessages, model); if (userChatMessages.size() <= 0) { response.getOutputStream().write(BaseCodeEnum.TOKEN_OVER.getMsg().getBytes()); response.getOutputStream().close(); return; } ChatMessageResultVo streamChatCompletion = proxyService.createStreamChatCompletion(ChatCompletionRequest.builder() .model(model) .messages(userChatMessages) .user(user) .temperature(chatReq.getTemperature()) .topP(chatReq.getTop_p()) .stream(true) .build(), response.getOutputStream(), uid); if(streamChatCompletion!=null){ ChatMessageCacheUtil.saveChatMessage(user,streamChatCompletion.getChatMessage()); } } catch (ChatGPTException e) { // 用户主动停掉回答 log.error("streamChatWithWebV3 user error chatReq={} ", chatReq, e); response.getOutputStream().write(BaseCodeEnum.TERMINATE.getMsg().getBytes()); } catch (Exception e) { log.error("streamChatWithWebV3 error chatReq={} ", chatReq, e); userLogManager.addUserLog("BlueCatApiChat", uid, OpEnum.GPT3.getOp(), ip, browserName); response.getOutputStream().write(BaseCodeEnum.SERVER_BUSY.getMsg().getBytes()); } finally { response.getOutputStream().close(); } } @PostMapping("/chat/streamChatWithWeb/completion") public void completion(@RequestBody @Validated CompletionReq completionReq, HttpServletResponse response) throws IOException { CacheUserInfoVo cacheUserInfoVo = SessionUser.get(); if (cacheUserInfoVo == null) { response.getOutputStream().write("请登录之后再使用!".getBytes()); return; } response.setContentType("text/event-stream"); response.setCharacterEncoding("UTF-8"); response.setHeader("Cache-Control", "no-cache"); StringBuilder builder = new StringBuilder(); PromptModel prompt = promptModelManager.getPromptById(Long.parseLong(completionReq.getModelId())); if (prompt == null || StringUtils.isBlank(prompt.getContent())) { response.getOutputStream().write("模板已过期,请联系管理员".getBytes()); return; } builder.append(prompt.getContent()).append("\n"); builder.append(completionReq.getContent()); String uid = UUID.randomUUID().toString(); String model = StringUtils.isNoneEmpty(completionReq.getModel()) ? completionReq.getModel() : "gpt-3.5-turbo"; Optional<GptModelConfig> modelConfig = weightAlgorithmManager.round(cacheUserInfoVo, model); if (!modelConfig.isPresent()) { response.getOutputStream().write(BaseCodeEnum.NO_MODEL_ROLE.getMsg().getBytes()); return; } GptModelConfig gptModelConfig = modelConfig.get(); ChatBaseOpenAiProxyService proxyService = OpenAiProxyServiceFactory.createProxyService(gptModelConfig.getId().toString()); if (proxyService == null) { response.getOutputStream().write(BaseCodeEnum.NO_MODEL.getMsg().getBytes()); response.getOutputStream().close(); return; } LinkedList<ChatMessage> userChatMessages = new LinkedList<>(); userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), builder.toString())); proxyService.createStreamChatCompletion(ChatCompletionRequest.builder() .model(model) .messages(userChatMessages) .user(uid) .temperature(1.0) .topP(1.0) .stream(true) .build(), response.getOutputStream(), cacheUserInfoVo.getGptApiTokenVo().getToken()); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1974, 2086), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2078), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2037), 'com.google.common.cache.CacheBuilder.newBuilder'), ((1974, 2019), 'com.google.common.cache.CacheBuilder.newBuilder'), ((2792, 2838), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((2792, 2827), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((3663, 3707), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((3663, 3696), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((5036, 5063), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((5266, 5309), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5266, 5298), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5498, 5811), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5782), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5748), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5702), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5643), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5498, 5563), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6209, 6251), 'com.chat.base.bean.common.BaseCodeEnum.TERMINATE.getMsg'), ((6209, 6240), 'com.chat.base.bean.common.BaseCodeEnum.TERMINATE.getMsg'), ((6507, 6551), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((6507, 6540), 'com.chat.base.bean.common.BaseCodeEnum.SERVER_BUSY.getMsg'), ((8032, 8078), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((8032, 8067), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((8383, 8424), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((8383, 8413), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((8622, 8649), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((8722, 8970), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8945), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8915), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8888), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8854), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8827), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8722, 8783), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.chunxia.chatgpt.chatapi; import android.util.Log; import com.blankj.utilcode.util.ThreadUtils; import com.chunxia.chatgpt.model.review.SentenceCard; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import java.util.ArrayList; import java.util.List; public class MultiRoundChatAgent { private static final String TAG = "MultiRoundChatAiApi"; private final List<ChatMessage> oldMessages = new ArrayList<>(); private String model = "gpt-3.5-turbo"; private int responseN = 1; private int maxTokenN = 512; private final ChatMessage systemMessage; private final String systemCommand; private final List<ThreadUtils.Task<String>> threadTasks = new ArrayList<>(); public MultiRoundChatAgent(String systemCommand, String model, int responseN, int maxTokenN) { this.systemCommand = systemCommand; this.model = model; this.responseN = responseN; this.maxTokenN = maxTokenN; this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand); oldMessages.add(systemMessage); } public MultiRoundChatAgent() { this.systemCommand = ""; this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand); oldMessages.add(systemMessage); } public MultiRoundChatAgent(String systemCommand) { this.systemCommand = systemCommand; this.systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemCommand); oldMessages.add(systemMessage); } public void sendMessageInThread(String message, ReceiveOpenAiReply onReceiveOpenAiReply) { ThreadUtils.Task<String> tTask = new ThreadUtils.SimpleTask<String>() { @Override public String doInBackground() throws Throwable { return sendToChatAi(message); } @Override public void onSuccess(String result) { Log.i(TAG, "receive reply from chatgpt"); onReceiveOpenAiReply.onSuccess(result); } }; threadTasks.add(tTask); ThreadUtils.getIoPool().execute(tTask); } public String sendMessage(String message) { return sendToChatAi(message); } public void cancelAllCurrentThread() { // todo 只取消当前正在执行的 threadTasks.forEach(ThreadUtils::cancel); } public SentenceCard getOneRoundSentenceCard() { if (oldMessages.size() < 3) { return null; } SentenceCard sentenceCard = new SentenceCard(oldMessages.get(2).getContent(), oldMessages.get(1).getContent()); return sentenceCard; } public interface ReceiveOpenAiReply { void onSuccess(String reply); } private void insertUserMessage(String message) { final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), message); oldMessages.add(userMessage); } private String sendToChatAi(String message) { Log.i(TAG, "User: " + message); insertUserMessage(message); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(model) .messages(oldMessages) .n(responseN) .maxTokens(maxTokenN) .build(); OpenAiService openAiService = OpenAIServiceManager.getOpenAiService(); if (openAiService == null) { return null; } else { List<ChatCompletionChoice> choices = openAiService.createChatCompletion(chatCompletionRequest).getChoices(); if (!choices.isEmpty()) { String content = choices.get(0).getMessage().getContent(); Log.i(TAG, "ChatGpt: " + content); addChatGptReplyToMessage(choices.get(0).getMessage()); return content; } } return null; } public void clearOldMessage() { oldMessages.clear(); oldMessages.add(systemMessage); } public void addChatGptReplyToMessage(ChatMessage message) { oldMessages.add(message); } public int getMaxTokenN() { return maxTokenN; } public void setMaxTokenN(int maxTokenN) { this.maxTokenN = maxTokenN; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1259, 1289), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1473, 1503), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1717, 1747), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2390, 2428), 'com.blankj.utilcode.util.ThreadUtils.getIoPool'), ((3155, 3183), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.theokanning.openai.service; import com.theokanning.openai.moderation.Moderation; import com.theokanning.openai.moderation.ModerationRequest; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; public class ModerationTest { String token = System.getenv("OPENAI_TOKEN"); com.theokanning.openai.service.OpenAiService service = new OpenAiService(token); @Test void createModeration() { ModerationRequest moderationRequest = ModerationRequest.builder() .input("I want to kill them") .model("text-moderation-latest") .build(); Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0); assertTrue(moderationScore.isFlagged()); } }
[ "com.theokanning.openai.moderation.ModerationRequest.builder" ]
[((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')]
package com.theokanning.openai.service; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; class ChatCompletionTest { String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService(token); @Test void createChatCompletion() { final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such."); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(5) .maxTokens(50) .logitBias(new HashMap<>()) .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(chatCompletionRequest).getChoices(); assertEquals(5, choices.size()); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((772, 802), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package com.couchbase.intellij.tree.iq.intents; import com.couchbase.client.java.json.JsonArray; import com.couchbase.client.java.json.JsonObject; import com.couchbase.intellij.tree.iq.IQWindowContent; import com.couchbase.intellij.tree.iq.chat.ChatExchangeAbortException; import com.couchbase.intellij.tree.iq.chat.ChatGptHandler; import com.couchbase.intellij.tree.iq.chat.ChatLink; import com.couchbase.intellij.tree.iq.chat.ChatLinkService; import com.couchbase.intellij.tree.iq.chat.ChatLinkState; import com.couchbase.intellij.tree.iq.chat.ChatMessageEvent; import com.couchbase.intellij.tree.iq.chat.ChatMessageListener; import com.couchbase.intellij.tree.iq.chat.ConfigurationPage; import com.couchbase.intellij.tree.iq.chat.ConversationContext; import com.couchbase.intellij.tree.iq.core.IQCredentials; import com.couchbase.intellij.tree.iq.intents.actions.ActionInterface; import com.couchbase.intellij.tree.iq.settings.OpenAISettingsState; import com.couchbase.intellij.workbench.Log; import com.intellij.testFramework.fixtures.BasePlatformTestCase; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; public abstract class AbstractIQTest extends BasePlatformTestCase { private static final String IQ_URL = System.getenv("CAPELLA_DOMAIN") + "/v2/organizations/%s/integrations/iq/"; private static final ChatGptHandler handler = new ChatGptHandler(); private static ConversationContext ctx; private static ChatLink link; @Override protected void setUp() throws Exception { super.setUp(); IQCredentials credentials = new IQCredentials(System.getenv("IQ_ORG_LOGIN"), System.getenv("IQ_ORG_PASSWD")); assertTrue("Please set capella domain and IQ credentials using `CAPELLA_DOMAIN`, `IQ_ORG_ID`, `IQ_ORG_LOGIN`, and `IQ_ORG_PASSWD` envvars", credentials.doLogin()); String orgId = System.getenv("IQ_ORG_ID"); final String iqUrl = String.format(IQ_URL, orgId); OpenAISettingsState.OpenAIConfig iqGptConfig = new OpenAISettingsState.OpenAIConfig(); OpenAISettingsState.getInstance().setGpt4Config(iqGptConfig); OpenAISettingsState.getInstance().setEnableInitialMessage(false); iqGptConfig.setApiKey(credentials.getAuth().getJwt()); iqGptConfig.setEnableStreamResponse(false); iqGptConfig.setModelName("gpt-4"); iqGptConfig.setApiEndpointUrl(iqUrl); iqGptConfig.setEnableCustomApiEndpointUrl(true); ConfigurationPage cp = iqGptConfig.withSystemPrompt(IQWindowContent::systemPrompt); Log.setLevel(3); Log.setPrinter(new Log.StdoutPrinter()); link = new ChatLinkService(getProject(), null, cp); ctx = new ChatLinkState(cp); } protected void send(String message, Consumer<ChatMessageEvent.ResponseArrived> listener) { send(message, false, listener); } protected void send(String message, boolean isSystem, Consumer<ChatMessageEvent.ResponseArrived> listener) { ChatMessage chatMessage = new ChatMessage( isSystem ? ChatMessageRole.SYSTEM.value() : ChatMessageRole.USER.value(), message ); ChatMessageEvent.Starting event = ChatMessageEvent.starting(AbstractIQTest.link, chatMessage); ctx.addChatMessage(chatMessage); List<ChatMessage> messages = ctx.getChatMessages(ctx.getModelType(), chatMessage); if (isSystem) { messages.add(chatMessage); } ChatCompletionRequest request = ChatCompletionRequest.builder() .messages(messages) .build(); handler.handle(AbstractIQTest.ctx, event.initiating(request), new ChatMessageListener() { @Override public void exchangeStarting(ChatMessageEvent.Starting event) throws ChatExchangeAbortException { } @Override public void exchangeStarted(ChatMessageEvent.Started event) { } @Override public void responseArriving(ChatMessageEvent.ResponseArriving event) { } @Override public void responseArrived(ChatMessageEvent.ResponseArrived event) { listener.accept(event); } @Override public void responseCompleted(ChatMessageEvent.ResponseArrived event) { } @Override public void exchangeFailed(ChatMessageEvent.Failed event) { throw new RuntimeException("IQ Exchange failed", event.getCause()); } @Override public void exchangeCancelled(ChatMessageEvent.Cancelled event) { } }).blockingLast(); } protected String getResponse(ChatMessageEvent.ResponseArrived response) { assertEquals(1, response.getResponseChoices().size()); return response.getResponseChoices().get(0).getContent(); } protected JsonObject getJson(ChatMessageEvent.ResponseArrived response) { return JsonObject.fromJson(getResponse(response)); } protected void assertJsonResponse(ChatMessageEvent.ResponseArrived response) { String message = getResponse(response); assertTrue(message.startsWith("{")); } protected void assertNotJson(ChatMessageEvent.ResponseArrived response) { assertFalse(getResponse(response).trim().charAt(0) == '{'); } protected List<JsonObject> getIntents(ChatMessageEvent.ResponseArrived response, Class<? extends ActionInterface> action) { List<JsonObject> results = new ArrayList<>(); JsonObject json = getJson(response); assertInstanceOf(json.get("actions"), JsonArray.class); JsonArray actions = json.getArray("actions"); for (int i = 0; i < actions.size(); i++) { assertInstanceOf(actions.get(i), JsonObject.class); JsonObject intent = actions.getObject(i); assertInstanceOf(intent.get("action"), String.class); if (intent.getString("action").equals(action.getSimpleName())) { results.add(intent); } } return results; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2263, 2323), 'com.couchbase.intellij.tree.iq.settings.OpenAISettingsState.getInstance'), ((2333, 2397), 'com.couchbase.intellij.tree.iq.settings.OpenAISettingsState.getInstance'), ((3263, 3293), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3296, 3324), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3709, 3801), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3709, 3776), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.theokanning.openai.service; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.theokanning.openai.ListSearchParameters; import com.theokanning.openai.OpenAiResponse; import com.theokanning.openai.assistants.Assistant; import com.theokanning.openai.assistants.AssistantFunction; import com.theokanning.openai.assistants.AssistantRequest; import com.theokanning.openai.assistants.AssistantToolsEnum; import com.theokanning.openai.assistants.Tool; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.runs.RequiredAction; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.runs.RunStep; import com.theokanning.openai.runs.SubmitToolOutputRequestItem; import com.theokanning.openai.runs.SubmitToolOutputs; import com.theokanning.openai.runs.SubmitToolOutputsRequest; import com.theokanning.openai.runs.ToolCall; import com.theokanning.openai.threads.Thread; import com.theokanning.openai.threads.ThreadRequest; import com.theokanning.openai.utils.TikTokensUtil; import org.junit.jupiter.api.Test; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; class AssistantFunctionTest { String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService(token, Duration.ofMinutes(1)); @Test void createRetrieveRun() throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class); mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class); mapper.addMixIn(ChatFunctionCall.class, ChatFunctionCallMixIn.class); String funcDef = "{\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" + " \"location\": {\n" + " \"type\": \"string\",\n" + " \"description\": \"The city and state, e.g. San Francisco, CA\"\n" + " },\n" + " \"unit\": {\n" + " \"type\": \"string\",\n" + " \"enum\": [\"celsius\", \"fahrenheit\"]\n" + " }\n" + " },\n" + " \"required\": [\"location\"]\n" + "}"; Map<String, Object> funcParameters = mapper.readValue(funcDef, new TypeReference<Map<String, Object>>() {}); AssistantFunction function = AssistantFunction.builder() .name("weather_reporter") .description("Get the current weather of a location") .parameters(funcParameters) .build(); List<Tool> toolList = new ArrayList<>(); Tool funcTool = new Tool(AssistantToolsEnum.FUNCTION, function); toolList.add(funcTool); AssistantRequest assistantRequest = AssistantRequest.builder() .model(TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName()) .name("MATH_TUTOR") .instructions("You are a personal Math Tutor.") .tools(toolList) .build(); Assistant assistant = service.createAssistant(assistantRequest); ThreadRequest threadRequest = ThreadRequest.builder() .build(); Thread thread = service.createThread(threadRequest); MessageRequest messageRequest = MessageRequest.builder() .content("What's the weather of Xiamen?") .build(); Message message = service.createMessage(thread.getId(), messageRequest); RunCreateRequest runCreateRequest = RunCreateRequest.builder() .assistantId(assistant.getId()) .build(); Run run = service.createRun(thread.getId(), runCreateRequest); assertNotNull(run); Run retrievedRun = service.retrieveRun(thread.getId(), run.getId()); while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")) && !(retrievedRun.getStatus().equals("requires_action"))){ retrievedRun = service.retrieveRun(thread.getId(), run.getId()); } if (retrievedRun.getStatus().equals("requires_action")) { RequiredAction requiredAction = retrievedRun.getRequiredAction(); System.out.println("requiredAction"); System.out.println(mapper.writeValueAsString(requiredAction)); List<ToolCall> toolCalls = requiredAction.getSubmitToolOutputs().getToolCalls(); ToolCall toolCall = toolCalls.get(0); String toolCallId = toolCall.getId(); SubmitToolOutputRequestItem toolOutputRequestItem = SubmitToolOutputRequestItem.builder() .toolCallId(toolCallId) .output("sunny") .build(); List<SubmitToolOutputRequestItem> toolOutputRequestItems = new ArrayList<>(); toolOutputRequestItems.add(toolOutputRequestItem); SubmitToolOutputsRequest submitToolOutputsRequest = SubmitToolOutputsRequest.builder() .toolOutputs(toolOutputRequestItems) .build(); retrievedRun = service.submitToolOutputs(retrievedRun.getThreadId(), retrievedRun.getId(), submitToolOutputsRequest); while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")) && !(retrievedRun.getStatus().equals("requires_action"))){ retrievedRun = service.retrieveRun(thread.getId(), run.getId()); } OpenAiResponse<Message> response = service.listMessages(thread.getId()); List<Message> messages = response.getData(); System.out.println(mapper.writeValueAsString(messages)); } } }
[ "com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName", "com.theokanning.openai.assistants.AssistantRequest.builder", "com.theokanning.openai.messages.MessageRequest.builder", "com.theokanning.openai.assistants.AssistantFunction.builder", "com.theokanning.openai.runs.SubmitToolOutputsRequest.builder", "com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder", "com.theokanning.openai.threads.ThreadRequest.builder", "com.theokanning.openai.runs.RunCreateRequest.builder" ]
[((3437, 3645), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3620), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3576), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3506), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3864, 4125), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4100), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4067), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4003), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 3967), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3914, 3966), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName'), ((4239, 4287), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((4391, 4498), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4391, 4473), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4627, 4726), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((4627, 4701), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((5724, 5871), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5842), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5805), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6090, 6210), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder'), ((6090, 6181), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder')]
package com.theokanning.openai.service; import com.theokanning.openai.audio.CreateSpeechRequest; import com.theokanning.openai.audio.CreateTranscriptionRequest; import com.theokanning.openai.audio.CreateTranslationRequest; import com.theokanning.openai.audio.TranscriptionResult; import com.theokanning.openai.audio.TranslationResult; import org.junit.jupiter.api.Test; import java.io.IOException; import java.time.Duration; import okhttp3.MediaType; import okhttp3.ResponseBody; import static org.junit.jupiter.api.Assertions.*; public class AudioTest { static String englishAudioFilePath = "src/test/resources/hello-world.mp3"; static String koreanAudioFilePath = "src/test/resources/korean-hello.mp3"; String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); @Test void createTranscription() { CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .build(); String text = service.createTranscription(createTranscriptionRequest, englishAudioFilePath).getText(); assertEquals("Hello World.", text); } @Test void createTranscriptionVerbose() { CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .responseFormat("verbose_json") .build(); TranscriptionResult result = service.createTranscription(createTranscriptionRequest, englishAudioFilePath); assertEquals("Hello World.", result.getText()); assertEquals("transcribe", result.getTask()); assertEquals("english", result.getLanguage()); assertTrue(result.getDuration() > 0); assertEquals(1, result.getSegments().size()); } @Test void createTranslation() { CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder() .model("whisper-1") .build(); String text = service.createTranslation(createTranslationRequest, koreanAudioFilePath).getText(); assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", text); } @Test void createTranslationVerbose() { CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder() .model("whisper-1") .responseFormat("verbose_json") .build(); TranslationResult result = service.createTranslation(createTranslationRequest, koreanAudioFilePath); assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", result.getText()); assertEquals("translate", result.getTask()); assertEquals("english", result.getLanguage()); assertTrue(result.getDuration() > 0); assertEquals(1, result.getSegments().size()); } @Test void createSpeech() throws IOException { CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder() .model("tts-1") .input("Hello World.") .voice("alloy") .build(); final ResponseBody speech = service.createSpeech(createSpeechRequest); assertNotNull(speech); assertEquals(MediaType.get("audio/mpeg"), speech.contentType()); assertTrue(speech.bytes().length > 0); } }
[ "com.theokanning.openai.audio.CreateTranslationRequest.builder", "com.theokanning.openai.audio.CreateSpeechRequest.builder", "com.theokanning.openai.audio.CreateTranscriptionRequest.builder" ]
[((958, 1055), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((958, 1030), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1479), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1454), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1406), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1971, 2066), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((1971, 2041), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2519), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2494), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2446), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((3049, 3206), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3181), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3149), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3110), 'com.theokanning.openai.audio.CreateSpeechRequest.builder')]
package org.zhong.chatgpt.wechat.bot.chatgptwechatbot.test; import java.time.Duration; import java.util.List; import org.apache.http.client.CookieStore; import org.apache.http.cookie.Cookie; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.junit.jupiter.api.Test; import org.zhong.chatgpt.wechat.bot.config.BotConfig; import com.theokanning.openai.completion.CompletionRequest; import cn.zhouyafeng.itchat4j.utils.MyHttpClient; import com.theokanning.openai.OpenAiService; public class TestOpenAI { private static CloseableHttpClient httpClient; private static MyHttpClient instance = null; private static CookieStore cookieStore; static { cookieStore = new BasicCookieStore(); // 将CookieStore设置到httpClient中 httpClient = HttpClients.custom().setDefaultCookieStore(cookieStore).build(); } public static String getCookie(String name) { List<Cookie> cookies = cookieStore.getCookies(); for (Cookie cookie : cookies) { if (cookie.getName().equalsIgnoreCase(name)) { return cookie.getValue(); } } return null; } @Test public void test() { OpenAiService service = new OpenAiService(BotConfig.getAppKey(),"https://api.openai.com/", Duration.ofSeconds(300)); CompletionRequest completionRequest = CompletionRequest.builder() .prompt("你好") .model("text-davinci-003") .maxTokens(2000) .temperature(0.8) .topP(1.0) .frequencyPenalty(0.55) .presencePenalty(0.19) .echo(true) .user("1234213213") .build(); String text = service.createCompletion(completionRequest).getChoices().get(0).getText(); System.out.print(text); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((872, 935), 'org.apache.http.impl.client.HttpClients.custom'), ((872, 927), 'org.apache.http.impl.client.HttpClients.custom'), ((1374, 1638), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1625), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1601), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1585), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1558), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1530), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1515), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1493), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1466), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1374, 1429), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.touchbiz.chatgpt.simple; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import com.touchbiz.common.utils.tools.JsonUtils; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.Test; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.MediaType; import org.springframework.http.codec.ServerSentEvent; import org.springframework.web.reactive.function.BodyInserters; import org.springframework.web.reactive.function.client.WebClient; import reactor.core.publisher.Flux; import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.time.LocalTime; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; @Slf4j public class EventStreamTest { String token = ""; @Test public void testRetrofit(){ CompletionRequest completionRequest = CompletionRequest.builder() // .prompt("Human:" + chat.prompt +"\nAI:") .prompt("胡寅恺帅嘛") .model("text-davinci-003") // .echo(true) // .stop(Arrays.asList(" Human:"," AI:")) .maxTokens(128) .presencePenalty(0d) .frequencyPenalty(0d) .temperature(0.7D) .bestOf(1) .topP(1d) // .stream(true) .build(); OpenAiService service = new OpenAiService(token); var result = service.createCompletion(completionRequest); log.info("result:{}", JsonUtils.toJson(result)); } @SneakyThrows @Test public void testHttp() { HttpClient client = HttpClient.newBuilder().build(); CompletionRequest completionRequest = CompletionRequest.builder() // .prompt("Human:" + chat.prompt +"\nAI:") .prompt("给我推荐10本小说") .model("text-davinci-001") // .echo(true) .stop(Arrays.asList(" Human:"," AI:")) .maxTokens(1024) .presencePenalty(0d) .frequencyPenalty(0d) .temperature(0.7D) .bestOf(1) .topP(1d) .stream(true) .build(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); var json = mapper.writeValueAsString(completionRequest); log.info("json:{}", json); HttpRequest request = HttpRequest.newBuilder() .header("Authorization", "Bearer " + this.token) .header( "Content-Type", "application/json") .POST(HttpRequest.BodyPublishers.ofString(json)) .uri(URI.create("https://api.openai.com/v1/completions")) .build(); client.sendAsync(request, HttpResponse.BodyHandlers.ofLines()) .thenApply(HttpResponse::body).get() .forEach(System.out::println); } @SneakyThrows @Test public void testFlux(){ WebClient client = WebClient.create("https://api.openai.com/v1/completions"); ParameterizedTypeReference<ServerSentEvent<String>> type = new ParameterizedTypeReference<>() { }; CompletionRequest completionRequest = CompletionRequest.builder() // .prompt("Human:" + chat.prompt +"\nAI:") .prompt("给我推荐10本小说") .model("text-davinci-001") // .echo(true) .stop(Arrays.asList(" Human:"," AI:")) .maxTokens(1024) .presencePenalty(0d) .frequencyPenalty(0d) .temperature(0.7D) .bestOf(1) .topP(1d) .stream(true) .build(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); Flux<ServerSentEvent<String>> eventStream = client.post() .accept(MediaType.APPLICATION_JSON) .contentType(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ") .body(BodyInserters.fromValue(mapper.writeValueAsString(completionRequest))) .retrieve() .bodyToFlux(type); eventStream.doOnError(x-> log.error("doOnError SSE:", x)); eventStream.subscribe(consumer , error -> log.error("Error receiving SSE:", error), () -> log.info("Completed!!!")); Thread.sleep(10*1000); } private Consumer<ServerSentEvent<String>> consumer = content -> log.info("Time: {} - event: name[{}], id [{}], content[{}] ", LocalTime.now(), content.event(), content.id(), content.data()); @SneakyThrows @Test public void testModels() { HttpClient client = HttpClient.newBuilder().build(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); HttpRequest request = HttpRequest.newBuilder() .header("Authorization", "Bearer " + this.token) .header( "Content-Type", "application/json") .GET() .uri(URI.create("https://api.openai.com/v1/models")) .build(); var response = client.sendAsync(request, HttpResponse.BodyHandlers.ofString()) .thenApply(HttpResponse::body).get(); log.info("response:{}", response); } @SneakyThrows @Test public void testChatGptModelHttp() { HttpClient client = HttpClient.newBuilder().build(); List<ChatMessage> message = new ArrayList<>(); message.add(new ChatMessage("user","请给我推荐10本书")); ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() // .prompt("Human:" + chat.prompt +"\nAI:") .model("gpt-3.5-turbo") .stream(true) .messages(message).build(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); var json = mapper.writeValueAsString(completionRequest); log.info("json:{}", json); HttpRequest request = HttpRequest.newBuilder() .header("Authorization", "Bearer " + this.token) .header( "Content-Type", "application/json") .POST(HttpRequest.BodyPublishers.ofString(json)) .uri(URI.create("https://api.openai.com/v1/chat/completions")) .build(); client.sendAsync(request, HttpResponse.BodyHandlers.ofLines()) .thenApply(HttpResponse::body).get() .forEach(System.out::println); } @Builder @Data public static class ChatCompletionRequest{ private String model; private Boolean stream; private List<ChatMessage> messages; } @AllArgsConstructor @Data public static class ChatMessage{ private String role; private String content; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1334, 1845), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1788), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1762), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1735), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1700), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1662), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1625), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1506), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1334, 1463), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2122, 2153), 'java.net.http.HttpClient.newBuilder'), ((2202, 2718), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2693), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2663), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2637), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2610), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2575), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2537), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2500), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2467), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2382), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2202, 2339), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3135, 3449), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3424), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3350), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3285), 'java.net.http.HttpRequest.newBuilder'), ((3135, 3224), 'java.net.http.HttpRequest.newBuilder'), ((3308, 3349), 'java.net.http.HttpRequest.BodyPublishers.ofString'), ((3486, 3521), 'java.net.http.HttpResponse.BodyHandlers.ofLines'), ((3950, 4466), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4441), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4411), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4385), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4358), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4323), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4285), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4248), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4215), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4130), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3950, 4087), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5712, 5743), 'java.net.http.HttpClient.newBuilder'), ((6060, 6327), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6302), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6233), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6210), 'java.net.http.HttpRequest.newBuilder'), ((6060, 6149), 'java.net.http.HttpRequest.newBuilder'), ((6379, 6415), 'java.net.http.HttpResponse.BodyHandlers.ofString'), ((6622, 6653), 'java.net.http.HttpClient.newBuilder'), ((7453, 7772), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7747), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7668), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7603), 'java.net.http.HttpRequest.newBuilder'), ((7453, 7542), 'java.net.http.HttpRequest.newBuilder'), ((7626, 7667), 'java.net.http.HttpRequest.BodyPublishers.ofString'), ((7809, 7844), 'java.net.http.HttpResponse.BodyHandlers.ofLines')]
package br.com.alura.ecomart.chatbot.infra.openai; import br.com.alura.ecomart.chatbot.domain.DadosCalculoFrete; import br.com.alura.ecomart.chatbot.domain.service.CalculadorDeFrete; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.runs.SubmitToolOutputRequestItem; import com.theokanning.openai.runs.SubmitToolOutputsRequest; import com.theokanning.openai.service.FunctionExecutor; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.threads.ThreadRequest; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @Component public class OpenAIClient { private final String apiKey; private final String assistantId; private String threadId; private final OpenAiService service; private final CalculadorDeFrete calculadorDeFrete; public OpenAIClient(@Value("${app.openai.api.key}") String apiKey, @Value("${app.openai.assistant.id}") String assistantId, CalculadorDeFrete calculadorDeFrete) { this.apiKey = apiKey; this.service = new OpenAiService(apiKey, Duration.ofSeconds(60)); this.assistantId = assistantId; this.calculadorDeFrete = calculadorDeFrete; } public String enviarRequisicaoChatCompletion(DadosRequisicaoChatCompletion dados) { var messageRequest = MessageRequest .builder() .role(ChatMessageRole.USER.value()) .content(dados.promptUsuario()) .build(); if (this.threadId == null) { var threadRequest = ThreadRequest .builder() .messages(Arrays.asList(messageRequest)) .build(); var thread = service.createThread(threadRequest); this.threadId = thread.getId(); } else { service.createMessage(this.threadId, messageRequest); } var runRequest = RunCreateRequest .builder() .assistantId(assistantId) .build(); var run = service.createRun(threadId, runRequest); var concluido = false; var precisaChamarFuncao = false; try { while (!concluido && !precisaChamarFuncao) { Thread.sleep(1000 * 10); run = service.retrieveRun(threadId, run.getId()); concluido = run.getStatus().equalsIgnoreCase("completed"); precisaChamarFuncao = run.getRequiredAction() != null; } } catch (InterruptedException e) { throw new RuntimeException(e); } if (precisaChamarFuncao) { var precoDoFrete = chamarFuncao(run); var submitRequest = SubmitToolOutputsRequest .builder() .toolOutputs(Arrays.asList( new SubmitToolOutputRequestItem( run .getRequiredAction() .getSubmitToolOutputs() .getToolCalls() .get(0) .getId(), precoDoFrete) )) .build(); service.submitToolOutputs(threadId, run.getId(), submitRequest); try { while (!concluido) { Thread.sleep(1000 * 10); run = service.retrieveRun(threadId, run.getId()); concluido = run.getStatus().equalsIgnoreCase("completed"); } } catch (InterruptedException e) { throw new RuntimeException(e); } } var mensagens = service.listMessages(threadId); return mensagens .getData() .stream() .sorted(Comparator.comparingInt(Message::getCreatedAt).reversed()) .findFirst().get().getContent().get(0).getText().getValue() .replaceAll("\\\u3010.*?\\\u3011", ""); } private String chamarFuncao(Run run) { try { var funcao = run.getRequiredAction().getSubmitToolOutputs().getToolCalls().get(0).getFunction(); var funcaoCalcularFrete = ChatFunction.builder() .name("calcularFrete") .executor(DadosCalculoFrete.class, d -> calculadorDeFrete.calcular(d)) .build(); var executorDeFuncoes = new FunctionExecutor(Arrays.asList(funcaoCalcularFrete)); var functionCall = new ChatFunctionCall(funcao.getName(), new ObjectMapper().readTree(funcao.getArguments())); return executorDeFuncoes.execute(functionCall).toString(); } catch (Exception e) { throw new RuntimeException(e); } } public List<String> carregarHistoricoDeMensagens() { var mensagens = new ArrayList<String>(); if (this.threadId != null) { mensagens.addAll( service .listMessages(this.threadId) .getData() .stream() .sorted(Comparator.comparingInt(Message::getCreatedAt)) .map(m -> m.getContent().get(0).getText().getValue()) .collect(Collectors.toList()) ); } return mensagens; } public void apagarThread() { if (this.threadId != null) { service.deleteThread(this.threadId); this.threadId = null; } } }
[ "com.theokanning.openai.completion.chat.ChatFunction.builder", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1972, 2000), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4533, 4590), 'java.util.Comparator.comparingInt'), ((4935, 5120), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((4935, 5091), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((4935, 5000), 'com.theokanning.openai.completion.chat.ChatFunction.builder')]
package learn.scraibe.controllers; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import learn.scraibe.models.Note; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import java.time.Duration; import java.util.ArrayList; import java.util.List; @RestController @RequestMapping("/generate-completion") public class OpenAIController { @Value("${openai.api.key}") private String openaiApiKey; @PostMapping public ResponseEntity<Object> generateCompletion(@RequestBody Note note) { if(note.getContent() == null || note.getContent().isBlank()){ return new ResponseEntity<>("Cannot have blank notes", HttpStatus.BAD_REQUEST); } //create service that will route to OpenAI endpoint, provide key and timeout value incase openai takes a long time OpenAiService service = new OpenAiService(openaiApiKey, Duration.ofSeconds(60)); //set up messages and Roles List<ChatMessage> messages = new ArrayList<>(); ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "organize with bullet points, only respond with bullet points "+ note.getContent()); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), "you are a helpful assistant"); messages.add(userMessage); messages.add((systemMessage)); // configure chatCompletionRequest object that will be sent over via the api ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo-0613") .messages(messages) .build(); //use service to make the request to OpenAI and then get the specific message to send back to the frontend. ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage(); note.setContent(responseMessage.getContent()); return new ResponseEntity<>(note, HttpStatus.OK); //TODO make a conditional statement based on the success of a response message, //one previous error occurred because the request timed out(openai took too long to send back a request) // but extending the duration seemed to solved the issue, just wondering what other issues to anticipate. } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value" ]
[((1638, 1666), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1805, 1838), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')]
package com.theokanning.openai.service; import com.theokanning.openai.moderation.Moderation; import com.theokanning.openai.moderation.ModerationRequest; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; public class ModerationTest { String token = System.getenv("OPENAI_TOKEN"); com.theokanning.openai.service.OpenAiService service = new OpenAiService(token); @Test void createModeration() { ModerationRequest moderationRequest = ModerationRequest.builder() .input("I want to kill them") .model("text-moderation-latest") .build(); Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0); assertTrue(moderationScore.isFlagged()); } }
[ "com.theokanning.openai.moderation.ModerationRequest.builder" ]
[((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')]
package com.theokanning.openai.service; import com.theokanning.openai.moderation.Moderation; import com.theokanning.openai.moderation.ModerationRequest; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; public class ModerationTest { String token = System.getenv("OPENAI_TOKEN"); com.theokanning.openai.service.OpenAiService service = new OpenAiService(token); @Test void createModeration() { ModerationRequest moderationRequest = ModerationRequest.builder() .input("I want to kill them") .model("text-moderation-latest") .build(); Moderation moderationScore = service.createModeration(moderationRequest).getResults().get(0); assertTrue(moderationScore.isFlagged()); } }
[ "com.theokanning.openai.moderation.ModerationRequest.builder" ]
[((504, 651), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 626), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((504, 577), 'com.theokanning.openai.moderation.ModerationRequest.builder')]
package com.theokanning.openai.service; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.theokanning.openai.ListSearchParameters; import com.theokanning.openai.OpenAiResponse; import com.theokanning.openai.assistants.Assistant; import com.theokanning.openai.assistants.AssistantFunction; import com.theokanning.openai.assistants.AssistantRequest; import com.theokanning.openai.assistants.AssistantToolsEnum; import com.theokanning.openai.assistants.Tool; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.runs.RequiredAction; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.runs.RunStep; import com.theokanning.openai.runs.SubmitToolOutputRequestItem; import com.theokanning.openai.runs.SubmitToolOutputs; import com.theokanning.openai.runs.SubmitToolOutputsRequest; import com.theokanning.openai.runs.ToolCall; import com.theokanning.openai.threads.Thread; import com.theokanning.openai.threads.ThreadRequest; import com.theokanning.openai.utils.TikTokensUtil; import org.junit.jupiter.api.Test; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; class AssistantFunctionTest { String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService(token, Duration.ofMinutes(1)); @Test void createRetrieveRun() throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class); mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class); mapper.addMixIn(ChatFunctionCall.class, ChatFunctionCallMixIn.class); String funcDef = "{\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" + " \"location\": {\n" + " \"type\": \"string\",\n" + " \"description\": \"The city and state, e.g. San Francisco, CA\"\n" + " },\n" + " \"unit\": {\n" + " \"type\": \"string\",\n" + " \"enum\": [\"celsius\", \"fahrenheit\"]\n" + " }\n" + " },\n" + " \"required\": [\"location\"]\n" + "}"; Map<String, Object> funcParameters = mapper.readValue(funcDef, new TypeReference<Map<String, Object>>() {}); AssistantFunction function = AssistantFunction.builder() .name("weather_reporter") .description("Get the current weather of a location") .parameters(funcParameters) .build(); List<Tool> toolList = new ArrayList<>(); Tool funcTool = new Tool(AssistantToolsEnum.FUNCTION, function); toolList.add(funcTool); AssistantRequest assistantRequest = AssistantRequest.builder() .model(TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName()) .name("MATH_TUTOR") .instructions("You are a personal Math Tutor.") .tools(toolList) .build(); Assistant assistant = service.createAssistant(assistantRequest); ThreadRequest threadRequest = ThreadRequest.builder() .build(); Thread thread = service.createThread(threadRequest); MessageRequest messageRequest = MessageRequest.builder() .content("What's the weather of Xiamen?") .build(); Message message = service.createMessage(thread.getId(), messageRequest); RunCreateRequest runCreateRequest = RunCreateRequest.builder() .assistantId(assistant.getId()) .build(); Run run = service.createRun(thread.getId(), runCreateRequest); assertNotNull(run); Run retrievedRun = service.retrieveRun(thread.getId(), run.getId()); while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")) && !(retrievedRun.getStatus().equals("requires_action"))){ retrievedRun = service.retrieveRun(thread.getId(), run.getId()); } if (retrievedRun.getStatus().equals("requires_action")) { RequiredAction requiredAction = retrievedRun.getRequiredAction(); System.out.println("requiredAction"); System.out.println(mapper.writeValueAsString(requiredAction)); List<ToolCall> toolCalls = requiredAction.getSubmitToolOutputs().getToolCalls(); ToolCall toolCall = toolCalls.get(0); String toolCallId = toolCall.getId(); SubmitToolOutputRequestItem toolOutputRequestItem = SubmitToolOutputRequestItem.builder() .toolCallId(toolCallId) .output("sunny") .build(); List<SubmitToolOutputRequestItem> toolOutputRequestItems = new ArrayList<>(); toolOutputRequestItems.add(toolOutputRequestItem); SubmitToolOutputsRequest submitToolOutputsRequest = SubmitToolOutputsRequest.builder() .toolOutputs(toolOutputRequestItems) .build(); retrievedRun = service.submitToolOutputs(retrievedRun.getThreadId(), retrievedRun.getId(), submitToolOutputsRequest); while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")) && !(retrievedRun.getStatus().equals("requires_action"))){ retrievedRun = service.retrieveRun(thread.getId(), run.getId()); } OpenAiResponse<Message> response = service.listMessages(thread.getId()); List<Message> messages = response.getData(); System.out.println(mapper.writeValueAsString(messages)); } } }
[ "com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName", "com.theokanning.openai.assistants.AssistantRequest.builder", "com.theokanning.openai.messages.MessageRequest.builder", "com.theokanning.openai.assistants.AssistantFunction.builder", "com.theokanning.openai.runs.SubmitToolOutputsRequest.builder", "com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder", "com.theokanning.openai.threads.ThreadRequest.builder", "com.theokanning.openai.runs.RunCreateRequest.builder" ]
[((3437, 3645), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3620), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3576), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3437, 3506), 'com.theokanning.openai.assistants.AssistantFunction.builder'), ((3864, 4125), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4100), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4067), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 4003), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3864, 3967), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((3914, 3966), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName'), ((4239, 4287), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((4391, 4498), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4391, 4473), 'com.theokanning.openai.messages.MessageRequest.builder'), ((4627, 4726), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((4627, 4701), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((5724, 5871), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5842), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((5724, 5805), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6090, 6210), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder'), ((6090, 6181), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder')]
package com.theokanning.openai.service; import com.theokanning.openai.audio.CreateSpeechRequest; import com.theokanning.openai.audio.CreateTranscriptionRequest; import com.theokanning.openai.audio.CreateTranslationRequest; import com.theokanning.openai.audio.TranscriptionResult; import com.theokanning.openai.audio.TranslationResult; import org.junit.jupiter.api.Test; import java.io.IOException; import java.time.Duration; import okhttp3.MediaType; import okhttp3.ResponseBody; import static org.junit.jupiter.api.Assertions.*; public class AudioTest { static String englishAudioFilePath = "src/test/resources/hello-world.mp3"; static String koreanAudioFilePath = "src/test/resources/korean-hello.mp3"; String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); @Test void createTranscription() { CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .build(); String text = service.createTranscription(createTranscriptionRequest, englishAudioFilePath).getText(); assertEquals("Hello World.", text); } @Test void createTranscriptionVerbose() { CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .responseFormat("verbose_json") .build(); TranscriptionResult result = service.createTranscription(createTranscriptionRequest, englishAudioFilePath); assertEquals("Hello World.", result.getText()); assertEquals("transcribe", result.getTask()); assertEquals("english", result.getLanguage()); assertTrue(result.getDuration() > 0); assertEquals(1, result.getSegments().size()); } @Test void createTranslation() { CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder() .model("whisper-1") .build(); String text = service.createTranslation(createTranslationRequest, koreanAudioFilePath).getText(); assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", text); } @Test void createTranslationVerbose() { CreateTranslationRequest createTranslationRequest = CreateTranslationRequest.builder() .model("whisper-1") .responseFormat("verbose_json") .build(); TranslationResult result = service.createTranslation(createTranslationRequest, koreanAudioFilePath); assertEquals("Hello, my name is Yoona. I am a Korean native speaker.", result.getText()); assertEquals("translate", result.getTask()); assertEquals("english", result.getLanguage()); assertTrue(result.getDuration() > 0); assertEquals(1, result.getSegments().size()); } @Test void createSpeech() throws IOException { CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder() .model("tts-1") .input("Hello World.") .voice("alloy") .build(); final ResponseBody speech = service.createSpeech(createSpeechRequest); assertNotNull(speech); assertEquals(MediaType.get("audio/mpeg"), speech.contentType()); assertTrue(speech.bytes().length > 0); } }
[ "com.theokanning.openai.audio.CreateTranslationRequest.builder", "com.theokanning.openai.audio.CreateSpeechRequest.builder", "com.theokanning.openai.audio.CreateTranscriptionRequest.builder" ]
[((958, 1055), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((958, 1030), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1479), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1454), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1334, 1406), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((1971, 2066), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((1971, 2041), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2519), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2494), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((2376, 2446), 'com.theokanning.openai.audio.CreateTranslationRequest.builder'), ((3049, 3206), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3181), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3149), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3049, 3110), 'com.theokanning.openai.audio.CreateSpeechRequest.builder')]
package cn.shu.wechat.utils; import cn.shu.wechat.configuration.OpenAIConfiguration; import cn.shu.wechat.entity.Message; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.theokanning.openai.OpenAiApi; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import okhttp3.*; import retrofit2.Retrofit; import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory; import retrofit2.converter.jackson.JacksonConverterFactory; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.time.Duration.ofSeconds; public class OpenAPIUtil { private static final String BASE_URL = "https://api.openai.com/"; public static List<Message> chat(String q) { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); OkHttpClient client = new OkHttpClient.Builder() .addInterceptor(new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { Request request = chain.request() .newBuilder() .header("Authorization", "Bearer " + OpenAIConfiguration.getInstance().getOpenaiKey()) .build(); return chain.proceed(request); } }) .sslSocketFactory(TestSSLSocketClient.getSSLSocketFactory(), TestSSLSocketClient.getX509TrustManager()) .hostnameVerifier(TestSSLSocketClient.getHostnameVerifier()) .connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS)) .readTimeout(ofSeconds(OpenAIConfiguration.getInstance().getExpire()).toMillis(), TimeUnit.MILLISECONDS) .build(); Retrofit retrofit = new Retrofit.Builder() .baseUrl(BASE_URL) .client(client) .addConverterFactory(JacksonConverterFactory.create(mapper)) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .build(); OpenAiService service = new OpenAiService(retrofit.create(OpenAiApi.class)); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(q) .maxTokens(1024) .model("text-davinci-003") .echo(true) .build(); CompletionResult completion = service.createCompletion(completionRequest); Stream<Message> messageStream = completion.getChoices().stream() .map(e -> { return Message.builder().content(e.getText().substring(e.getText().indexOf("\n\n") + 2)).build(); }); return messageStream.collect(Collectors.toList()); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1749, 1797), 'cn.shu.wechat.configuration.OpenAIConfiguration.getInstance'), ((2249, 2294), 'cn.shu.wechat.configuration.OpenAIConfiguration.getInstance'), ((2788, 2971), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2946), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2918), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2875), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2788, 2842), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3184, 3273), 'cn.shu.wechat.entity.Message.builder'), ((3184, 3265), 'cn.shu.wechat.entity.Message.builder')]