com.agentsflex.llm.moonshot.MoonshotLlmUtil Maven / Gradle / Ivy
/*
* Copyright (c) 2023-2025, Agents-Flex ([email protected]).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agentsflex.llm.moonshot;
import com.agentsflex.core.llm.ChatOptions;
import com.agentsflex.core.message.MessageStatus;
import com.agentsflex.core.parser.AiMessageParser;
import com.agentsflex.core.parser.FunctionMessageParser;
import com.agentsflex.core.parser.impl.DefaultAiMessageParser;
import com.agentsflex.core.parser.impl.DefaultFunctionMessageParser;
import com.agentsflex.core.prompt.DefaultPromptFormat;
import com.agentsflex.core.prompt.Prompt;
import com.agentsflex.core.prompt.PromptFormat;
import com.agentsflex.core.util.Maps;
import com.alibaba.fastjson.JSON;
public class MoonshotLlmUtil {
private static final PromptFormat promptFormat = new DefaultPromptFormat();
public static AiMessageParser getAiMessageParser(Boolean isStream) {
DefaultAiMessageParser aiMessageParser = new DefaultAiMessageParser();
if (isStream) {
aiMessageParser.setContentPath("$.choices[0].delta.content");
aiMessageParser.setIndexPath("$.choices[0].index");
aiMessageParser.setStatusPath("$.choices[0].finish_reason");
aiMessageParser.setStatusParser(content -> parseMessageStatus((String) content));
aiMessageParser.setTotalTokensPath("choices[0].usage.total_tokens");
} else {
aiMessageParser.setContentPath("$.choices[0].message.content");
aiMessageParser.setIndexPath("$.choices[0].index");
aiMessageParser.setStatusPath("$.choices[0].finish_reason");
aiMessageParser.setStatusParser(content -> parseMessageStatus((String) content));
aiMessageParser.setTotalTokensPath("$.usage.total_tokens");
}
return aiMessageParser;
}
public static FunctionMessageParser getFunctionMessageParser() {
DefaultFunctionMessageParser functionMessageParser = new DefaultFunctionMessageParser();
functionMessageParser.setFunctionNamePath("$.choices[0].message.tool_calls[0].function.name");
functionMessageParser.setFunctionArgsPath("$.choices[0].message.tool_calls[0].function.arguments");
functionMessageParser.setFunctionArgsParser(JSON::parseObject);
return functionMessageParser;
}
public static MessageStatus parseMessageStatus(String status) {
return "stop".equals(status) ? MessageStatus.END : MessageStatus.MIDDLE;
}
/**
* 将给定的Prompt转换为特定的payload格式,用于与语言模型进行交互。
*
* @param prompt 需要转换为payload的Prompt对象,包含了对话的具体内容。
* @param config 用于配置Moonshot LLM行为的配置对象,例如指定使用的模型。
* @param isStream 指示payload是否应该以流的形式进行处理。
* @param chatOptions 包含了对话选项的配置,如温度和最大令牌数等。
* @return 返回一个字符串形式的payload,供进一步的处理或发送给语言模型。
*/
public static String promptToPayload(Prompt> prompt, MoonshotLlmConfig config, Boolean isStream, ChatOptions chatOptions) {
// 构建payload的根结构,包括模型信息、流式处理标志、对话选项和格式化后的prompt消息。
Maps.Builder root = Maps.of("model", config.getModel())
.put("stream", isStream)
.put("temperature", chatOptions.getTemperature())
.put("max_tokens", chatOptions.getMaxTokens())
.put("messages", promptFormat.toMessagesJsonObject(prompt));
// 将构建好的payload结构转换为JSON字符串并返回。
return JSON.toJSONString(root.build());
}
}