fi.evolver.ai.spring.skill.mock.LlmSkill Maven / Gradle / Ivy
package fi.evolver.ai.spring.skill.mock;
import java.util.Objects;
import fi.evolver.ai.spring.Model;
import fi.evolver.ai.spring.chat.ChatApi;
import fi.evolver.ai.spring.chat.ChatResponse;
import fi.evolver.ai.spring.chat.prompt.ChatPrompt;
import fi.evolver.ai.spring.chat.prompt.ChatPrompt.Builder;
import fi.evolver.ai.spring.chat.prompt.Message;
import fi.evolver.ai.spring.json.JsonSpec;
import fi.evolver.ai.spring.skill.Skill;
import fi.evolver.ai.spring.skill.SkillException;
import io.swagger.v3.core.util.Json;
public class LlmSkill extends Skill {
private final JsonSpec spec;
private final ChatApi chatApi;
private final Model model;
private final String systemPrompt;
public LlmSkill(Class parameterType, Class resultType, ChatApi chatApi, Model model, String systemPrompt) {
super(parameterType, resultType);
this.spec = JsonSpec.of(resultType);
this.chatApi = Objects.requireNonNull(chatApi);
this.model = Objects.requireNonNull(model);
this.systemPrompt = Objects.requireNonNull(systemPrompt);
}
@Override
public R apply(T parameters) throws SkillException {
Builder builder = ChatPrompt.builder(model)
.setResponseFormat(spec)
.add(Message.system(systemPrompt))
.add(Message.user(Json.pretty(parameters)));
ChatResponse response = chatApi.send(builder.build());
String data = response.getTextContent().orElseThrow(() -> new SkillException("No content from LLM"));
return spec.parse(data);
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy