All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.wildfly.extension.ai.chat.OllamaChatModelProviderServiceConfigurator Maven / Gradle / Ivy

There is a newer version: 0.1.0
Show newest version
/*
 * Copyright The WildFly Authors
 * SPDX-License-Identifier: Apache-2.0
 */
package org.wildfly.extension.ai.chat;

import static org.wildfly.extension.ai.AIAttributeDefinitions.BASE_URL;
import static org.wildfly.extension.ai.AIAttributeDefinitions.CONNECT_TIMEOUT;
import static org.wildfly.extension.ai.AIAttributeDefinitions.LOG_REQUESTS;
import static org.wildfly.extension.ai.AIAttributeDefinitions.LOG_RESPONSES;
import static org.wildfly.extension.ai.AIAttributeDefinitions.MAX_RETRIES;
import static org.wildfly.extension.ai.AIAttributeDefinitions.MODEL_NAME;
import static org.wildfly.extension.ai.AIAttributeDefinitions.TEMPERATURE;

import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import java.time.Duration;
import java.util.function.Supplier;

import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.dmr.ModelNode;
import org.wildfly.extension.ai.AIAttributeDefinitions;

import static org.wildfly.extension.ai.AIAttributeDefinitions.RESPONSE_FORMAT;

import org.wildfly.service.capture.ValueRegistry;
import org.wildfly.subsystem.service.ResourceServiceInstaller;

/**
 * Configures an aggregate ChatModel provider service.
 */
public class OllamaChatModelProviderServiceConfigurator extends AbstractChatModelProviderServiceConfigurator {

    public OllamaChatModelProviderServiceConfigurator(ValueRegistry registry) {
        super(registry);
    }

    @Override
    public ResourceServiceInstaller configure(OperationContext context, ModelNode model) throws OperationFailedException {
        Double temperature = TEMPERATURE.resolveModelAttribute(context, model).asDoubleOrNull();
        Long connectTimeOut = CONNECT_TIMEOUT.resolveModelAttribute(context, model).asLong();
        Boolean logRequests = LOG_REQUESTS.resolveModelAttribute(context, model).asBooleanOrNull();
        Boolean logResponses = LOG_RESPONSES.resolveModelAttribute(context, model).asBooleanOrNull();
        String baseUrl = BASE_URL.resolveModelAttribute(context, model).asString();
        Integer maxRetries = MAX_RETRIES.resolveModelAttribute(context, model).asIntOrNull();
        String modelName = MODEL_NAME.resolveModelAttribute(context, model).asString();
        boolean isJson = AIAttributeDefinitions.ResponseFormat.isJson(RESPONSE_FORMAT.resolveModelAttribute(context, model).asStringOrNull());
        Supplier factory = new Supplier<>() {
            @Override
            public ChatLanguageModel get() {
                OllamaChatModel.OllamaChatModelBuilder builder = OllamaChatModel.builder()
                        .baseUrl(baseUrl)
                        .logRequests(logRequests)
                        .logResponses(logResponses)
                        .maxRetries(maxRetries)
                        .temperature(temperature)
                        .timeout(Duration.ofMillis(connectTimeOut))
                        .modelName(modelName);
                if (isJson) {
                    builder.format("json");
                }
                return builder.build();
            }
        };
        return installService(context.getCurrentAddressValue(), factory);
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy