io.quarkiverse.langchain4j.ollama.runtime.config.ChatModelFixedRuntimeConfig Maven / Gradle / Ivy
package io.quarkiverse.langchain4j.ollama.runtime.config;
import io.quarkus.runtime.annotations.ConfigGroup;
import io.smallrye.config.WithDefault;
@ConfigGroup
public interface ChatModelFixedRuntimeConfig {
/**
* Model to use. According to Ollama
* docs,
* the default value is {@code llama3}
*/
@WithDefault("llama3")
String modelId();
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy