com.github.tjake.jlama.net.openai.model.CreateChatCompletionStreamResponseChoicesInner Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of jlama-net Show documentation
Show all versions of jlama-net Show documentation
Jlama: A modern LLM inference engine for Java
/*
* OpenAI API
* The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
*
* The version of the OpenAPI document: 2.1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.github.tjake.jlama.net.openai.model;
import java.util.Objects;
import java.util.Map;
import java.util.HashMap;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.annotation.JsonValue;
import com.github.tjake.jlama.net.openai.model.ChatCompletionStreamResponseDelta;
import com.github.tjake.jlama.net.openai.model.CreateChatCompletionResponseChoicesInnerLogprobs;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import jakarta.validation.constraints.*;
import jakarta.validation.Valid;
import org.hibernate.validator.constraints.*;
import com.github.tjake.jlama.net.openai.JSON;
/**
* CreateChatCompletionStreamResponseChoicesInner
*/
@JsonPropertyOrder({
CreateChatCompletionStreamResponseChoicesInner.JSON_PROPERTY_DELTA,
CreateChatCompletionStreamResponseChoicesInner.JSON_PROPERTY_LOGPROBS,
CreateChatCompletionStreamResponseChoicesInner.JSON_PROPERTY_FINISH_REASON,
CreateChatCompletionStreamResponseChoicesInner.JSON_PROPERTY_INDEX
})
@JsonTypeName("CreateChatCompletionStreamResponse_choices_inner")
@jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0")
public class CreateChatCompletionStreamResponseChoicesInner {
public static final String JSON_PROPERTY_DELTA = "delta";
private ChatCompletionStreamResponseDelta delta;
public static final String JSON_PROPERTY_LOGPROBS = "logprobs";
private CreateChatCompletionResponseChoicesInnerLogprobs logprobs;
/**
* The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
*/
public enum FinishReasonEnum {
STOP("stop"),
LENGTH("length"),
TOOL_CALLS("tool_calls"),
CONTENT_FILTER("content_filter"),
FUNCTION_CALL("function_call");
private String value;
FinishReasonEnum(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static FinishReasonEnum fromValue(String value) {
for (FinishReasonEnum b : FinishReasonEnum.values()) {
if (b.value.equals(value)) {
return b;
}
}
return null;
}
}
public static final String JSON_PROPERTY_FINISH_REASON = "finish_reason";
private FinishReasonEnum finishReason;
public static final String JSON_PROPERTY_INDEX = "index";
private Integer index;
public CreateChatCompletionStreamResponseChoicesInner() {
}
public CreateChatCompletionStreamResponseChoicesInner delta(ChatCompletionStreamResponseDelta delta) {
this.delta = delta;
return this;
}
/**
* Get delta
* @return delta
*/
@jakarta.annotation.Nonnull
@NotNull
@Valid
@JsonProperty(JSON_PROPERTY_DELTA)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public ChatCompletionStreamResponseDelta getDelta() {
return delta;
}
@JsonProperty(JSON_PROPERTY_DELTA)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public void setDelta(ChatCompletionStreamResponseDelta delta) {
this.delta = delta;
}
public CreateChatCompletionStreamResponseChoicesInner logprobs(CreateChatCompletionResponseChoicesInnerLogprobs logprobs) {
this.logprobs = logprobs;
return this;
}
/**
* Get logprobs
* @return logprobs
*/
@jakarta.annotation.Nullable
@Valid
@JsonProperty(JSON_PROPERTY_LOGPROBS)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public CreateChatCompletionResponseChoicesInnerLogprobs getLogprobs() {
return logprobs;
}
@JsonProperty(JSON_PROPERTY_LOGPROBS)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public void setLogprobs(CreateChatCompletionResponseChoicesInnerLogprobs logprobs) {
this.logprobs = logprobs;
}
public CreateChatCompletionStreamResponseChoicesInner finishReason(FinishReasonEnum finishReason) {
this.finishReason = finishReason;
return this;
}
/**
* The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
* @return finishReason
*/
@jakarta.annotation.Nullable
@NotNull
@JsonProperty(JSON_PROPERTY_FINISH_REASON)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public FinishReasonEnum getFinishReason() {
return finishReason;
}
@JsonProperty(JSON_PROPERTY_FINISH_REASON)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public void setFinishReason(FinishReasonEnum finishReason) {
this.finishReason = finishReason;
}
public CreateChatCompletionStreamResponseChoicesInner index(Integer index) {
this.index = index;
return this;
}
/**
* The index of the choice in the list of choices.
* @return index
*/
@jakarta.annotation.Nonnull
@NotNull
@JsonProperty(JSON_PROPERTY_INDEX)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public Integer getIndex() {
return index;
}
@JsonProperty(JSON_PROPERTY_INDEX)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public void setIndex(Integer index) {
this.index = index;
}
/**
* Return true if this CreateChatCompletionStreamResponse_choices_inner object is equal to o.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CreateChatCompletionStreamResponseChoicesInner createChatCompletionStreamResponseChoicesInner = (CreateChatCompletionStreamResponseChoicesInner) o;
return Objects.equals(this.delta, createChatCompletionStreamResponseChoicesInner.delta) &&
Objects.equals(this.logprobs, createChatCompletionStreamResponseChoicesInner.logprobs) &&
Objects.equals(this.finishReason, createChatCompletionStreamResponseChoicesInner.finishReason) &&
Objects.equals(this.index, createChatCompletionStreamResponseChoicesInner.index);
}
@Override
public int hashCode() {
return Objects.hash(delta, logprobs, finishReason, index);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CreateChatCompletionStreamResponseChoicesInner {\n");
sb.append(" delta: ").append(toIndentedString(delta)).append("\n");
sb.append(" logprobs: ").append(toIndentedString(logprobs)).append("\n");
sb.append(" finishReason: ").append(toIndentedString(finishReason)).append("\n");
sb.append(" index: ").append(toIndentedString(index)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}