chatCompletion

fun Ollama.chatCompletion(model: ModelName, messages: List<Message>, stream: Boolean? = null, format: ResponseFormat? = null, keep_alive: String? = null, options: ModelOptions? = null): Result<Sequence<ChatCompletionResponse>, RemoteFailure>(source)
fun Ollama.chatCompletion(model: ModelName, messages: Message, stream: Boolean? = null, format: ResponseFormat? = null, keep_alive: String? = null, options: ModelOptions? = null): Result<Sequence<ChatCompletionResponse>, RemoteFailure>(source)

See also