Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions langchain4j-kotlin/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,6 @@
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlinx</groupId>
<artifactId>kotlinx-coroutines-test-jvm</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ import java.util.Properties
* their renderers. The configurations are loaded from a properties file, and components are instantiated dynamically
* based on the class names specified in the properties.
*/
object Configuration {
val properties: Properties = loadProperties()
public object Configuration {
public val properties: Properties = loadProperties()

operator fun get(key: String): String = properties.getProperty(key)
public operator fun get(key: String): String = properties.getProperty(key)

val promptTemplateSource: PromptTemplateSource =
public val promptTemplateSource: PromptTemplateSource =
createInstanceByName(this["prompt.template.source"])
val promptTemplateRenderer: TemplateRenderer =
public val promptTemplateRenderer: TemplateRenderer =
createInstanceByName(this["prompt.template.renderer"])
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ package me.kpavlov.langchain4j.kotlin
* and its implementations to specify the input parameter for retrieving
* system messages.
*/
typealias ChatMemoryId = Any
public typealias ChatMemoryId = Any

/**
* Type alias for the name of a template.
Expand All @@ -16,7 +16,7 @@ typealias ChatMemoryId = Any
* of the codebase, providing a clearer and more specific meaning compared
* to using `String` directly.
*/
typealias TemplateName = String
public typealias TemplateName = String

/**
* Represents the content of a template.
Expand All @@ -25,7 +25,7 @@ typealias TemplateName = String
* which is expected to be in the form of a string. Various classes and functions that deal
* with templates will utilize this type alias to ensure consistency and clarity.
*/
typealias TemplateContent = String
public typealias TemplateContent = String

/**
* Type alias for a string representing the content of a prompt.
Expand All @@ -34,4 +34,4 @@ typealias TemplateContent = String
* by various functions and methods within the system that deal with
* generating and handling prompts.
*/
typealias PromptContent = String
public typealias PromptContent = String
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,12 @@ import me.kpavlov.langchain4j.kotlin.model.chat.asReplyFlow
import java.lang.reflect.ParameterizedType
import java.lang.reflect.Type

class TokenStreamToReplyFlowAdapter : TokenStreamAdapter {
public class TokenStreamToReplyFlowAdapter : TokenStreamAdapter {
override fun canAdaptTokenStreamTo(type: Type?): Boolean {
if (type is ParameterizedType) {
if (type.rawType === Flow::class.java) {
val typeArguments: Array<Type?> = type.actualTypeArguments
return typeArguments.size == 1 &&
typeArguments[0] === StreamingChatLanguageModelReply::class.java
}
if (type is ParameterizedType && type.rawType === Flow::class.java) {
val typeArguments: Array<Type> = type.actualTypeArguments
return typeArguments.size == 1 &&
typeArguments[0] === StreamingChatLanguageModelReply::class.java
}
return false
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ import me.kpavlov.langchain4j.kotlin.model.chat.asFlow
import java.lang.reflect.ParameterizedType
import java.lang.reflect.Type

class TokenStreamToStringFlowAdapter : TokenStreamAdapter {
override fun canAdaptTokenStreamTo(type: Type?): Boolean {
public class TokenStreamToStringFlowAdapter : TokenStreamAdapter {
public override fun canAdaptTokenStreamTo(type: Type?): Boolean {
if (type is ParameterizedType) {
if (type.rawType === Flow::class.java) {
val typeArguments: Array<Type?> = type.actualTypeArguments
Expand All @@ -18,5 +18,5 @@ class TokenStreamToStringFlowAdapter : TokenStreamAdapter {
return false
}

override fun adapt(tokenStream: TokenStream): Any = tokenStream.asFlow()
public override fun adapt(tokenStream: TokenStream): Any = tokenStream.asFlow()
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import me.kpavlov.langchain4j.kotlin.model.chat.request.chatRequest
* @see ChatRequest
* @see ChatResponse
*/
suspend fun ChatLanguageModel.chatAsync(request: ChatRequest): ChatResponse =
public suspend fun ChatLanguageModel.chatAsync(request: ChatRequest): ChatResponse =
coroutineScope { [email protected](request) }

/**
Expand All @@ -53,7 +53,7 @@ suspend fun ChatLanguageModel.chatAsync(request: ChatRequest): ChatResponse =
* @see ChatRequest.Builder
* @see chatAsync
*/
suspend fun ChatLanguageModel.chatAsync(requestBuilder: ChatRequest.Builder): ChatResponse =
public suspend fun ChatLanguageModel.chatAsync(requestBuilder: ChatRequest.Builder): ChatResponse =
chatAsync(requestBuilder.build())

/**
Expand All @@ -79,7 +79,7 @@ suspend fun ChatLanguageModel.chatAsync(requestBuilder: ChatRequest.Builder): Ch
* associated metadata.
* @throws Exception if the chat request fails or encounters an error during execution.
*/
suspend fun ChatLanguageModel.chatAsync(block: ChatRequestBuilder.() -> Unit): ChatResponse =
public suspend fun ChatLanguageModel.chatAsync(block: ChatRequestBuilder.() -> Unit): ChatResponse =
chatAsync(chatRequest(block))

/**
Expand Down Expand Up @@ -109,5 +109,5 @@ suspend fun ChatLanguageModel.chatAsync(block: ChatRequestBuilder.() -> Unit): C
* @see ChatRequest.Builder
* @see ChatRequestBuilder
*/
fun ChatLanguageModel.chat(requestBuilder: ChatRequest.Builder): ChatResponse =
public fun ChatLanguageModel.chat(requestBuilder: ChatRequest.Builder): ChatResponse =
this.chat(requestBuilder.build())
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ private val logger = LoggerFactory.getLogger(StreamingChatLanguageModel::class.j
* Represents different types of replies that can be received from an AI language model during streaming.
* This sealed interface provides type-safe handling of both intermediate tokens and final completion responses.
*/
sealed interface StreamingChatLanguageModelReply {
public sealed interface StreamingChatLanguageModelReply {
/**
* Represents a partial response received from an AI language model during a streaming interaction.
*
Expand All @@ -31,7 +31,7 @@ sealed interface StreamingChatLanguageModelReply {
* @property token The string representation of the token generated as part of the streaming process.
* @see StreamingChatResponseHandler.onPartialResponse
*/
data class PartialResponse(
public data class PartialResponse(
val token: String,
) : StreamingChatLanguageModelReply

Expand All @@ -45,7 +45,7 @@ sealed interface StreamingChatLanguageModelReply {
* @property response The final chat response generated by the model.
* @see StreamingChatResponseHandler.onCompleteResponse
*/
data class CompleteResponse(
public data class CompleteResponse(
val response: ChatResponse,
) : StreamingChatLanguageModelReply

Expand All @@ -58,7 +58,7 @@ sealed interface StreamingChatLanguageModelReply {
* @property cause The underlying exception or error that caused the failure.
* @see StreamingChatResponseHandler.onError
*/
data class Error(
public data class Error(
val cause: Throwable,
) : StreamingChatLanguageModelReply
}
Expand All @@ -79,7 +79,7 @@ sealed interface StreamingChatLanguageModelReply {
* types of replies during the chat interaction, including partial responses,
* final responses, and errors.
*/
fun StreamingChatLanguageModel.chatFlow(
public fun StreamingChatLanguageModel.chatFlow(
block: ChatRequestBuilder.() -> Unit,
): Flow<StreamingChatLanguageModelReply> =
callbackFlow {
Expand Down Expand Up @@ -127,7 +127,7 @@ fun StreamingChatLanguageModel.chatFlow(
}
}

fun TokenStream.asFlow(): Flow<String> =
public fun TokenStream.asFlow(): Flow<String> =
flow {
callbackFlow {
onPartialResponse { trySend(it) }
Expand All @@ -138,7 +138,7 @@ fun TokenStream.asFlow(): Flow<String> =
}.buffer(Channel.UNLIMITED).collect(this)
}

fun TokenStream.asReplyFlow(): Flow<StreamingChatLanguageModelReply> =
public fun TokenStream.asReplyFlow(): Flow<StreamingChatLanguageModelReply> =
flow {
callbackFlow<StreamingChatLanguageModelReply> {
onPartialResponse { token ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
* and/or parameters for the `ChatRequest`.
* @return A fully constructed `ChatRequest` instance based on the applied configurations.
*/
fun chatRequest(block: ChatRequestBuilder.() -> Unit): ChatRequest {
public fun chatRequest(block: ChatRequestBuilder.() -> Unit): ChatRequest {
val builder = ChatRequestBuilder()
builder.apply { block() }
return builder.build()
Expand All @@ -44,19 +44,19 @@
* @property responseFormat Specifies the format of the response, such as plain text or structured data.
*/
@Suppress("LongParameterList")
open class ChatRequestParametersBuilder<B : DefaultChatRequestParameters.Builder<*>>(
val builder: B,
var modelName: String? = null,
var temperature: Double? = null,
var topP: Double? = null,
var topK: Int? = null,
var frequencyPenalty: Double? = null,
var presencePenalty: Double? = null,
var maxOutputTokens: Int? = null,
var stopSequences: List<String>? = null,
var toolSpecifications: List<ToolSpecification>? = null,
var toolChoice: ToolChoice? = null,
var responseFormat: ResponseFormat? = null,
public open class ChatRequestParametersBuilder<B : DefaultChatRequestParameters.Builder<*>>(
public val builder: B,
public var modelName: String? = null,
public var temperature: Double? = null,
public var topP: Double? = null,
public var topK: Int? = null,
public var frequencyPenalty: Double? = null,
public var presencePenalty: Double? = null,
public var maxOutputTokens: Int? = null,
public var stopSequences: List<String>? = null,
public var toolSpecifications: List<ToolSpecification>? = null,
public var toolChoice: ToolChoice? = null,
public var responseFormat: ResponseFormat? = null,
)

/**
Expand All @@ -66,25 +66,26 @@
* This builder provides methods to add individual or multiple chat messages,
* as well as set request parameters for the generated `ChatRequest`.
*/
open class ChatRequestBuilder(
var messages: MutableList<ChatMessage> = mutableListOf(),
var parameters: ChatRequestParameters? = null,
public open class ChatRequestBuilder(
public var messages: MutableList<ChatMessage> = mutableListOf(),
public var parameters: ChatRequestParameters? = null,
) {
/**
* Adds a list of `ChatMessage` objects to the builder's messages collection.
*
* @param value The list of `ChatMessage` objects to be added to the builder.
* @return This builder instance for chaining other method calls.
*/
fun messages(value: List<ChatMessage>) = apply { this.messages.addAll(value) }
public fun messages(value: List<ChatMessage>): ChatRequestBuilder =
apply { this.messages.addAll(value) }

Check warning on line 80 in langchain4j-kotlin/src/main/kotlin/me/kpavlov/langchain4j/kotlin/model/chat/request/ChatRequestExtensions.kt

View check run for this annotation

Codecov / codecov/patch

langchain4j-kotlin/src/main/kotlin/me/kpavlov/langchain4j/kotlin/model/chat/request/ChatRequestExtensions.kt#L80

Added line #L80 was not covered by tests

/**
* Adds a chat message to the messages list.
*
* @param value The chat message to be added.
* @return The current instance for method chaining.
*/
fun message(value: ChatMessage) = apply { this.messages.add(value) }
public fun message(value: ChatMessage): ChatRequestBuilder = apply { this.messages.add(value) }

Check warning on line 88 in langchain4j-kotlin/src/main/kotlin/me/kpavlov/langchain4j/kotlin/model/chat/request/ChatRequestExtensions.kt

View check run for this annotation

Codecov / codecov/patch

langchain4j-kotlin/src/main/kotlin/me/kpavlov/langchain4j/kotlin/model/chat/request/ChatRequestExtensions.kt#L88

Added line #L88 was not covered by tests

/**
* Builds and returns a ChatRequest instance using the current state of messages and parameters.
Expand All @@ -106,7 +107,7 @@
* @param configurer A lambda with the builder as receiver to configure the chat request parameters.
*/
@JvmOverloads
fun <B : DefaultChatRequestParameters.Builder<*>> parameters(
public fun <B : DefaultChatRequestParameters.Builder<*>> parameters(
@Suppress("UNCHECKED_CAST")
builder: B = DefaultChatRequestParameters.builder() as B,
configurer: ChatRequestParametersBuilder<B>.() -> Unit,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import me.kpavlov.langchain4j.kotlin.TemplateContent
* incorporate variables or placeholders.
*/
public interface PromptTemplate {
fun content(): TemplateContent
public fun content(): TemplateContent
}

/**
Expand All @@ -25,5 +25,5 @@ public interface PromptTemplate {
public data class SimplePromptTemplate(
private val content: TemplateContent,
) : PromptTemplate {
override fun content(): TemplateContent = content
public override fun content(): TemplateContent = content
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ import me.kpavlov.langchain4j.kotlin.TemplateName
* a template name. The implementation of this interface will determine
* how and from where the templates are sourced.
*/
interface PromptTemplateSource {
public interface PromptTemplateSource {
/**
* Retrieves a prompt template based on the provided template name.
*
* @param name The name of the template to retrieve.
* @return The prompt template associated with the specified name, or null if no such template exists.
*/
fun getTemplate(name: TemplateName): PromptTemplate?
public fun getTemplate(name: TemplateName): PromptTemplate?
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ private val logger = LoggerFactory.getLogger(RenderablePromptTemplate::class.jav
* @property content The content of the template.
* @property templateRenderer The renderer used for generating the final template string from the content and variables.
*/
class RenderablePromptTemplate(
val name: TemplateName,
public class RenderablePromptTemplate(
public val name: TemplateName,
private val content: TemplateContent,
private val templateRenderer: TemplateRenderer,
) : PromptTemplate,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@ import java.util.function.Function
* Interface for providing LLM system messages based on a given chat memory identifier.
*/
@FunctionalInterface
interface SystemMessageProvider : Function<ChatMemoryId, PromptContent?> {
public interface SystemMessageProvider : Function<ChatMemoryId, PromptContent?> {
/**
* Provides a system message based on the given chat memory identifier.
*
* @param chatMemoryID Identifier for the chat memory used to generate the system message.
* @return A system prompt string associated with the provided chat memory identifier, maybe `null`
*/
fun getSystemMessage(chatMemoryID: ChatMemoryId): PromptContent?
public fun getSystemMessage(chatMemoryID: ChatMemoryId): PromptContent?

/**
* Applies the given chat memory identifier to generate the corresponding system message.
Expand All @@ -24,5 +24,6 @@ interface SystemMessageProvider : Function<ChatMemoryId, PromptContent?> {
* @return The prompt content associated with the specified chat memory identifier,
* or `null` if no system message is available.
*/
override fun apply(chatMemoryID: ChatMemoryId): PromptContent? = getSystemMessage(chatMemoryID)
public override fun apply(chatMemoryID: ChatMemoryId): PromptContent? =
getSystemMessage(chatMemoryID)
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ import me.kpavlov.langchain4j.kotlin.prompt.TemplateRenderer
* @property promptTemplateSource Source from which the prompt templates are fetched.
* @property promptTemplateRenderer Renderer used to render the content with specific variables.
*/
open class TemplateSystemMessageProvider(
public open class TemplateSystemMessageProvider(
private val templateName: TemplateName,
private val promptTemplateSource: PromptTemplateSource = Configuration.promptTemplateSource,
private val promptTemplateRenderer: TemplateRenderer = Configuration.promptTemplateRenderer,
) : SystemMessageProvider {
open fun templateName(): TemplateName = templateName
public open fun templateName(): TemplateName = templateName

constructor(
public constructor(
templateName: TemplateName,
) : this(
templateName = templateName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import me.kpavlov.langchain4j.kotlin.data.document.loadAsync
import org.slf4j.Logger
import java.nio.file.Paths

suspend fun loadDocument(
public suspend fun loadDocument(
documentName: String,
logger: Logger,
): Document {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package me.kpavlov.langchain4j.kotlin

import me.kpavlov.aimocks.openai.MockOpenai

object TestEnvironment : me.kpavlov.finchly.BaseTestEnvironment(
internal object TestEnvironment : me.kpavlov.finchly.BaseTestEnvironment(
dotEnvFileDir = "../",
) {
val openaiApiKey = get("OPENAI_API_KEY", "demo")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.mockito.kotlin.doAnswer
import org.mockito.kotlin.whenever

@ExtendWith(MockitoExtension::class)
class ServiceWithFlowTest {
internal class ServiceWithFlowTest {
@Mock
private lateinit var model: StreamingChatLanguageModel

Expand Down
Loading
Loading