Skip to content

Commit 6334bcf

Browse files
committed
test(ollama): request options customization
Also expose baseUrl to allow custom clients.
1 parent 2ce8aab commit 6334bcf

File tree

2 files changed

+31
-2
lines changed

2 files changed

+31
-2
lines changed

integration-tests/src/jvmMain/kotlin/ai/koog/integration/tests/OllamaTestFixture.kt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ class OllamaTestFixture {
1818

1919
private lateinit var ollamaContainer: GenericContainer<*>
2020

21+
lateinit var baseUrl: String
2122
lateinit var client: OllamaClient
2223
lateinit var executor: SingleLLMPromptExecutor
2324
val model = OllamaModels.Meta.LLAMA_3_2
@@ -31,7 +32,7 @@ class OllamaTestFixture {
3132

3233
val host = ollamaContainer.host
3334
val port = ollamaContainer.getMappedPort(PORT)
34-
val baseUrl = "http://$host:$port"
35+
baseUrl = "http://$host:$port"
3536
waitForOllamaServer(baseUrl)
3637

3738
client = OllamaClient(baseUrl)

integration-tests/src/jvmTest/kotlin/ai/koog/integration/tests/OllamaClientIntegrationTest.kt

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,17 @@ import ai.koog.agents.core.tools.ToolDescriptor
44
import ai.koog.agents.core.tools.ToolParameterDescriptor
55
import ai.koog.agents.core.tools.ToolParameterType
66
import ai.koog.prompt.dsl.Prompt
7+
import ai.koog.prompt.executor.llms.SingleLLMPromptExecutor
78
import ai.koog.prompt.executor.model.PromptExecutorExt.execute
9+
import ai.koog.prompt.executor.ollama.client.OllamaClient
810
import kotlinx.coroutines.flow.Flow
911
import kotlinx.coroutines.flow.flow
1012
import kotlinx.coroutines.test.runTest
1113
import kotlinx.serialization.Serializable
1214
import org.junit.jupiter.api.Disabled
1315
import org.junit.jupiter.api.extension.ExtendWith
1416
import kotlin.test.Test
17+
import kotlin.test.assertEquals
1518
import kotlin.test.assertTrue
1619
import kotlin.time.Duration.Companion.seconds
1720

@@ -20,6 +23,7 @@ class OllamaClientIntegrationTest {
2023
companion object {
2124
@field:InjectOllamaTestFixture
2225
private lateinit var fixture: OllamaTestFixture
26+
private val baseUrl get() = fixture.baseUrl
2327
private val executor get() = fixture.executor
2428
private val model get() = fixture.model
2529
}
@@ -37,6 +41,30 @@ class OllamaClientIntegrationTest {
3741
assertTrue(response.content.contains("Paris"), "Response should contain 'Paris'")
3842
}
3943

44+
@Test
45+
fun `ollama_test execute simple prompt with options`() = runTest(timeout = 600.seconds) {
46+
// Create a custom client
47+
val client = OllamaClient(
48+
baseUrl = baseUrl,
49+
requestBuilderAction = { _, _ ->
50+
seed = 0
51+
numCtx = 100
52+
numPredict = 10
53+
},
54+
)
55+
val executor = SingleLLMPromptExecutor(client)
56+
57+
val prompt = Prompt.build("test-with-options") {
58+
system("You are a helpful assistant.")
59+
user("What is the capital of France?")
60+
}
61+
62+
val response = executor.execute(prompt = prompt, model = model)
63+
64+
assertTrue(response.content.isNotEmpty(), "Response should not be empty")
65+
assertEquals("The capital of France is Paris.", response.content)
66+
}
67+
4068
@Test
4169
fun `ollama_test execute tools with required parameters`() = runTest(timeout = 600.seconds) {
4270
val searchTool = ToolDescriptor(
@@ -607,4 +635,4 @@ class OllamaClientIntegrationTest {
607635
println()
608636
}
609637
}
610-
}
638+
}

0 commit comments

Comments
 (0)