Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion settings.gradle.kts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
plugins {
id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0"
id("org.gradle.toolchains.foojay-resolver-convention") version "1.0.0"
}
rootProject.name = "coral-server"

1 change: 1 addition & 0 deletions src/main/kotlin/org/coralprotocol/coralserver/Main.kt
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ fun main(args: Array<String>) {
blockchainModule,
networkModule,
agentModule,
llmProxyModule,
sessionModule,
module {
single {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package org.coralprotocol.coralserver.agent.registry

import kotlinx.serialization.Serializable

@Serializable
data class AgentLlmConfig(
val proxies: List<AgentLlmProxy> = emptyList()
)

@Serializable
data class AgentLlmProxy(
val name: String,
val format: String,
val model: String? = null
)
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ data class RegistryAgent(
val edition: Int = MAXIMUM_SUPPORTED_AGENT_VERSION,
val runtimes: LocalAgentRuntimes,
val options: Map<String, AgentOption> = mapOf(),
val llm: AgentLlmConfig? = null,
val marketplace: RegistryAgentMarketplaceSettings? = null,

@Transient
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import org.bitcoinj.core.Base58
import org.coralprotocol.coralserver.agent.registry.option.AgentOption
import org.coralprotocol.coralserver.agent.runtime.PrototypeRuntime
import org.coralprotocol.coralserver.agent.runtime.prototype.*
import org.coralprotocol.coralserver.llmproxy.LlmProviderProfile
import java.net.URI
import java.net.URISyntaxException

Expand Down Expand Up @@ -72,6 +73,12 @@ val AGENT_MARKETPLACE_PRICING_DESCRIPTION_LENGTH = 1..256
const val AGENT_MARKETPLACE_PRICING_MIN_MIN = 0.00
const val AGENT_MARKETPLACE_PRICING_MIN_MAX = 20.00

// [llm.proxies]
const val AGENT_LLM_PROXIES_MAX_ENTRIES = 16
val AGENT_LLM_PROXY_NAME_LENGTH = 1..32
val AGENT_LLM_PROXY_NAME_PATTERN = "^[A-Z_0-9]+$".toRegex()
val AGENT_LLM_PROXY_MODEL_LENGTH = 1..128

// [marketplace.identities.erc8004]
const val AGENT_MARKETPLACE_ERC8004_ENDPOINTS_MAX_ENTRIES = 32
val AGENT_MARKETPLACE_ERC8004_ENDPOINTS_NAME_LENGTH = 1..32
Expand Down Expand Up @@ -583,11 +590,36 @@ private fun RegistryAgent.validateMarketplace() {
*
* @throws RegistryException if this registry agent contains any number of invalid values
*/
private fun RegistryAgent.validateLlm() {
val llm = llm ?: return

if (llm.proxies.size > AGENT_LLM_PROXIES_MAX_ENTRIES)
throw RegistryException("llm proxy count cannot exceed $AGENT_LLM_PROXIES_MAX_ENTRIES, was ${llm.proxies.size}")

val names = mutableSetOf<String>()
for ((index, proxy) in llm.proxies.withIndex()) {
validateStringLength("llm.proxies[$index].name", proxy.name, AGENT_LLM_PROXY_NAME_LENGTH)

if (!proxy.name.matches(AGENT_LLM_PROXY_NAME_PATTERN))
throw RegistryException("llm.proxies[$index].name (\"${proxy.name}\") must only contain uppercase alphanumeric or underscore characters")

if (!names.add(proxy.name))
throw RegistryException("llm.proxies[$index].name (\"${proxy.name}\") is not unique")

if (LlmProviderProfile.fromId(proxy.format) == null)
throw RegistryException("llm.proxies[$index].format (\"${proxy.format}\") is not a known format. Valid formats: ${LlmProviderProfile.entries.joinToString { it.providerId }}")

if (proxy.model != null)
validateStringLength("llm.proxies[$index].model", proxy.model, AGENT_LLM_PROXY_MODEL_LENGTH)
}
}

fun RegistryAgent.validate() {
validateName()
validateVersion()
validateOptionalAgentInfo()
validateRuntimes()
validateOptions()
validateLlm()
validateMarketplace()
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ data class UnresolvedRegistryAgent(
@Optional
val options: Map<String, AgentOption> = mapOf(),

@Description("LLM proxy configuration declaring which proxy endpoints this agent needs")
@Optional
val llm: AgentLlmConfig? = null,

@Description("Information for this agent relevant to it's potential listing on the marketplace")
@Optional
val marketplace: RegistryAgentMarketplaceSettings? = null
Expand Down Expand Up @@ -114,6 +118,7 @@ data class UnresolvedRegistryAgent(
info = agentInfo.resolve(context.registrySourceIdentifier),
runtimes = runtimes,
options = options,
llm = llm,
path = context.path,
marketplace = marketplace
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ class ApplicationRuntimeContext(
return builder.build()
}

fun getLlmProxyUrl(executionContext: SessionAgentExecutionContext, addressConsumer: AddressConsumer): Url {
val builder = URLBuilder(getApiUrl(addressConsumer))
builder.appendPathSegments("llm-proxy", executionContext.agent.secret)
return builder.build()
}

fun getMcpUrl(
transport: McpTransportType,
executionContext: SessionAgentExecutionContext,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ sealed interface PrototypeApiUrl {

@Serializable
@SerialName("proxy")
object Proxy : PrototypeApiUrl {
data object Proxy : PrototypeApiUrl {
override fun resolve(executionContext: SessionAgentExecutionContext): String {
TODO("Not yet implemented")
TODO("format changing soon")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import ai.koog.prompt.executor.clients.openai.OpenAIModels
import ai.koog.prompt.executor.clients.openrouter.OpenRouterClientSettings
import ai.koog.prompt.executor.clients.openrouter.OpenRouterLLMClient
import ai.koog.prompt.executor.clients.openrouter.OpenRouterModels
import ai.koog.prompt.executor.llms.SingleLLMPromptExecutor
import ai.koog.prompt.executor.llms.MultiLLMPromptExecutor
import ai.koog.prompt.executor.model.PromptExecutor
import ai.koog.prompt.llm.LLModel
import dev.eav.tomlkt.TomlClassDiscriminator
Expand Down Expand Up @@ -60,15 +60,16 @@ sealed class PrototypeModelProvider {
override val name: PrototypeString,
override val url: PrototypeApiUrl? = null,
) : PrototypeModelProvider() {
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor =
SingleLLMPromptExecutor(
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor {
return MultiLLMPromptExecutor(
OpenAILLMClient(
apiKey = key.resolve(executionContext),
settings = if (url == null) OpenAIClientSettings() else OpenAIClientSettings(
baseUrl = url.resolve(executionContext)
)
)
)
}

override val modelClass: Any
get() = OpenAIModels.Chat
Expand All @@ -81,15 +82,16 @@ sealed class PrototypeModelProvider {
override val name: PrototypeString,
override val url: PrototypeApiUrl? = null,
) : PrototypeModelProvider() {
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor =
SingleLLMPromptExecutor(
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor {
return MultiLLMPromptExecutor(
AnthropicLLMClient(
apiKey = key.resolve(executionContext),
settings = if (url == null) AnthropicClientSettings() else AnthropicClientSettings(
baseUrl = url.resolve(executionContext)
)
)
)
}

override val modelClass: Any
get() = AnthropicModels
Expand All @@ -102,15 +104,16 @@ sealed class PrototypeModelProvider {
override val name: PrototypeString,
override val url: PrototypeApiUrl? = null,
) : PrototypeModelProvider() {
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor =
SingleLLMPromptExecutor(
override fun getExecutor(executionContext: SessionAgentExecutionContext): PromptExecutor {
return MultiLLMPromptExecutor(
OpenRouterLLMClient(
apiKey = key.resolve(executionContext),
settings = if (url == null) OpenRouterClientSettings() else OpenRouterClientSettings(
baseUrl = url.resolve(executionContext)
)
)
)
}

override val modelClass: Any
get() = OpenRouterModels
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package org.coralprotocol.coralserver.config

data class LlmProxyConfig(
val enabled: Boolean = true,
val requestTimeoutSeconds: Long = 300,
val retryMaxAttempts: Int = 0,
val retryInitialDelayMs: Long = 1000,
val retryMaxDelayMs: Long = 10000,
val providers: Map<String, LlmProxyProviderConfig> = emptyMap()
)

data class LlmProxyProviderConfig(
val apiKey: String? = null,
val baseUrl: String? = null,
val timeoutSeconds: Long? = null
)
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,10 @@ data class RootConfig(
val loggingConfig: LoggingConfig = LoggingConfig(),

@param:ConfigAlias("console")
val consoleConfig: ConsoleConfig = ConsoleConfig()
val consoleConfig: ConsoleConfig = ConsoleConfig(),

@param:ConfigAlias("llm-proxy")
val llmProxyConfig: LlmProxyConfig = LlmProxyConfig()
) {
/**
* Calculates the address required to access the server for a given consumer.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.JsonClassDiscriminator
import org.coralprotocol.coralserver.agent.graph.UniqueAgentName
import org.coralprotocol.coralserver.llmproxy.LlmErrorKind
import org.coralprotocol.coralserver.session.*
import org.coralprotocol.coralserver.util.InstantSerializer
import org.coralprotocol.coralserver.util.utcTimeNow
Expand Down Expand Up @@ -77,4 +78,18 @@ sealed class SessionEvent {
@Serializable
@SerialName("docker_container_removed")
data class DockerContainerRemoved(val containerId: String) : SessionEvent()

@Serializable
@SerialName("llm_proxy_call")
data class LlmProxyCall(
val agentName: UniqueAgentName,
val provider: String,
val model: String?,
val inputTokens: Long?,
val outputTokens: Long?,
val durationMs: Long,
val streaming: Boolean,
val success: Boolean,
val errorKind: LlmErrorKind? = null
) : SessionEvent()
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package org.coralprotocol.coralserver.llmproxy

import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable

@Serializable
enum class LlmErrorKind {
@SerialName("rate_limited") RATE_LIMITED,
@SerialName("credentials") CREDENTIALS,
@SerialName("upstream_health") UPSTREAM_HEALTH,
@SerialName("request_error") REQUEST_ERROR,
@SerialName("connectivity") CONNECTIVITY,
@SerialName("response_too_large") RESPONSE_TOO_LARGE,
@SerialName("unknown") UNKNOWN
}

data class LlmCallResult(
val provider: String,
val model: String?,
val inputTokens: Long? = null,
val outputTokens: Long? = null,
val durationMs: Long,
val streaming: Boolean,
val success: Boolean,
val errorKind: LlmErrorKind? = null,
val statusCode: Int? = null,
val chunkCount: Int? = null,
) {
fun formatTokenInfo(): String {
if (inputTokens == null && outputTokens == null) return ""
return " tokens=${inputTokens ?: "?"}→${outputTokens ?: "?"}"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package org.coralprotocol.coralserver.llmproxy

enum class LlmProviderProfile(
val providerId: String,
val defaultBaseUrl: String,
val authStyle: AuthStyle,
val defaultHeaders: Map<String, String>,
val strategy: LlmProviderStrategy,
val sdkBaseUrlEnvVar: String? = null,
val sdkPathSuffix: String = ""
) {
OPENAI(
"openai", "https://api.openai.com", AuthStyle.Bearer, emptyMap(), OpenAIStrategy,
sdkBaseUrlEnvVar = "OPENAI_BASE_URL", sdkPathSuffix = "v1"
),

ANTHROPIC(
"anthropic",
"https://api.anthropic.com",
AuthStyle.Custom("x-api-key"),
mapOf("anthropic-version" to "2023-06-01"),
AnthropicStrategy,
sdkBaseUrlEnvVar = "ANTHROPIC_BASE_URL"
),

OPENROUTER(
"openrouter", "https://openrouter.ai", AuthStyle.Bearer, emptyMap(), OpenAIStrategy,
sdkBaseUrlEnvVar = "OPENROUTER_BASE_URL"
);

companion object {
private val byId = entries.associateBy { it.providerId }

fun fromId(id: String): LlmProviderProfile? = byId[id.lowercase()]
}
}

sealed class AuthStyle {
data object Bearer : AuthStyle()
data class Custom(val headerName: String) : AuthStyle()
}
Loading
Loading