Documentation
¶
Index ¶
- func AccText(msg *anthropic.BetaMessage) string
- func AnthropicMessageToLLMResponse(msg *anthropic.BetaMessage) (*model.LLMResponse, error)
- func EnsureUserContent(contents []*genai.Content) []*genai.Content
- func GenAI2XAIChatOptions(config *genai.GenerateContentConfig) xai.ChatOption
- func GenAIToAnthropicMessages(system *genai.Content, contents []*genai.Content) ([]anthropic.BetaTextBlockParam, []anthropic.BetaMessageParam, error)
- func GenAIToResponsesInput(contents []*genai.Content) ([]responses.ResponseInputItemUnionParam, error)
- func GenAIToolsToAnthropic(tools []*genai.Tool, cfg *genai.ToolConfig) ([]anthropic.BetaToolUnionParam, *anthropic.BetaToolChoiceUnionParam)
- func GenAIToolsToResponses(tools []*genai.Tool, cfg *genai.ToolConfig) (params []responses.ToolUnionParam, ...)
- func ModelName(defaultName string, req *model.LLMRequest) string
- func NormalizeRequest(req *model.LLMRequest, userAgent string) *genai.GenerateContentConfig
- func OpenAIResponseToLLM(resp *responses.Response, stopSequences []string) (*model.LLMResponse, error)
- func XAIResponseToLLM(resp *xai.Response) *model.LLMResponse
- type OpenAIStreamAggregator
- type XAIStreamAggregator
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func AccText ¶
func AccText(msg *anthropic.BetaMessage) string
AccText concatenates all text blocks from an Anthropic Beta message.
func AnthropicMessageToLLMResponse ¶
func AnthropicMessageToLLMResponse(msg *anthropic.BetaMessage) (*model.LLMResponse, error)
AnthropicMessageToLLMResponse converts an Anthropic beta message into an ADK LLM response.
func EnsureUserContent ¶
EnsureUserContent appends a user turn if the content list is empty or does not end with a user role.
func GenAI2XAIChatOptions ¶
func GenAI2XAIChatOptions(config *genai.GenerateContentConfig) xai.ChatOption
GenAI2XAIChatOptions builds a ChatOption that maps GenAI generation config into xAI request fields.
func GenAIToAnthropicMessages ¶
func GenAIToAnthropicMessages(system *genai.Content, contents []*genai.Content) ([]anthropic.BetaTextBlockParam, []anthropic.BetaMessageParam, error)
GenAIToAnthropicMessages converts the GenAI contents into Anthropic beta message parameters.
func GenAIToResponsesInput ¶
func GenAIToResponsesInput(contents []*genai.Content) ([]responses.ResponseInputItemUnionParam, error)
GenAIToResponsesInput converts GenAI content slices into OpenAI Responses input items.
func GenAIToolsToAnthropic ¶
func GenAIToolsToAnthropic(tools []*genai.Tool, cfg *genai.ToolConfig) ([]anthropic.BetaToolUnionParam, *anthropic.BetaToolChoiceUnionParam)
GenAIToolsToAnthropic converts GenAI tool declarations to Anthropic Beta tool definitions and choice hints.
func GenAIToolsToResponses ¶
func GenAIToolsToResponses(tools []*genai.Tool, cfg *genai.ToolConfig) (params []responses.ToolUnionParam, choiceOpt *responses.ResponseNewParamsToolChoiceUnion)
GenAIToolsToResponses maps GenAI tool declarations into Responses tool parameters and choice options.
func ModelName ¶
func ModelName(defaultName string, req *model.LLMRequest) string
ModelName returns the trimmed request model if set, otherwise the provided default.
func NormalizeRequest ¶
func NormalizeRequest(req *model.LLMRequest, userAgent string) *genai.GenerateContentConfig
NormalizeRequest ensures request config, HTTP headers, and user turn presence are set, then applies the provided user-agent.
func OpenAIResponseToLLM ¶
func OpenAIResponseToLLM(resp *responses.Response, stopSequences []string) (*model.LLMResponse, error)
OpenAIResponseToLLM converts an OpenAI Responses payload into a *model.LLMResponse, returning an error when the payload is nil or contains no output items.
func XAIResponseToLLM ¶
func XAIResponseToLLM(resp *xai.Response) *model.LLMResponse
XAIResponseToLLM converts an xAI response into an ADK LLMResponse, preserving usage and metadata.
Types ¶
type OpenAIStreamAggregator ¶
type OpenAIStreamAggregator struct {
// contains filtered or unexported fields
}
OpenAIStreamAggregator aggregates Responses streaming events into LLM responses.
func NewOpenAIStreamAggregator ¶
func NewOpenAIStreamAggregator(stopSequences []string) *OpenAIStreamAggregator
NewOpenAIStreamAggregator constructs a streaming aggregator for Responses events.
func (*OpenAIStreamAggregator) Err ¶
func (a *OpenAIStreamAggregator) Err() error
Err returns the terminal error captured during stream aggregation.
func (*OpenAIStreamAggregator) Final ¶
func (a *OpenAIStreamAggregator) Final() *model.LLMResponse
Final returns the terminal aggregated LLM response, or nil when nothing was accumulated.
func (*OpenAIStreamAggregator) Process ¶
func (a *OpenAIStreamAggregator) Process(event *responses.ResponseStreamEventUnion) []*model.LLMResponse
Process consumes a streaming event and emits any partial LLM responses produced by it.
type XAIStreamAggregator ¶
type XAIStreamAggregator struct {
// contains filtered or unexported fields
}
XAIStreamAggregator accumulates streaming xAI responses into coherent LLM responses.
func NewXAIStreamAggregator ¶
func NewXAIStreamAggregator() *XAIStreamAggregator
NewXAIStreamAggregator constructs a streaming aggregator for xAI responses.
func (*XAIStreamAggregator) Close ¶
func (s *XAIStreamAggregator) Close() *model.LLMResponse
Close returns the final aggregated LLM response and resets the aggregator state.