Skip to content

Commit 19db52f

Browse files
committed
Integrate Anthropic model support from PR google#233
This commit integrates the changes from google#233, which adds comprehensive support for Anthropic's Claude models via the model.LLM interface. Key features: - Support for both streaming and non-streaming generation - Integration with Vertex AI, Anthropic API, and AWS Bedrock providers - Tool/function calling with proper schema conversion - Handles multiple content types (text, images, code execution) - Comprehensive test coverage Changes include: - New model/anthropic package with core implementation - Request/response builders for API translation - Provider selection (vertex_ai, anthropic, aws_bedrock) - Dependencies: anthropic-sdk-go v1.17.0 and tidwall utilities All code has been formatted with go fmt, passes go vet checks, and all tests pass successfully. Original PR: google#233 Author: git-hulk <[email protected]>
1 parent 0a038b6 commit 19db52f

File tree

7 files changed

+1140
-0
lines changed

7 files changed

+1140
-0
lines changed

go.mod

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ require (
1515
)
1616

1717
require (
18+
github.com/anthropics/anthropic-sdk-go v1.17.0
1819
github.com/google/jsonschema-go v0.3.0
1920
github.com/modelcontextprotocol/go-sdk v0.7.0
2021
google.golang.org/grpc v1.76.0
@@ -73,6 +74,10 @@ require (
7374
github.com/jinzhu/now v1.1.5 // indirect
7475
github.com/mattn/go-sqlite3 v1.14.22 // indirect
7576
github.com/spf13/pflag v1.0.10 // indirect
77+
github.com/tidwall/gjson v1.18.0 // indirect
78+
github.com/tidwall/match v1.1.1 // indirect
79+
github.com/tidwall/pretty v1.2.1 // indirect
80+
github.com/tidwall/sjson v1.2.5 // indirect
7681
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
7782
golang.org/x/crypto v0.43.0 // indirect
7883
golang.org/x/net v0.46.0 // indirect

go.sum

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapp
3030
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
3131
github.com/a2aproject/a2a-go v0.3.0 h1:mnfBEDJXShzEhXCmUbfZ9xo8sXfq2pCxemsY9uasvzg=
3232
github.com/a2aproject/a2a-go v0.3.0/go.mod h1:8C0O6lsfR7zWFEqVZz/+zWCoxe8gSWpknEpqm/Vgj3E=
33+
github.com/anthropics/anthropic-sdk-go v1.17.0 h1:BwK8ApcmaAUkvZTiQE0yi3R9XneEFskDIjLTmOAFZxQ=
34+
github.com/anthropics/anthropic-sdk-go v1.17.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
3335
github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E=
3436
github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs=
3537
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
@@ -102,6 +104,16 @@ github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMps
102104
github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs=
103105
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
104106
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
107+
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
108+
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
109+
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
110+
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
111+
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
112+
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
113+
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
114+
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
115+
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
116+
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
105117
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
106118
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
107119
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=

model/anthropic/anthropic.go

Lines changed: 222 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,222 @@
1+
// Copyright 2025 Google LLC
2+
//
3+
// Licensed under the Apache License, Version 2.0 (the "License");
4+
// you may not use this file except in compliance with the License.
5+
// You may obtain a copy of the License at
6+
//
7+
// http://www.apache.org/licenses/LICENSE-2.0
8+
//
9+
// Unless required by applicable law or agreed to in writing, software
10+
// distributed under the License is distributed on an "AS IS" BASIS,
11+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
// See the License for the specific language governing permissions and
13+
// limitations under the License.
14+
15+
// Package anthropic implements the model.LLM interface backed by Claude models
16+
// served via Vertex AI.
17+
package anthropic
18+
19+
import (
20+
"context"
21+
"fmt"
22+
"iter"
23+
"os"
24+
25+
"github.com/anthropics/anthropic-sdk-go"
26+
"github.com/anthropics/anthropic-sdk-go/option"
27+
"github.com/anthropics/anthropic-sdk-go/packages/ssestream"
28+
"github.com/anthropics/anthropic-sdk-go/vertex"
29+
30+
"google.golang.org/adk/model"
31+
)
32+
33+
const (
34+
envProjectID = "GOOGLE_CLOUD_PROJECT"
35+
envLocation = "GOOGLE_CLOUD_LOCATION"
36+
37+
defaultMaxTokens = 8192
38+
defaultOAuthScope = "https://www.googleapis.com/auth/cloud-platform"
39+
)
40+
41+
const (
42+
ProviderVertexAI = "vertex_ai"
43+
ProviderAnthropic = "anthropic"
44+
ProviderAWSBedrock = "aws_bedrock"
45+
)
46+
47+
// Config controls how the Anthropic-backed model is initialized.
48+
type Config struct {
49+
// Provider indicates which service is used to access the Anthropic models.
50+
// Supported values are "vertex_ai", "aws_bedrock", and "anthropic". Default is "vertex_ai".
51+
Provider string
52+
// APIKey is the API key used to authenticate with the Anthropic API.
53+
// Only required when Provider is "anthropic".
54+
APIKey string
55+
// MaxTokens sets the maximum number of tokens the model can generate.
56+
MaxTokens int64
57+
// ClientOptions are forwarded to the underlying Anthropics SDK client.
58+
ClientOptions []option.RequestOption
59+
}
60+
61+
func (c *Config) applyDefaults() {
62+
if c.ClientOptions == nil {
63+
c.ClientOptions = []option.RequestOption{}
64+
}
65+
if c.MaxTokens == 0 {
66+
c.MaxTokens = defaultMaxTokens
67+
}
68+
if c.Provider == "" {
69+
c.Provider = ProviderVertexAI
70+
}
71+
}
72+
73+
type AnthropicModel struct {
74+
client anthropic.Client
75+
76+
name string
77+
maxTokens int64
78+
}
79+
80+
// NewModel returns [model.LLM] backed by the Anthropic API.
81+
func NewModel(ctx context.Context, modelName string, cfg *Config) (model.LLM, error) {
82+
if modelName == "" {
83+
return nil, fmt.Errorf("model name must be provided")
84+
}
85+
86+
if cfg == nil {
87+
cfg = &Config{}
88+
}
89+
cfg.applyDefaults()
90+
91+
opts := append([]option.RequestOption{}, cfg.ClientOptions...)
92+
93+
switch cfg.Provider {
94+
case ProviderAnthropic:
95+
if cfg.APIKey == "" {
96+
return nil, fmt.Errorf("API key must be provided to use Anthropic provider")
97+
}
98+
opts = append(opts, option.WithAPIKey(cfg.APIKey))
99+
case ProviderAWSBedrock:
100+
// Do nothing special for AWS Bedrock for now. User need to provide the client option
101+
// via `bedrock.WithConfig()` or `bedrock.WithLoadDefaultConfig()`.
102+
default:
103+
projectID := os.Getenv(envProjectID)
104+
location := os.Getenv(envLocation)
105+
if projectID == "" || location == "" {
106+
return nil, fmt.Errorf("GOOGLE_CLOUD_PROJECT and GOOGLE_CLOUD_LOCATION must be set to use Anthropic on Vertex")
107+
}
108+
opts = append(opts, vertex.WithGoogleAuth(ctx, location, projectID, defaultOAuthScope))
109+
}
110+
111+
return &AnthropicModel{
112+
name: modelName,
113+
maxTokens: cfg.MaxTokens,
114+
client: anthropic.NewClient(opts...),
115+
}, nil
116+
}
117+
118+
func (m *AnthropicModel) Name() string {
119+
return m.name
120+
}
121+
122+
// GenerateContent issues a Messages.New call. When stream is true, the Anthropic
123+
// streaming API is used to emit partial responses as they arrive.
124+
func (m *AnthropicModel) GenerateContent(ctx context.Context, req *model.LLMRequest, stream bool) iter.Seq2[*model.LLMResponse, error] {
125+
if stream {
126+
return m.generateStream(ctx, req)
127+
}
128+
return func(yield func(*model.LLMResponse, error) bool) {
129+
resp, err := m.generate(ctx, req)
130+
if !yield(resp, err) {
131+
return
132+
}
133+
}
134+
}
135+
136+
func (m *AnthropicModel) generate(ctx context.Context, req *model.LLMRequest) (*model.LLMResponse, error) {
137+
if req == nil {
138+
return nil, fmt.Errorf("llm request must not be empty")
139+
}
140+
141+
requestBuilder := RequestBuilder{modelName: m.name, maxTokens: m.maxTokens}
142+
params, err := requestBuilder.FromLLMRequest(req)
143+
if err != nil {
144+
return nil, err
145+
}
146+
147+
msg, err := m.client.Messages.New(ctx, *params)
148+
if err != nil {
149+
return nil, fmt.Errorf("failed to send llm request to anthropic: %w", err)
150+
}
151+
152+
responseBuilder := ResponseBuilder{}
153+
return responseBuilder.FromMessage(msg)
154+
}
155+
156+
func (m *AnthropicModel) generateStream(ctx context.Context, req *model.LLMRequest) iter.Seq2[*model.LLMResponse, error] {
157+
return func(yield func(*model.LLMResponse, error) bool) {
158+
builder := RequestBuilder{modelName: m.name, maxTokens: m.maxTokens}
159+
params, err := builder.FromLLMRequest(req)
160+
if err != nil {
161+
yield(nil, err)
162+
return
163+
}
164+
165+
stream := m.client.Messages.NewStreaming(ctx, *params)
166+
for resp, err := range readStreamEvents(stream) {
167+
if !yield(resp, err) {
168+
return
169+
}
170+
}
171+
}
172+
}
173+
174+
func readStreamEvents(stream *ssestream.Stream[anthropic.MessageStreamEventUnion]) iter.Seq2[*model.LLMResponse, error] {
175+
return func(yield func(*model.LLMResponse, error) bool) {
176+
if stream == nil {
177+
yield(nil, fmt.Errorf("the stream is empty"))
178+
return
179+
}
180+
defer func() {
181+
_ = stream.Close()
182+
}()
183+
184+
if err := stream.Err(); err != nil {
185+
yield(nil, fmt.Errorf("got the stream error: %w", err))
186+
return
187+
}
188+
189+
var message anthropic.Message
190+
for stream.Next() {
191+
event := stream.Current()
192+
if err := message.Accumulate(event); err != nil {
193+
yield(nil, fmt.Errorf("accumulate stream event error: %w", err))
194+
return
195+
}
196+
197+
partialResponse := parsePartialStreamEvent(event)
198+
if partialResponse != nil {
199+
if !yield(partialResponse, nil) {
200+
return
201+
}
202+
}
203+
204+
if _, ok := event.AsAny().(anthropic.MessageStopEvent); ok {
205+
responseBuilder := ResponseBuilder{}
206+
finalResponse, err := responseBuilder.FromMessage(&message)
207+
if err != nil {
208+
yield(nil, err)
209+
return
210+
}
211+
finalResponse.TurnComplete = true
212+
if !yield(finalResponse, nil) {
213+
return
214+
}
215+
}
216+
}
217+
218+
if err := stream.Err(); err != nil {
219+
yield(nil, fmt.Errorf("got the stream error: %w", err))
220+
}
221+
}
222+
}

0 commit comments

Comments
 (0)