llm provider supports Anthropic and Anthropic compatibility api

This commit is contained in:
MaysWind
2026-02-01 16:17:22 +08:00
parent 4177ac3d46
commit fa047bf303
8 changed files with 563 additions and 9 deletions
+25 -1
View File
@@ -169,7 +169,7 @@ transaction_from_ai_image_recognition = false
max_ai_recognition_picture_size = 10485760 max_ai_recognition_picture_size = 10485760
[llm_image_recognition] [llm_image_recognition]
# Large Language Model (LLM) provider for receipt image recognition, supports the following types: "openai", "openai_compatible", "openrouter", "ollama", "lm_studio", "google_ai" # Large Language Model (LLM) provider for receipt image recognition, supports the following types: "openai", "openai_compatible", "anthropic", "anthropic_compatible", "openrouter", "ollama", "lm_studio", "google_ai"
llm_provider = llm_provider =
# For "openai" llm provider only, OpenAI API secret key, please visit https://platform.openai.com/api-keys for more information # For "openai" llm provider only, OpenAI API secret key, please visit https://platform.openai.com/api-keys for more information
@@ -187,6 +187,30 @@ openai_compatible_api_key =
# For "openai_compatible" llm provider only, receipt image recognition model for creating transactions from images # For "openai_compatible" llm provider only, receipt image recognition model for creating transactions from images
openai_compatible_model_id = openai_compatible_model_id =
# For "anthropic" llm provider only, Anthropic API key, please visit https://platform.claude.com/settings/keys for more information
anthropic_api_key =
# For "anthropic" llm provider only, receipt image recognition model for creating transactions from images
anthropic_model_id =
# For "anthropic" llm provider only, maximum allowed number of generated tokens for creating transactions from images, default is 1024
anthropic_max_tokens = 1024
# For "anthropic_compatible" llm provider only, Anthropic compatible API base url, e.g. "https://api.anthropic.com/v1/"
anthropic_compatible_base_url =
# For "anthropic_compatible" llm provider only, Anthropic compatible API version, e.g. "2023-06-01". If the LLM service does not require API versioning, leave it blank
anthropic_compatible_api_version =
# For "anthropic_compatible" llm provider only, Anthropic compatible API secret key
anthropic_compatible_api_key =
# For "anthropic_compatible" llm provider only, receipt image recognition model for creating transactions from images
anthropic_compatible_model_id =
# For "anthropic_compatible" llm provider only, maximum allowed number of generated tokens for creating transactions from images, default is 1024
anthropic_compatible_max_tokens = 1024
# For "openrouter" llm provider only, OpenRouter API key, please visit https://openrouter.ai/settings/keys for more information # For "openrouter" llm provider only, OpenRouter API key, please visit https://openrouter.ai/settings/keys for more information
openrouter_api_key = openrouter_api_key =
@@ -5,6 +5,7 @@ import (
"github.com/mayswind/ezbookkeeping/pkg/errs" "github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/llm/data" "github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider" "github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/anthropic"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/googleai" "github.com/mayswind/ezbookkeeping/pkg/llm/provider/googleai"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/lmstudio" "github.com/mayswind/ezbookkeeping/pkg/llm/provider/lmstudio"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/ollama" "github.com/mayswind/ezbookkeeping/pkg/llm/provider/ollama"
@@ -42,6 +43,10 @@ func initializeLargeLanguageModelProvider(llmConfig *settings.LLMConfig, enableR
return openai.NewOpenAILargeLanguageModelProvider(llmConfig, enableResponseLog), nil return openai.NewOpenAILargeLanguageModelProvider(llmConfig, enableResponseLog), nil
} else if llmConfig.LLMProvider == settings.OpenAICompatibleLLMProvider { } else if llmConfig.LLMProvider == settings.OpenAICompatibleLLMProvider {
return openai.NewOpenAICompatibleLargeLanguageModelProvider(llmConfig, enableResponseLog), nil return openai.NewOpenAICompatibleLargeLanguageModelProvider(llmConfig, enableResponseLog), nil
} else if llmConfig.LLMProvider == settings.AnthropicLLMProvider {
return anthropic.NewAnthropicLargeLanguageModelProvider(llmConfig, enableResponseLog), nil
} else if llmConfig.LLMProvider == settings.AnthropicCompatibleLLMProvider {
return anthropic.NewAnthropicCompatibleLargeLanguageModelProvider(llmConfig, enableResponseLog), nil
} else if llmConfig.LLMProvider == settings.OpenRouterLLMProvider { } else if llmConfig.LLMProvider == settings.OpenRouterLLMProvider {
return openai.NewOpenRouterLargeLanguageModelProvider(llmConfig, enableResponseLog), nil return openai.NewOpenRouterLargeLanguageModelProvider(llmConfig, enableResponseLog), nil
} else if llmConfig.LLMProvider == settings.OllamaLLMProvider { } else if llmConfig.LLMProvider == settings.OllamaLLMProvider {
@@ -0,0 +1,196 @@
package anthropic
import (
"bytes"
"encoding/base64"
"encoding/json"
"io"
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/common"
"github.com/mayswind/ezbookkeeping/pkg/log"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
// AnthropicMessagesAPIProvider defines the structure of Anthropic messages API provider
type AnthropicMessagesAPIProvider interface {
// BuildMessagesHttpRequest returns the messages http request
BuildMessagesHttpRequest(c core.Context, uid int64) (*http.Request, error)
// GetModelID returns the model id
GetModelID() string
// GetMaxTokens returns the max tokens to generate
GetMaxTokens() uint32
}
// CommonAnthropicMessagesAPILargeLanguageModelAdapter defines the structure of Anthropic common compatible large language model adapter based on messages api
type CommonAnthropicMessagesAPILargeLanguageModelAdapter struct {
common.HttpLargeLanguageModelAdapter
apiProvider AnthropicMessagesAPIProvider
}
// AnthropicMessageRole defines the role of Anthropic message
type AnthropicMessageRole string
// Anthropic Message Roles
const (
AnthropicMessageRoleUser AnthropicMessageRole = "user"
)
type AnthropicThinkingType string
// Anthropic Thinking Types
const (
AnthropicThinkingTypeDisabled AnthropicThinkingType = "disabled"
)
// AnthropicMessagesRequest defines the structure of Anthropic messages request
type AnthropicMessagesRequest struct {
Model string `json:"model"`
MaxTokens uint32 `json:"max_tokens"`
Stream bool `json:"stream"`
System string `json:"system,omitempty"`
Messages []any `json:"messages"`
Thinking *AnthropicMessagesRequestThinkingConfigParam `json:"thinking,omitempty"`
}
// AnthropicMessagesRequestMessage defines the structure of Anthropic messages request message
type AnthropicMessagesRequestMessage[T string | []*AnthropicMessagesRequestImageBlockParam] struct {
Role AnthropicMessageRole `json:"role"`
Content T `json:"content"`
}
// AnthropicMessagesRequestImageBlockParam defines the structure of Anthropic messages request image content block param
type AnthropicMessagesRequestImageBlockParam struct {
Source *AnthropicMessagesRequestBase64ImageSource `json:"source"`
Type string `json:"type"`
}
// AnthropicMessagesRequestBase64ImageSource defines the structure of Anthropic messages request base64 image source
type AnthropicMessagesRequestBase64ImageSource struct {
Data string `json:"data"`
MediaType string `json:"media_type"`
Type string `json:"type"`
}
// AnthropicMessagesRequestThinkingConfigParam defines the structure of Anthropic messages request thinking config param
type AnthropicMessagesRequestThinkingConfigParam struct {
Type AnthropicThinkingType `json:"type"`
}
// AnthropicMessagesResponse defines the structure of Anthropic messages response
type AnthropicMessagesResponse struct {
Content []*AnthropicMessagesResponseContentBlock `json:"content"`
}
// AnthropicMessagesResponseContentBlock defines the structure of Anthropic messages response content block
type AnthropicMessagesResponseContentBlock struct {
Text *string `json:"text"`
}
// BuildTextualRequest returns the http request by Anthropic common compatible adapter
func (p *CommonAnthropicMessagesAPILargeLanguageModelAdapter) BuildTextualRequest(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) (*http.Request, error) {
requestBody, err := p.buildJsonRequestBody(c, uid, request, responseType)
if err != nil {
return nil, err
}
httpRequest, err := p.apiProvider.BuildMessagesHttpRequest(c, uid)
if err != nil {
return nil, err
}
httpRequest.Body = io.NopCloser(bytes.NewReader(requestBody))
httpRequest.Header.Set("Content-Type", "application/json")
return httpRequest, nil
}
// ParseTextualResponse returns the textual response by Anthropic common compatible adapter
func (p *CommonAnthropicMessagesAPILargeLanguageModelAdapter) ParseTextualResponse(c core.Context, uid int64, body []byte, responseType data.LargeLanguageModelResponseFormat) (*data.LargeLanguageModelTextualResponse, error) {
messagesResponse := &AnthropicMessagesResponse{}
err := json.Unmarshal(body, &messagesResponse)
if err != nil {
log.Errorf(c, "[anthropic_common_compatible_large_language_model_adapter.ParseTextualResponse] failed to parse messages response for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrFailedToRequestRemoteApi
}
if messagesResponse == nil || messagesResponse.Content == nil || len(messagesResponse.Content) < 1 || messagesResponse.Content[0].Text == nil {
log.Errorf(c, "[anthropic_common_compatible_large_language_model_adapter.ParseTextualResponse] messages response is invalid for user \"uid:%d\"", uid)
return nil, errs.ErrFailedToRequestRemoteApi
}
textualResponse := &data.LargeLanguageModelTextualResponse{
Content: *messagesResponse.Content[0].Text,
}
return textualResponse, nil
}
func (p *CommonAnthropicMessagesAPILargeLanguageModelAdapter) buildJsonRequestBody(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) ([]byte, error) {
if p.apiProvider.GetModelID() == "" {
return nil, errs.ErrInvalidLLMModelId
}
messagesRequest := &AnthropicMessagesRequest{
Model: p.apiProvider.GetModelID(),
MaxTokens: p.apiProvider.GetMaxTokens(),
Stream: request.Stream,
Messages: make([]any, 0, 1),
Thinking: &AnthropicMessagesRequestThinkingConfigParam{
Type: AnthropicThinkingTypeDisabled,
},
}
if request.SystemPrompt != "" {
messagesRequest.System = request.SystemPrompt
}
if len(request.UserPrompt) > 0 {
if request.UserPromptType == data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL {
imageBase64Data := base64.StdEncoding.EncodeToString(request.UserPrompt)
messagesRequest.Messages = append(messagesRequest.Messages, &AnthropicMessagesRequestMessage[[]*AnthropicMessagesRequestImageBlockParam]{
Role: AnthropicMessageRoleUser,
Content: []*AnthropicMessagesRequestImageBlockParam{
{
Type: "image",
Source: &AnthropicMessagesRequestBase64ImageSource{
Data: imageBase64Data,
MediaType: request.UserPromptContentType,
Type: "base64",
},
},
},
})
} else {
messagesRequest.Messages = append(messagesRequest.Messages, &AnthropicMessagesRequestMessage[string]{
Role: AnthropicMessageRoleUser,
Content: string(request.UserPrompt),
})
}
}
requestBodyBytes, err := json.Marshal(messagesRequest)
if err != nil {
log.Errorf(c, "[anthropic_common_compatible_large_language_model_adapter.buildJsonRequestBody] failed to marshal request body for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrOperationFailed
}
log.Debugf(c, "[anthropic_common_compatible_large_language_model_adapter.buildJsonRequestBody] request body is %s", requestBodyBytes)
return requestBodyBytes, nil
}
func newCommonAnthropicMessagesAPILargeLanguageModelAdapter(llmConfig *settings.LLMConfig, enableResponseLog bool, apiProvider AnthropicMessagesAPIProvider) provider.LargeLanguageModelProvider {
return common.NewCommonHttpLargeLanguageModelProvider(llmConfig, enableResponseLog, &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: apiProvider,
})
}
@@ -0,0 +1,152 @@
package anthropic
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
)
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_buildJsonRequestBody_TextualUserPrompt(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{
AnthropicModelID: "test",
AnthropicMaxTokens: 128,
},
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "You are a helpful assistant.",
UserPrompt: []byte("Hello, how are you?"),
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"max_tokens\":128,\"stream\":false,\"system\":\"You are a helpful assistant.\",\"messages\":[{\"role\":\"user\",\"content\":\"Hello, how are you?\"}],\"thinking\":{\"type\":\"disabled\"}}", string(bodyBytes))
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_buildJsonRequestBody_ImageUserPrompt(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{
AnthropicModelID: "test",
AnthropicMaxTokens: 128,
},
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "What's in this image?",
UserPrompt: []byte("fakedata"),
UserPromptType: data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL,
UserPromptContentType: "image/png",
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"max_tokens\":128,\"stream\":false,\"system\":\"What's in this image?\",\"messages\":[{\"role\":\"user\",\"content\":[{\"source\":{\"data\":\"ZmFrZWRhdGE=\",\"media_type\":\"image/png\",\"type\":\"base64\"},\"type\":\"image\"}]}],\"thinking\":{\"type\":\"disabled\"}}", string(bodyBytes))
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_ParseTextualResponse_ValidJsonResponse(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{},
}
response := `{
"id": "test-123",
"role": "assistant",
"type": "message",
"model": "test",
"usage": {
"input_tokens": 13,
"output_tokens": 7
},
"content": [
{
"type": "text",
"text": "This is a test response"
}
]
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "This is a test response", result.Content)
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_ParseTextualResponse_EmptyContentText(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{},
}
response := `{
"id": "test-123",
"role": "assistant",
"content": [
{
"type": "text",
"text": ""
}
]
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "", result.Content)
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_ParseTextualResponse_EmptyContent(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{},
}
response := `{
"id": "test-123",
"role": "assistant",
"content": []
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_ParseTextualResponse_NoContentText(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{},
}
response := `{
"id": "msg_123",
"role": "assistant",
"content": [
{
"type": "text"
}
]
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestCommonAnthropicMessagesAPILargeLanguageModelAdapter_ParseTextualResponse_InvalidJson(t *testing.T) {
adapter := &CommonAnthropicMessagesAPILargeLanguageModelAdapter{
apiProvider: &AnthropicOfficialMessagesAPIProvider{},
}
response := "error"
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
@@ -0,0 +1,72 @@
package anthropic
import (
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
const anthropicCompatibleMessagesPath = "messages"
// AnthropicCompatibleMessagesAPIProvider defines the structure of Anthropic compatible messages API provider
type AnthropicCompatibleMessagesAPIProvider struct {
AnthropicMessagesAPIProvider
AnthropicCompatibleBaseURL string
AnthropicCompatibleAPIVersion string
AnthropicCompatibleAPIKey string
AnthropicCompatibleModelID string
AnthropicCompatibleMaxTokens uint32
}
// BuildMessagesHttpRequest returns the messages http request by Anthropic compatible messages API provider
func (p *AnthropicCompatibleMessagesAPIProvider) BuildMessagesHttpRequest(c core.Context, uid int64) (*http.Request, error) {
req, err := http.NewRequest("POST", p.getFinalMessagesRequestUrl(), nil)
if err != nil {
return nil, err
}
if p.AnthropicCompatibleAPIVersion != "" {
req.Header.Set("anthropic-version", p.AnthropicCompatibleAPIVersion)
}
if p.AnthropicCompatibleAPIKey != "" {
req.Header.Set("X-Api-Key", p.AnthropicCompatibleAPIKey)
}
return req, nil
}
// GetModelID returns the model id of Anthropic compatible messages API provider
func (p *AnthropicCompatibleMessagesAPIProvider) GetModelID() string {
return p.AnthropicCompatibleModelID
}
// GetMaxTokens returns the max tokens to generate of Anthropic compatible messages API provider
func (p *AnthropicCompatibleMessagesAPIProvider) GetMaxTokens() uint32 {
return p.AnthropicCompatibleMaxTokens
}
func (p *AnthropicCompatibleMessagesAPIProvider) getFinalMessagesRequestUrl() string {
url := p.AnthropicCompatibleBaseURL
if url[len(url)-1] != '/' {
url += "/"
}
url += anthropicCompatibleMessagesPath
return url
}
// NewAnthropicCompatibleLargeLanguageModelProvider creates a new Anthropic compatible large language model provider instance
func NewAnthropicCompatibleLargeLanguageModelProvider(llmConfig *settings.LLMConfig, enableResponseLog bool) provider.LargeLanguageModelProvider {
return newCommonAnthropicMessagesAPILargeLanguageModelAdapter(llmConfig, enableResponseLog, &AnthropicCompatibleMessagesAPIProvider{
AnthropicCompatibleBaseURL: llmConfig.AnthropicCompatibleBaseURL,
AnthropicCompatibleAPIVersion: llmConfig.AnthropicCompatibleAPIVersion,
AnthropicCompatibleAPIKey: llmConfig.AnthropicCompatibleAPIKey,
AnthropicCompatibleModelID: llmConfig.AnthropicCompatibleModelID,
AnthropicCompatibleMaxTokens: llmConfig.AnthropicCompatibleMaxTokens,
})
}
@@ -0,0 +1,27 @@
package anthropic
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestAnthropicCompatibleMessagesAPIProvider_GetFinalRequestUrl(t *testing.T) {
apiProvider := &AnthropicCompatibleMessagesAPIProvider{
AnthropicCompatibleBaseURL: "https://api.example.com/v1/",
}
url := apiProvider.getFinalMessagesRequestUrl()
assert.Equal(t, "https://api.example.com/v1/messages", url)
apiProvider = &AnthropicCompatibleMessagesAPIProvider{
AnthropicCompatibleBaseURL: "https://api.example.com/v1",
}
url = apiProvider.getFinalMessagesRequestUrl()
assert.Equal(t, "https://api.example.com/v1/messages", url)
apiProvider = &AnthropicCompatibleMessagesAPIProvider{
AnthropicCompatibleBaseURL: "https://example.com/api",
}
url = apiProvider.getFinalMessagesRequestUrl()
assert.Equal(t, "https://example.com/api/messages", url)
}
@@ -0,0 +1,53 @@
package anthropic
import (
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
// AnthropicOfficialMessagesAPIProvider defines the structure of Anthropic official messages API provider
type AnthropicOfficialMessagesAPIProvider struct {
AnthropicMessagesAPIProvider
AnthropicAPIKey string
AnthropicModelID string
AnthropicMaxTokens uint32
}
const anthropicMessagesUrl = "https://api.anthropic.com/v1/messages"
const anthropicAPIVersion = "2023-06-01"
// BuildMessagesHttpRequest returns the messages http request by Anthropic official messages API provider
func (p *AnthropicOfficialMessagesAPIProvider) BuildMessagesHttpRequest(c core.Context, uid int64) (*http.Request, error) {
req, err := http.NewRequest("POST", anthropicMessagesUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("anthropic-version", anthropicAPIVersion)
req.Header.Set("X-Api-Key", p.AnthropicAPIKey)
return req, nil
}
// GetModelID returns the model id of Anthropic official messages API provider
func (p *AnthropicOfficialMessagesAPIProvider) GetModelID() string {
return p.AnthropicModelID
}
// GetMaxTokens returns the max tokens to generate of Anthropic official messages API provider
func (p *AnthropicOfficialMessagesAPIProvider) GetMaxTokens() uint32 {
return p.AnthropicMaxTokens
}
// NewAnthropicLargeLanguageModelProvider creates a new Anthropic large language model provider instance
func NewAnthropicLargeLanguageModelProvider(llmConfig *settings.LLMConfig, enableResponseLog bool) provider.LargeLanguageModelProvider {
return newCommonAnthropicMessagesAPILargeLanguageModelAdapter(llmConfig, enableResponseLog, &AnthropicOfficialMessagesAPIProvider{
AnthropicAPIKey: llmConfig.AnthropicAPIKey,
AnthropicModelID: llmConfig.AnthropicModelID,
AnthropicMaxTokens: llmConfig.AnthropicMaxTokens,
})
}
+33 -8
View File
@@ -69,12 +69,14 @@ const (
) )
const ( const (
OpenAILLMProvider string = "openai" OpenAILLMProvider string = "openai"
OpenAICompatibleLLMProvider string = "openai_compatible" OpenAICompatibleLLMProvider string = "openai_compatible"
OpenRouterLLMProvider string = "openrouter" AnthropicLLMProvider string = "anthropic"
OllamaLLMProvider string = "ollama" AnthropicCompatibleLLMProvider string = "anthropic_compatible"
LMStudioLLMProvider string = "lm_studio" OpenRouterLLMProvider string = "openrouter"
GoogleAILLMProvider string = "google_ai" OllamaLLMProvider string = "ollama"
LMStudioLLMProvider string = "lm_studio"
GoogleAILLMProvider string = "google_ai"
) )
// Uuid generator types // Uuid generator types
@@ -162,8 +164,9 @@ const (
defaultWebDAVRequestTimeout uint32 = 10000 // 10 seconds defaultWebDAVRequestTimeout uint32 = 10000 // 10 seconds
defaultAIRecognitionPictureMaxSize uint32 = 10485760 // 10MB defaultAIRecognitionPictureMaxSize uint32 = 10485760 // 10MB
defaultLargeLanguageModelAPIRequestTimeout uint32 = 60000 // 60 seconds defaultAnthropicLargeLanguageModelAPIMaximumTokens uint32 = 1024
defaultLargeLanguageModelAPIRequestTimeout uint32 = 60000 // 60 seconds
defaultInMemoryDuplicateCheckerCleanupInterval uint32 = 60 // 1 minutes defaultInMemoryDuplicateCheckerCleanupInterval uint32 = 60 // 1 minutes
defaultDuplicateSubmissionsInterval uint32 = 300 // 5 minutes defaultDuplicateSubmissionsInterval uint32 = 300 // 5 minutes
@@ -245,6 +248,14 @@ type LLMConfig struct {
OpenAICompatibleBaseURL string OpenAICompatibleBaseURL string
OpenAICompatibleAPIKey string OpenAICompatibleAPIKey string
OpenAICompatibleModelID string OpenAICompatibleModelID string
AnthropicAPIKey string
AnthropicModelID string
AnthropicMaxTokens uint32
AnthropicCompatibleBaseURL string
AnthropicCompatibleAPIVersion string
AnthropicCompatibleAPIKey string
AnthropicCompatibleModelID string
AnthropicCompatibleMaxTokens uint32
OpenRouterAPIKey string OpenRouterAPIKey string
OpenRouterModelID string OpenRouterModelID string
OllamaServerURL string OllamaServerURL string
@@ -864,6 +875,10 @@ func loadLLMConfiguration(configFile *ini.File, sectionName string) (*LLMConfig,
llmConfig.LLMProvider = OpenAILLMProvider llmConfig.LLMProvider = OpenAILLMProvider
} else if llmProvider == OpenAICompatibleLLMProvider { } else if llmProvider == OpenAICompatibleLLMProvider {
llmConfig.LLMProvider = OpenAICompatibleLLMProvider llmConfig.LLMProvider = OpenAICompatibleLLMProvider
} else if llmProvider == AnthropicLLMProvider {
llmConfig.LLMProvider = AnthropicLLMProvider
} else if llmProvider == AnthropicCompatibleLLMProvider {
llmConfig.LLMProvider = AnthropicCompatibleLLMProvider
} else if llmProvider == OpenRouterLLMProvider { } else if llmProvider == OpenRouterLLMProvider {
llmConfig.LLMProvider = OpenRouterLLMProvider llmConfig.LLMProvider = OpenRouterLLMProvider
} else if llmProvider == OllamaLLMProvider { } else if llmProvider == OllamaLLMProvider {
@@ -883,6 +898,16 @@ func loadLLMConfiguration(configFile *ini.File, sectionName string) (*LLMConfig,
llmConfig.OpenAICompatibleAPIKey = getConfigItemStringValue(configFile, sectionName, "openai_compatible_api_key") llmConfig.OpenAICompatibleAPIKey = getConfigItemStringValue(configFile, sectionName, "openai_compatible_api_key")
llmConfig.OpenAICompatibleModelID = getConfigItemStringValue(configFile, sectionName, "openai_compatible_model_id") llmConfig.OpenAICompatibleModelID = getConfigItemStringValue(configFile, sectionName, "openai_compatible_model_id")
llmConfig.AnthropicAPIKey = getConfigItemStringValue(configFile, sectionName, "anthropic_api_key")
llmConfig.AnthropicModelID = getConfigItemStringValue(configFile, sectionName, "anthropic_model_id")
llmConfig.AnthropicMaxTokens = getConfigItemUint32Value(configFile, sectionName, "anthropic_max_tokens", defaultAnthropicLargeLanguageModelAPIMaximumTokens)
llmConfig.AnthropicCompatibleBaseURL = getConfigItemStringValue(configFile, sectionName, "anthropic_compatible_base_url")
llmConfig.AnthropicCompatibleAPIVersion = getConfigItemStringValue(configFile, sectionName, "anthropic_compatible_api_version")
llmConfig.AnthropicCompatibleAPIKey = getConfigItemStringValue(configFile, sectionName, "anthropic_compatible_api_key")
llmConfig.AnthropicCompatibleModelID = getConfigItemStringValue(configFile, sectionName, "anthropic_compatible_model_id")
llmConfig.AnthropicCompatibleMaxTokens = getConfigItemUint32Value(configFile, sectionName, "anthropic_compatible_max_tokens", defaultAnthropicLargeLanguageModelAPIMaximumTokens)
llmConfig.OpenRouterAPIKey = getConfigItemStringValue(configFile, sectionName, "openrouter_api_key") llmConfig.OpenRouterAPIKey = getConfigItemStringValue(configFile, sectionName, "openrouter_api_key")
llmConfig.OpenRouterModelID = getConfigItemStringValue(configFile, sectionName, "openrouter_model_id") llmConfig.OpenRouterModelID = getConfigItemStringValue(configFile, sectionName, "openrouter_model_id")