code refactor

This commit is contained in:
MaysWind
2025-09-23 00:25:37 +08:00
parent 5a9877588f
commit b967a214cb
15 changed files with 482 additions and 399 deletions
@@ -0,0 +1,102 @@
package common
import (
"crypto/tls"
"io"
"net/http"
"strings"
"time"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/log"
"github.com/mayswind/ezbookkeeping/pkg/settings"
"github.com/mayswind/ezbookkeeping/pkg/utils"
)
// HttpLargeLanguageModelAdapter defines the structure of http large language model adapter
type HttpLargeLanguageModelAdapter interface {
// BuildTextualRequest returns the http request by the provider api definition
BuildTextualRequest(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) (*http.Request, error)
// ParseTextualResponse returns the textual response entity by the provider api definition
ParseTextualResponse(c core.Context, uid int64, body []byte, responseType data.LargeLanguageModelResponseFormat) (*data.LargeLanguageModelTextualResponse, error)
}
// CommonHttpLargeLanguageModelProvider defines the structure of common http large language model provider
type CommonHttpLargeLanguageModelProvider struct {
provider.LargeLanguageModelProvider
adapter HttpLargeLanguageModelAdapter
}
// GetJsonResponse returns the json response from the OpenAI common compatible large language model provider
func (p *CommonHttpLargeLanguageModelProvider) GetJsonResponse(c core.Context, uid int64, currentLLMConfig *settings.LLMConfig, request *data.LargeLanguageModelRequest) (*data.LargeLanguageModelTextualResponse, error) {
response, err := p.getTextualResponse(c, uid, currentLLMConfig, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
if err != nil {
return nil, err
}
if strings.HasPrefix(response.Content, "```json") && strings.HasSuffix(response.Content, "```") {
response.Content = strings.TrimPrefix(response.Content, "```json")
response.Content = strings.TrimSuffix(response.Content, "```")
} else if strings.HasPrefix(response.Content, "```") && strings.HasSuffix(response.Content, "```") {
response.Content = strings.TrimPrefix(response.Content, "```")
response.Content = strings.TrimSuffix(response.Content, "```")
}
return response, nil
}
func (p *CommonHttpLargeLanguageModelProvider) getTextualResponse(c core.Context, uid int64, currentLLMConfig *settings.LLMConfig, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) (*data.LargeLanguageModelTextualResponse, error) {
transport := http.DefaultTransport.(*http.Transport).Clone()
utils.SetProxyUrl(transport, currentLLMConfig.LargeLanguageModelAPIProxy)
if currentLLMConfig.LargeLanguageModelAPISkipTLSVerify {
transport.TLSClientConfig = &tls.Config{
InsecureSkipVerify: true,
}
}
client := &http.Client{
Transport: transport,
Timeout: time.Duration(currentLLMConfig.LargeLanguageModelAPIRequestTimeout) * time.Millisecond,
}
httpRequest, err := p.adapter.BuildTextualRequest(c, uid, request, responseType)
if err != nil {
log.Errorf(c, "[common_http_large_language_model_provider.getTextualResponse] failed to build requests for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrFailedToRequestRemoteApi
}
httpRequest.Header.Set("User-Agent", settings.GetUserAgent())
resp, err := client.Do(httpRequest)
if err != nil {
log.Errorf(c, "[common_http_large_language_model_provider.getTextualResponse] failed to request large language model api for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrFailedToRequestRemoteApi
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
log.Debugf(c, "[common_http_large_language_model_provider.getTextualResponse] response is %s", body)
if resp.StatusCode != 200 {
log.Errorf(c, "[common_http_large_language_model_provider.getTextualResponse] failed to get large language model api response for user \"uid:%d\", because response code is %d", uid, resp.StatusCode)
return nil, errs.ErrFailedToRequestRemoteApi
}
return p.adapter.ParseTextualResponse(c, uid, body, responseType)
}
// NewCommonHttpLargeLanguageModelProvider creates a http adapter based large language model provider instance
func NewCommonHttpLargeLanguageModelProvider(adapter HttpLargeLanguageModelAdapter) *CommonHttpLargeLanguageModelProvider {
return &CommonHttpLargeLanguageModelProvider{
adapter: adapter,
}
}
@@ -0,0 +1,13 @@
package provider
import (
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
// LargeLanguageModelProvider defines the structure of large language model provider
type LargeLanguageModelProvider interface {
// GetJsonResponse returns the json response from the large language model provider
GetJsonResponse(c core.Context, uid int64, currentLLMConfig *settings.LLMConfig, request *data.LargeLanguageModelRequest) (*data.LargeLanguageModelTextualResponse, error)
}
@@ -0,0 +1,166 @@
package ollama
import (
"bytes"
"encoding/base64"
"encoding/json"
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/common"
"github.com/mayswind/ezbookkeeping/pkg/log"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
const ollamaChatCompletionsPath = "api/chat"
// OllamaLargeLanguageModelAdapter defines the structure of Ollama large language model adapter
type OllamaLargeLanguageModelAdapter struct {
common.HttpLargeLanguageModelAdapter
OllamaServerURL string
OllamaModelID string
}
// OllamaMessageRole defines the role of Ollama chat message
type OllamaMessageRole string
const (
OllamaMessageRoleSystem OllamaMessageRole = "system"
OllamaMessageRoleUser OllamaMessageRole = "user"
)
// OllamaChatRequest defines the structure of Ollama chat request
type OllamaChatRequest struct {
Model string `json:"model"`
Stream bool `json:"stream"`
Messages []*OllamaChatRequestMessage `json:"messages"`
Format string `json:"format,omitempty"`
}
// OllamaChatRequestMessage defines the structure of Ollama chat request message
type OllamaChatRequestMessage struct {
Role OllamaMessageRole `json:"role"`
Content string `json:"content"`
Images []string `json:"images,omitempty"`
}
// OllamaChatResponse defines the structure of Ollama chat response
type OllamaChatResponse struct {
Message *OllamaChatResponseMessage `json:"message"`
}
// OllamaChatResponseMessage defines the structure of Ollama chat response message
type OllamaChatResponseMessage struct {
Content *string `json:"content"`
}
// BuildTextualRequest returns the http request by Ollama large language model adapter
func (p *OllamaLargeLanguageModelAdapter) BuildTextualRequest(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) (*http.Request, error) {
requestBody, err := p.buildJsonRequestBody(c, uid, request, responseType)
if err != nil {
return nil, err
}
httpRequest, err := http.NewRequest("POST", p.getOllamaRequestUrl(), bytes.NewReader(requestBody))
if err != nil {
return nil, err
}
httpRequest.Header.Set("Content-Type", "application/json")
return httpRequest, nil
}
// ParseTextualResponse returns the textual response by Ollama large language model adapter
func (p *OllamaLargeLanguageModelAdapter) ParseTextualResponse(c core.Context, uid int64, body []byte, responseType data.LargeLanguageModelResponseFormat) (*data.LargeLanguageModelTextualResponse, error) {
chatResponse := &OllamaChatResponse{}
err := json.Unmarshal(body, &chatResponse)
if err != nil {
log.Errorf(c, "[ollama_large_language_model_adapter.ParseTextualResponse] failed to parse chat response for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrFailedToRequestRemoteApi
}
if chatResponse == nil || chatResponse.Message == nil || chatResponse.Message.Content == nil {
log.Errorf(c, "[ollama_large_language_model_adapter.ParseTextualResponse] chat response is invalid for user \"uid:%d\"", uid)
return nil, errs.ErrFailedToRequestRemoteApi
}
textualResponse := &data.LargeLanguageModelTextualResponse{
Content: *chatResponse.Message.Content,
}
return textualResponse, nil
}
func (p *OllamaLargeLanguageModelAdapter) buildJsonRequestBody(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) ([]byte, error) {
if p.OllamaModelID == "" {
return nil, errs.ErrInvalidLLMModelId
}
chatRequest := &OllamaChatRequest{
Model: p.OllamaModelID,
Stream: request.Stream,
Messages: make([]*OllamaChatRequestMessage, 0, 2),
}
if request.SystemPrompt != "" {
chatRequest.Messages = append(chatRequest.Messages, &OllamaChatRequestMessage{
Role: OllamaMessageRoleSystem,
Content: request.SystemPrompt,
})
}
if len(request.UserPrompt) > 0 {
if request.UserPromptType == data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL {
imageBase64Data := base64.StdEncoding.EncodeToString(request.UserPrompt)
chatRequest.Messages = append(chatRequest.Messages, &OllamaChatRequestMessage{
Role: OllamaMessageRoleUser,
Images: []string{imageBase64Data},
})
} else {
chatRequest.Messages = append(chatRequest.Messages, &OllamaChatRequestMessage{
Role: OllamaMessageRoleUser,
Content: string(request.UserPrompt),
})
}
}
if responseType == data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON {
chatRequest.Format = "json"
}
requestBodyBytes, err := json.Marshal(chatRequest)
if err != nil {
log.Errorf(c, "[ollama_large_language_model_adapter.buildJsonRequestBody] failed to marshal request body for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrOperationFailed
}
log.Debugf(c, "[ollama_large_language_model_adapter.buildJsonRequestBody] request body is %s", requestBodyBytes)
return requestBodyBytes, nil
}
func (p *OllamaLargeLanguageModelAdapter) getOllamaRequestUrl() string {
url := p.OllamaServerURL
if url[len(url)-1] != '/' {
url += "/"
}
url += ollamaChatCompletionsPath
return url
}
// NewOllamaLargeLanguageModelProvider creates a new Ollama large language model provider instance
func NewOllamaLargeLanguageModelProvider(llmConfig *settings.LLMConfig) provider.LargeLanguageModelProvider {
return common.NewCommonHttpLargeLanguageModelProvider(&OllamaLargeLanguageModelAdapter{
OllamaServerURL: llmConfig.OllamaServerURL,
OllamaModelID: llmConfig.OllamaModelID,
})
}
@@ -0,0 +1,143 @@
package ollama
import (
"encoding/json"
"testing"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/stretchr/testify/assert"
"github.com/mayswind/ezbookkeeping/pkg/core"
)
func TestOllamaLargeLanguageModelAdapter_buildJsonRequestBody_TextualUserPrompt(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{
OllamaModelID: "test",
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "You are a helpful assistant.",
UserPrompt: []byte("Hello, how are you?"),
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"stream\":false,\"messages\":[{\"role\":\"system\",\"content\":\"You are a helpful assistant.\"},{\"role\":\"user\",\"content\":\"Hello, how are you?\"}],\"format\":\"json\"}", string(bodyBytes))
}
func TestOllamaLargeLanguageModelAdapter_buildJsonRequestBody_ImageUserPrompt(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{
OllamaModelID: "test",
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "What's in this image?",
UserPrompt: []byte("fakedata"),
UserPromptType: data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL,
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"stream\":false,\"messages\":[{\"role\":\"system\",\"content\":\"What's in this image?\"},{\"role\":\"user\",\"content\":\"\",\"images\":[\"ZmFrZWRhdGE=\"]}],\"format\":\"json\"}", string(bodyBytes))
}
func TestOllamaLargeLanguageModelAdapter_ParseTextualResponse_ValidJsonResponse(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{}
response := `{
"model": "test",
"created_at": "2025-09-01T01:02:03.456789Z",
"message": {
"role": "assistant",
"content": "This is a test response"
}
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "This is a test response", result.Content)
}
func TestOllamaLargeLanguageModelAdapter_ParseTextualResponse_EmptyResponse(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{}
response := `{
"model": "test",
"created_at": "2025-09-01T01:02:03.456789Z",
"message": {
"role": "assistant",
"content": ""
}
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "", result.Content)
}
func TestOllamaLargeLanguageModelAdapter_ParseTextualResponse_EmptyMessage(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{}
response := `{
"model": "test",
"created_at": "2025-09-01T01:02:03.456789Z",
"message": {}
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestOllamaLargeLanguageModelAdapter_ParseTextualResponse_NoContentFieldInMessage(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{}
response := `{
"model": "test",
"created_at": "2025-09-01T01:02:03.456789Z",
"message": {
"role": "assistant"
}
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestOllamaLargeLanguageModelAdapter_ParseTextualResponse_InvalidJson(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{}
response := "error"
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestOllamaLargeLanguageModelAdapter_GetOllamaRequestUrl(t *testing.T) {
adapter := &OllamaLargeLanguageModelAdapter{
OllamaServerURL: "http://localhost:11434/",
}
url := adapter.getOllamaRequestUrl()
assert.Equal(t, "http://localhost:11434/api/chat", url)
adapter = &OllamaLargeLanguageModelAdapter{
OllamaServerURL: "http://localhost:11434",
}
url = adapter.getOllamaRequestUrl()
assert.Equal(t, "http://localhost:11434/api/chat", url)
adapter = &OllamaLargeLanguageModelAdapter{
OllamaServerURL: "http://example.com/ollama/",
}
url = adapter.getOllamaRequestUrl()
assert.Equal(t, "http://example.com/ollama/api/chat", url)
}
@@ -0,0 +1,44 @@
package openai
import (
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
// OpenAIOfficialChatCompletionsAPIProvider defines the structure of OpenAI official chat completions API provider
type OpenAIOfficialChatCompletionsAPIProvider struct {
OpenAIChatCompletionsAPIProvider
OpenAIAPIKey string
OpenAIModelID string
}
const openAIChatCompletionsUrl = "https://api.openai.com/v1/chat/completions"
// BuildChatCompletionsHttpRequest returns the chat completions http request by OpenAI official chat completions API provider
func (p *OpenAIOfficialChatCompletionsAPIProvider) BuildChatCompletionsHttpRequest(c core.Context, uid int64) (*http.Request, error) {
req, err := http.NewRequest("POST", openAIChatCompletionsUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+p.OpenAIAPIKey)
return req, nil
}
// GetModelID returns the model id of OpenAI official chat completions API provider
func (p *OpenAIOfficialChatCompletionsAPIProvider) GetModelID() string {
return p.OpenAIModelID
}
// NewOpenAILargeLanguageModelProvider creates a new OpenAI large language model provider instance
func NewOpenAILargeLanguageModelProvider(llmConfig *settings.LLMConfig) provider.LargeLanguageModelProvider {
return newCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter(&OpenAIOfficialChatCompletionsAPIProvider{
OpenAIAPIKey: llmConfig.OpenAIAPIKey,
OpenAIModelID: llmConfig.OpenAIModelID,
})
}
@@ -0,0 +1,219 @@
package openai
import (
"bytes"
"encoding/base64"
"encoding/json"
"io"
"net/http"
"github.com/invopop/jsonschema"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider/common"
"github.com/mayswind/ezbookkeeping/pkg/log"
)
// OpenAIChatCompletionsAPIProvider defines the structure of OpenAI chat completions API provider
type OpenAIChatCompletionsAPIProvider interface {
// BuildChatCompletionsHttpRequest returns the chat completions http request
BuildChatCompletionsHttpRequest(c core.Context, uid int64) (*http.Request, error)
// GetModelID returns the model id if supported, otherwise returns empty string
GetModelID() string
}
// CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter defines the structure of OpenAI common compatible large language model adapter based on chat completions api
type CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter struct {
common.HttpLargeLanguageModelAdapter
apiProvider OpenAIChatCompletionsAPIProvider
}
// OpenAIMessageRole defines the role of OpenAI chat completions message
type OpenAIMessageRole string
// OpenAI Message Roles
const (
OpenAIMessageRoleSystem OpenAIMessageRole = "system"
OpenAIMessageRoleUser OpenAIMessageRole = "user"
)
// OpenAIChatCompletionsRequestResponseFormatType defines the type of OpenAI chat completions request response format
type OpenAIChatCompletionsRequestResponseFormatType string
// OpenAI Chat Completions Request Response Format Types
const (
OpenAIChatCompletionsRequestResponseFormatTypeJsonObject OpenAIChatCompletionsRequestResponseFormatType = "json_object"
OpenAIChatCompletionsRequestResponseFormatTypeJsonSchema OpenAIChatCompletionsRequestResponseFormatType = "json_schema"
)
// OpenAIChatCompletionsRequest defines the structure of OpenAI chat completions request
type OpenAIChatCompletionsRequest struct {
Model string `json:"model"`
Stream bool `json:"stream"`
Messages []any `json:"messages"`
ResponseFormat *OpenAIChatCompletionsRequestResponseFormat `json:"response_format,omitempty"`
}
// OpenAIChatCompletionsRequestMessage defines the structure of OpenAI chat completions request message
type OpenAIChatCompletionsRequestMessage[T string | []*OpenAIChatCompletionsRequestImageContent] struct {
Role OpenAIMessageRole `json:"role"`
Content T `json:"content"`
}
// OpenAIChatCompletionsRequestImageContent defines the structure of OpenAI chat completions request image content
type OpenAIChatCompletionsRequestImageContent struct {
Type string `json:"type"`
ImageURL *OpenAIChatCompletionsRequestImageUrl `json:"image_url"`
}
// OpenAIChatCompletionsRequestResponseFormat defines the structure of OpenAI chat completions request response format
type OpenAIChatCompletionsRequestResponseFormat struct {
Type OpenAIChatCompletionsRequestResponseFormatType `json:"type"`
JsonSchema *jsonschema.Schema `json:"json_schema,omitempty"`
}
// OpenAIChatCompletionsRequestImageUrl defines the structure of OpenAI image url
type OpenAIChatCompletionsRequestImageUrl struct {
Url string `json:"url"`
}
// OpenAIChatCompletionsResponse defines the structure of OpenAI chat completions response
type OpenAIChatCompletionsResponse struct {
Choices []*OpenAIChatCompletionsResponseChoice `json:"choices"`
}
// OpenAIChatCompletionsResponseChoice defines the structure of OpenAI chat completions response choice
type OpenAIChatCompletionsResponseChoice struct {
Message *OpenAIChatCompletionsResponseMessage `json:"message"`
}
// OpenAIChatCompletionsResponseMessage defines the structure of OpenAI chat completions response message
type OpenAIChatCompletionsResponseMessage struct {
Content *string `json:"content"`
}
// BuildTextualRequest returns the http request by OpenAI common compatible adapter
func (p *CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter) BuildTextualRequest(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) (*http.Request, error) {
requestBody, err := p.buildJsonRequestBody(c, uid, request, responseType)
if err != nil {
return nil, err
}
httpRequest, err := p.apiProvider.BuildChatCompletionsHttpRequest(c, uid)
if err != nil {
return nil, err
}
httpRequest.Body = io.NopCloser(bytes.NewReader(requestBody))
httpRequest.Header.Set("Content-Type", "application/json")
return httpRequest, nil
}
// ParseTextualResponse returns the textual response by OpenAI common compatible adapter
func (p *CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter) ParseTextualResponse(c core.Context, uid int64, body []byte, responseType data.LargeLanguageModelResponseFormat) (*data.LargeLanguageModelTextualResponse, error) {
chatCompletionsResponse := &OpenAIChatCompletionsResponse{}
err := json.Unmarshal(body, &chatCompletionsResponse)
if err != nil {
log.Errorf(c, "[openai_common_compatible_large_language_model_adapter.ParseTextualResponse] failed to parse chat completions response for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrFailedToRequestRemoteApi
}
if chatCompletionsResponse == nil || chatCompletionsResponse.Choices == nil || len(chatCompletionsResponse.Choices) < 1 ||
chatCompletionsResponse.Choices[0].Message == nil ||
chatCompletionsResponse.Choices[0].Message.Content == nil {
log.Errorf(c, "[openai_common_compatible_large_language_model_adapter.ParseTextualResponse] chat completions response is invalid for user \"uid:%d\"", uid)
return nil, errs.ErrFailedToRequestRemoteApi
}
textualResponse := &data.LargeLanguageModelTextualResponse{
Content: *chatCompletionsResponse.Choices[0].Message.Content,
}
return textualResponse, nil
}
func (p *CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter) buildJsonRequestBody(c core.Context, uid int64, request *data.LargeLanguageModelRequest, responseType data.LargeLanguageModelResponseFormat) ([]byte, error) {
if p.apiProvider.GetModelID() == "" {
return nil, errs.ErrInvalidLLMModelId
}
chatCompletionsRequest := &OpenAIChatCompletionsRequest{
Model: p.apiProvider.GetModelID(),
Stream: request.Stream,
Messages: make([]any, 0, 2),
}
if request.SystemPrompt != "" {
chatCompletionsRequest.Messages = append(chatCompletionsRequest.Messages, &OpenAIChatCompletionsRequestMessage[string]{
Role: OpenAIMessageRoleSystem,
Content: request.SystemPrompt,
})
}
if len(request.UserPrompt) > 0 {
if request.UserPromptType == data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL {
imageBase64Data := "data:" + request.UserPromptContentType + ";base64," + base64.StdEncoding.EncodeToString(request.UserPrompt)
chatCompletionsRequest.Messages = append(chatCompletionsRequest.Messages, &OpenAIChatCompletionsRequestMessage[[]*OpenAIChatCompletionsRequestImageContent]{
Role: OpenAIMessageRoleUser,
Content: []*OpenAIChatCompletionsRequestImageContent{
{
Type: "image_url",
ImageURL: &OpenAIChatCompletionsRequestImageUrl{
Url: imageBase64Data,
},
},
},
})
} else {
chatCompletionsRequest.Messages = append(chatCompletionsRequest.Messages, &OpenAIChatCompletionsRequestMessage[string]{
Role: OpenAIMessageRoleUser,
Content: string(request.UserPrompt),
})
}
}
if responseType == data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON {
if request.ResponseJsonObjectType != nil {
schemeGenerator := jsonschema.Reflector{
Anonymous: true,
DoNotReference: true,
ExpandedStruct: true,
}
schema := schemeGenerator.ReflectFromType(request.ResponseJsonObjectType)
schema.Version = ""
chatCompletionsRequest.ResponseFormat = &OpenAIChatCompletionsRequestResponseFormat{
Type: OpenAIChatCompletionsRequestResponseFormatTypeJsonSchema,
JsonSchema: schema,
}
} else {
chatCompletionsRequest.ResponseFormat = &OpenAIChatCompletionsRequestResponseFormat{
Type: OpenAIChatCompletionsRequestResponseFormatTypeJsonObject,
}
}
}
requestBodyBytes, err := json.Marshal(chatCompletionsRequest)
if err != nil {
log.Errorf(c, "[openai_common_compatible_large_language_model_adapter.buildJsonRequestBody] failed to marshal request body for user \"uid:%d\", because %s", uid, err.Error())
return nil, errs.ErrOperationFailed
}
log.Debugf(c, "[openai_common_compatible_large_language_model_adapter.buildJsonRequestBody] request body is %s", requestBodyBytes)
return requestBodyBytes, nil
}
func newCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter(apiProvider OpenAIChatCompletionsAPIProvider) provider.LargeLanguageModelProvider {
return common.NewCommonHttpLargeLanguageModelProvider(&CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: apiProvider,
})
}
@@ -0,0 +1,163 @@
package openai
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/data"
)
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_buildJsonRequestBody_TextualUserPrompt(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{
OpenAIModelID: "test",
},
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "You are a helpful assistant.",
UserPrompt: []byte("Hello, how are you?"),
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"stream\":false,\"messages\":[{\"role\":\"system\",\"content\":\"You are a helpful assistant.\"},{\"role\":\"user\",\"content\":\"Hello, how are you?\"}],\"response_format\":{\"type\":\"json_object\"}}", string(bodyBytes))
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_buildJsonRequestBody_ImageUserPrompt(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{
OpenAIModelID: "test",
},
}
request := &data.LargeLanguageModelRequest{
SystemPrompt: "What's in this image?",
UserPrompt: []byte("fakedata"),
UserPromptType: data.LARGE_LANGUAGE_MODEL_REQUEST_PROMPT_TYPE_IMAGE_URL,
UserPromptContentType: "image/png",
}
bodyBytes, err := adapter.buildJsonRequestBody(core.NewNullContext(), 0, request, data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
var body map[string]interface{}
err = json.Unmarshal(bodyBytes, &body)
assert.Nil(t, err)
assert.Equal(t, "{\"model\":\"test\",\"stream\":false,\"messages\":[{\"role\":\"system\",\"content\":\"What's in this image?\"},{\"role\":\"user\",\"content\":[{\"type\":\"image_url\",\"image_url\":{\"url\":\"data:image/png;base64,ZmFrZWRhdGE=\"}}]}],\"response_format\":{\"type\":\"json_object\"}}", string(bodyBytes))
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_ParseTextualResponse_ValidJsonResponse(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{},
}
response := `{
"id": "test-123",
"object": "chat.completion",
"created": 1234567890,
"model": "test",
"usage": {
"prompt_tokens": 13,
"completion_tokens": 7,
"total_tokens": 20
},
"choices": [
{
"finish_reason": "stop",
"index": 0,
"message": {
"role": "assistant",
"content": "This is a test response"
}
}
]
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "This is a test response", result.Content)
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_ParseTextualResponse_EmptyResponse(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{},
}
response := `{
"id": "test-123",
"object": "chat.completion",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"message": {
"role": "assistant",
"content": ""
}
}
]
}`
result, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.Nil(t, err)
assert.Equal(t, "", result.Content)
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_ParseTextualResponse_EmptyChoices(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{},
}
response := `{
"id": "test-123",
"object": "chat.completion",
"choices": []
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_ParseTextualResponse_NoChoiceContent(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{},
}
response := `{
"id": "chatcmpl-123",
"object": "chat.completion",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"message": {
"role": "assistant"
}
}
]
}`
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
func TestCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter_ParseTextualResponse_InvalidJson(t *testing.T) {
adapter := &CommonOpenAIChatCompletionsAPILargeLanguageModelAdapter{
apiProvider: &OpenAIOfficialChatCompletionsAPIProvider{},
}
response := "error"
_, err := adapter.ParseTextualResponse(core.NewNullContext(), 0, []byte(response), data.LARGE_LANGUAGE_MODEL_RESPONSE_FORMAT_JSON)
assert.EqualError(t, err, "failed to request third party api")
}
@@ -0,0 +1,59 @@
package openai
import (
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
const openAICompatibleChatCompletionsPath = "chat/completions"
// OpenAICompatibleChatCompletionsAPIProvider defines the structure of OpenAI compatible chat completions API provider
type OpenAICompatibleChatCompletionsAPIProvider struct {
OpenAIChatCompletionsAPIProvider
OpenAICompatibleBaseURL string
OpenAICompatibleAPIKey string
OpenAICompatibleModelID string
}
// BuildChatCompletionsHttpRequest returns the chat completions http request by OpenAI compatible chat completions API provider
func (p *OpenAICompatibleChatCompletionsAPIProvider) BuildChatCompletionsHttpRequest(c core.Context, uid int64) (*http.Request, error) {
req, err := http.NewRequest("POST", p.getFinalChatCompletionsRequestUrl(), nil)
if err != nil {
return nil, err
}
if p.OpenAICompatibleAPIKey != "" {
req.Header.Set("Authorization", "Bearer "+p.OpenAICompatibleAPIKey)
}
return req, nil
}
// GetModelID returns the model id of OpenAI compatible chat completions API provider
func (p *OpenAICompatibleChatCompletionsAPIProvider) GetModelID() string {
return p.OpenAICompatibleModelID
}
func (p *OpenAICompatibleChatCompletionsAPIProvider) getFinalChatCompletionsRequestUrl() string {
url := p.OpenAICompatibleBaseURL
if url[len(url)-1] != '/' {
url += "/"
}
url += openAICompatibleChatCompletionsPath
return url
}
// NewOpenAICompatibleLargeLanguageModelProvider creates a new OpenAI compatible large language model provider instance
func NewOpenAICompatibleLargeLanguageModelProvider(llmConfig *settings.LLMConfig) provider.LargeLanguageModelProvider {
return newCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter(&OpenAICompatibleChatCompletionsAPIProvider{
OpenAICompatibleBaseURL: llmConfig.OpenAICompatibleBaseURL,
OpenAICompatibleAPIKey: llmConfig.OpenAICompatibleAPIKey,
OpenAICompatibleModelID: llmConfig.OpenAICompatibleModelID,
})
}
@@ -0,0 +1,27 @@
package openai
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestOpenAICompatibleChatCompletionsAPIProvider_GetFinalRequestUrl(t *testing.T) {
apiProvider := &OpenAICompatibleChatCompletionsAPIProvider{
OpenAICompatibleBaseURL: "https://api.example.com/v1/",
}
url := apiProvider.getFinalChatCompletionsRequestUrl()
assert.Equal(t, "https://api.example.com/v1/chat/completions", url)
apiProvider = &OpenAICompatibleChatCompletionsAPIProvider{
OpenAICompatibleBaseURL: "https://api.example.com/v1",
}
url = apiProvider.getFinalChatCompletionsRequestUrl()
assert.Equal(t, "https://api.example.com/v1/chat/completions", url)
apiProvider = &OpenAICompatibleChatCompletionsAPIProvider{
OpenAICompatibleBaseURL: "https://example.com/api",
}
url = apiProvider.getFinalChatCompletionsRequestUrl()
assert.Equal(t, "https://example.com/api/chat/completions", url)
}
@@ -0,0 +1,46 @@
package openai
import (
"net/http"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/llm/provider"
"github.com/mayswind/ezbookkeeping/pkg/settings"
)
// OpenRouterChatCompletionsAPIProvider defines the structure of OpenRouter chat completions API provider
type OpenRouterChatCompletionsAPIProvider struct {
OpenAIChatCompletionsAPIProvider
OpenRouterAPIKey string
OpenRouterModelID string
}
const openRouterChatCompletionsUrl = "https://openrouter.ai/api/v1/chat/completions"
// BuildChatCompletionsHttpRequest returns the chat completions http request by OpenRouter chat completions API provider
func (p *OpenRouterChatCompletionsAPIProvider) BuildChatCompletionsHttpRequest(c core.Context, uid int64) (*http.Request, error) {
req, err := http.NewRequest("POST", openRouterChatCompletionsUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+p.OpenRouterAPIKey)
req.Header.Set("HTTP-Referer", "https://ezbookkeeping.mayswind.net/")
req.Header.Set("X-Title", "ezBookkeeping")
return req, nil
}
// GetModelID returns the model id of OpenRouter chat completions API provider
func (p *OpenRouterChatCompletionsAPIProvider) GetModelID() string {
return p.OpenRouterModelID
}
// NewOpenRouterLargeLanguageModelProvider creates a new OpenRouter large language model provider instance
func NewOpenRouterLargeLanguageModelProvider(llmConfig *settings.LLMConfig) provider.LargeLanguageModelProvider {
return newCommonOpenAIChatCompletionsAPILargeLanguageModelAdapter(&OpenRouterChatCompletionsAPIProvider{
OpenRouterAPIKey: llmConfig.OpenRouterAPIKey,
OpenRouterModelID: llmConfig.OpenRouterModelID,
})
}