diff --git a/cmd/webserver.go b/cmd/webserver.go index c43c8a45..90adfe2f 100644 --- a/cmd/webserver.go +++ b/cmd/webserver.go @@ -396,7 +396,7 @@ func startWebServer(c *core.CliContext) error { apiV1Route.POST("/transactions/delete.json", bindApi(api.Transactions.TransactionDeleteHandler)) if config.EnableDataImport { - apiV1Route.POST("/transactions/parse_dsv_file.json", bindApi(api.Transactions.TransactionParseImportDsvFileDataHandler)) + apiV1Route.POST("/transactions/parse_custom_file.json", bindApi(api.Transactions.TransactionParseImportCustomFileDataHandler)) apiV1Route.POST("/transactions/parse_import.json", bindApi(api.Transactions.TransactionParseImportFileHandler)) apiV1Route.POST("/transactions/import.json", bindApi(api.Transactions.TransactionImportHandler)) apiV1Route.GET("/transactions/import/process.json", bindApi(api.Transactions.TransactionImportProcessHandler)) diff --git a/pkg/api/transactions.go b/pkg/api/transactions.go index 90b44d56..8ec50fde 100644 --- a/pkg/api/transactions.go +++ b/pkg/api/transactions.go @@ -1401,13 +1401,13 @@ func (a *TransactionsApi) TransactionDeleteHandler(c *core.WebContext) (any, *er return true, nil } -// TransactionParseImportDsvFileDataHandler returns the parsed file data by request parameters for current user -func (a *TransactionsApi) TransactionParseImportDsvFileDataHandler(c *core.WebContext) (any, *errs.Error) { +// TransactionParseImportCustomFileDataHandler returns the parsed file data by request parameters for current user +func (a *TransactionsApi) TransactionParseImportCustomFileDataHandler(c *core.WebContext) (any, *errs.Error) { uid := c.GetCurrentUid() form, err := c.MultipartForm() if err != nil { - log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to get multi-part form data for user \"uid:%d\", because %s", uid, err.Error()) + log.Errorf(c, "[transactions.TransactionParseImportCustomFileDataHandler] failed to get multi-part form data for user \"uid:%d\", because %s", uid, err.Error()) return nil, errs.ErrParameterInvalid } @@ -1419,18 +1419,18 @@ func (a *TransactionsApi) TransactionParseImportDsvFileDataHandler(c *core.WebCo fileType := fileTypes[0] - if !converters.IsCustomDelimiterSeparatedValuesFileType(fileType) { + if !converters.IsCustomFileFormatFileType(fileType) { return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported) } fileEncodings := form.Value["fileEncoding"] + fileEncoding := "" - if len(fileEncodings) < 1 || fileEncodings[0] == "" { - return nil, errs.ErrImportFileEncodingIsEmpty + if len(fileEncodings) > 0 { + fileEncoding = fileEncodings[0] } - fileEncoding := fileEncodings[0] - dataParser, err := converters.CreateNewDelimiterSeparatedValuesDataParser(fileType, fileEncoding) + dataParser, err := converters.CreateNewCustomFileFormatTransactionDataParser(fileType, fileEncoding) if err != nil { return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported) @@ -1439,24 +1439,24 @@ func (a *TransactionsApi) TransactionParseImportDsvFileDataHandler(c *core.WebCo importFiles := form.File["file"] if len(importFiles) < 1 { - log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] there is no import file in request for user \"uid:%d\"", uid) + log.Warnf(c, "[transactions.TransactionParseImportCustomFileDataHandler] there is no import file in request for user \"uid:%d\"", uid) return nil, errs.ErrNoFilesUpload } if importFiles[0].Size < 1 { - log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] the size of import file in request is zero for user \"uid:%d\"", uid) + log.Warnf(c, "[transactions.TransactionParseImportCustomFileDataHandler] the size of import file in request is zero for user \"uid:%d\"", uid) return nil, errs.ErrUploadedFileEmpty } if importFiles[0].Size > int64(a.CurrentConfig().MaxImportFileSize) { - log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] the upload file size \"%d\" exceeds the maximum size \"%d\" of import file for user \"uid:%d\"", importFiles[0].Size, a.CurrentConfig().MaxImportFileSize, uid) + log.Warnf(c, "[transactions.TransactionParseImportCustomFileDataHandler] the upload file size \"%d\" exceeds the maximum size \"%d\" of import file for user \"uid:%d\"", importFiles[0].Size, a.CurrentConfig().MaxImportFileSize, uid) return nil, errs.ErrExceedMaxUploadFileSize } importFile, err := importFiles[0].Open() if err != nil { - log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to get import file from request for user \"uid:%d\", because %s", uid, err.Error()) + log.Errorf(c, "[transactions.TransactionParseImportCustomFileDataHandler] failed to get import file from request for user \"uid:%d\", because %s", uid, err.Error()) return nil, errs.ErrOperationFailed } @@ -1464,14 +1464,14 @@ func (a *TransactionsApi) TransactionParseImportDsvFileDataHandler(c *core.WebCo fileData, err := io.ReadAll(importFile) if err != nil { - log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to read import file data for user \"uid:%d\", because %s", uid, err.Error()) + log.Errorf(c, "[transactions.TransactionParseImportCustomFileDataHandler] failed to read import file data for user \"uid:%d\", because %s", uid, err.Error()) return nil, errs.Or(err, errs.ErrOperationFailed) } - allLines, err := dataParser.ParseDsvFileLines(c, fileData) + allLines, err := dataParser.ParseDataLines(c, fileData) if err != nil { - log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to parse import file data for user \"uid:%d\", because %s", uid, err.Error()) + log.Errorf(c, "[transactions.TransactionParseImportCustomFileDataHandler] failed to parse import file data for user \"uid:%d\", because %s", uid, err.Error()) return nil, errs.Or(err, errs.ErrOperationFailed) } @@ -1514,15 +1514,14 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext) var dataImporter converter.TransactionDataImporter - if converters.IsCustomDelimiterSeparatedValuesFileType(fileType) { + if converters.IsCustomFileFormatFileType(fileType) { fileEncodings := form.Value["fileEncoding"] + fileEncoding := "" - if len(fileEncodings) < 1 || fileEncodings[0] == "" { - return nil, errs.ErrImportFileEncodingIsEmpty + if len(fileEncodings) > 0 { + fileEncoding = fileEncodings[0] } - fileEncoding := fileEncodings[0] - columnMappings := form.Value["columnMapping"] if len(columnMappings) < 1 || columnMappings[0] == "" { @@ -1606,7 +1605,7 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext) transactionTagSeparator = transactionTagSeparators[0] } - dataImporter, err = converters.CreateNewDelimiterSeparatedValuesDataImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormats[0], timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, geoLocationOrder, transactionTagSeparator) + dataImporter, err = converters.CreateNewCustomTransactionDataImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormats[0], timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, geoLocationOrder, transactionTagSeparator) } else { dataImporter, err = converters.GetTransactionDataImporter(fileType) } diff --git a/pkg/converters/custom/custom_data_parser.go b/pkg/converters/custom/custom_data_parser.go new file mode 100644 index 00000000..1b71fbef --- /dev/null +++ b/pkg/converters/custom/custom_data_parser.go @@ -0,0 +1,8 @@ +package custom + +import "github.com/mayswind/ezbookkeeping/pkg/core" + +// CustomTransactionDataParser represents the parser for custom transaction data files +type CustomTransactionDataParser interface { + ParseDataLines(ctx core.Context, data []byte) ([][]string, error) +} diff --git a/pkg/converters/dsv/custom_transaction_data_dsv_file_importer.go b/pkg/converters/custom/custom_transaction_data_dsv_file_importer.go similarity index 95% rename from pkg/converters/dsv/custom_transaction_data_dsv_file_importer.go rename to pkg/converters/custom/custom_transaction_data_dsv_file_importer.go index 49a629e2..81b37b99 100644 --- a/pkg/converters/dsv/custom_transaction_data_dsv_file_importer.go +++ b/pkg/converters/custom/custom_transaction_data_dsv_file_importer.go @@ -1,4 +1,4 @@ -package dsv +package custom import ( "bytes" @@ -94,10 +94,6 @@ var customTransactionTypeNameMapping = map[models.TransactionType]string{ models.TRANSACTION_TYPE_TRANSFER: utils.IntToString(int(models.TRANSACTION_TYPE_TRANSFER)), } -type CustomTransactionDataDsvFileParser interface { - ParseDsvFileLines(ctx core.Context, data []byte) ([][]string, error) -} - // customTransactionDataDsvFileImporter defines the structure of custom dsv importer for transaction data type customTransactionDataDsvFileImporter struct { fileEncoding encoding.Encoding @@ -114,8 +110,8 @@ type customTransactionDataDsvFileImporter struct { transactionTagSeparator string } -// ParseDsvFileLines returns the parsed file lines for specified the dsv file data -func (c *customTransactionDataDsvFileImporter) ParseDsvFileLines(ctx core.Context, data []byte) ([][]string, error) { +// ParseDataLines returns the parsed file lines for specified the dsv file data +func (c *customTransactionDataDsvFileImporter) ParseDataLines(ctx core.Context, data []byte) ([][]string, error) { reader := transform.NewReader(bytes.NewReader(data), c.fileEncoding.NewDecoder()) csvReader := csv.NewReader(reader) csvReader.Comma = c.separator @@ -131,7 +127,7 @@ func (c *customTransactionDataDsvFileImporter) ParseDsvFileLines(ctx core.Contex } if err != nil { - log.Errorf(ctx, "[custom_transaction_data_dsv_file_importer.ParseDsvFileLines] cannot parse dsv data, because %s", err.Error()) + log.Errorf(ctx, "[custom_transaction_data_dsv_file_importer.ParseDataLines] cannot parse dsv data, because %s", err.Error()) return nil, errs.ErrInvalidCSVFile } @@ -151,7 +147,7 @@ func (c *customTransactionDataDsvFileImporter) ParseDsvFileLines(ctx core.Contex // ParseImportedData returns the imported data by parsing the custom transaction dsv data func (c *customTransactionDataDsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezone *time.Location, additionalOptions converter.TransactionDataImporterOptions, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) { - allLines, err := c.ParseDsvFileLines(ctx, data) + allLines, err := c.ParseDataLines(ctx, data) if err != nil { return nil, nil, nil, nil, nil, nil, err @@ -170,14 +166,18 @@ func IsDelimiterSeparatedValuesFileType(fileType string) bool { return exists } -// CreateNewCustomTransactionDataDsvFileParser returns a new custom dsv parser for transaction data -func CreateNewCustomTransactionDataDsvFileParser(fileType string, fileEncoding string) (CustomTransactionDataDsvFileParser, error) { +// CreateNewCustomTransactionDataDsvFileParser returns a new custom transaction data parser +func CreateNewCustomTransactionDataDsvFileParser(fileType string, fileEncoding string) (CustomTransactionDataParser, error) { separator, exists := supportedFileTypeSeparators[fileType] if !exists { return nil, errs.ErrImportFileTypeNotSupported } + if fileEncoding == "" { + return nil, errs.ErrImportFileEncodingIsEmpty + } + enc, exists := supportedFileEncodings[fileEncoding] if !exists { @@ -198,6 +198,10 @@ func CreateNewCustomTransactionDataDsvFileImporter(fileType string, fileEncoding return nil, errs.ErrImportFileTypeNotSupported } + if fileEncoding == "" { + return nil, errs.ErrImportFileEncodingIsEmpty + } + enc, exists := supportedFileEncodings[fileEncoding] if !exists { diff --git a/pkg/converters/dsv/custom_transaction_data_dsv_file_importer_test.go b/pkg/converters/custom/custom_transaction_data_dsv_file_importer_test.go similarity index 99% rename from pkg/converters/dsv/custom_transaction_data_dsv_file_importer_test.go rename to pkg/converters/custom/custom_transaction_data_dsv_file_importer_test.go index ee7a6f25..2305558d 100644 --- a/pkg/converters/dsv/custom_transaction_data_dsv_file_importer_test.go +++ b/pkg/converters/custom/custom_transaction_data_dsv_file_importer_test.go @@ -1,4 +1,4 @@ -package dsv +package custom import ( "testing" @@ -25,13 +25,13 @@ func TestIsDelimiterSeparatedValuesFileType(t *testing.T) { assert.False(t, IsDelimiterSeparatedValuesFileType("ssv")) } -func TestCustomTransactionDataDsvFileParser_ParseDsvFileLines(t *testing.T) { +func TestCustomTransactionDataDsvFileParser_ParseDataLines(t *testing.T) { importer, err := CreateNewCustomTransactionDataDsvFileParser("custom_csv", "utf-8") assert.Nil(t, err) context := core.NewNullContext() - allLines, err := importer.ParseDsvFileLines(context, []byte( + allLines, err := importer.ParseDataLines(context, []byte( "2024-09-01 00:00:00,B,123.45\n"+ "2024-09-01 01:23:45,I,0.12\n")) assert.Nil(t, err) @@ -51,7 +51,7 @@ func TestCustomTransactionDataDsvFileParser_ParseDsvFileLines(t *testing.T) { importer, err = CreateNewCustomTransactionDataDsvFileParser("custom_tsv", "utf-8") assert.Nil(t, err) - allLines, err = importer.ParseDsvFileLines(context, []byte( + allLines, err = importer.ParseDataLines(context, []byte( "2024-09-01 12:34:56\tE\t1.00\n"+ "2024-09-01 23:59:59\tT\t0.05")) assert.Nil(t, err) @@ -71,7 +71,7 @@ func TestCustomTransactionDataDsvFileParser_ParseDsvFileLines(t *testing.T) { importer, err = CreateNewCustomTransactionDataDsvFileParser("custom_ssv", "utf-8") assert.Nil(t, err) - allLines, err = importer.ParseDsvFileLines(context, []byte( + allLines, err = importer.ParseDataLines(context, []byte( "2024-09-01 12:34:56;E;1.00\n"+ "2024-09-01 23:59:59;T;0.05")) assert.Nil(t, err) diff --git a/pkg/converters/custom/custom_transaction_data_excel_file_importer.go b/pkg/converters/custom/custom_transaction_data_excel_file_importer.go new file mode 100644 index 00000000..38d2f2b3 --- /dev/null +++ b/pkg/converters/custom/custom_transaction_data_excel_file_importer.go @@ -0,0 +1,137 @@ +package custom + +import ( + "strings" + "time" + + "github.com/mayswind/ezbookkeeping/pkg/converters/converter" + csvconverter "github.com/mayswind/ezbookkeeping/pkg/converters/csv" + "github.com/mayswind/ezbookkeeping/pkg/converters/datatable" + "github.com/mayswind/ezbookkeeping/pkg/converters/excel" + "github.com/mayswind/ezbookkeeping/pkg/core" + "github.com/mayswind/ezbookkeeping/pkg/errs" + "github.com/mayswind/ezbookkeeping/pkg/models" +) + +const customOOXMLExcelFileType = "custom_xlsx" +const customMSCFBExcelFileType = "custom_xls" + +// customTransactionDataExcelFileImporter defines the structure of custom excel importer for transaction data +type customTransactionDataExcelFileImporter struct { + fileType string + columnIndexMapping map[datatable.TransactionDataTableColumn]int + transactionTypeNameMapping map[string]models.TransactionType + hasHeaderLine bool + timeFormat string + timezoneFormat string + amountDecimalSeparator string + amountDigitGroupingSymbol string + geoLocationSeparator string + geoLocationOrder converter.TransactionGeoLocationOrder + transactionTagSeparator string +} + +// ParseDataLines returns the parsed file lines for specified the excel file data +func (c *customTransactionDataExcelFileImporter) ParseDataLines(ctx core.Context, data []byte) ([][]string, error) { + var excelDataTable datatable.BasicDataTable + var err error + + if c.fileType == customOOXMLExcelFileType { + excelDataTable, err = excel.CreateNewExcelOOXMLFileBasicDataTable(data, false) + } else if c.fileType == customMSCFBExcelFileType { + excelDataTable, err = excel.CreateNewExcelMSCFBFileBasicDataTable(data, false) + } else { + return nil, errs.ErrImportFileTypeNotSupported + } + + if err != nil { + return nil, err + } + + iterator := excelDataTable.DataRowIterator() + allLines := make([][]string, 0) + + for iterator.HasNext() { + row := iterator.Next() + items := make([]string, row.ColumnCount()) + + for i := 0; i < row.ColumnCount(); i++ { + items[i] = strings.Trim(row.GetData(i), " ") + } + + allLines = append(allLines, items) + } + + return allLines, nil +} + +// ParseImportedData returns the imported data by parsing the custom transaction dsv data +func (c *customTransactionDataExcelFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezone *time.Location, additionalOptions converter.TransactionDataImporterOptions, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) { + allLines, err := c.ParseDataLines(ctx, data) + + if err != nil { + return nil, nil, nil, nil, nil, nil, err + } + + dataTable := csvconverter.CreateNewCustomCsvBasicDataTable(allLines, c.hasHeaderLine) + transactionDataTable := CreateNewCustomPlainTextDataTable(dataTable, c.columnIndexMapping, c.transactionTypeNameMapping, c.timeFormat, c.timezoneFormat, c.amountDecimalSeparator, c.amountDigitGroupingSymbol) + dataTableImporter := converter.CreateNewImporterWithTypeNameMapping(customTransactionTypeNameMapping, c.geoLocationSeparator, c.geoLocationOrder, c.transactionTagSeparator) + + return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezone, additionalOptions, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap) +} + +// IsCustomExcelFileType returns whether the file type is the custom excel file type +func IsCustomExcelFileType(fileType string) bool { + return fileType == customOOXMLExcelFileType || fileType == customMSCFBExcelFileType +} + +// CreateNewCustomTransactionDataExcelFileParser returns a new custom transaction data parser +func CreateNewCustomTransactionDataExcelFileParser(fileType string) (CustomTransactionDataParser, error) { + if fileType != customOOXMLExcelFileType && fileType != customMSCFBExcelFileType { + return nil, errs.ErrImportFileTypeNotSupported + } + + return &customTransactionDataExcelFileImporter{ + fileType: fileType, + }, nil +} + +// CreateNewCustomTransactionDataExcelFileImporter returns a new custom excel importer for transaction data +func CreateNewCustomTransactionDataExcelFileImporter(fileType string, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, hasHeaderLine bool, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string, geoLocationSeparator string, geoLocationOrder string, transactionTagSeparator string) (converter.TransactionDataImporter, error) { + if fileType != customOOXMLExcelFileType && fileType != customMSCFBExcelFileType { + return nil, errs.ErrImportFileTypeNotSupported + } + + if geoLocationOrder == "" { + geoLocationOrder = string(converter.TRANSACTION_GEO_LOCATION_ORDER_LONGITUDE_LATITUDE) + } else if geoLocationOrder != string(converter.TRANSACTION_GEO_LOCATION_ORDER_LONGITUDE_LATITUDE) && + geoLocationOrder != string(converter.TRANSACTION_GEO_LOCATION_ORDER_LATITUDE_LONGITUDE) { + return nil, errs.ErrImportFileTypeNotSupported + } + + if _, exists := columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME]; !exists { + return nil, errs.ErrMissingRequiredFieldInHeaderRow + } + + if _, exists := columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE]; !exists { + return nil, errs.ErrMissingRequiredFieldInHeaderRow + } + + if _, exists := columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_AMOUNT]; !exists { + return nil, errs.ErrMissingRequiredFieldInHeaderRow + } + + return &customTransactionDataExcelFileImporter{ + fileType: fileType, + columnIndexMapping: columnIndexMapping, + transactionTypeNameMapping: transactionTypeNameMapping, + hasHeaderLine: hasHeaderLine, + timeFormat: timeFormat, + timezoneFormat: timezoneFormat, + amountDecimalSeparator: amountDecimalSeparator, + amountDigitGroupingSymbol: amountDigitGroupingSymbol, + geoLocationSeparator: geoLocationSeparator, + geoLocationOrder: converter.TransactionGeoLocationOrder(geoLocationOrder), + transactionTagSeparator: transactionTagSeparator, + }, nil +} diff --git a/pkg/converters/custom/custom_transaction_data_excel_file_importer_test.go b/pkg/converters/custom/custom_transaction_data_excel_file_importer_test.go new file mode 100644 index 00000000..cdc5b470 --- /dev/null +++ b/pkg/converters/custom/custom_transaction_data_excel_file_importer_test.go @@ -0,0 +1,254 @@ +package custom + +import ( + "os" + "testing" + + "github.com/mayswind/ezbookkeeping/pkg/core" + "github.com/mayswind/ezbookkeeping/pkg/errs" + "github.com/stretchr/testify/assert" +) + +func TestIsCustomExcelFileType(t *testing.T) { + assert.True(t, IsCustomExcelFileType("custom_xlsx")) + assert.True(t, IsCustomExcelFileType("custom_xls")) + + assert.False(t, IsCustomExcelFileType("xlsx")) + assert.False(t, IsCustomExcelFileType("xls")) + assert.False(t, IsCustomExcelFileType("excel")) +} + +func TestCustomTransactionDataParser_ParseOOXMLExcelDataLines_EmptyData(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xlsx") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xlsx") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 0, len(allLines)) +} + +func TestCustomTransactionDataParser_ParseOOXMLExcelDataLines_SingleSheet(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xlsx") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 3, len(allLines)) + + assert.Equal(t, 3, len(allLines[0])) + assert.Equal(t, "A1", allLines[0][0]) + assert.Equal(t, "B1", allLines[0][1]) + assert.Equal(t, "C1", allLines[0][2]) + + assert.Equal(t, 3, len(allLines[1])) + assert.Equal(t, "A2", allLines[1][0]) + assert.Equal(t, "B2", allLines[1][1]) + assert.Equal(t, "C2", allLines[1][2]) + + assert.Equal(t, 3, len(allLines[2])) + assert.Equal(t, "A3", allLines[2][0]) + assert.Equal(t, "B3", allLines[2][1]) + assert.Equal(t, "C3", allLines[2][2]) +} + +func TestCustomTransactionDataParser_ParseOOXMLExcelDataLines_MultipleSheet(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xlsx") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xlsx") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 9, len(allLines)) + + assert.Equal(t, 3, len(allLines[0])) + assert.Equal(t, "A1", allLines[0][0]) + assert.Equal(t, "B1", allLines[0][1]) + assert.Equal(t, "C1", allLines[0][2]) + + assert.Equal(t, 3, len(allLines[1])) + assert.Equal(t, "1-A2", allLines[1][0]) + assert.Equal(t, "1-B2", allLines[1][1]) + assert.Equal(t, "1-C2", allLines[1][2]) + + assert.Equal(t, 3, len(allLines[2])) + assert.Equal(t, "1-A3", allLines[2][0]) + assert.Equal(t, "1-B3", allLines[2][1]) + assert.Equal(t, "1-C3", allLines[2][2]) + + assert.Equal(t, 3, len(allLines[3])) + assert.Equal(t, "A1", allLines[3][0]) + assert.Equal(t, "B1", allLines[3][1]) + assert.Equal(t, "C1", allLines[3][2]) + + assert.Equal(t, 2, len(allLines[4])) + assert.Equal(t, "3-A2", allLines[4][0]) + assert.Equal(t, "3-B2", allLines[4][1]) + + assert.Equal(t, 3, len(allLines[5])) + assert.Equal(t, "A1", allLines[5][0]) + assert.Equal(t, "B1", allLines[5][1]) + assert.Equal(t, "C1", allLines[5][2]) + + assert.Equal(t, 3, len(allLines[6])) + assert.Equal(t, "A1", allLines[6][0]) + assert.Equal(t, "B1", allLines[6][1]) + assert.Equal(t, "C1", allLines[6][2]) + + assert.Equal(t, 3, len(allLines[7])) + assert.Equal(t, "5-A2", allLines[7][0]) + assert.Equal(t, "5-B2", allLines[7][1]) + assert.Equal(t, "5-C2", allLines[7][2]) + + assert.Equal(t, 3, len(allLines[8])) + assert.Equal(t, "5-A3", allLines[8][0]) + assert.Equal(t, "5-B3", allLines[8][1]) + assert.Equal(t, "5-C3", allLines[8][2]) +} + +func TestCustomTransactionDataParser_ParseOOXMLExcelDataLines_MultipleSheetWithDifferentColumnCount(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xlsx") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/multiple_sheets_with_different_header_row_excel_file.xlsx") + assert.Nil(t, err) + + _, err = importer.ParseDataLines(context, testdata) + assert.EqualError(t, err, errs.ErrFieldsInMultiTableAreDifferent.Message) +} + +func TestCustomTransactionDataParser_ParseMSCFBExcelDataLines_EmptyData(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xls") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 0, len(allLines)) +} + +func TestCustomTransactionDataParser_ParseMSCFBExcelDataLines_SingleSheet(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xls") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 3, len(allLines)) + + assert.Equal(t, 3, len(allLines[0])) + assert.Equal(t, "A1", allLines[0][0]) + assert.Equal(t, "B1", allLines[0][1]) + assert.Equal(t, "C1", allLines[0][2]) + + assert.Equal(t, 3, len(allLines[1])) + assert.Equal(t, "A2", allLines[1][0]) + assert.Equal(t, "B2", allLines[1][1]) + assert.Equal(t, "C2", allLines[1][2]) + + assert.Equal(t, 3, len(allLines[2])) + assert.Equal(t, "A3", allLines[2][0]) + assert.Equal(t, "B3", allLines[2][1]) + assert.Equal(t, "C3", allLines[2][2]) +} + +func TestCustomTransactionDataParser_ParseMSCFBExcelDataLines_MultipleSheet(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xls") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls") + assert.Nil(t, err) + + allLines, err := importer.ParseDataLines(context, testdata) + assert.Nil(t, err) + + assert.Equal(t, 9, len(allLines)) + + assert.Equal(t, 3, len(allLines[0])) + assert.Equal(t, "A1", allLines[0][0]) + assert.Equal(t, "B1", allLines[0][1]) + assert.Equal(t, "C1", allLines[0][2]) + + assert.Equal(t, 3, len(allLines[1])) + assert.Equal(t, "1-A2", allLines[1][0]) + assert.Equal(t, "1-B2", allLines[1][1]) + assert.Equal(t, "1-C2", allLines[1][2]) + + assert.Equal(t, 3, len(allLines[2])) + assert.Equal(t, "1-A3", allLines[2][0]) + assert.Equal(t, "1-B3", allLines[2][1]) + assert.Equal(t, "1-C3", allLines[2][2]) + + assert.Equal(t, 3, len(allLines[3])) + assert.Equal(t, "A1", allLines[3][0]) + assert.Equal(t, "B1", allLines[3][1]) + assert.Equal(t, "C1", allLines[3][2]) + + assert.Equal(t, 3, len(allLines[4])) + assert.Equal(t, "3-A2", allLines[4][0]) + assert.Equal(t, "3-B2", allLines[4][1]) + assert.Equal(t, "", allLines[4][2]) + + assert.Equal(t, 3, len(allLines[5])) + assert.Equal(t, "A1", allLines[5][0]) + assert.Equal(t, "B1", allLines[5][1]) + assert.Equal(t, "C1", allLines[5][2]) + + assert.Equal(t, 3, len(allLines[6])) + assert.Equal(t, "A1", allLines[6][0]) + assert.Equal(t, "B1", allLines[6][1]) + assert.Equal(t, "C1", allLines[6][2]) + + assert.Equal(t, 3, len(allLines[7])) + assert.Equal(t, "5-A2", allLines[7][0]) + assert.Equal(t, "5-B2", allLines[7][1]) + assert.Equal(t, "5-C2", allLines[7][2]) + + assert.Equal(t, 3, len(allLines[8])) + assert.Equal(t, "5-A3", allLines[8][0]) + assert.Equal(t, "5-B3", allLines[8][1]) + assert.Equal(t, "5-C3", allLines[8][2]) +} + +func TestCustomTransactionDataParser_ParseMSCFBExcelDataLines_MultipleSheetWithDifferentColumnCount(t *testing.T) { + importer, err := CreateNewCustomTransactionDataExcelFileParser("custom_xls") + assert.Nil(t, err) + + context := core.NewNullContext() + + testdata, err := os.ReadFile("../../../testdata/multiple_sheets_with_different_header_row_excel_file.xls") + assert.Nil(t, err) + + _, err = importer.ParseDataLines(context, testdata) + assert.EqualError(t, err, errs.ErrFieldsInMultiTableAreDifferent.Message) +} diff --git a/pkg/converters/dsv/custom_transaction_plain_text_data_table.go b/pkg/converters/custom/custom_transaction_plain_text_data_table.go similarity index 99% rename from pkg/converters/dsv/custom_transaction_plain_text_data_table.go rename to pkg/converters/custom/custom_transaction_plain_text_data_table.go index b7f69945..62d2982d 100644 --- a/pkg/converters/dsv/custom_transaction_plain_text_data_table.go +++ b/pkg/converters/custom/custom_transaction_plain_text_data_table.go @@ -1,4 +1,4 @@ -package dsv +package custom import ( "strings" diff --git a/pkg/converters/transaction_data_converters.go b/pkg/converters/transaction_data_converters.go index e4addbc4..4d421995 100644 --- a/pkg/converters/transaction_data_converters.go +++ b/pkg/converters/transaction_data_converters.go @@ -5,9 +5,9 @@ import ( "github.com/mayswind/ezbookkeeping/pkg/converters/beancount" "github.com/mayswind/ezbookkeeping/pkg/converters/camt" "github.com/mayswind/ezbookkeeping/pkg/converters/converter" + "github.com/mayswind/ezbookkeeping/pkg/converters/custom" "github.com/mayswind/ezbookkeeping/pkg/converters/datatable" "github.com/mayswind/ezbookkeeping/pkg/converters/default" - "github.com/mayswind/ezbookkeeping/pkg/converters/dsv" "github.com/mayswind/ezbookkeeping/pkg/converters/feidee" "github.com/mayswind/ezbookkeeping/pkg/converters/fireflyIII" "github.com/mayswind/ezbookkeeping/pkg/converters/gnucash" @@ -85,17 +85,29 @@ func GetTransactionDataImporter(fileType string) (converter.TransactionDataImpor } } -// IsCustomDelimiterSeparatedValuesFileType returns whether the file type is the delimiter-separated values file type -func IsCustomDelimiterSeparatedValuesFileType(fileType string) bool { - return dsv.IsDelimiterSeparatedValuesFileType(fileType) +// IsCustomFileFormatFileType returns whether the file type is the custom file format +func IsCustomFileFormatFileType(fileType string) bool { + return custom.IsDelimiterSeparatedValuesFileType(fileType) || custom.IsCustomExcelFileType(fileType) } -// CreateNewDelimiterSeparatedValuesDataParser returns a new delimiter-separated values data parser according to the file type and encoding -func CreateNewDelimiterSeparatedValuesDataParser(fileType string, fileEncoding string) (dsv.CustomTransactionDataDsvFileParser, error) { - return dsv.CreateNewCustomTransactionDataDsvFileParser(fileType, fileEncoding) +// CreateNewCustomFileFormatTransactionDataParser returns a new custom transaction data parser according to the file type and encoding +func CreateNewCustomFileFormatTransactionDataParser(fileType string, fileEncoding string) (custom.CustomTransactionDataParser, error) { + if custom.IsDelimiterSeparatedValuesFileType(fileType) { + return custom.CreateNewCustomTransactionDataDsvFileParser(fileType, fileEncoding) + } else if custom.IsCustomExcelFileType(fileType) { + return custom.CreateNewCustomTransactionDataExcelFileParser(fileType) + } else { + return nil, errs.ErrImportFileTypeNotSupported + } } -// CreateNewDelimiterSeparatedValuesDataImporter returns a new delimiter-separated values data importer according to the file type and encoding -func CreateNewDelimiterSeparatedValuesDataImporter(fileType string, fileEncoding string, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, hasHeaderLine bool, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string, geoLocationSeparator string, geoLocationOrder string, transactionTagSeparator string) (converter.TransactionDataImporter, error) { - return dsv.CreateNewCustomTransactionDataDsvFileImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormat, timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, geoLocationOrder, transactionTagSeparator) +// CreateNewCustomTransactionDataImporter returns a new custom transaction data importer according to the file type and encoding +func CreateNewCustomTransactionDataImporter(fileType string, fileEncoding string, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, hasHeaderLine bool, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string, geoLocationSeparator string, geoLocationOrder string, transactionTagSeparator string) (converter.TransactionDataImporter, error) { + if custom.IsDelimiterSeparatedValuesFileType(fileType) { + return custom.CreateNewCustomTransactionDataDsvFileImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormat, timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, geoLocationOrder, transactionTagSeparator) + } else if custom.IsCustomExcelFileType(fileType) { + return custom.CreateNewCustomTransactionDataExcelFileImporter(fileType, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormat, timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, geoLocationOrder, transactionTagSeparator) + } else { + return nil, errs.ErrImportFileTypeNotSupported + } } diff --git a/src/consts/api.ts b/src/consts/api.ts index 95b8be2e..506d315b 100644 --- a/src/consts/api.ts +++ b/src/consts/api.ts @@ -78,7 +78,7 @@ export const SPECIFIED_API_NOT_FOUND_ERRORS: Record = '/api/v1/users/2fa/recovery/regenerate.json': { message: 'Two-factor authentication is disabled' }, - '/api/v1/transactions/parse_dsv_file.json': { + '/api/v1/transactions/parse_custom_file.json': { message: 'Transaction importing is disabled' }, '/api/v1/transactions/parse_import.json': { diff --git a/src/consts/file.ts b/src/consts/file.ts index 60735a3d..1095cc14 100644 --- a/src/consts/file.ts +++ b/src/consts/file.ts @@ -180,7 +180,28 @@ export const SUPPORTED_IMPORT_FILE_CATEGORY_AND_TYPES: ImportFileCategoryAndType supportMultiLanguages: true, anchor: 'how-to-import-delimiter-separated-values-dsv-file-or-data' } - } + }, + { + type: 'excel', + name: 'Excel Workbook File', + extensions: '.xlsx,.xls', + subTypes: [ + { + type: 'custom_xlsx', + name: 'Excel Workbook File (.xlsx)', + extensions: '.xlsx', + }, + { + type: 'custom_xls', + name: 'Excel 97-2003 Workbook File (.xls)', + extensions: '.xls', + } + ], + document: { + supportMultiLanguages: true, + anchor: 'how-to-import-delimiter-separated-values-dsv-file-or-data' + } + }, ] }, { diff --git a/src/core/datetime.ts b/src/core/datetime.ts index 6d16087f..8046b036 100644 --- a/src/core/datetime.ts +++ b/src/core/datetime.ts @@ -570,6 +570,9 @@ export class KnownDateTimeFormat { public static readonly YYYYMMDD = new KnownDateTimeFormat('YYYYMMDD', DateFormatOrder.YMD, /^\d{4}(0[1-9]|1[0-2])(0[1-9]|[1-2][0-9]|3[0-1])$/); + public static readonly MMDDYYDash = new KnownDateTimeFormat('MM-DD-YY', DateFormatOrder.MDY, /^(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])-\d{2}$/); + public static readonly MMDDYYSlash = new KnownDateTimeFormat('MM/DD/YY', DateFormatOrder.MDY, /^(0[1-9]|1[0-2])\/(0[1-9]|[1-2][0-9]|3[0-1])\/\d{2}$/); + public readonly format: string; public readonly type: DateFormatOrder; private readonly regex: RegExp; diff --git a/src/lib/services.ts b/src/lib/services.ts index 783eaece..f746fabf 100644 --- a/src/lib/services.ts +++ b/src/lib/services.ts @@ -617,8 +617,8 @@ export default { deleteTransaction: (req: TransactionDeleteRequest): ApiResponsePromise => { return axios.post>('v1/transactions/delete.json', req); }, - parseImportDsvFile: ({ fileType, fileEncoding, importFile }: { fileType: string, fileEncoding?: string, importFile: File }): ApiResponsePromise => { - return axios.postForm>('v1/transactions/parse_dsv_file.json', { + parseImportCustomFile: ({ fileType, fileEncoding, importFile }: { fileType: string, fileEncoding?: string, importFile: File }): ApiResponsePromise => { + return axios.postForm>('v1/transactions/parse_custom_file.json', { fileType: fileType, fileEncoding: fileEncoding, file: importFile diff --git a/src/locales/de.json b/src/locales/de.json index 9e08fb8d..42852db4 100644 --- a/src/locales/de.json +++ b/src/locales/de.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "ezBookkeeping-Datenexportdatei", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX)-Datei", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX)-Datei", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF)-Datei", diff --git a/src/locales/en.json b/src/locales/en.json index 965439fc..49c82408 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "ezbookkeeping Data Export File", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) File", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) File", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) File", diff --git a/src/locales/es.json b/src/locales/es.json index f7cce913..04bd56cd 100644 --- a/src/locales/es.json +++ b/src/locales/es.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Otro Formato de Archivo de Aplicación Financiera", "ezbookkeeping Data Export File": "Datos exportados de ezBookkeeping", "Excel Workbook File": "Archivo Excel", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Archivo OFX (Open Financial Exchange)", "Quicken Financial Exchange (QFX) File": "Archivo QFX (Quicken Financial Exchange)", "Quicken Interchange Format (QIF) File": "Archivo QIF (Quicken Interchange Format)", diff --git a/src/locales/fr.json b/src/locales/fr.json index 228149f2..fe4e1626 100644 --- a/src/locales/fr.json +++ b/src/locales/fr.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Format de fichier d'autre app financière", "ezbookkeeping Data Export File": "Fichier d'exportation de données ezbookkeeping", "Excel Workbook File": "Fichier de classeur Excel", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Fichier Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "Fichier Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "Fichier Quicken Interchange Format (QIF)", diff --git a/src/locales/it.json b/src/locales/it.json index 1d577d59..761059fc 100644 --- a/src/locales/it.json +++ b/src/locales/it.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "File esportazione dati ezBookkeeping", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "File Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "File Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "File Quicken Interchange Format (QIF)", diff --git a/src/locales/ja.json b/src/locales/ja.json index 18d77033..08e28fb7 100644 --- a/src/locales/ja.json +++ b/src/locales/ja.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "ezbookkeepingデータエクスポートファイル", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) ファイル", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) ファイル", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) ファイル", diff --git a/src/locales/kn.json b/src/locales/kn.json index d2b146bb..c6d27100 100644 --- a/src/locales/kn.json +++ b/src/locales/kn.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "ಇತರೆ ಹಣಕಾಸು ಅಪ್ ಫೈಲ್ ರೂಪ", "ezbookkeeping Data Export File": "ezBookkeeping ಡೇಟಾ ರಫ್ತು ಫೈಲ್", "Excel Workbook File": "Excel ವರ್ಕ್‌ಬುಕ್ ಫೈಲ್", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) ಫೈಲ್", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) ಫೈಲ್", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) ಫೈಲ್", diff --git a/src/locales/ko.json b/src/locales/ko.json index e07e9d53..66d5df8a 100644 --- a/src/locales/ko.json +++ b/src/locales/ko.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "기타 금융 앱 파일 형식", "ezbookkeeping Data Export File": "ezbookkeeping 데이터 내보내기 파일", "Excel Workbook File": "Excel 통합 문서 파일", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) 파일", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) 파일", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) 파일", diff --git a/src/locales/nl.json b/src/locales/nl.json index 93579f62..939aea82 100644 --- a/src/locales/nl.json +++ b/src/locales/nl.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "ezBookkeeping-gegevensexportbestand", "Excel Workbook File": "Excel-werkmap", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX)-bestand", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX)-bestand", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF)-bestand", diff --git a/src/locales/pt_BR.json b/src/locales/pt_BR.json index 11b36682..e45fa8a5 100644 --- a/src/locales/pt_BR.json +++ b/src/locales/pt_BR.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "Arquivo de Exportação de Dados ezbookkeeping", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Arquivo Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "Arquivo Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "Arquivo Quicken Interchange Format (QIF)", diff --git a/src/locales/ru.json b/src/locales/ru.json index 25ddd460..d20fafdf 100644 --- a/src/locales/ru.json +++ b/src/locales/ru.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Формат файла другого финансового приложения", "ezbookkeeping Data Export File": "Файл экспорта данных ezbookkeeping", "Excel Workbook File": "Файл рабочей книги Excel", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Файл Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "Файл Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "Файл Quicken Interchange Format (QIF)", diff --git a/src/locales/sl.json b/src/locales/sl.json index d723326b..dde67756 100644 --- a/src/locales/sl.json +++ b/src/locales/sl.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Format datoteke druge finančne aplikacije", "ezbookkeeping Data Export File": "ezbookkeeping datoteka za izvoz podatkov", "Excel Workbook File": "Excelova delovni zvezek", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) datoteka", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) datoteka", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) datoteka", diff --git a/src/locales/ta.json b/src/locales/ta.json index 46f1aa13..49498d50 100644 --- a/src/locales/ta.json +++ b/src/locales/ta.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "இதர நிதி ஆப் கோப்பு வடிவம்", "ezbookkeeping Data Export File": "ezBookkeeping தரவு ஏற்றுமதி கோப்பு", "Excel Workbook File": "Excel வேலை‌புத்தகம் கோப்பு", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) கோப்பு", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) கோப்பு", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) கோப்பு", diff --git a/src/locales/th.json b/src/locales/th.json index cd134e3d..09229028 100644 --- a/src/locales/th.json +++ b/src/locales/th.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "รูปแบบไฟแอปการเงินอื่น", "ezbookkeeping Data Export File": "ไฟล์ส่งออกข้อมูล ezBookkeeping", "Excel Workbook File": "ไฟล์ Excel", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "ไฟล์ Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "ไฟล์ Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "ไฟล์ Quicken Interchange Format (QIF)", diff --git a/src/locales/tr.json b/src/locales/tr.json index 61631726..597ee46e 100644 --- a/src/locales/tr.json +++ b/src/locales/tr.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Diğer Finans Uygulaması Dosya Formatı", "ezbookkeeping Data Export File": "ezBookkeeping Veri Dışa Aktarım Dosyası", "Excel Workbook File": "Excel Çalışma Kitabı Dosyası", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Open Financial Exchange (OFX) Dosyası", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) Dosyası", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) Dosyası", diff --git a/src/locales/uk.json b/src/locales/uk.json index b9238000..7981904d 100644 --- a/src/locales/uk.json +++ b/src/locales/uk.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "Файл експорту даних ezbookkeeping", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Файл Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "Файл Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "Файл Quicken Interchange Format (QIF)", diff --git a/src/locales/vi.json b/src/locales/vi.json index 63a1043e..e94b1907 100644 --- a/src/locales/vi.json +++ b/src/locales/vi.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "Other Finance App File Format", "ezbookkeeping Data Export File": "Tệp xuất dữ liệu ezbookkeeping", "Excel Workbook File": "Excel Workbook File", + "Excel Workbook File (.xlsx)": "Excel Workbook File (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 Workbook File (.xls)", "Open Financial Exchange (OFX) File": "Tệp Open Financial Exchange (OFX)", "Quicken Financial Exchange (QFX) File": "Tệp Quicken Financial Exchange (QFX)", "Quicken Interchange Format (QIF) File": "Tệp Quicken Interchange Format (QIF)", diff --git a/src/locales/zh_Hans.json b/src/locales/zh_Hans.json index 4802ccb0..862083ca 100644 --- a/src/locales/zh_Hans.json +++ b/src/locales/zh_Hans.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "其他金融应用文件格式", "ezbookkeeping Data Export File": "ezbookkeeping 数据导出文件", "Excel Workbook File": "Excel 工作簿文件", + "Excel Workbook File (.xlsx)": "Excel 工作簿文件 (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 工作簿文件 (.xls)", "Open Financial Exchange (OFX) File": "开放式金融交换 (OFX) 文件", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) 文件", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) 文件", diff --git a/src/locales/zh_Hant.json b/src/locales/zh_Hant.json index 869df0c6..d4b3f7b1 100644 --- a/src/locales/zh_Hant.json +++ b/src/locales/zh_Hant.json @@ -1974,6 +1974,8 @@ "Other Finance App File Format": "其他金融應用程式檔案格式", "ezbookkeeping Data Export File": "ezbookkeeping 資料匯出檔案", "Excel Workbook File": "Excel 工作簿檔案", + "Excel Workbook File (.xlsx)": "Excel 工作簿檔案 (.xlsx)", + "Excel 97-2003 Workbook File (.xls)": "Excel 97-2003 工作簿檔案 (.xls)", "Open Financial Exchange (OFX) File": "開放式金融交換 (OFX) 檔案", "Quicken Financial Exchange (QFX) File": "Quicken Financial Exchange (QFX) 檔案", "Quicken Interchange Format (QIF) File": "Quicken Interchange Format (QIF) 檔案", diff --git a/src/stores/transaction.ts b/src/stores/transaction.ts index c839d8d8..5cf2b338 100644 --- a/src/stores/transaction.ts +++ b/src/stores/transaction.ts @@ -1260,9 +1260,9 @@ export const useTransactionsStore = defineStore('transactions', () => { services.cancelRequest(cancelableUuid); } - function parseImportDsvFile({ fileType, fileEncoding, importFile }: { fileType: string, fileEncoding?: string, importFile: File }): Promise { + function parseImportCustomFile({ fileType, fileEncoding, importFile }: { fileType: string, fileEncoding?: string, importFile: File }): Promise { return new Promise((resolve, reject) => { - services.parseImportDsvFile({ fileType, fileEncoding, importFile }).then(response => { + services.parseImportCustomFile({ fileType, fileEncoding, importFile }).then(response => { const data = response.data; if (!data || !data.success || !data.result) { @@ -1476,7 +1476,7 @@ export const useTransactionsStore = defineStore('transactions', () => { deleteTransaction, recognizeReceiptImage, cancelRecognizeReceiptImage, - parseImportDsvFile, + parseImportCustomFile, parseImportTransaction, importTransactions, getImportTransactionsProcess, diff --git a/src/views/desktop/transactions/import/ImportDialog.vue b/src/views/desktop/transactions/import/ImportDialog.vue index 34daf32e..d0b10f10 100644 --- a/src/views/desktop/transactions/import/ImportDialog.vue +++ b/src/views/desktop/transactions/import/ImportDialog.vue @@ -142,7 +142,7 @@ /> - + @@ -210,8 +210,8 @@ - {{ tt('How to import this file?') }} - {{ tt('How to export this file?') }} + {{ tt('How to import this file?') }} + {{ tt('How to export this file?') }} [{{ exportFileGuideDocumentLanguageName }}] @@ -334,7 +334,7 @@ type ImportTransactionExecuteCustomScriptTabType = InstanceType; type ImportTransactionDialogStep = 'uploadFile' | 'defineColumn' | 'executeCustomScript' | 'checkData' | 'finalResult'; -enum ImportDSVProcessMethod { +enum ImportCustomFileFormatProcessMethod { ColumnMapping, CustomScript }; @@ -400,7 +400,7 @@ const fileSubType = ref('ezbookkeeping_csv'); const fileEncoding = ref('auto'); const detectingFileEncoding = ref(false); const autoDetectedFileEncoding = ref(undefined); -const processDSVMethod = ref(ImportDSVProcessMethod.ColumnMapping); +const processCustomFileFormatMethod = ref(ImportCustomFileFormatProcessMethod.ColumnMapping); const importFile = ref(null); const importData = ref(''); const importAdditionalOptions = ref({}); @@ -451,6 +451,7 @@ const allSupportedEncodings = computed(() => fileType.value === 'dsv' || fileType.value === 'dsv_data' || fileType.value === 'excel'); const isImportDataFromTextbox = computed(() => allSupportedImportFileTypesMap.value[fileType.value]?.dataFromTextbox ?? false); const supportedAdditionalOptions = computed(() => allSupportedImportFileTypesMap.value[fileType.value]?.supportedAdditionalOptions); @@ -463,8 +464,8 @@ const allSteps = computed(() => { } ]; - if (fileType.value === 'dsv' || fileType.value === 'dsv_data') { - if (processDSVMethod.value === ImportDSVProcessMethod.CustomScript) { + if (isCustomFileFormat.value) { + if (processCustomFileFormatMethod.value === ImportCustomFileFormatProcessMethod.CustomScript) { steps.push({ name: 'executeCustomScript', title: tt('Execute Custom Script'), @@ -608,7 +609,7 @@ function open(): Promise { fileEncoding.value = 'auto'; detectingFileEncoding.value = false; autoDetectedFileEncoding.value = undefined; - processDSVMethod.value = ImportDSVProcessMethod.ColumnMapping; + processCustomFileFormatMethod.value = ImportCustomFileFormatProcessMethod.ColumnMapping; currentStep.value = 'uploadFile'; importProcess.value = 0; importFile.value = null; @@ -780,18 +781,16 @@ function parseData(): void { return; } - const isDsvFileType: boolean = fileType.value === 'dsv' || fileType.value === 'dsv_data'; - - if (isDsvFileType && currentStep.value === 'uploadFile') { + if (isCustomFileFormat.value && currentStep.value === 'uploadFile') { submitting.value = true; - transactionsStore.parseImportDsvFile({ + transactionsStore.parseImportCustomFile({ fileType: type, fileEncoding: encoding, importFile: uploadFile }).then(response => { if (response && response.length) { - if (processDSVMethod.value === ImportDSVProcessMethod.CustomScript) { + if (processCustomFileFormatMethod.value === ImportCustomFileFormatProcessMethod.CustomScript) { importTransactionExecuteCustomScriptTab.value?.reset(); parsedFileData.value = response; currentStep.value = 'executeCustomScript'; @@ -825,7 +824,7 @@ function parseData(): void { let geoLocationOrder: string | undefined = undefined; let tagSeparator: string | undefined = undefined; - if (isDsvFileType && processDSVMethod.value === ImportDSVProcessMethod.ColumnMapping) { + if (isCustomFileFormat.value && processCustomFileFormatMethod.value === ImportCustomFileFormatProcessMethod.ColumnMapping) { const defineColumnResult = importTransactionDefineColumnTab.value?.generateResult(); if (!defineColumnResult) { @@ -842,7 +841,7 @@ function parseData(): void { geoLocationSeparator = defineColumnResult.geoLocationSeparator; geoLocationOrder = defineColumnResult.geoLocationOrder; tagSeparator = defineColumnResult.tagSeparator; - } else if (isDsvFileType && processDSVMethod.value === ImportDSVProcessMethod.CustomScript) { + } else if (isCustomFileFormat.value && processCustomFileFormatMethod.value === ImportCustomFileFormatProcessMethod.CustomScript) { const executeCustomScriptResult = importTransactionExecuteCustomScriptTab.value?.generateResult(); if (!executeCustomScriptResult) {