rename files

This commit is contained in:
MaysWind
2024-10-16 00:46:30 +08:00
parent 8978e340c7
commit a062592043
26 changed files with 365 additions and 361 deletions
@@ -1,14 +1,14 @@
package alipay
// alipayAppTransactionDataCsvImporter defines the structure of alipay app csv importer for transaction data
type alipayAppTransactionDataCsvImporter struct {
alipayTransactionDataCsvImporter
// alipayAppTransactionDataCsvFileImporter defines the structure of alipay app csv importer for transaction data
type alipayAppTransactionDataCsvFileImporter struct {
alipayTransactionDataCsvFileImporter
}
// Initialize a alipay app transaction data csv file importer singleton instance
var (
AlipayAppTransactionDataCsvImporter = &alipayAppTransactionDataCsvImporter{
alipayTransactionDataCsvImporter{
AlipayAppTransactionDataCsvFileImporter = &alipayAppTransactionDataCsvFileImporter{
alipayTransactionDataCsvFileImporter{
fileHeaderLine: "------------------------------------------------------------------------------------",
dataHeaderStartContent: "支付宝(中国)网络技术有限公司 电子客户回单",
originalColumnNames: alipayTransactionColumnNames{
@@ -139,7 +139,7 @@ func (t *alipayTransactionDataRowIterator) Next(ctx core.Context, user *models.U
rowItems[t.dataTable.originalTypeColumnIndex] != alipayTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME] &&
rowItems[t.dataTable.originalTypeColumnIndex] != alipayTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE] &&
rowItems[t.dataTable.originalTypeColumnIndex] != alipayTransactionTypeNameMapping[models.TRANSACTION_TYPE_TRANSFER] {
log.Warnf(ctx, "[alipay_transaction_data_plain_text_data_table.Next] skip parsing transaction in row \"index:%d\", because type is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalTypeColumnIndex])
log.Warnf(ctx, "[alipay_transaction_csv_data_table.Next] skip parsing transaction in row \"index:%d\", because type is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalTypeColumnIndex])
isValid = false
}
@@ -150,7 +150,7 @@ func (t *alipayTransactionDataRowIterator) Next(ctx core.Context, user *models.U
rowItems[t.dataTable.originalStatusColumnIndex] != alipayTransactionDataStatusClosedName &&
rowItems[t.dataTable.originalStatusColumnIndex] != alipayTransactionDataStatusRefundSuccessName &&
rowItems[t.dataTable.originalStatusColumnIndex] != alipayTransactionDataStatusTaxRefundSuccessName {
log.Warnf(ctx, "[alipay_transaction_data_plain_text_data_table.Next] skip parsing transaction in row \"index:%d\", because status is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalStatusColumnIndex])
log.Warnf(ctx, "[alipay_transaction_csv_data_table.Next] skip parsing transaction in row \"index:%d\", because status is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalStatusColumnIndex])
isValid = false
}
@@ -161,7 +161,7 @@ func (t *alipayTransactionDataRowIterator) Next(ctx core.Context, user *models.U
finalItems, errMsg = t.dataTable.parseTransactionData(ctx, user, rowItems)
if finalItems == nil {
log.Warnf(ctx, "[alipay_transaction_data_plain_text_data_table.Next] skip parsing transaction in row \"index:%d\", because %s", t.currentIndex, errMsg)
log.Warnf(ctx, "[alipay_transaction_csv_data_table.Next] skip parsing transaction in row \"index:%d\", because %s", t.currentIndex, errMsg)
isValid = false
}
}
@@ -302,7 +302,7 @@ func createNewAlipayTransactionDataTable(ctx core.Context, reader io.Reader, fil
}
if len(allOriginalLines) < 2 {
log.Errorf(ctx, "[alipay_transaction_data_plain_text_data_table.createNewAlipayTransactionPlainTextDataTable] cannot parse import data, because data table row count is less 1")
log.Errorf(ctx, "[alipay_transaction_csv_data_table.createNewAlipayTransactionPlainTextDataTable] cannot parse import data, because data table row count is less 1")
return nil, errs.ErrNotFoundTransactionDataInFile
}
@@ -324,7 +324,7 @@ func createNewAlipayTransactionDataTable(ctx core.Context, reader io.Reader, fil
descriptionColumnIdx, descriptionColumnExists := originalHeaderItemMap[originalColumnNames.descriptionColumnName]
if !timeColumnExists || !amountColumnExists || !typeColumnExists || !statusColumnExists {
log.Errorf(ctx, "[alipay_transaction_data_plain_text_data_table.createNewAlipayTransactionPlainTextDataTable] cannot parse alipay csv data, because missing essential columns in header row")
log.Errorf(ctx, "[alipay_transaction_csv_data_table.createNewAlipayTransactionPlainTextDataTable] cannot parse alipay csv data, because missing essential columns in header row")
return nil, errs.ErrMissingRequiredFieldInHeaderRow
}
@@ -379,7 +379,7 @@ func parseAllLinesFromAlipayTransactionPlainText(ctx core.Context, reader io.Rea
}
if err != nil {
log.Errorf(ctx, "[alipay_transaction_data_plain_text_data_table.parseAllLinesFromAlipayTransactionPlainText] cannot parse alipay csv data, because %s", err.Error())
log.Errorf(ctx, "[alipay_transaction_csv_data_table.parseAllLinesFromAlipayTransactionPlainText] cannot parse alipay csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
@@ -390,7 +390,7 @@ func parseAllLinesFromAlipayTransactionPlainText(ctx core.Context, reader io.Rea
hasFileHeader = true
continue
} else {
log.Warnf(ctx, "[alipay_transaction_data_plain_text_data_table.parseAllLinesFromAlipayTransactionPlainText] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
log.Warnf(ctx, "[alipay_transaction_csv_data_table.parseAllLinesFromAlipayTransactionPlainText] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
continue
}
}
@@ -418,7 +418,7 @@ func parseAllLinesFromAlipayTransactionPlainText(ctx core.Context, reader io.Rea
}
if len(allOriginalLines) > 0 && len(items) < len(allOriginalLines[0]) {
log.Errorf(ctx, "[alipay_transaction_data_plain_text_data_table.parseAllLinesFromAlipayTransactionPlainText] cannot parse row \"index:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", len(allOriginalLines), len(items), len(allOriginalLines[0]))
log.Errorf(ctx, "[alipay_transaction_csv_data_table.parseAllLinesFromAlipayTransactionPlainText] cannot parse row \"index:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", len(allOriginalLines), len(items), len(allOriginalLines[0]))
return nil, errs.ErrFewerFieldsInDataRowThanInHeaderRow
}
@@ -17,8 +17,8 @@ var alipayTransactionTypeNameMapping = map[models.TransactionType]string{
models.TRANSACTION_TYPE_TRANSFER: "不计收支",
}
// alipayTransactionDataCsvImporter defines the structure of alipay csv importer for transaction data
type alipayTransactionDataCsvImporter struct {
// alipayTransactionDataCsvFileImporter defines the structure of alipay csv importer for transaction data
type alipayTransactionDataCsvFileImporter struct {
fileHeaderLine string
dataHeaderStartContent string
dataBottomEndLineRune rune
@@ -26,7 +26,7 @@ type alipayTransactionDataCsvImporter struct {
}
// ParseImportedData returns the imported data by parsing the alipay transaction csv data
func (c *alipayTransactionDataCsvImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
func (c *alipayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
enc := simplifiedchinese.GB18030
reader := transform.NewReader(bytes.NewReader(data), enc.NewDecoder())
@@ -15,7 +15,7 @@ import (
)
func TestAlipayCsvFileImporterParseImportedData_MinimumValidData(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -94,7 +94,7 @@ func TestAlipayCsvFileImporterParseImportedData_MinimumValidData(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_ParseRefundTransaction(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -142,7 +142,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseRefundTransaction(t *testin
}
func TestAlipayCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -176,7 +176,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_ParseInvalidType(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -198,7 +198,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseInvalidType(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_ParseAccountName(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -325,7 +325,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseAccountName(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_ParseCategory(t *testing.T) {
converter := AlipayAppTransactionDataCsvImporter
converter := AlipayAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -365,7 +365,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseCategory(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_ParseDescription(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -405,7 +405,7 @@ func TestAlipayCsvFileImporterParseImportedData_ParseDescription(t *testing.T) {
}
func TestAlipayCsvFileImporterParseImportedData_MissingFileHeader(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -427,7 +427,7 @@ func TestAlipayCsvFileImporterParseImportedData_MissingFileHeader(t *testing.T)
}
func TestAlipayCsvFileImporterParseImportedData_MissingRequiredColumn(t *testing.T) {
converter := AlipayWebTransactionDataCsvImporter
converter := AlipayWebTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -1,14 +1,14 @@
package alipay
// alipayWebTransactionDataCsvImporter defines the structure of alipay (web) csv importer for transaction data
type alipayWebTransactionDataCsvImporter struct {
alipayTransactionDataCsvImporter
// alipayWebTransactionDataCsvFileImporter defines the structure of alipay (web) csv importer for transaction data
type alipayWebTransactionDataCsvFileImporter struct {
alipayTransactionDataCsvFileImporter
}
// Initialize a alipay (web) transaction data csv file importer singleton instance
var (
AlipayWebTransactionDataCsvImporter = &alipayWebTransactionDataCsvImporter{
alipayTransactionDataCsvImporter{
AlipayWebTransactionDataCsvFileImporter = &alipayWebTransactionDataCsvFileImporter{
alipayTransactionDataCsvFileImporter{
fileHeaderLine: "支付宝交易记录明细查询",
dataHeaderStartContent: "交易记录明细列表",
dataBottomEndLineRune: '-',
@@ -0,0 +1,125 @@
package csv
import (
"encoding/csv"
"io"
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/log"
)
// CsvFileImportedDataTable defines the structure of csv data table
type CsvFileImportedDataTable struct {
allLines [][]string
}
// CsvFileImportedDataRow defines the structure of csv data table row
type CsvFileImportedDataRow struct {
dataTable *CsvFileImportedDataTable
allItems []string
}
// CsvFileImportedDataRowIterator defines the structure of csv data table row iterator
type CsvFileImportedDataRowIterator struct {
dataTable *CsvFileImportedDataTable
currentIndex int
}
// DataRowCount returns the total count of data row
func (t *CsvFileImportedDataTable) DataRowCount() int {
if len(t.allLines) < 1 {
return 0
}
return len(t.allLines) - 1
}
// HeaderColumnNames returns the header column name list
func (t *CsvFileImportedDataTable) HeaderColumnNames() []string {
if len(t.allLines) < 1 {
return nil
}
return t.allLines[0]
}
// DataRowIterator returns the iterator of data row
func (t *CsvFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
return &CsvFileImportedDataRowIterator{
dataTable: t,
currentIndex: 0,
}
}
// ColumnCount returns the total count of column in this data row
func (r *CsvFileImportedDataRow) ColumnCount() int {
return len(r.allItems)
}
// GetData returns the data in the specified column index
func (r *CsvFileImportedDataRow) GetData(columnIndex int) string {
if columnIndex >= len(r.allItems) {
return ""
}
return r.allItems[columnIndex]
}
// HasNext returns whether the iterator does not reach the end
func (t *CsvFileImportedDataRowIterator) HasNext() bool {
return t.currentIndex+1 < len(t.dataTable.allLines)
}
// Next returns the next imported data row
func (t *CsvFileImportedDataRowIterator) Next() datatable.ImportedDataRow {
if t.currentIndex+1 >= len(t.dataTable.allLines) {
return nil
}
t.currentIndex++
rowItems := t.dataTable.allLines[t.currentIndex]
return &CsvFileImportedDataRow{
dataTable: t.dataTable,
allItems: rowItems,
}
}
// CreateNewCsvDataTable returns comma separated values data table by io readers
func CreateNewCsvDataTable(ctx core.Context, reader io.Reader) (*CsvFileImportedDataTable, error) {
return createNewCsvFileDataTable(ctx, reader, ',')
}
func createNewCsvFileDataTable(ctx core.Context, reader io.Reader, separator rune) (*CsvFileImportedDataTable, error) {
csvReader := csv.NewReader(reader)
csvReader.Comma = separator
csvReader.FieldsPerRecord = -1
allLines := make([][]string, 0)
for {
items, err := csvReader.Read()
if err == io.EOF {
break
}
if err != nil {
log.Errorf(ctx, "[csv_file_imported_data_table.createNewCsvFileDataTable] cannot parse csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
if len(items) == 0 && items[0] == "" {
continue
}
allLines = append(allLines, items)
}
return &CsvFileImportedDataTable{
allLines: allLines,
}, nil
}
@@ -1,124 +0,0 @@
package datatable
import (
"encoding/csv"
"io"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/log"
)
// DefaultPlainTextImportedDataTable defines the structure of default plain text data table
type DefaultPlainTextImportedDataTable struct {
allLines [][]string
}
// DefaultPlainTextImportedDataRow defines the structure of default plain text data table row
type DefaultPlainTextImportedDataRow struct {
dataTable *DefaultPlainTextImportedDataTable
allItems []string
}
// DefaultPlainTextImportedDataRowIterator defines the structure of default plain text data table row iterator
type DefaultPlainTextImportedDataRowIterator struct {
dataTable *DefaultPlainTextImportedDataTable
currentIndex int
}
// DataRowCount returns the total count of data row
func (t *DefaultPlainTextImportedDataTable) DataRowCount() int {
if len(t.allLines) < 1 {
return 0
}
return len(t.allLines) - 1
}
// HeaderColumnNames returns the header column name list
func (t *DefaultPlainTextImportedDataTable) HeaderColumnNames() []string {
if len(t.allLines) < 1 {
return nil
}
return t.allLines[0]
}
// DataRowIterator returns the iterator of data row
func (t *DefaultPlainTextImportedDataTable) DataRowIterator() ImportedDataRowIterator {
return &DefaultPlainTextImportedDataRowIterator{
dataTable: t,
currentIndex: 0,
}
}
// ColumnCount returns the total count of column in this data row
func (r *DefaultPlainTextImportedDataRow) ColumnCount() int {
return len(r.allItems)
}
// GetData returns the data in the specified column index
func (r *DefaultPlainTextImportedDataRow) GetData(columnIndex int) string {
if columnIndex >= len(r.allItems) {
return ""
}
return r.allItems[columnIndex]
}
// HasNext returns whether the iterator does not reach the end
func (t *DefaultPlainTextImportedDataRowIterator) HasNext() bool {
return t.currentIndex+1 < len(t.dataTable.allLines)
}
// Next returns the next imported data row
func (t *DefaultPlainTextImportedDataRowIterator) Next() ImportedDataRow {
if t.currentIndex+1 >= len(t.dataTable.allLines) {
return nil
}
t.currentIndex++
rowItems := t.dataTable.allLines[t.currentIndex]
return &DefaultPlainTextImportedDataRow{
dataTable: t.dataTable,
allItems: rowItems,
}
}
// CreateNewDefaultCsvDataTable returns default csv data table by io readers
func CreateNewDefaultCsvDataTable(ctx core.Context, reader io.Reader) (*DefaultPlainTextImportedDataTable, error) {
return createNewDefaultPlainTextDataTable(ctx, reader, ',')
}
func createNewDefaultPlainTextDataTable(ctx core.Context, reader io.Reader, comma rune) (*DefaultPlainTextImportedDataTable, error) {
csvReader := csv.NewReader(reader)
csvReader.Comma = comma
csvReader.FieldsPerRecord = -1
allLines := make([][]string, 0)
for {
items, err := csvReader.Read()
if err == io.EOF {
break
}
if err != nil {
log.Errorf(ctx, "[default_plain_text_imported_data_table.createNewDefaultPlainTextDataTable] cannot parse plain text data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
if len(items) == 0 && items[0] == "" {
continue
}
allLines = append(allLines, items)
}
return &DefaultPlainTextImportedDataTable{
allLines: allLines,
}, nil
}
@@ -0,0 +1,15 @@
package _default
// defaultTransactionDataCSVFileConverter defines the structure of ezbookkeeping default csv file converter
type defaultTransactionDataCSVFileConverter struct {
defaultTransactionDataPlainTextConverter
}
// Initialize an ezbookkeeping default transaction data csv file converter singleton instance
var (
DefaultTransactionDataCSVFileConverter = &defaultTransactionDataCSVFileConverter{
defaultTransactionDataPlainTextConverter{
columnSeparator: ",",
},
}
)
@@ -6,8 +6,8 @@ import (
"github.com/mayswind/ezbookkeeping/pkg/models"
)
// ezBookKeepingTransactionDataPlainTextConverter defines the structure of ezbookkeeping plain text converter for transaction data
type ezBookKeepingTransactionDataPlainTextConverter struct {
// defaultTransactionDataPlainTextConverter defines the structure of ezbookkeeping default plain text converter for transaction data
type defaultTransactionDataPlainTextConverter struct {
columnSeparator string
}
@@ -57,8 +57,8 @@ var ezbookkeepingDataColumns = []datatable.TransactionDataTableColumn{
}
// ToExportedContent returns the exported transaction plain text data
func (c *ezBookKeepingTransactionDataPlainTextConverter) ToExportedContent(ctx core.Context, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account, categoryMap map[int64]*models.TransactionCategory, tagMap map[int64]*models.TransactionTag, allTagIndexes map[int64][]int64) ([]byte, error) {
dataTableBuilder := createNewezbookkeepingTransactionPlainTextDataTableBuilder(
func (c *defaultTransactionDataPlainTextConverter) ToExportedContent(ctx core.Context, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account, categoryMap map[int64]*models.TransactionCategory, tagMap map[int64]*models.TransactionTag, allTagIndexes map[int64][]int64) ([]byte, error) {
dataTableBuilder := createNewDefaultTransactionPlainTextDataTableBuilder(
len(transactions),
ezbookkeepingDataColumns,
ezbookkeepingDataColumnNameMapping,
@@ -82,8 +82,8 @@ func (c *ezBookKeepingTransactionDataPlainTextConverter) ToExportedContent(ctx c
}
// ParseImportedData returns the imported data by parsing the transaction plain text data
func (c *ezBookKeepingTransactionDataPlainTextConverter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
dataTable, err := createNewezbookkeepingPlainTextDataTable(
func (c *defaultTransactionDataPlainTextConverter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
dataTable, err := createNewDefaultPlainTextDataTable(
string(data),
c.columnSeparator,
ezbookkeepingLineSeparator,
@@ -12,7 +12,7 @@ import (
)
func TestEzBookKeepingPlainFileConverterToExportedContent(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
transactions := make([]*models.Transaction, 3)
@@ -126,7 +126,7 @@ func TestEzBookKeepingPlainFileConverterToExportedContent(t *testing.T) {
}
func TestEzBookKeepingPlainFileConverterParseImportedData_MinimumValidData(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -197,7 +197,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_MinimumValidData(t *te
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidTime(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -215,7 +215,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidTime(t *te
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidType(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -229,7 +229,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidType(t *te
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidTimezone(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -257,7 +257,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidTimezone(t *
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidTimezone(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -271,7 +271,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidTimezone(t
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidAccountCurrency(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -298,7 +298,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidAccountCurre
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidAccountCurrency(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -318,7 +318,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidAccountCur
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseNotSupportedCurrency(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -336,7 +336,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseNotSupportedCurre
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidAmount(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -354,7 +354,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidAmount(t *
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseNoAmount2(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -378,7 +378,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseNoAmount2(t *test
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidGeographicLocation(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -396,7 +396,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseValidGeographicLo
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidGeographicLocation(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -421,7 +421,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseInvalidGeographic
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseTag(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -450,7 +450,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseTag(t *testing.T)
}
func TestEzBookKeepingPlainFileConverterParseImportedData_ParseDescription(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -467,7 +467,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_ParseDescription(t *te
}
func TestEzBookKeepingPlainFileConverterParseImportedData_MissingFileHeader(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -480,7 +480,7 @@ func TestEzBookKeepingPlainFileConverterParseImportedData_MissingFileHeader(t *t
}
func TestEzBookKeepingPlainFileConverterParseImportedData_MissingRequiredColumn(t *testing.T) {
converter := EzBookKeepingTransactionDataCSVFileConverter
converter := DefaultTransactionDataCSVFileConverter
context := core.NewNullContext()
user := &models.User{
@@ -0,0 +1,15 @@
package _default
// defaultTransactionDataTSVFileConverter defines the structure of ezbookkeeping default tsv file converter
type defaultTransactionDataTSVFileConverter struct {
defaultTransactionDataPlainTextConverter
}
// Initialize an ezbookkeeping default transaction data tsv file converter singleton instance
var (
DefaultTransactionDataTSVFileConverter = &defaultTransactionDataTSVFileConverter{
defaultTransactionDataPlainTextConverter{
columnSeparator: "\t",
},
}
)
@@ -8,27 +8,27 @@ import (
"github.com/mayswind/ezbookkeeping/pkg/errs"
)
// ezBookKeepingPlainTextDataTable defines the structure of ezbookkeeping plain text data table
type ezBookKeepingPlainTextDataTable struct {
// defaultPlainTextDataTable defines the structure of ezbookkeeping default plain text data table
type defaultPlainTextDataTable struct {
columnSeparator string
lineSeparator string
allLines []string
headerLineColumnNames []string
}
// ezBookKeepingPlainTextDataRow defines the structure of ezbookkeeping plain text data row
type ezBookKeepingPlainTextDataRow struct {
// defaultPlainTextDataRow defines the structure of ezbookkeeping default plain text data row
type defaultPlainTextDataRow struct {
allItems []string
}
// ezBookKeepingPlainTextDataRowIterator defines the structure of ezbookkeeping plain text data row iterator
type ezBookKeepingPlainTextDataRowIterator struct {
dataTable *ezBookKeepingPlainTextDataTable
// defaultPlainTextDataRowIterator defines the structure of ezbookkeeping default plain text data row iterator
type defaultPlainTextDataRowIterator struct {
dataTable *defaultPlainTextDataTable
currentIndex int
}
// ezBookKeepingTransactionPlainTextDataTableBuilder defines the structure of ezbookkeeping transaction plain text data table builder
type ezBookKeepingTransactionPlainTextDataTableBuilder struct {
// defaultTransactionPlainTextDataTableBuilder defines the structure of ezbookkeeping default transaction plain text data table builder
type defaultTransactionPlainTextDataTableBuilder struct {
columnSeparator string
lineSeparator string
columns []datatable.TransactionDataTableColumn
@@ -38,7 +38,7 @@ type ezBookKeepingTransactionPlainTextDataTableBuilder struct {
}
// DataRowCount returns the total count of data row
func (t *ezBookKeepingPlainTextDataTable) DataRowCount() int {
func (t *defaultPlainTextDataTable) DataRowCount() int {
if len(t.allLines) < 1 {
return 0
}
@@ -47,25 +47,25 @@ func (t *ezBookKeepingPlainTextDataTable) DataRowCount() int {
}
// HeaderColumnNames returns the header column name list
func (t *ezBookKeepingPlainTextDataTable) HeaderColumnNames() []string {
func (t *defaultPlainTextDataTable) HeaderColumnNames() []string {
return t.headerLineColumnNames
}
// DataRowIterator returns the iterator of data row
func (t *ezBookKeepingPlainTextDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
return &ezBookKeepingPlainTextDataRowIterator{
func (t *defaultPlainTextDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
return &defaultPlainTextDataRowIterator{
dataTable: t,
currentIndex: 0,
}
}
// ColumnCount returns the total count of column in this data row
func (r *ezBookKeepingPlainTextDataRow) ColumnCount() int {
func (r *defaultPlainTextDataRow) ColumnCount() int {
return len(r.allItems)
}
// GetData returns the data in the specified column index
func (r *ezBookKeepingPlainTextDataRow) GetData(columnIndex int) string {
func (r *defaultPlainTextDataRow) GetData(columnIndex int) string {
if columnIndex >= len(r.allItems) {
return ""
}
@@ -74,12 +74,12 @@ func (r *ezBookKeepingPlainTextDataRow) GetData(columnIndex int) string {
}
// HasNext returns whether the iterator does not reach the end
func (t *ezBookKeepingPlainTextDataRowIterator) HasNext() bool {
func (t *defaultPlainTextDataRowIterator) HasNext() bool {
return t.currentIndex+1 < len(t.dataTable.allLines)
}
// Next returns the next imported data row
func (t *ezBookKeepingPlainTextDataRowIterator) Next() datatable.ImportedDataRow {
func (t *defaultPlainTextDataRowIterator) Next() datatable.ImportedDataRow {
if t.currentIndex+1 >= len(t.dataTable.allLines) {
return nil
}
@@ -89,13 +89,13 @@ func (t *ezBookKeepingPlainTextDataRowIterator) Next() datatable.ImportedDataRow
rowContent := t.dataTable.allLines[t.currentIndex]
rowItems := strings.Split(rowContent, t.dataTable.columnSeparator)
return &ezBookKeepingPlainTextDataRow{
return &defaultPlainTextDataRow{
allItems: rowItems,
}
}
// AppendTransaction appends the specified transaction to data builder
func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) AppendTransaction(data map[datatable.TransactionDataTableColumn]string) {
func (b *defaultTransactionPlainTextDataTableBuilder) AppendTransaction(data map[datatable.TransactionDataTableColumn]string) {
dataRowParams := make([]any, len(b.columns))
for i := 0; i < len(b.columns); i++ {
@@ -106,7 +106,7 @@ func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) AppendTransaction(da
}
// ReplaceDelimiters returns the text after removing the delimiters
func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) ReplaceDelimiters(text string) string {
func (b *defaultTransactionPlainTextDataTableBuilder) ReplaceDelimiters(text string) string {
text = strings.Replace(text, "\r\n", " ", -1)
text = strings.Replace(text, "\r", " ", -1)
text = strings.Replace(text, "\n", " ", -1)
@@ -117,11 +117,11 @@ func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) ReplaceDelimiters(te
}
// String returns the textual representation of this data
func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) String() string {
func (b *defaultTransactionPlainTextDataTableBuilder) String() string {
return b.builder.String()
}
func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) generateHeaderLine() string {
func (b *defaultTransactionPlainTextDataTableBuilder) generateHeaderLine() string {
var ret strings.Builder
for i := 0; i < len(b.columns); i++ {
@@ -140,7 +140,7 @@ func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) generateHeaderLine()
return ret.String()
}
func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) generateDataLineFormat() string {
func (b *defaultTransactionPlainTextDataTableBuilder) generateDataLineFormat() string {
var ret strings.Builder
for i := 0; i < len(b.columns); i++ {
@@ -156,7 +156,7 @@ func (b *ezBookKeepingTransactionPlainTextDataTableBuilder) generateDataLineForm
return ret.String()
}
func createNewezbookkeepingPlainTextDataTable(content string, columnSeparator string, lineSeparator string) (*ezBookKeepingPlainTextDataTable, error) {
func createNewDefaultPlainTextDataTable(content string, columnSeparator string, lineSeparator string) (*defaultPlainTextDataTable, error) {
allLines := strings.Split(content, lineSeparator)
if len(allLines) < 2 {
@@ -167,7 +167,7 @@ func createNewezbookkeepingPlainTextDataTable(content string, columnSeparator st
headerLine = strings.ReplaceAll(headerLine, "\r", "")
headerLineItems := strings.Split(headerLine, columnSeparator)
return &ezBookKeepingPlainTextDataTable{
return &defaultPlainTextDataTable{
columnSeparator: columnSeparator,
lineSeparator: lineSeparator,
allLines: allLines,
@@ -175,11 +175,11 @@ func createNewezbookkeepingPlainTextDataTable(content string, columnSeparator st
}, nil
}
func createNewezbookkeepingTransactionPlainTextDataTableBuilder(transactionCount int, columns []datatable.TransactionDataTableColumn, dataColumnNameMapping map[datatable.TransactionDataTableColumn]string, columnSeparator string, lineSeparator string) *ezBookKeepingTransactionPlainTextDataTableBuilder {
func createNewDefaultTransactionPlainTextDataTableBuilder(transactionCount int, columns []datatable.TransactionDataTableColumn, dataColumnNameMapping map[datatable.TransactionDataTableColumn]string, columnSeparator string, lineSeparator string) *defaultTransactionPlainTextDataTableBuilder {
var builder strings.Builder
builder.Grow(transactionCount * 100)
dataTableBuilder := &ezBookKeepingTransactionPlainTextDataTableBuilder{
dataTableBuilder := &defaultTransactionPlainTextDataTableBuilder{
columnSeparator: columnSeparator,
lineSeparator: lineSeparator,
columns: columns,
@@ -1,15 +0,0 @@
package _default
// ezBookKeepingTransactionDataCSVFileConverter defines the structure of CSV file converter
type ezBookKeepingTransactionDataCSVFileConverter struct {
ezBookKeepingTransactionDataPlainTextConverter
}
// Initialize an ezbookkeeping transaction data csv file converter singleton instance
var (
EzBookKeepingTransactionDataCSVFileConverter = &ezBookKeepingTransactionDataCSVFileConverter{
ezBookKeepingTransactionDataPlainTextConverter{
columnSeparator: ",",
},
}
)
@@ -1,15 +0,0 @@
package _default
// ezBookKeepingTransactionDataTSVFileConverter defines the structure of TSV file converter
type ezBookKeepingTransactionDataTSVFileConverter struct {
ezBookKeepingTransactionDataPlainTextConverter
}
// Initialize an ezbookkeeping transaction data tsv file converter singleton instance
var (
EzBookKeepingTransactionDataTSVFileConverter = &ezBookKeepingTransactionDataTSVFileConverter{
ezBookKeepingTransactionDataPlainTextConverter{
columnSeparator: "\t",
},
}
)
@@ -1,34 +1,35 @@
package datatable
package excel
import (
"bytes"
"github.com/shakinm/xlsReader/xls"
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/errs"
)
// DefaultExcelFileImportedDataTable defines the structure of default excel file data table
type DefaultExcelFileImportedDataTable struct {
// ExcelFileImportedDataTable defines the structure of excel file data table
type ExcelFileImportedDataTable struct {
workbook *xls.Workbook
headerLineColumnNames []string
}
// DefaultExcelFileDataRow defines the structure of default excel file data table row
type DefaultExcelFileDataRow struct {
// ExcelFileDataRow defines the structure of excel file data table row
type ExcelFileDataRow struct {
sheet *xls.Sheet
rowIndex int
}
// DefaultExcelFileDataRowIterator defines the structure of default excel file data table row iterator
type DefaultExcelFileDataRowIterator struct {
dataTable *DefaultExcelFileImportedDataTable
// ExcelFileDataRowIterator defines the structure of excel file data table row iterator
type ExcelFileDataRowIterator struct {
dataTable *ExcelFileImportedDataTable
currentTableIndex int
currentRowIndexInTable int
}
// DataRowCount returns the total count of data row
func (t *DefaultExcelFileImportedDataTable) DataRowCount() int {
func (t *ExcelFileImportedDataTable) DataRowCount() int {
allSheets := t.workbook.GetSheets()
totalDataRowCount := 0
@@ -46,13 +47,13 @@ func (t *DefaultExcelFileImportedDataTable) DataRowCount() int {
}
// HeaderColumnNames returns the header column name list
func (t *DefaultExcelFileImportedDataTable) HeaderColumnNames() []string {
func (t *ExcelFileImportedDataTable) HeaderColumnNames() []string {
return t.headerLineColumnNames
}
// DataRowIterator returns the iterator of data row
func (t *DefaultExcelFileImportedDataTable) DataRowIterator() ImportedDataRowIterator {
return &DefaultExcelFileDataRowIterator{
func (t *ExcelFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
return &ExcelFileDataRowIterator{
dataTable: t,
currentTableIndex: 0,
currentRowIndexInTable: 0,
@@ -60,7 +61,7 @@ func (t *DefaultExcelFileImportedDataTable) DataRowIterator() ImportedDataRowIte
}
// ColumnCount returns the total count of column in this data row
func (r *DefaultExcelFileDataRow) ColumnCount() int {
func (r *ExcelFileDataRow) ColumnCount() int {
row, err := r.sheet.GetRow(r.rowIndex)
if err != nil {
@@ -71,7 +72,7 @@ func (r *DefaultExcelFileDataRow) ColumnCount() int {
}
// GetData returns the data in the specified column index
func (r *DefaultExcelFileDataRow) GetData(columnIndex int) string {
func (r *ExcelFileDataRow) GetData(columnIndex int) string {
row, err := r.sheet.GetRow(r.rowIndex)
if err != nil {
@@ -88,7 +89,7 @@ func (r *DefaultExcelFileDataRow) GetData(columnIndex int) string {
}
// HasNext returns whether the iterator does not reach the end
func (t *DefaultExcelFileDataRowIterator) HasNext() bool {
func (t *ExcelFileDataRowIterator) HasNext() bool {
allSheets := t.dataTable.workbook.GetSheets()
if t.currentTableIndex >= len(allSheets) {
@@ -115,7 +116,7 @@ func (t *DefaultExcelFileDataRowIterator) HasNext() bool {
}
// Next returns the next imported data row
func (t *DefaultExcelFileDataRowIterator) Next() ImportedDataRow {
func (t *ExcelFileDataRowIterator) Next() datatable.ImportedDataRow {
allSheets := t.dataTable.workbook.GetSheets()
currentRowIndexInTable := t.currentRowIndexInTable
@@ -143,14 +144,14 @@ func (t *DefaultExcelFileDataRowIterator) Next() ImportedDataRow {
return nil
}
return &DefaultExcelFileDataRow{
return &ExcelFileDataRow{
sheet: &currentSheet,
rowIndex: t.currentRowIndexInTable,
}
}
// CreateNewDefaultExcelFileImportedDataTable returns default excel xls data table by file binary data
func CreateNewDefaultExcelFileImportedDataTable(data []byte) (*DefaultExcelFileImportedDataTable, error) {
// CreateNewExcelFileImportedDataTable returns excel xls data table by file binary data
func CreateNewExcelFileImportedDataTable(data []byte) (*ExcelFileImportedDataTable, error) {
reader := bytes.NewReader(data)
workbook, err := xls.OpenReader(reader)
@@ -197,7 +198,7 @@ func CreateNewDefaultExcelFileImportedDataTable(data []byte) (*DefaultExcelFileI
}
}
return &DefaultExcelFileImportedDataTable{
return &ExcelFileImportedDataTable{
workbook: &workbook,
headerLineColumnNames: headerRowItems,
}, nil
@@ -21,16 +21,16 @@ const feideeMymoneyCsvFileTransactionTypeExpenseText = "支出"
const feideeMymoneyCsvFileTransactionTypeTransferInText = "转入"
const feideeMymoneyCsvFileTransactionTypeTransferOutText = "转出"
// feideeMymoneyTransactionDataCsvImporter defines the structure of feidee mymoney csv importer for transaction data
type feideeMymoneyTransactionDataCsvImporter struct{}
// feideeMymoneyAppTransactionDataCsvFileImporter defines the structure of feidee mymoney app csv importer for transaction data
type feideeMymoneyAppTransactionDataCsvFileImporter struct{}
// Initialize a feidee mymoney transaction data csv file importer singleton instance
// Initialize a feidee mymoney app transaction data csv file importer singleton instance
var (
FeideeMymoneyTransactionDataCsvImporter = &feideeMymoneyTransactionDataCsvImporter{}
FeideeMymoneyAppTransactionDataCsvFileImporter = &feideeMymoneyAppTransactionDataCsvFileImporter{}
)
// ParseImportedData returns the imported data by parsing the feidee mymoney transaction csv data
func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
// ParseImportedData returns the imported data by parsing the feidee mymoney app transaction csv data
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
content := string(data)
if strings.Index(content, feideeMymoneyTransactionDataCsvFileHeader) != 0 && strings.Index(content, feideeMymoneyTransactionDataCsvFileHeaderWithUtf8Bom) != 0 {
@@ -44,7 +44,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
}
if len(allLines) < 2 {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] cannot parse import data for user \"uid:%d\", because data table row count is less 1", user.Uid)
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] cannot parse import data for user \"uid:%d\", because data table row count is less 1", user.Uid)
return nil, nil, nil, nil, nil, nil, errs.ErrNotFoundTransactionDataInFile
}
@@ -67,7 +67,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
if !timeColumnExists || !typeColumnExists || !subCategoryColumnExists ||
!accountColumnExists || !amountColumnExists || !relatedIdColumnExists {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] cannot parse import data for user \"uid:%d\", because missing essential columns in header row", user.Uid)
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] cannot parse import data for user \"uid:%d\", because missing essential columns in header row", user.Uid)
return nil, nil, nil, nil, nil, nil, errs.ErrMissingRequiredFieldInHeaderRow
}
@@ -107,7 +107,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
items := allLines[i]
if len(items) < len(headerLineItems) {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] cannot parse row \"index:%d\" for user \"uid:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", i, user.Uid, len(items), len(headerLineItems))
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] cannot parse row \"index:%d\" for user \"uid:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", i, user.Uid, len(items), len(headerLineItems))
return nil, nil, nil, nil, nil, nil, errs.ErrFewerFieldsInDataRowThanInHeaderRow
}
@@ -149,7 +149,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
dataTable.Add(data)
} else if transactionType == feideeMymoneyCsvFileTransactionTypeTransferInText || transactionType == feideeMymoneyCsvFileTransactionTypeTransferOutText {
if relatedId == "" {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] transfer transaction has blank related id in row \"index:%d\" for user \"uid:%d\"", i, user.Uid)
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] transfer transaction has blank related id in row \"index:%d\" for user \"uid:%d\"", i, user.Uid)
return nil, nil, nil, nil, nil, nil, errs.ErrRelatedIdCannotBeBlank
}
@@ -175,17 +175,17 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
dataTable.Add(data)
delete(transferTransactionsMap, relatedId)
} else {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] transfer transaction type \"%s\" is not expected in row \"index:%d\" for user \"uid:%d\"", transactionType, i, user.Uid)
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] transfer transaction type \"%s\" is not expected in row \"index:%d\" for user \"uid:%d\"", transactionType, i, user.Uid)
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTypeInvalid
}
} else {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] cannot parse transaction type \"%s\" in row \"index:%d\" for user \"uid:%d\"", transactionType, i, user.Uid)
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] cannot parse transaction type \"%s\" in row \"index:%d\" for user \"uid:%d\"", transactionType, i, user.Uid)
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTypeInvalid
}
}
if len(transferTransactionsMap) > 0 {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.ParseImportedData] there are %d transactions (related id is %s) which don't have related records", len(transferTransactionsMap), c.getRelatedIds(transferTransactionsMap))
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.ParseImportedData] there are %d transactions (related id is %s) which don't have related records", len(transferTransactionsMap), c.getRelatedIds(transferTransactionsMap))
return nil, nil, nil, nil, nil, nil, errs.ErrFoundRecordNotHasRelatedRecord
}
@@ -194,7 +194,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) ParseImportedData(ctx core.Con
return dataTableImporter.ParseImportedData(ctx, user, dataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
}
func (c *feideeMymoneyTransactionDataCsvImporter) parseAllLinesFromCsvData(ctx core.Context, content string) ([][]string, error) {
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) parseAllLinesFromCsvData(ctx core.Context, content string) ([][]string, error) {
csvReader := csv.NewReader(strings.NewReader(content))
csvReader.FieldsPerRecord = -1
@@ -209,7 +209,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) parseAllLinesFromCsvData(ctx c
}
if err != nil {
log.Errorf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.parseAllLinesFromCsvData] cannot parse feidee mymoney csv data, because %s", err.Error())
log.Errorf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.parseAllLinesFromCsvData] cannot parse feidee mymoney csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
@@ -220,7 +220,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) parseAllLinesFromCsvData(ctx c
hasFileHeader = true
continue
} else {
log.Warnf(ctx, "[feidee_mymoney_transaction_data_csv_file_importer.parseAllLinesFromCsvData] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
log.Warnf(ctx, "[feidee_mymoney_app_transaction_data_csv_file_importer.parseAllLinesFromCsvData] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
}
}
@@ -230,7 +230,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) parseAllLinesFromCsvData(ctx c
return allLines, nil
}
func (c *feideeMymoneyTransactionDataCsvImporter) parseTransactionData(
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) parseTransactionData(
items []string,
timeColumnIdx int,
timeColumnExists bool,
@@ -293,7 +293,7 @@ func (c *feideeMymoneyTransactionDataCsvImporter) parseTransactionData(
return data, relatedId
}
func (c *feideeMymoneyTransactionDataCsvImporter) getRelatedIds(transferTransactionsMap map[string]map[datatable.TransactionDataTableColumn]string) string {
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) getRelatedIds(transferTransactionsMap map[string]map[datatable.TransactionDataTableColumn]string) string {
builder := strings.Builder{}
for relatedId := range transferTransactionsMap {
@@ -13,7 +13,7 @@ import (
)
func TestFeideeMymoneyCsvFileImporterParseImportedData_MinimumValidData(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -110,7 +110,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_MinimumValidData(t *testi
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -130,7 +130,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidTime(t *testi
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidType(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -145,7 +145,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidType(t *testi
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseValidAccountCurrency(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -174,7 +174,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseValidAccountCurrency
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidAccountCurrency(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -198,7 +198,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidAccountCurren
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseNotSupportedCurrency(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -225,7 +225,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseNotSupportedCurrency
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidAmount(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -252,7 +252,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidAmount(t *tes
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseDescription(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -271,7 +271,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseDescription(t *testi
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_InvalidRelatedId(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -297,7 +297,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_InvalidRelatedId(t *testi
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_MissingFileHeader(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -313,7 +313,7 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_MissingFileHeader(t *test
}
func TestFeideeMymoneyCsvFileImporterParseImportedData_MissingRequiredColumn(t *testing.T) {
converter := FeideeMymoneyTransactionDataCsvImporter
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -1,32 +0,0 @@
package feidee
import (
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/models"
)
// feideeMymoneyTransactionDataXlsImporter defines the structure of feidee mymoney xls importer for transaction data
type feideeMymoneyTransactionDataXlsImporter struct {
datatable.DataTableTransactionDataImporter
}
// Initialize a feidee mymoney transaction data xls file importer singleton instance
var (
FeideeMymoneyTransactionDataXlsImporter = &feideeMymoneyTransactionDataXlsImporter{}
)
// ParseImportedData returns the imported data by parsing the feidee mymoney transaction xls data
func (c *feideeMymoneyTransactionDataXlsImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
dataTable, err := datatable.CreateNewDefaultExcelFileImportedDataTable(data)
if err != nil {
return nil, nil, nil, nil, nil, nil, err
}
transactionRowParser := createFeideeMymoneyTransactionDataRowParser()
transactionDataTable := datatable.CreateImportedTransactionDataTableWithRowParser(dataTable, feideeMymoneyDataColumnNameMapping, transactionRowParser)
dataTableImporter := datatable.CreateNewSimpleImporter(feideeMymoneyTransactionTypeNameMapping)
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
}
@@ -0,0 +1,33 @@
package feidee
import (
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/converters/excel"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/models"
)
// feideeMymoneyWebTransactionDataXlsFileImporter defines the structure of feidee mymoney (web) xls importer for transaction data
type feideeMymoneyWebTransactionDataXlsFileImporter struct {
datatable.DataTableTransactionDataImporter
}
// Initialize a feidee mymoney (web) transaction data xls file importer singleton instance
var (
FeideeMymoneyWebTransactionDataXlsFileImporter = &feideeMymoneyWebTransactionDataXlsFileImporter{}
)
// ParseImportedData returns the imported data by parsing the feidee mymoney (web) transaction xls data
func (c *feideeMymoneyWebTransactionDataXlsFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
dataTable, err := excel.CreateNewExcelFileImportedDataTable(data)
if err != nil {
return nil, nil, nil, nil, nil, nil, err
}
transactionRowParser := createFeideeMymoneyTransactionDataRowParser()
transactionDataTable := datatable.CreateImportedTransactionDataTableWithRowParser(dataTable, feideeMymoneyDataColumnNameMapping, transactionRowParser)
dataTableImporter := datatable.CreateNewSimpleImporter(feideeMymoneyTransactionTypeNameMapping)
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
}
@@ -13,7 +13,7 @@ import (
)
func TestFeideeMymoneyTransactionDataXlsImporterParseImportedData_MinimumValidData(t *testing.T) {
converter := FeideeMymoneyTransactionDataXlsImporter
converter := FeideeMymoneyWebTransactionDataXlsFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -3,6 +3,7 @@ package fireflyIII
import (
"bytes"
"github.com/mayswind/ezbookkeeping/pkg/converters/csv"
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/models"
@@ -29,18 +30,18 @@ var fireflyIIITransactionTypeNameMapping = map[models.TransactionType]string{
models.TRANSACTION_TYPE_TRANSFER: "Transfer",
}
// fireflyIIITransactionDataCsvImporter defines the structure of firefly III csv importer for transaction data
type fireflyIIITransactionDataCsvImporter struct{}
// fireflyIIITransactionDataCsvFileImporter defines the structure of firefly III csv importer for transaction data
type fireflyIIITransactionDataCsvFileImporter struct{}
// Initialize a firefly III transaction data csv file importer singleton instance
var (
FireflyIIITransactionDataCsvImporter = &fireflyIIITransactionDataCsvImporter{}
FireflyIIITransactionDataCsvFileImporter = &fireflyIIITransactionDataCsvFileImporter{}
)
// ParseImportedData returns the imported data by parsing the firefly III transaction csv data
func (c *fireflyIIITransactionDataCsvImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
func (c *fireflyIIITransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
reader := bytes.NewReader(data)
dataTable, err := datatable.CreateNewDefaultCsvDataTable(ctx, reader)
dataTable, err := csv.CreateNewCsvDataTable(ctx, reader)
if err != nil {
return nil, nil, nil, nil, nil, nil, err
@@ -12,7 +12,7 @@ import (
)
func TestFireFlyIIICsvFileConverterParseImportedData_MinimumValidData(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -83,7 +83,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_MinimumValidData(t *testing
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidTime(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -101,7 +101,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidTime(t *testing
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidType(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -115,7 +115,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidType(t *testing
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseValidTimezone(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -143,7 +143,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseValidTimezone(t *testi
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseValidAccountCurrency(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -170,7 +170,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseValidAccountCurrency(t
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidAccountCurrency(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -190,7 +190,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidAccountCurrency
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseNotSupportedCurrency(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -208,7 +208,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseNotSupportedCurrency(t
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidAmount(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -226,7 +226,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseInvalidAmount(t *testi
}
func TestFireFlyIIICsvFileConverterParseImportedData_ParseDescription(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -243,7 +243,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_ParseDescription(t *testing
}
func TestFireFlyIIICsvFileConverterParseImportedData_MissingFileHeader(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -256,7 +256,7 @@ func TestFireFlyIIICsvFileConverterParseImportedData_MissingFileHeader(t *testin
}
func TestFireFlyIIICsvFileConverterParseImportedData_MissingRequiredColumn(t *testing.T) {
converter := FireflyIIITransactionDataCsvImporter
converter := FireflyIIITransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
+10 -10
View File
@@ -13,9 +13,9 @@ import (
// GetTransactionDataExporter returns the transaction data exporter according to the file type
func GetTransactionDataExporter(fileType string) base.TransactionDataExporter {
if fileType == "csv" {
return _default.EzBookKeepingTransactionDataCSVFileConverter
return _default.DefaultTransactionDataCSVFileConverter
} else if fileType == "tsv" {
return _default.EzBookKeepingTransactionDataTSVFileConverter
return _default.DefaultTransactionDataTSVFileConverter
} else {
return nil
}
@@ -24,21 +24,21 @@ func GetTransactionDataExporter(fileType string) base.TransactionDataExporter {
// GetTransactionDataImporter returns the transaction data importer according to the file type
func GetTransactionDataImporter(fileType string) (base.TransactionDataImporter, error) {
if fileType == "ezbookkeeping_csv" {
return _default.EzBookKeepingTransactionDataCSVFileConverter, nil
return _default.DefaultTransactionDataCSVFileConverter, nil
} else if fileType == "ezbookkeeping_tsv" {
return _default.EzBookKeepingTransactionDataTSVFileConverter, nil
return _default.DefaultTransactionDataTSVFileConverter, nil
} else if fileType == "firefly_iii_csv" {
return fireflyIII.FireflyIIITransactionDataCsvImporter, nil
return fireflyIII.FireflyIIITransactionDataCsvFileImporter, nil
} else if fileType == "feidee_mymoney_csv" {
return feidee.FeideeMymoneyTransactionDataCsvImporter, nil
return feidee.FeideeMymoneyAppTransactionDataCsvFileImporter, nil
} else if fileType == "feidee_mymoney_xls" {
return feidee.FeideeMymoneyTransactionDataXlsImporter, nil
return feidee.FeideeMymoneyWebTransactionDataXlsFileImporter, nil
} else if fileType == "alipay_app_csv" {
return alipay.AlipayAppTransactionDataCsvImporter, nil
return alipay.AlipayAppTransactionDataCsvFileImporter, nil
} else if fileType == "alipay_web_csv" {
return alipay.AlipayWebTransactionDataCsvImporter, nil
return alipay.AlipayWebTransactionDataCsvFileImporter, nil
} else if fileType == "wechat_pay_app_csv" {
return wechat.WeChatPayTransactionDataCsvImporter, nil
return wechat.WeChatPayTransactionDataCsvFileImporter, nil
} else {
return nil, errs.ErrImportFileTypeNotSupported
}
@@ -122,7 +122,7 @@ func (t *wechatPayTransactionDataRowIterator) Next(ctx core.Context, user *model
rowItems[t.dataTable.originalTypeColumnIndex] != wechatPayTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME] &&
rowItems[t.dataTable.originalTypeColumnIndex] != wechatPayTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE] &&
rowItems[t.dataTable.originalTypeColumnIndex] != wechatPayTransactionTypeNameMapping[models.TRANSACTION_TYPE_TRANSFER] {
log.Warnf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.Next] skip parsing transaction in row \"index:%d\", because type is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalTypeColumnIndex])
log.Warnf(ctx, "[wechat_pay_transaction_csv_data_table.Next] skip parsing transaction in row \"index:%d\", because type is \"%s\"", t.currentIndex, rowItems[t.dataTable.originalTypeColumnIndex])
isValid = false
}
@@ -133,7 +133,7 @@ func (t *wechatPayTransactionDataRowIterator) Next(ctx core.Context, user *model
finalItems, errMsg = t.dataTable.parseTransactionData(ctx, user, rowItems)
if finalItems == nil {
log.Warnf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.Next] skip parsing transaction in row \"index:%d\", because %s", t.currentIndex, errMsg)
log.Warnf(ctx, "[wechat_pay_transaction_csv_data_table.Next] skip parsing transaction in row \"index:%d\", because %s", t.currentIndex, errMsg)
isValid = false
}
}
@@ -245,7 +245,7 @@ func createNewWeChatPayTransactionDataTable(ctx core.Context, reader io.Reader)
}
if len(allOriginalLines) < 2 {
log.Errorf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.createNewwechatPayTransactionPlainTextDataTable] cannot parse import data, because data table row count is less 1")
log.Errorf(ctx, "[wechat_pay_transaction_csv_data_table.createNewwechatPayTransactionPlainTextDataTable] cannot parse import data, because data table row count is less 1")
return nil, errs.ErrNotFoundTransactionDataInFile
}
@@ -267,7 +267,7 @@ func createNewWeChatPayTransactionDataTable(ctx core.Context, reader io.Reader)
descriptionColumnIdx, descriptionColumnExists := originalHeaderItemMap["备注"]
if !timeColumnExists || !categoryColumnExists || !typeColumnExists || !amountColumnExists || !statusColumnExists {
log.Errorf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.createNewwechatPayTransactionPlainTextDataTable] cannot parse wechat pay csv data, because missing essential columns in header row")
log.Errorf(ctx, "[wechat_pay_transaction_csv_data_table.createNewwechatPayTransactionPlainTextDataTable] cannot parse wechat pay csv data, because missing essential columns in header row")
return nil, errs.ErrMissingRequiredFieldInHeaderRow
}
@@ -318,7 +318,7 @@ func parseAllLinesFromWechatPayTransactionPlainText(ctx core.Context, reader io.
}
if err != nil {
log.Errorf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.parseAllLinesFromWechatPayTransactionPlainText] cannot parse wechat pay csv data, because %s", err.Error())
log.Errorf(ctx, "[wechat_pay_transaction_csv_data_table.parseAllLinesFromWechatPayTransactionPlainText] cannot parse wechat pay csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
@@ -329,7 +329,7 @@ func parseAllLinesFromWechatPayTransactionPlainText(ctx core.Context, reader io.
hasFileHeader = true
continue
} else {
log.Warnf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.parseAllLinesFromWechatPayTransactionPlainText] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
log.Warnf(ctx, "[wechat_pay_transaction_csv_data_table.parseAllLinesFromWechatPayTransactionPlainText] read unexpected line before read file header, line content is %s", strings.Join(items, ","))
continue
}
}
@@ -355,7 +355,7 @@ func parseAllLinesFromWechatPayTransactionPlainText(ctx core.Context, reader io.
}
if len(allOriginalLines) > 0 && len(items) < len(allOriginalLines[0]) {
log.Errorf(ctx, "[wechat_pay_transaction_data_plain_text_data_table.parseAllLinesFromWechatPayTransactionPlainText] cannot parse row \"index:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", len(allOriginalLines), len(items), len(allOriginalLines[0]))
log.Errorf(ctx, "[wechat_pay_transaction_csv_data_table.parseAllLinesFromWechatPayTransactionPlainText] cannot parse row \"index:%d\", because may missing some columns (column count %d in data row is less than header column count %d)", len(allOriginalLines), len(items), len(allOriginalLines[0]))
return nil, errs.ErrFewerFieldsInDataRowThanInHeaderRow
}
@@ -14,19 +14,19 @@ var wechatPayTransactionTypeNameMapping = map[models.TransactionType]string{
models.TRANSACTION_TYPE_TRANSFER: "/",
}
// wechatPayTransactionDataCsvImporter defines the structure of wechatPay csv importer for transaction data
type wechatPayTransactionDataCsvImporter struct {
// wechatPayTransactionDataCsvFileImporter defines the structure of wechatPay csv importer for transaction data
type wechatPayTransactionDataCsvFileImporter struct {
fileHeaderLineBeginning string
dataHeaderStartContentBeginning string
}
// Initialize a webchat pay transaction data csv file importer singleton instance
var (
WeChatPayTransactionDataCsvImporter = &wechatPayTransactionDataCsvImporter{}
WeChatPayTransactionDataCsvFileImporter = &wechatPayTransactionDataCsvFileImporter{}
)
// ParseImportedData returns the imported data by parsing the wechat pay transaction csv data
func (c *wechatPayTransactionDataCsvImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
func (c *wechatPayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
reader := bytes.NewReader(data)
transactionDataTable, err := createNewWeChatPayTransactionDataTable(ctx, reader)
@@ -13,7 +13,7 @@ import (
)
func TestWeChatPayCsvFileImporterParseImportedData_MinimumValidData(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -93,7 +93,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_MinimumValidData(t *testing.T
}
func TestWeChatPayCsvFileImporterParseImportedData_ParseRefundTransaction(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -120,7 +120,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_ParseRefundTransaction(t *tes
}
func TestWeChatPayCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -150,7 +150,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T
}
func TestWeChatPayCsvFileImporterParseImportedData_ParseInvalidType(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -170,7 +170,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_ParseInvalidType(t *testing.T
}
func TestWeChatPayCsvFileImporterParseImportedData_ParseAccountName(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -244,7 +244,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_ParseAccountName(t *testing.T
}
func TestWeChatPayCsvFileImporterParseImportedData_ParseDescription(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -278,7 +278,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_ParseDescription(t *testing.T
}
func TestWeChatPayCsvFileImporterParseImportedData_MissingFileHeader(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{
@@ -296,7 +296,7 @@ func TestWeChatPayCsvFileImporterParseImportedData_MissingFileHeader(t *testing.
}
func TestWeChatPayCsvFileImporterParseImportedData_MissingRequiredColumn(t *testing.T) {
converter := WeChatPayTransactionDataCsvImporter
converter := WeChatPayTransactionDataCsvFileImporter
context := core.NewNullContext()
user := &models.User{