code refactor

This commit is contained in:
MaysWind
2025-06-18 23:27:37 +08:00
parent b6e96586a5
commit 4bab8db7c0
32 changed files with 627 additions and 605 deletions
@@ -0,0 +1,138 @@
package csv
import (
"encoding/csv"
"fmt"
"io"
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/log"
)
// CsvFileBasicDataTable defines the structure of csv data table
type CsvFileBasicDataTable struct {
allLines [][]string
}
// CsvFileBasicDataTableRow defines the structure of csv data table row
type CsvFileBasicDataTableRow struct {
dataTable *CsvFileBasicDataTable
allItems []string
}
// CsvFileBasicDataTableRowIterator defines the structure of csv data table row iterator
type CsvFileBasicDataTableRowIterator struct {
dataTable *CsvFileBasicDataTable
currentIndex int
}
// DataRowCount returns the total count of data row
func (t *CsvFileBasicDataTable) DataRowCount() int {
if len(t.allLines) < 1 {
return 0
}
return len(t.allLines) - 1
}
// HeaderColumnNames returns the header column name list
func (t *CsvFileBasicDataTable) HeaderColumnNames() []string {
if len(t.allLines) < 1 {
return nil
}
return t.allLines[0]
}
// DataRowIterator returns the iterator of data row
func (t *CsvFileBasicDataTable) DataRowIterator() datatable.BasicDataTableRowIterator {
return &CsvFileBasicDataTableRowIterator{
dataTable: t,
currentIndex: 0,
}
}
// ColumnCount returns the total count of column in this data row
func (r *CsvFileBasicDataTableRow) ColumnCount() int {
return len(r.allItems)
}
// GetData returns the data in the specified column index
func (r *CsvFileBasicDataTableRow) GetData(columnIndex int) string {
if columnIndex >= len(r.allItems) {
return ""
}
return r.allItems[columnIndex]
}
// HasNext returns whether the iterator does not reach the end
func (t *CsvFileBasicDataTableRowIterator) HasNext() bool {
return t.currentIndex+1 < len(t.dataTable.allLines)
}
// CurrentRowId returns current index
func (t *CsvFileBasicDataTableRowIterator) CurrentRowId() string {
return fmt.Sprintf("line#%d", t.currentIndex)
}
// Next returns the next basic data row
func (t *CsvFileBasicDataTableRowIterator) Next() datatable.BasicDataTableRow {
if t.currentIndex+1 >= len(t.dataTable.allLines) {
return nil
}
t.currentIndex++
rowItems := t.dataTable.allLines[t.currentIndex]
return &CsvFileBasicDataTableRow{
dataTable: t.dataTable,
allItems: rowItems,
}
}
// CreateNewCsvBasicDataTable returns comma separated values data table by io readers
func CreateNewCsvBasicDataTable(ctx core.Context, reader io.Reader) (datatable.BasicDataTable, error) {
return createNewCsvFileBasicDataTable(ctx, reader, ',')
}
// CreateNewCustomCsvBasicDataTable returns character separated values data table by io readers
func CreateNewCustomCsvBasicDataTable(allLines [][]string) datatable.BasicDataTable {
return &CsvFileBasicDataTable{
allLines: allLines,
}
}
func createNewCsvFileBasicDataTable(ctx core.Context, reader io.Reader, separator rune) (*CsvFileBasicDataTable, error) {
csvReader := csv.NewReader(reader)
csvReader.Comma = separator
csvReader.FieldsPerRecord = -1
allLines := make([][]string, 0)
for {
items, err := csvReader.Read()
if err == io.EOF {
break
}
if err != nil {
log.Errorf(ctx, "[csv_file_basic_data_table.createNewCsvFileDataTable] cannot parse csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
if len(items) == 1 && items[0] == "" {
continue
}
allLines = append(allLines, items)
}
return &CsvFileBasicDataTable{
allLines: allLines,
}, nil
}
@@ -9,8 +9,8 @@ import (
"github.com/mayswind/ezbookkeeping/pkg/core"
)
func TestCsvFileImportedDataTableDataRowCount(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableDataRowCount(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -19,22 +19,22 @@ func TestCsvFileImportedDataTableDataRowCount(t *testing.T) {
assert.Equal(t, 2, datatable.DataRowCount())
}
func TestCsvFileImportedDataTableDataRowCount_OnlyHeaderLine(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableDataRowCount_OnlyHeaderLine(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
})
assert.Equal(t, 0, datatable.DataRowCount())
}
func TestCsvFileImportedDataTableDataRowCount_EmptyContent(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{})
func TestCsvFileBasicDataTableDataRowCount_EmptyContent(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{})
assert.Equal(t, 0, datatable.DataRowCount())
}
func TestCsvFileImportedDataTableHeaderColumnNames(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableHeaderColumnNames(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -43,14 +43,14 @@ func TestCsvFileImportedDataTableHeaderColumnNames(t *testing.T) {
assert.EqualValues(t, []string{"A1", "B1", "C1"}, datatable.HeaderColumnNames())
}
func TestCsvFileImportedDataTableHeaderColumnNames_EmptyContent(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{})
func TestCsvFileBasicDataTableHeaderColumnNames_EmptyContent(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{})
assert.Nil(t, datatable.HeaderColumnNames())
}
func TestCsvFileImportedDataRowIterator(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableRowIterator(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -76,8 +76,8 @@ func TestCsvFileImportedDataRowIterator(t *testing.T) {
assert.False(t, iterator.HasNext())
}
func TestCsvFileImportedDataRowColumnCount(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableRowColumnCount(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -92,8 +92,8 @@ func TestCsvFileImportedDataRowColumnCount(t *testing.T) {
assert.EqualValues(t, 3, row2.ColumnCount())
}
func TestCsvFileImportedDataRowGetData(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableRowGetData(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -112,8 +112,8 @@ func TestCsvFileImportedDataRowGetData(t *testing.T) {
assert.Equal(t, "C3", row2.GetData(2))
}
func TestCsvFileImportedDataRowGetData_GetNotExistedColumnData(t *testing.T) {
datatable := CreateNewCustomCsvImportedDataTable([][]string{
func TestCsvFileBasicDataTableRowGetData_GetNotExistedColumnData(t *testing.T) {
datatable := CreateNewCustomCsvBasicDataTable([][]string{
{"A1", "B1", "C1"},
{"A2", "B2", "C2"},
{"A3", "B3", "C3"},
@@ -125,12 +125,12 @@ func TestCsvFileImportedDataRowGetData_GetNotExistedColumnData(t *testing.T) {
assert.Equal(t, "", row1.GetData(3))
}
func TestCreateNewCsvImportedDataTable(t *testing.T) {
func TestCreateNewCsvBasicDataTable(t *testing.T) {
context := core.NewNullContext()
reader := bytes.NewReader([]byte("A1,B1,C1\n" +
"A2,B2,C2\n" +
"A3,B3,C3\n"))
datatable, err := CreateNewCsvImportedDataTable(context, reader)
datatable, err := CreateNewCsvBasicDataTable(context, reader)
assert.Nil(t, err)
assert.Equal(t, 2, datatable.DataRowCount())
@@ -153,14 +153,14 @@ func TestCreateNewCsvImportedDataTable(t *testing.T) {
assert.False(t, iterator.HasNext())
}
func TestCreateNewCsvImportedDataTable_SkipBlankLine(t *testing.T) {
func TestCreateNewCsvBasicDataTable_SkipBlankLine(t *testing.T) {
context := core.NewNullContext()
reader := bytes.NewReader([]byte("\n" +
"A1,B1,C1\n" +
"A2,B2,C2\n" +
"\n" +
"A3,B3,C3\n"))
datatable, err := CreateNewCsvImportedDataTable(context, reader)
datatable, err := CreateNewCsvBasicDataTable(context, reader)
assert.Nil(t, err)
assert.Equal(t, 2, datatable.DataRowCount())
@@ -1,138 +0,0 @@
package csv
import (
"encoding/csv"
"fmt"
"io"
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
"github.com/mayswind/ezbookkeeping/pkg/core"
"github.com/mayswind/ezbookkeeping/pkg/errs"
"github.com/mayswind/ezbookkeeping/pkg/log"
)
// CsvFileImportedDataTable defines the structure of csv data table
type CsvFileImportedDataTable struct {
allLines [][]string
}
// CsvFileImportedDataRow defines the structure of csv data table row
type CsvFileImportedDataRow struct {
dataTable *CsvFileImportedDataTable
allItems []string
}
// CsvFileImportedDataRowIterator defines the structure of csv data table row iterator
type CsvFileImportedDataRowIterator struct {
dataTable *CsvFileImportedDataTable
currentIndex int
}
// DataRowCount returns the total count of data row
func (t *CsvFileImportedDataTable) DataRowCount() int {
if len(t.allLines) < 1 {
return 0
}
return len(t.allLines) - 1
}
// HeaderColumnNames returns the header column name list
func (t *CsvFileImportedDataTable) HeaderColumnNames() []string {
if len(t.allLines) < 1 {
return nil
}
return t.allLines[0]
}
// DataRowIterator returns the iterator of data row
func (t *CsvFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
return &CsvFileImportedDataRowIterator{
dataTable: t,
currentIndex: 0,
}
}
// ColumnCount returns the total count of column in this data row
func (r *CsvFileImportedDataRow) ColumnCount() int {
return len(r.allItems)
}
// GetData returns the data in the specified column index
func (r *CsvFileImportedDataRow) GetData(columnIndex int) string {
if columnIndex >= len(r.allItems) {
return ""
}
return r.allItems[columnIndex]
}
// HasNext returns whether the iterator does not reach the end
func (t *CsvFileImportedDataRowIterator) HasNext() bool {
return t.currentIndex+1 < len(t.dataTable.allLines)
}
// CurrentRowId returns current index
func (t *CsvFileImportedDataRowIterator) CurrentRowId() string {
return fmt.Sprintf("line#%d", t.currentIndex)
}
// Next returns the next imported data row
func (t *CsvFileImportedDataRowIterator) Next() datatable.ImportedDataRow {
if t.currentIndex+1 >= len(t.dataTable.allLines) {
return nil
}
t.currentIndex++
rowItems := t.dataTable.allLines[t.currentIndex]
return &CsvFileImportedDataRow{
dataTable: t.dataTable,
allItems: rowItems,
}
}
// CreateNewCsvImportedDataTable returns comma separated values data table by io readers
func CreateNewCsvImportedDataTable(ctx core.Context, reader io.Reader) (*CsvFileImportedDataTable, error) {
return createNewCsvFileDataTable(ctx, reader, ',')
}
// CreateNewCustomCsvImportedDataTable returns character separated values data table by io readers
func CreateNewCustomCsvImportedDataTable(allLines [][]string) *CsvFileImportedDataTable {
return &CsvFileImportedDataTable{
allLines: allLines,
}
}
func createNewCsvFileDataTable(ctx core.Context, reader io.Reader, separator rune) (*CsvFileImportedDataTable, error) {
csvReader := csv.NewReader(reader)
csvReader.Comma = separator
csvReader.FieldsPerRecord = -1
allLines := make([][]string, 0)
for {
items, err := csvReader.Read()
if err == io.EOF {
break
}
if err != nil {
log.Errorf(ctx, "[csv_file_imported_data_table.createNewCsvFileDataTable] cannot parse csv data, because %s", err.Error())
return nil, errs.ErrInvalidCSVFile
}
if len(items) == 1 && items[0] == "" {
continue
}
allLines = append(allLines, items)
}
return &CsvFileImportedDataTable{
allLines: allLines,
}, nil
}