Compare commits
441 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6222b6edae | |||
| baa6850fcb | |||
| cab13cee3c | |||
| b4bff49104 | |||
| cde26b76b1 | |||
| a20ef34280 | |||
| 5606d40451 | |||
| b3a666f876 | |||
| 626d3895aa | |||
| e338c7190d | |||
| adfd12ef52 | |||
| 817291c9a7 | |||
| c4d20c539f | |||
| d089eee133 | |||
| 387df07659 | |||
| 5767acb29b | |||
| 607c1ddc48 | |||
| a6d45f5009 | |||
| 6e9f427182 | |||
| 62ad1749e8 | |||
| 9c695ee46d | |||
| 1d2002e92f | |||
| c7b809415c | |||
| 55fa9ca686 | |||
| 09a6ea46b2 | |||
| 56ba4d88f4 | |||
| fbc8c5e8c7 | |||
| 8a65282820 | |||
| 0d8c5f3dbe | |||
| dbbbe6805d | |||
| 4656002106 | |||
| d3758ec02f | |||
| 81812bb31d | |||
| ab6f9839ef | |||
| d036f66d4c | |||
| dc24186ccb | |||
| f6fbcd8608 | |||
| 381d063295 | |||
| 65a0e48988 | |||
| b1a928b990 | |||
| b7973772b3 | |||
| 20b65fd885 | |||
| 850fbffdde | |||
| 0af5b194fc | |||
| c421038808 | |||
| 68c078038a | |||
| be5b1a52ea | |||
| 86c5b882c2 | |||
| b7d2653fb5 | |||
| 34a752b8d8 | |||
| de217a1bbf | |||
| 3087296263 | |||
| 78ba43480b | |||
| e7e2cc8081 | |||
| 0f6b61ce6c | |||
| 9182e8f2ef | |||
| c7870a79e5 | |||
| 312172fcd8 | |||
| 4a83ba84d3 | |||
| 76a9a20d89 | |||
| 567902a407 | |||
| fca8211c4f | |||
| d37b023e11 | |||
| f175644843 | |||
| 13c4ad10c5 | |||
| 550cd848b0 | |||
| 25e0c43c0b | |||
| fd9d23995d | |||
| 3a467d758e | |||
| 60b6adfa1e | |||
| a44ac333ab | |||
| deea0deb3f | |||
| 97178227ef | |||
| fd1242490f | |||
| 1ac633bdd7 | |||
| 44d4349f12 | |||
| df31be61e8 | |||
| 68b08c1e8a | |||
| f97cca6dcc | |||
| 2c9bb12da9 | |||
| b059055a93 | |||
| 1092cc2fdc | |||
| d5e75b2a37 | |||
| 0a5f8862ad | |||
| 433a225b9d | |||
| 6dfff84ab7 | |||
| 91b6047f2e | |||
| 94ef7f450b | |||
| e5cf92f84e | |||
| 399b5c03a2 | |||
| f9b7be2f74 | |||
| 66f7cc6f88 | |||
| af03597e86 | |||
| ab4fc8faf5 | |||
| fc2c5a8e6c | |||
| 1d23558dff | |||
| ce65d0257a | |||
| 78c5b1704a | |||
| 00f8b6d950 | |||
| e829bdccb5 | |||
| 6a7627e8c6 | |||
| 6787d0591e | |||
| d78dada5ec | |||
| 74844b9a99 | |||
| a29ff0d553 | |||
| 6632dd64b3 | |||
| a8c912c4c2 | |||
| 2a02816127 | |||
| 66b950b4aa | |||
| 639bd9c5cd | |||
| 2bf8c0b501 | |||
| 847d5121aa | |||
| ab6e89594e | |||
| 0662427cec | |||
| d3b283f623 | |||
| 385d97ba15 | |||
| 09574f1c75 | |||
| 34247be52c | |||
| 56b1e1f565 | |||
| 7c6a3081ee | |||
| beeeb1c059 | |||
| 47f70098df | |||
| 4e6b708834 | |||
| a359b07ef3 | |||
| bb0524c559 | |||
| d70ea1987a | |||
| 001de8a1e0 | |||
| f045e8ffcd | |||
| f8be1222d6 | |||
| 8848fe8b33 | |||
| 32524dac56 | |||
| a50ecf4d9c | |||
| 98fda4e5d5 | |||
| f0c111f02a | |||
| bcc36e1533 | |||
| 872639fefa | |||
| e83b959930 | |||
| 3f8de39683 | |||
| 9430f57a0b | |||
| 703ceb44e4 | |||
| 3954db9b99 | |||
| 7abb5972eb | |||
| 377a4899b7 | |||
| d769e833e7 | |||
| 9786f96fe5 | |||
| bd2a672c12 | |||
| 96cb45dd45 | |||
| cf370c083b | |||
| 43e1780dc8 | |||
| 6aac810450 | |||
| f14c283a83 | |||
| e78b2cafb1 | |||
| a9eaf011cd | |||
| 7fca519fd9 | |||
| a9a37b0c97 | |||
| 8f55bd0df1 | |||
| 85e88949c4 | |||
| 80bcebbf66 | |||
| 7cae873830 | |||
| 7a1b27927f | |||
| 306da60752 | |||
| 4274b90b1e | |||
| 0b5721671d | |||
| 30575d15d0 | |||
| 0ca2f8b4a7 | |||
| 2e01e5530c | |||
| 13cc6a2cf0 | |||
| 35ba5dcc9f | |||
| ab58109e5e | |||
| 18a6d25ed6 | |||
| a0e3a269a0 | |||
| 1658d0758c | |||
| 6f0c59bba4 | |||
| 1e98a0df55 | |||
| fa77d3e837 | |||
| a21bc7aad7 | |||
| e665fac956 | |||
| e60c633e56 | |||
| a444526743 | |||
| b65a246fcb | |||
| c5e8a50033 | |||
| 6021c24da9 | |||
| 0e4cd10376 | |||
| f2c043a299 | |||
| 596787b998 | |||
| bb3a0c4444 | |||
| 624b9cb20b | |||
| 5a6c25d616 | |||
| eb178e7bed | |||
| 373d71c124 | |||
| 6618cfeceb | |||
| 58c1382570 | |||
| 721eb122bd | |||
| 95205d2f1d | |||
| b6efa91879 | |||
| 29d7ee09c8 | |||
| 68cb5bc523 | |||
| bd96b2398a | |||
| 1579882475 | |||
| b077b99806 | |||
| 9797e7e58f | |||
| 00f62fd608 | |||
| 833e767e6c | |||
| 3e7b3297aa | |||
| 8e170a69e8 | |||
| 00d6f5d473 | |||
| 3c363788d8 | |||
| cc920cff9a | |||
| 5d78d56f0c | |||
| b9b47c4428 | |||
| a5382e9fdd | |||
| 376f5b2650 | |||
| 61c5f75006 | |||
| 9a6148fe6e | |||
| ff6a558e96 | |||
| 3ad45bebb7 | |||
| 6b152bd778 | |||
| aacde2dfde | |||
| 6971eccb22 | |||
| a5e7c483ef | |||
| 319f97bf9e | |||
| da31a67c52 | |||
| af355e5b85 | |||
| ca9fe264b4 | |||
| c07e937702 | |||
| 371b88c6cd | |||
| 782bc11950 | |||
| 50c3fee7dc | |||
| 51c4e06e59 | |||
| 2a84f44f2c | |||
| 4878c7258d | |||
| e2a4e0cb3f | |||
| fd3457af84 | |||
| 8c0a9062a2 | |||
| 10d301aa3c | |||
| d7193847c5 | |||
| 8ea079679c | |||
| 95c7b498ff | |||
| 1156499b05 | |||
| e92aaffc94 | |||
| 8dc38912c9 | |||
| d228bf12bb | |||
| 7dff8a2ed5 | |||
| 814fe02949 | |||
| da702d6316 | |||
| fd909023f9 | |||
| c94c455b8b | |||
| d39b0ee077 | |||
| 99a2f40a4e | |||
| acaad355ed | |||
| ad4b351a32 | |||
| 2902fae1df | |||
| a0b9ca7fae | |||
| 05d8f8b9ab | |||
| d074a9d54a | |||
| d0274013cf | |||
| 1e0169a9b7 | |||
| c619d2ecad | |||
| a27a2556aa | |||
| 8207373a05 | |||
| 986fab9cbf | |||
| 2025551f3c | |||
| 3d934ab018 | |||
| 6a59ed0984 | |||
| 8fce3f2bcc | |||
| eca0574e41 | |||
| fa044f5972 | |||
| 6d758f338b | |||
| 28322bad5e | |||
| a9805b8fff | |||
| eb16b7fbb8 | |||
| 70428b6c96 | |||
| 85557c2879 | |||
| 5bf7f77520 | |||
| 3cdc7c947f | |||
| 84fc6b2ffb | |||
| e7612f6f0c | |||
| 58097331da | |||
| e053528abf | |||
| 568abb6e03 | |||
| 852c7899b5 | |||
| f4998da4cd | |||
| 9d9e6ef9bd | |||
| fb367174b6 | |||
| 87566010be | |||
| 2f7cdfd786 | |||
| 559e8259be | |||
| 929d3febb0 | |||
| 92df98c3fb | |||
| 019c993313 | |||
| 3c370b7ac7 | |||
| 1afd811aa8 | |||
| 0a999d56c7 | |||
| 4f6988d775 | |||
| 9f2bbe527e | |||
| 965be837a3 | |||
| f5f8b9a145 | |||
| f3b6c1266d | |||
| c675057ab1 | |||
| 20e95e35aa | |||
| f22666e756 | |||
| 3567ac170a | |||
| 260bd952d4 | |||
| e294f04b04 | |||
| 27fa5625be | |||
| 62623e6a23 | |||
| 9df436d31e | |||
| c03f74154b | |||
| 749bdfd164 | |||
| 6878d5260d | |||
| 4f21762533 | |||
| adebc96637 | |||
| 3a50e6d2de | |||
| b09b66adc3 | |||
| 6ef42a9303 | |||
| 922c338387 | |||
| dc4310c301 | |||
| 0b7fd647e6 | |||
| 081b270f04 | |||
| 29c09cb10a | |||
| cd2e6c1aae | |||
| d3e6756c22 | |||
| 7d31812055 | |||
| 8ce871e9bb | |||
| 30125f0faa | |||
| f4ea9a85f0 | |||
| 593e123610 | |||
| 4e8eddd868 | |||
| 041dbcb5c7 | |||
| 215a0163b6 | |||
| a38867ce01 | |||
| 9026c526f8 | |||
| c1f94a4499 | |||
| 2be329974e | |||
| abb26ac410 | |||
| c5f03165bc | |||
| 41452ac20b | |||
| a285707b53 | |||
| cc9a5eea36 | |||
| 89d5cc31af | |||
| f0e11e952c | |||
| 0f8431ff5c | |||
| 659b819011 | |||
| 6a62cfdef7 | |||
| f2ebd751d4 | |||
| 1414f54a12 | |||
| c3265c5bf6 | |||
| 13e322bb57 | |||
| 5cacfc8daf | |||
| 9bbe4d2dcf | |||
| b517409229 | |||
| 75a96e871a | |||
| 395f7dfd63 | |||
| b166f6ff56 | |||
| eb2b6d1002 | |||
| 6cb045453a | |||
| ffae9e81a7 | |||
| dc59d3954a | |||
| 7b26eb50bf | |||
| c0ab1ad793 | |||
| c73dcb51e4 | |||
| 8c7fc0fef9 | |||
| 76a2e24d06 | |||
| 3e6a054913 | |||
| 89b233e51b | |||
| 61f26e060e | |||
| 5649bb243d | |||
| ea90e97f92 | |||
| 3c624188d1 | |||
| 04f373e931 | |||
| b2e36a24fd | |||
| 8da3d2aa35 | |||
| 25c8b9baf8 | |||
| 1555052e1d | |||
| b1fbf91d6e | |||
| 5dfac0c085 | |||
| cb142a65f3 | |||
| b0a9b2366e | |||
| ed897d4105 | |||
| 2b71723ba1 | |||
| 60ba3b7977 | |||
| e0198da52c | |||
| 6365805715 | |||
| 5e7e3696bf | |||
| 166fae425d | |||
| f56bef40d8 | |||
| ad1eec7d47 | |||
| 5b241d2547 | |||
| 92a626fb21 | |||
| 5171f23c09 | |||
| 6dc0ebcac6 | |||
| 49f1f3c86b | |||
| 53ab441486 | |||
| 0e422b5a8f | |||
| 4f51480af9 | |||
| fb8fbbcf70 | |||
| 5256eff88d | |||
| 7c40157cba | |||
| 454e97c9f1 | |||
| 41d34af4c7 | |||
| a13f5bfb10 | |||
| da06fe4a7b | |||
| 061ea6aab4 | |||
| a19cc81391 | |||
| 871164b969 | |||
| 16fa77eb09 | |||
| a46399cbaf | |||
| a9e50d29d3 | |||
| e7d7f217a9 | |||
| 000c2b9ab0 | |||
| 37fdb161ea | |||
| 2d923bbdc9 | |||
| 07c55de024 | |||
| 30c463627a | |||
| 5eec635146 | |||
| 229d9c76c3 | |||
| 0119eadc14 | |||
| c1c656ab7e | |||
| e4a50bcd60 | |||
| d18e6df1c0 | |||
| af9aa726f4 | |||
| 27f8c90dae | |||
| b9a3c384d9 | |||
| eed7085756 | |||
| abb0c2ad16 | |||
| 9f7b40381c | |||
| ad9a390b58 | |||
| 5525635df1 | |||
| 863e0205ff | |||
| 2560a70e5e | |||
| b638a73e4d | |||
| 0a9cea4df3 | |||
| 15a98c3eac | |||
| 493c16087d | |||
| dd155a0f63 | |||
| 9ce1c8d397 | |||
| 3040435c06 | |||
| 685f93f05e | |||
| d465d9da1a | |||
| 50c2766014 | |||
| 4e1cbf13c6 |
@@ -0,0 +1,17 @@
|
|||||||
|
name: Deploy Docker Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Execute custom script
|
||||||
|
run: |
|
||||||
|
cat >> deploy.sh <<EOF
|
||||||
|
#!/bin/sh
|
||||||
|
${{ vars.CUSTOM_DEPLOY_SCRIPTS }}
|
||||||
|
EOF
|
||||||
|
chmod +x deploy.sh
|
||||||
|
./deploy.sh
|
||||||
@@ -10,11 +10,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ secrets.DOCKER_REPO }}/mayswind/ezbookkeeping
|
${{ secrets.DOCKER_REPO }}/mayswind/ezbookkeeping
|
||||||
@@ -24,10 +24,12 @@ jobs:
|
|||||||
type=raw,value=latest
|
type=raw,value=latest
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
image: tonistiigi/binfmt:qemu-v8.1.5
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Set up the environment
|
- name: Set up the environment
|
||||||
run: |
|
run: |
|
||||||
@@ -44,7 +46,7 @@ jobs:
|
|||||||
chmod +x docker/custom-frontend-pre-setup.sh
|
chmod +x docker/custom-frontend-pre-setup.sh
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
context: .
|
context: .
|
||||||
@@ -52,5 +54,6 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
build-args: |
|
build-args: |
|
||||||
RELEASE_BUILD=1
|
RELEASE_BUILD=1
|
||||||
|
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ secrets.DOCKER_REPO }}/mayswind/ezbookkeeping
|
${{ secrets.DOCKER_REPO }}/mayswind/ezbookkeeping
|
||||||
@@ -24,10 +24,12 @@ jobs:
|
|||||||
type=sha,format=short,prefix=SNAPSHOT-
|
type=sha,format=short,prefix=SNAPSHOT-
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
image: tonistiigi/binfmt:qemu-v8.1.5
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Set up the environment
|
- name: Set up the environment
|
||||||
run: |
|
run: |
|
||||||
@@ -44,7 +46,7 @@ jobs:
|
|||||||
chmod +x docker/custom-frontend-pre-setup.sh
|
chmod +x docker/custom-frontend-pre-setup.sh
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
context: .
|
context: .
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ secrets.DOCKER_USERNAME }}/ezbookkeeping
|
${{ secrets.DOCKER_USERNAME }}/ezbookkeeping
|
||||||
@@ -24,19 +24,21 @@ jobs:
|
|||||||
type=raw,value=latest
|
type=raw,value=latest
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
image: tonistiigi/binfmt:qemu-v8.1.5
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
context: .
|
context: .
|
||||||
@@ -48,5 +50,6 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
build-args: |
|
build-args: |
|
||||||
RELEASE_BUILD=1
|
RELEASE_BUILD=1
|
||||||
|
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ secrets.DOCKER_USERNAME }}/ezbookkeeping
|
${{ secrets.DOCKER_USERNAME }}/ezbookkeeping
|
||||||
@@ -23,19 +23,21 @@ jobs:
|
|||||||
type=raw,value=latest-snapshot
|
type=raw,value=latest-snapshot
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
image: tonistiigi/binfmt:qemu-v8.1.5
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
context: .
|
context: .
|
||||||
@@ -46,6 +48,6 @@ jobs:
|
|||||||
linux/arm/v6
|
linux/arm/v6
|
||||||
push: true
|
push: true
|
||||||
build-args: |
|
build-args: |
|
||||||
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|||||||
@@ -11,20 +11,20 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Login to DockerHub
|
name: Login to DockerHub
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
-
|
-
|
||||||
name: Build
|
name: Build
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: false
|
push: false
|
||||||
build-args: |
|
build-args: |
|
||||||
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
SKIP_TESTS=${{ vars.SKIP_TESTS }}
|
||||||
|
|||||||
+3
-3
@@ -1,5 +1,5 @@
|
|||||||
# Build backend binary file
|
# Build backend binary file
|
||||||
FROM golang:1.23.4-alpine3.21 AS be-builder
|
FROM golang:1.24.2-alpine3.21 AS be-builder
|
||||||
ARG RELEASE_BUILD
|
ARG RELEASE_BUILD
|
||||||
ARG SKIP_TESTS
|
ARG SKIP_TESTS
|
||||||
ENV RELEASE_BUILD=$RELEASE_BUILD
|
ENV RELEASE_BUILD=$RELEASE_BUILD
|
||||||
@@ -11,7 +11,7 @@ RUN apk add git gcc g++ libc-dev
|
|||||||
RUN ./build.sh backend
|
RUN ./build.sh backend
|
||||||
|
|
||||||
# Build frontend files
|
# Build frontend files
|
||||||
FROM --platform=$BUILDPLATFORM node:20.18.1-alpine3.21 AS fe-builder
|
FROM --platform=$BUILDPLATFORM node:22.15.0-alpine3.21 AS fe-builder
|
||||||
ARG RELEASE_BUILD
|
ARG RELEASE_BUILD
|
||||||
ENV RELEASE_BUILD=$RELEASE_BUILD
|
ENV RELEASE_BUILD=$RELEASE_BUILD
|
||||||
WORKDIR /go/src/github.com/mayswind/ezbookkeeping
|
WORKDIR /go/src/github.com/mayswind/ezbookkeeping
|
||||||
@@ -21,7 +21,7 @@ RUN apk add git
|
|||||||
RUN ./build.sh frontend
|
RUN ./build.sh frontend
|
||||||
|
|
||||||
# Package docker image
|
# Package docker image
|
||||||
FROM alpine:3.21.0
|
FROM alpine:3.21.3
|
||||||
LABEL maintainer="MaysWind <i@mayswind.net>"
|
LABEL maintainer="MaysWind <i@mayswind.net>"
|
||||||
RUN addgroup -S -g 1000 ezbookkeeping && adduser -S -G ezbookkeeping -u 1000 ezbookkeeping
|
RUN addgroup -S -g 1000 ezbookkeeping && adduser -S -G ezbookkeeping -u 1000 ezbookkeeping
|
||||||
RUN apk --no-cache add tzdata
|
RUN apk --no-cache add tzdata
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2020-2024 MaysWind (i@mayswind.net)
|
Copyright (c) 2020-2025 MaysWind (i@mayswind.net)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -6,32 +6,39 @@
|
|||||||
[](https://github.com/mayswind/ezbookkeeping/releases)
|
[](https://github.com/mayswind/ezbookkeeping/releases)
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
ezBookkeeping is a lightweight personal bookkeeping app hosted by yourself. It can be deployed on almost all platforms, including Windows, macOS and Linux on x86, amd64 and ARM architectures. You can even deploy it on an raspberry device. It also supports many different databases, including sqlite and mysql. With docker, you can just deploy it via one command without complicated configuration.
|
ezBookkeeping is a lightweight self-hosted personal bookkeeping app with user-friendly interface for both desktop and mobile devices. It supports PWA, you can [add the app homepage to the home screen](https://raw.githubusercontent.com/wiki/mayswind/ezbookkeeping/img/mobile/add_to_home_screen.gif) of your mobile device and use it just like a native app. It's easily to be deployed and configured, you can just deploy it by a single command via Docker. It supports almost all platforms, including Windows, macOS, and Linux, and is compatible with x86, amd64 and ARM hardware architectures. It only requires very few system resources, and you can even run it on a Raspberry Pi device.
|
||||||
|
|
||||||
Online Demo: [https://ezbookkeeping-demo.mayswind.net](https://ezbookkeeping-demo.mayswind.net)
|
Online Demo: [https://ezbookkeeping-demo.mayswind.net](https://ezbookkeeping-demo.mayswind.net)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
1. Open source & Self-hosted
|
1. Open Source & Self-Hosted
|
||||||
2. Lightweight & Fast
|
2. Lightweight & Fast
|
||||||
3. Easy to install
|
3. Easy Installation
|
||||||
* Docker support
|
* Support Docker
|
||||||
* Multiple database support (SQLite, MySQL, PostgreSQL, etc.)
|
* Support multiple databases (SQLite, MySQL, PostgreSQL, etc.)
|
||||||
* Multiple operation system & hardware support (Windows, macOS, Linux & x86, amd64, ARM)
|
* Support multiple operation system & hardware architectures (Windows, macOS, Linux & x86, amd64, ARM)
|
||||||
4. User-friendly interface
|
4. User-Friendly Interface
|
||||||
* Both desktop and mobile UI
|
* Native UI for both desktop and mobile devices
|
||||||
* Close to native app experience (for mobile device)
|
* Support PWA, providing near-native experience for mobile devices
|
||||||
* Two-level account & two-level category support
|
|
||||||
* Plentiful preset categories
|
|
||||||
* Geographic location and map support
|
|
||||||
* Searching & filtering history records
|
|
||||||
* Data statistics
|
|
||||||
* Dark theme
|
* Dark theme
|
||||||
5. Multiple currency support & automatically updating exchange rates
|
5. Powerful Bookkeeping Features
|
||||||
6. Multiple timezone support
|
* Support two-level account
|
||||||
7. Multi-language support
|
* Support two-level transaction categories and predefined categories
|
||||||
8. Two-factor authentication
|
* Support transaction pictures
|
||||||
9. Application lock (PIN code / WebAuthn)
|
* Support geographic location tracking and map
|
||||||
10. Data import & export
|
* Support recurring transactions
|
||||||
|
* Search and filter transaction records
|
||||||
|
* Data visualization and statistical analysis
|
||||||
|
6. Localization Support
|
||||||
|
* Multi-language support
|
||||||
|
* Multi-currency support with automatic exchange rate updates from various financial institutions
|
||||||
|
* Multi-timezone support
|
||||||
|
* Customizable date, time, number and currency display formats
|
||||||
|
7. Security & Reliability
|
||||||
|
* Two-factor authentication (2FA)
|
||||||
|
* Login rate limiting
|
||||||
|
* Application lock (PIN code / WebAuthn)
|
||||||
|
8. Data Export & Import (CSV, OFX, QFX, QIF, IIF, GnuCash, FireFly III, Beancount, etc.)
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
### Desktop Version
|
### Desktop Version
|
||||||
@@ -88,7 +95,7 @@ You can also build docker image, make sure you have [docker](https://www.docker.
|
|||||||
|
|
||||||
## Documents
|
## Documents
|
||||||
1. [English](http://ezbookkeeping.mayswind.net)
|
1. [English](http://ezbookkeeping.mayswind.net)
|
||||||
1. [简体中文 (Simplified Chinese)](http://ezbookkeeping.mayswind.net/zh_Hans)
|
1. [中文 (简体)](http://ezbookkeeping.mayswind.net/zh_Hans)
|
||||||
|
|
||||||
## License
|
## License
|
||||||
[MIT](https://github.com/mayswind/ezbookkeeping/blob/master/LICENSE)
|
[MIT](https://github.com/mayswind/ezbookkeeping/blob/master/LICENSE)
|
||||||
|
|||||||
+5
-3
@@ -1,14 +1,16 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/urfave/cli/v2"
|
"context"
|
||||||
|
|
||||||
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
)
|
)
|
||||||
|
|
||||||
func bindAction(fn core.CliHandlerFunc) cli.ActionFunc {
|
func bindAction(fn core.CliHandlerFunc) cli.ActionFunc {
|
||||||
return func(cliCtx *cli.Context) error {
|
return func(ctx context.Context, cmd *cli.Command) error {
|
||||||
c := core.WrapCilContext(cliCtx)
|
c := core.WrapCilContext(ctx, cmd)
|
||||||
return fn(c)
|
return fn(c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+2
-2
@@ -3,7 +3,7 @@ package cmd
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/cron"
|
"github.com/mayswind/ezbookkeeping/pkg/cron"
|
||||||
@@ -14,7 +14,7 @@ import (
|
|||||||
var CronJobs = &cli.Command{
|
var CronJobs = &cli.Command{
|
||||||
Name: "cron",
|
Name: "cron",
|
||||||
Usage: "ezBookkeeping cron job utilities",
|
Usage: "ezBookkeeping cron job utilities",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "list",
|
Name: "list",
|
||||||
Usage: "List all enabled cron jobs",
|
Usage: "List all enabled cron jobs",
|
||||||
|
|||||||
+10
-2
@@ -1,7 +1,7 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/datastore"
|
"github.com/mayswind/ezbookkeeping/pkg/datastore"
|
||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
var Database = &cli.Command{
|
var Database = &cli.Command{
|
||||||
Name: "database",
|
Name: "database",
|
||||||
Usage: "ezBookkeeping database maintenance",
|
Usage: "ezBookkeeping database maintenance",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "update",
|
Name: "update",
|
||||||
Usage: "Update database structure",
|
Usage: "Update database structure",
|
||||||
@@ -133,5 +133,13 @@ func updateAllDatabaseTablesStructure(c *core.CliContext) error {
|
|||||||
|
|
||||||
log.BootInfof(c, "[database.updateAllDatabaseTablesStructure] transaction picture table maintained successfully")
|
log.BootInfof(c, "[database.updateAllDatabaseTablesStructure] transaction picture table maintained successfully")
|
||||||
|
|
||||||
|
err = datastore.Container.UserDataStore.SyncStructs(new(models.UserCustomExchangeRate))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.BootInfof(c, "[database.updateAllDatabaseTablesStructure] user custom exchange rate table maintained successfully")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
+2
-2
@@ -3,7 +3,7 @@ package cmd
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
var SecurityUtils = &cli.Command{
|
var SecurityUtils = &cli.Command{
|
||||||
Name: "security",
|
Name: "security",
|
||||||
Usage: "ezBookkeeping security utilities",
|
Usage: "ezBookkeeping security utilities",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "gen-secret-key",
|
Name: "gen-secret-key",
|
||||||
Usage: "Generate a random secret key",
|
Usage: "Generate a random secret key",
|
||||||
|
|||||||
+8
-3
@@ -4,7 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
clis "github.com/mayswind/ezbookkeeping/pkg/cli"
|
clis "github.com/mayswind/ezbookkeeping/pkg/cli"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -18,7 +18,7 @@ import (
|
|||||||
var UserData = &cli.Command{
|
var UserData = &cli.Command{
|
||||||
Name: "userdata",
|
Name: "userdata",
|
||||||
Usage: "ezBookkeeping user data maintenance",
|
Usage: "ezBookkeeping user data maintenance",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "user-add",
|
Name: "user-add",
|
||||||
Usage: "Add new user",
|
Usage: "Add new user",
|
||||||
@@ -792,7 +792,11 @@ func exportUserTransaction(c *core.CliContext) error {
|
|||||||
filePath := c.String("file")
|
filePath := c.String("file")
|
||||||
fileType := c.String("type")
|
fileType := c.String("type")
|
||||||
|
|
||||||
if fileType != "" && fileType != "csv" && fileType != "tsv" {
|
if fileType == "" {
|
||||||
|
fileType = "csv"
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileType != "csv" && fileType != "tsv" {
|
||||||
log.CliErrorf(c, "[user_data.exportUserTransaction] export file type is not supported")
|
log.CliErrorf(c, "[user_data.exportUserTransaction] export file type is not supported")
|
||||||
return errs.ErrNotSupported
|
return errs.ErrNotSupported
|
||||||
}
|
}
|
||||||
@@ -899,6 +903,7 @@ func printUserInfo(user *models.User) {
|
|||||||
fmt.Printf("[DigitGroupingSymbol] %s (%d)\n", user.DigitGroupingSymbol, user.DigitGroupingSymbol)
|
fmt.Printf("[DigitGroupingSymbol] %s (%d)\n", user.DigitGroupingSymbol, user.DigitGroupingSymbol)
|
||||||
fmt.Printf("[DigitGrouping] %s (%d)\n", user.DigitGrouping, user.DigitGrouping)
|
fmt.Printf("[DigitGrouping] %s (%d)\n", user.DigitGrouping, user.DigitGrouping)
|
||||||
fmt.Printf("[CurrencyDisplayType] %s (%d)\n", user.CurrencyDisplayType, user.CurrencyDisplayType)
|
fmt.Printf("[CurrencyDisplayType] %s (%d)\n", user.CurrencyDisplayType, user.CurrencyDisplayType)
|
||||||
|
fmt.Printf("[CoordinateDisplayType] %s (%d)\n", user.CoordinateDisplayType, user.CoordinateDisplayType)
|
||||||
fmt.Printf("[ExpenseAmountColor] %s (%d)\n", user.ExpenseAmountColor, user.ExpenseAmountColor)
|
fmt.Printf("[ExpenseAmountColor] %s (%d)\n", user.ExpenseAmountColor, user.ExpenseAmountColor)
|
||||||
fmt.Printf("[IncomeAmountColor] %s (%d)\n", user.IncomeAmountColor, user.IncomeAmountColor)
|
fmt.Printf("[IncomeAmountColor] %s (%d)\n", user.IncomeAmountColor, user.IncomeAmountColor)
|
||||||
fmt.Printf("[FeatureRestriction] %s (%d)\n", user.FeatureRestriction, user.FeatureRestriction)
|
fmt.Printf("[FeatureRestriction] %s (%d)\n", user.FeatureRestriction, user.FeatureRestriction)
|
||||||
|
|||||||
+2
-2
@@ -5,7 +5,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
|
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
@@ -18,7 +18,7 @@ import (
|
|||||||
var Utilities = &cli.Command{
|
var Utilities = &cli.Command{
|
||||||
Name: "utility",
|
Name: "utility",
|
||||||
Usage: "ezBookkeeping utilities",
|
Usage: "ezBookkeeping utilities",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "parse-default-request-id",
|
Name: "parse-default-request-id",
|
||||||
Usage: "Parse a request id which is generated by default request generator and show the details",
|
Usage: "Parse a request id which is generated by default request generator and show the details",
|
||||||
|
|||||||
+25
-2
@@ -11,7 +11,7 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/gin-gonic/gin/binding"
|
"github.com/gin-gonic/gin/binding"
|
||||||
"github.com/go-playground/validator/v10"
|
"github.com/go-playground/validator/v10"
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/api"
|
"github.com/mayswind/ezbookkeeping/pkg/api"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -29,7 +29,7 @@ import (
|
|||||||
var WebServer = &cli.Command{
|
var WebServer = &cli.Command{
|
||||||
Name: "server",
|
Name: "server",
|
||||||
Usage: "ezBookkeeping web server operation",
|
Usage: "ezBookkeeping web server operation",
|
||||||
Subcommands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "run",
|
Name: "run",
|
||||||
Usage: "Run ezBookkeeping web server",
|
Usage: "Run ezBookkeeping web server",
|
||||||
@@ -116,8 +116,13 @@ func startWebServer(c *core.CliContext) error {
|
|||||||
router.StaticFile("favicon.png", filepath.Join(config.StaticRootPath, "favicon.png"))
|
router.StaticFile("favicon.png", filepath.Join(config.StaticRootPath, "favicon.png"))
|
||||||
router.StaticFile("touchicon.png", filepath.Join(config.StaticRootPath, "touchicon.png"))
|
router.StaticFile("touchicon.png", filepath.Join(config.StaticRootPath, "touchicon.png"))
|
||||||
router.StaticFile("manifest.json", filepath.Join(config.StaticRootPath, "manifest.json"))
|
router.StaticFile("manifest.json", filepath.Join(config.StaticRootPath, "manifest.json"))
|
||||||
|
router.StaticFile("sw.js", filepath.Join(config.StaticRootPath, "sw.js"))
|
||||||
router.GET("/server_settings.js", bindCachedJs(api.ServerSettings.ServerSettingsJavascriptHandler, serverSettingsCacheStore))
|
router.GET("/server_settings.js", bindCachedJs(api.ServerSettings.ServerSettingsJavascriptHandler, serverSettingsCacheStore))
|
||||||
|
|
||||||
|
for i := 0; i < len(workboxFileNames); i++ {
|
||||||
|
router.StaticFile("/"+workboxFileNames[i], filepath.Join(config.StaticRootPath, workboxFileNames[i]))
|
||||||
|
}
|
||||||
|
|
||||||
router.StaticFile("/mobile", filepath.Join(config.StaticRootPath, "mobile.html"))
|
router.StaticFile("/mobile", filepath.Join(config.StaticRootPath, "mobile.html"))
|
||||||
router.Static("/mobile/js", filepath.Join(config.StaticRootPath, "js"))
|
router.Static("/mobile/js", filepath.Join(config.StaticRootPath, "js"))
|
||||||
router.Static("/mobile/css", filepath.Join(config.StaticRootPath, "css"))
|
router.Static("/mobile/css", filepath.Join(config.StaticRootPath, "css"))
|
||||||
@@ -296,6 +301,7 @@ func startWebServer(c *core.CliContext) error {
|
|||||||
apiV1Route.POST("/accounts/hide.json", bindApi(api.Accounts.AccountHideHandler))
|
apiV1Route.POST("/accounts/hide.json", bindApi(api.Accounts.AccountHideHandler))
|
||||||
apiV1Route.POST("/accounts/move.json", bindApi(api.Accounts.AccountMoveHandler))
|
apiV1Route.POST("/accounts/move.json", bindApi(api.Accounts.AccountMoveHandler))
|
||||||
apiV1Route.POST("/accounts/delete.json", bindApi(api.Accounts.AccountDeleteHandler))
|
apiV1Route.POST("/accounts/delete.json", bindApi(api.Accounts.AccountDeleteHandler))
|
||||||
|
apiV1Route.POST("/accounts/sub_account/delete.json", bindApi(api.Accounts.SubAccountDeleteHandler))
|
||||||
|
|
||||||
// Transactions
|
// Transactions
|
||||||
apiV1Route.GET("/transactions/count.json", bindApi(api.Transactions.TransactionCountHandler))
|
apiV1Route.GET("/transactions/count.json", bindApi(api.Transactions.TransactionCountHandler))
|
||||||
@@ -310,8 +316,10 @@ func startWebServer(c *core.CliContext) error {
|
|||||||
apiV1Route.POST("/transactions/delete.json", bindApi(api.Transactions.TransactionDeleteHandler))
|
apiV1Route.POST("/transactions/delete.json", bindApi(api.Transactions.TransactionDeleteHandler))
|
||||||
|
|
||||||
if config.EnableDataImport {
|
if config.EnableDataImport {
|
||||||
|
apiV1Route.POST("/transactions/parse_dsv_file.json", bindApi(api.Transactions.TransactionParseImportDsvFileDataHandler))
|
||||||
apiV1Route.POST("/transactions/parse_import.json", bindApi(api.Transactions.TransactionParseImportFileHandler))
|
apiV1Route.POST("/transactions/parse_import.json", bindApi(api.Transactions.TransactionParseImportFileHandler))
|
||||||
apiV1Route.POST("/transactions/import.json", bindApi(api.Transactions.TransactionImportHandler))
|
apiV1Route.POST("/transactions/import.json", bindApi(api.Transactions.TransactionImportHandler))
|
||||||
|
apiV1Route.GET("/transactions/import/process.json", bindApi(api.Transactions.TransactionImportProcessHandler))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transaction Pictures
|
// Transaction Pictures
|
||||||
@@ -334,6 +342,7 @@ func startWebServer(c *core.CliContext) error {
|
|||||||
apiV1Route.GET("/transaction/tags/list.json", bindApi(api.TransactionTags.TagListHandler))
|
apiV1Route.GET("/transaction/tags/list.json", bindApi(api.TransactionTags.TagListHandler))
|
||||||
apiV1Route.GET("/transaction/tags/get.json", bindApi(api.TransactionTags.TagGetHandler))
|
apiV1Route.GET("/transaction/tags/get.json", bindApi(api.TransactionTags.TagGetHandler))
|
||||||
apiV1Route.POST("/transaction/tags/add.json", bindApi(api.TransactionTags.TagCreateHandler))
|
apiV1Route.POST("/transaction/tags/add.json", bindApi(api.TransactionTags.TagCreateHandler))
|
||||||
|
apiV1Route.POST("/transaction/tags/add_batch.json", bindApi(api.TransactionTags.TagCreateBatchHandler))
|
||||||
apiV1Route.POST("/transaction/tags/modify.json", bindApi(api.TransactionTags.TagModifyHandler))
|
apiV1Route.POST("/transaction/tags/modify.json", bindApi(api.TransactionTags.TagModifyHandler))
|
||||||
apiV1Route.POST("/transaction/tags/hide.json", bindApi(api.TransactionTags.TagHideHandler))
|
apiV1Route.POST("/transaction/tags/hide.json", bindApi(api.TransactionTags.TagHideHandler))
|
||||||
apiV1Route.POST("/transaction/tags/move.json", bindApi(api.TransactionTags.TagMoveHandler))
|
apiV1Route.POST("/transaction/tags/move.json", bindApi(api.TransactionTags.TagMoveHandler))
|
||||||
@@ -350,6 +359,8 @@ func startWebServer(c *core.CliContext) error {
|
|||||||
|
|
||||||
// Exchange Rates
|
// Exchange Rates
|
||||||
apiV1Route.GET("/exchange_rates/latest.json", bindApi(api.ExchangeRates.LatestExchangeRateHandler))
|
apiV1Route.GET("/exchange_rates/latest.json", bindApi(api.ExchangeRates.LatestExchangeRateHandler))
|
||||||
|
apiV1Route.POST("/exchange_rates/user_custom/update.json", bindApi(api.ExchangeRates.UserCustomExchangeRateUpdateHandler))
|
||||||
|
apiV1Route.POST("/exchange_rates/user_custom/delete.json", bindApi(api.ExchangeRates.UserCustomExchangeRateDeleteHandler))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,6 +423,18 @@ func bindApiWithTokenUpdate(fn core.ApiHandlerFunc, config *settings.Config) gin
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func bindEventStreamApi(fn core.EventStreamApiHandlerFunc) gin.HandlerFunc {
|
||||||
|
return func(ginCtx *gin.Context) {
|
||||||
|
c := core.WrapWebContext(ginCtx)
|
||||||
|
utils.SetEventStreamHeader(c)
|
||||||
|
err := fn(c)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
utils.WriteEventStreamJsonErrorResult(c, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func bindCachedJs(fn core.DataHandlerFunc, store persistence.CacheStore) gin.HandlerFunc {
|
func bindCachedJs(fn core.DataHandlerFunc, store persistence.CacheStore) gin.HandlerFunc {
|
||||||
return cache.CachePage(store, time.Minute, func(ginCtx *gin.Context) {
|
return cache.CachePage(store, time.Minute, func(ginCtx *gin.Context) {
|
||||||
c := core.WrapWebContext(ginCtx)
|
c := core.WrapWebContext(ginCtx)
|
||||||
|
|||||||
+11
-3
@@ -180,6 +180,12 @@ email_verify_token_expired_time = 3600
|
|||||||
# Password reset token expired seconds (60 - 4294967295), default is 3600 (60 minutes)
|
# Password reset token expired seconds (60 - 4294967295), default is 3600 (60 minutes)
|
||||||
password_reset_token_expired_time = 3600
|
password_reset_token_expired_time = 3600
|
||||||
|
|
||||||
|
# Maximum count of password / token check failures (0 - 4294967295) per IP per minute (use the above duplicate checker), default is 5, set to 0 to disable
|
||||||
|
max_failures_per_ip_per_minute = 5
|
||||||
|
|
||||||
|
# Maximum count of password / token check failures (0 - 4294967295) per user per minute (use the above duplicate checker), default is 5, set to 0 to disable
|
||||||
|
max_failures_per_user_per_minute = 5
|
||||||
|
|
||||||
# Add X-Request-Id header to response to track user request or error, default is true
|
# Add X-Request-Id header to response to track user request or error, default is true
|
||||||
request_id_header = true
|
request_id_header = true
|
||||||
|
|
||||||
@@ -248,7 +254,7 @@ enable_tips_in_login_page = false
|
|||||||
|
|
||||||
# The custom tips displayed in login page, it supports multi-language configuration
|
# The custom tips displayed in login page, it supports multi-language configuration
|
||||||
# Add an underscore and a language tag after the setting key to configure the notification content in that language, the same below
|
# Add an underscore and a language tag after the setting key to configure the notification content in that language, the same below
|
||||||
# For example, login_page_tips_content_zh_hans means the notification content in Simplified Chinese
|
# For example, login_page_tips_content_zh_hans means the notification content in Chinese (Simplified)
|
||||||
login_page_tips_content =
|
login_page_tips_content =
|
||||||
|
|
||||||
[notification]
|
[notification]
|
||||||
@@ -257,7 +263,7 @@ enable_notification_after_register = false
|
|||||||
|
|
||||||
# The notification content displayed each time users register, it supports multi-language configuration
|
# The notification content displayed each time users register, it supports multi-language configuration
|
||||||
# Add an underscore and a language tag after the setting key to configure the notification content in that language, the same below
|
# Add an underscore and a language tag after the setting key to configure the notification content in that language, the same below
|
||||||
# For example, after_login_notification_content_zh_hans means the notification content in Simplified Chinese
|
# For example, after_login_notification_content_zh_hans means the notification content in Chinese (Simplified)
|
||||||
after_register_notification_content =
|
after_register_notification_content =
|
||||||
|
|
||||||
# Set to true to display custom notification in home page every time users login
|
# Set to true to display custom notification in home page every time users login
|
||||||
@@ -315,7 +321,7 @@ amap_application_key =
|
|||||||
# "external_proxy": use an external proxy to request amap api (amap application secret should be set by external proxy)
|
# "external_proxy": use an external proxy to request amap api (amap application secret should be set by external proxy)
|
||||||
# "plain_text": append amap application secret to frontend request directly (insecurity for public network)
|
# "plain_text": append amap application secret to frontend request directly (insecurity for public network)
|
||||||
# Please visit https://developer.amap.com/api/jsapi-v2/guide/abc/load for more information
|
# Please visit https://developer.amap.com/api/jsapi-v2/guide/abc/load for more information
|
||||||
amap_security_verification_method = plain_text
|
amap_security_verification_method = internal_proxy
|
||||||
|
|
||||||
# For "amap" map provider only, Amap JavaScript API application secret, this setting must be provided when "amap_security_verification_method" is set to "internal_proxy" or "plain_text", please visit https://lbs.amap.com/api/javascript-api/guide/abc/prepare for more information
|
# For "amap" map provider only, Amap JavaScript API application secret, this setting must be provided when "amap_security_verification_method" is set to "internal_proxy" or "plain_text", please visit https://lbs.amap.com/api/javascript-api/guide/abc/prepare for more information
|
||||||
amap_application_secret =
|
amap_application_secret =
|
||||||
@@ -354,8 +360,10 @@ custom_map_tile_server_default_zoom_level = 14
|
|||||||
# "national_bank_of_romania": https://www.bnr.ro/Exchange-rates-1224.aspx
|
# "national_bank_of_romania": https://www.bnr.ro/Exchange-rates-1224.aspx
|
||||||
# "bank_of_russia": https://www.cbr.ru/eng/currency_base/daily/
|
# "bank_of_russia": https://www.cbr.ru/eng/currency_base/daily/
|
||||||
# "swiss_national_bank": https://www.snb.ch/en/the-snb/mandates-goals/statistics/statistics-pub/current_interest_exchange_rates
|
# "swiss_national_bank": https://www.snb.ch/en/the-snb/mandates-goals/statistics/statistics-pub/current_interest_exchange_rates
|
||||||
|
# "national_bank_of_ukraine": https://bank.gov.ua/ua/markets/exchangerates
|
||||||
# "central_bank_of_uzbekistan": https://cbu.uz/en/arkhiv-kursov-valyut/
|
# "central_bank_of_uzbekistan": https://cbu.uz/en/arkhiv-kursov-valyut/
|
||||||
# "international_monetary_fund": https://www.imf.org/external/np/fin/data/param_rms_mth.aspx
|
# "international_monetary_fund": https://www.imf.org/external/np/fin/data/param_rms_mth.aspx
|
||||||
|
# "user_custom": users set their own exchange rates data in the UI
|
||||||
data_source = euro_central_bank
|
data_source = euro_central_bank
|
||||||
|
|
||||||
# Requesting exchange rates data timeout (0 - 4294967295 milliseconds)
|
# Requesting exchange rates data timeout (0 - 4294967295 milliseconds)
|
||||||
|
|||||||
+27
-32
@@ -1,36 +1,31 @@
|
|||||||
import globals from 'globals';
|
import pluginVue from 'eslint-plugin-vue';
|
||||||
|
import vueTsEslintConfig from '@vue/eslint-config-typescript';
|
||||||
|
|
||||||
import path from 'node:path';
|
export default [
|
||||||
import { fileURLToPath } from 'node:url';
|
...pluginVue.configs['flat/essential'],
|
||||||
|
...vueTsEslintConfig(),
|
||||||
import js from '@eslint/js';
|
{
|
||||||
import { FlatCompat } from '@eslint/eslintrc';
|
languageOptions: {
|
||||||
import { includeIgnoreFile } from '@eslint/compat';
|
parserOptions: {
|
||||||
|
projectService: true,
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
tsconfigRootDir: import.meta.dirname,
|
||||||
const __dirname = path.dirname(__filename);
|
}
|
||||||
const gitignorePath = path.resolve(__dirname, '.gitignore');
|
|
||||||
|
|
||||||
const compat = new FlatCompat({
|
|
||||||
baseDirectory: __dirname,
|
|
||||||
recommendedConfig: js.configs.recommended,
|
|
||||||
allConfig: js.configs.all
|
|
||||||
});
|
|
||||||
|
|
||||||
export default [...compat.extends('eslint:recommended', 'plugin:vue/vue3-essential'),
|
|
||||||
includeIgnoreFile(gitignorePath), {
|
|
||||||
languageOptions: {
|
|
||||||
globals: {
|
|
||||||
...globals.node,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
files: [
|
{
|
||||||
"**/*.{vue,js,jsx,cjs,mjs}"
|
ignores: [
|
||||||
],
|
'dist/**',
|
||||||
rules: {
|
'**/*.{js,jsx,cjs,mjs}'
|
||||||
'vue/no-use-v-if-with-v-for': 'off',
|
]
|
||||||
'vue/valid-v-slot': ['error', {
|
|
||||||
allowModifiers: true,
|
|
||||||
}],
|
|
||||||
},
|
},
|
||||||
}];
|
{
|
||||||
|
files: [
|
||||||
|
'**/*.{vue,ts,tsx,mts,js,jsx,cjs,mjs}'
|
||||||
|
],
|
||||||
|
rules: {
|
||||||
|
'vue/valid-v-slot': ['error', {
|
||||||
|
allowModifiers: true
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|||||||
+4
-3
@@ -1,12 +1,13 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v3"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/cmd"
|
"github.com/mayswind/ezbookkeeping/cmd"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
||||||
@@ -28,7 +29,7 @@ func main() {
|
|||||||
settings.Version = Version
|
settings.Version = Version
|
||||||
settings.CommitHash = CommitHash
|
settings.CommitHash = CommitHash
|
||||||
|
|
||||||
app := &cli.App{
|
cmd := &cli.Command{
|
||||||
Name: "ezBookkeeping",
|
Name: "ezBookkeeping",
|
||||||
Usage: "A lightweight personal bookkeeping app hosted by yourself.",
|
Usage: "A lightweight personal bookkeeping app hosted by yourself.",
|
||||||
Version: GetFullVersion(),
|
Version: GetFullVersion(),
|
||||||
@@ -52,7 +53,7 @@ func main() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err := app.Run(os.Args)
|
err := cmd.Run(context.Background(), os.Args)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to run ezBookkeeping with %s: %v", os.Args, err)
|
log.Fatalf("Failed to run ezBookkeeping with %s: %v", os.Args, err)
|
||||||
|
|||||||
@@ -1,29 +1,30 @@
|
|||||||
module github.com/mayswind/ezbookkeeping
|
module github.com/mayswind/ezbookkeeping
|
||||||
|
|
||||||
go 1.22
|
go 1.24
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/boombuler/barcode v1.0.2
|
github.com/boombuler/barcode v1.0.2
|
||||||
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b
|
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b
|
||||||
github.com/gin-contrib/cache v1.3.0
|
github.com/gin-contrib/cache v1.3.2
|
||||||
github.com/gin-contrib/gzip v1.0.1
|
github.com/gin-contrib/gzip v1.2.3
|
||||||
github.com/gin-gonic/gin v1.10.0
|
github.com/gin-gonic/gin v1.10.0
|
||||||
github.com/go-co-op/gocron/v2 v2.12.3
|
github.com/go-co-op/gocron/v2 v2.16.1
|
||||||
github.com/go-playground/validator/v10 v10.22.1
|
github.com/go-playground/validator/v10 v10.26.0
|
||||||
github.com/go-sql-driver/mysql v1.8.1
|
github.com/go-sql-driver/mysql v1.9.2
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||||
github.com/lib/pq v1.10.9
|
github.com/lib/pq v1.10.9
|
||||||
github.com/mattn/go-sqlite3 v1.14.24
|
github.com/mattn/go-sqlite3 v1.14.28
|
||||||
github.com/minio/minio-go/v7 v7.0.80
|
github.com/minio/minio-go/v7 v7.0.91
|
||||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||||
github.com/pquerna/otp v1.4.0
|
github.com/pquerna/otp v1.4.0
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/stretchr/testify v1.9.0
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/urfave/cli/v2 v2.27.4
|
github.com/urfave/cli/v3 v3.2.0
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8
|
github.com/wk8/go-ordered-map/v2 v2.1.8
|
||||||
golang.org/x/crypto v0.28.0
|
github.com/xuri/excelize/v2 v2.9.0
|
||||||
golang.org/x/net v0.30.0
|
golang.org/x/crypto v0.37.0
|
||||||
golang.org/x/text v0.19.0
|
golang.org/x/net v0.39.0
|
||||||
|
golang.org/x/text v0.24.0
|
||||||
gopkg.in/ini.v1 v1.67.0
|
gopkg.in/ini.v1 v1.67.0
|
||||||
gopkg.in/mail.v2 v2.3.1
|
gopkg.in/mail.v2 v2.3.1
|
||||||
xorm.io/builder v0.3.13
|
xorm.io/builder v0.3.13
|
||||||
@@ -35,39 +36,43 @@ require (
|
|||||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20230905024940-24af94b03874 // indirect
|
github.com/bradfitz/gomemcache v0.0.0-20230905024940-24af94b03874 // indirect
|
||||||
github.com/buger/jsonparser v1.1.1 // indirect
|
github.com/buger/jsonparser v1.1.1 // indirect
|
||||||
github.com/bytedance/sonic v1.11.6 // indirect
|
github.com/bytedance/sonic v1.13.2 // indirect
|
||||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||||
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/extrame/goyymmdd v0.0.0-20210114090516-7cc815f00d1a // indirect
|
github.com/extrame/goyymmdd v0.0.0-20210114090516-7cc815f00d1a // indirect
|
||||||
github.com/extrame/ole2 v0.0.0-20160812065207-d69429661ad7 // indirect
|
github.com/extrame/ole2 v0.0.0-20160812065207-d69429661ad7 // indirect
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/gin-contrib/sse v1.0.0 // indirect
|
||||||
github.com/go-ini/ini v1.67.0 // indirect
|
github.com/go-ini/ini v1.67.0 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/goccy/go-json v0.10.3 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
github.com/gomodule/redigo v1.8.9 // indirect
|
github.com/gomodule/redigo v1.9.2 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
github.com/jonboulle/clockwork v0.4.0 // indirect
|
github.com/jonboulle/clockwork v0.5.0 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/klauspost/compress v1.17.11 // indirect
|
github.com/klauspost/compress v1.18.0 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||||
github.com/leodido/go-urn v1.4.0 // indirect
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/memcachier/mc/v3 v3.0.3 // indirect
|
github.com/memcachier/mc/v3 v3.0.3 // indirect
|
||||||
|
github.com/minio/crc64nvme v1.0.1 // indirect
|
||||||
github.com/minio/md5-simd v1.1.2 // indirect
|
github.com/minio/md5-simd v1.1.2 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/richardlehane/mscfb v1.0.4 // indirect
|
||||||
|
github.com/richardlehane/msoleps v1.0.4 // indirect
|
||||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||||
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62 // indirect
|
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62 // indirect
|
||||||
github.com/rs/xid v1.6.0 // indirect
|
github.com/rs/xid v1.6.0 // indirect
|
||||||
@@ -77,10 +82,12 @@ require (
|
|||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||||
golang.org/x/arch v0.8.0 // indirect
|
github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d // indirect
|
||||||
|
github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7 // indirect
|
||||||
|
golang.org/x/arch v0.15.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 // indirect
|
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 // indirect
|
||||||
golang.org/x/sys v0.26.0 // indirect
|
golang.org/x/sys v0.32.0 // indirect
|
||||||
google.golang.org/protobuf v1.34.1 // indirect
|
google.golang.org/protobuf v1.36.6 // indirect
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,21 +12,20 @@ github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMU
|
|||||||
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
||||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||||
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
|
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
|
||||||
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
|
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||||
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
|
|
||||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||||
|
github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
|
||||||
|
github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||||
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
|
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||||
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
|
|
||||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
|
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
@@ -39,53 +38,53 @@ github.com/extrame/ole2 v0.0.0-20160812065207-d69429661ad7/go.mod h1:GPpMrAfHdb8
|
|||||||
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b h1:jqW/h4gcXYEB6kVf6iuxjU9ONWA0ugUB94TP9UNmgdg=
|
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b h1:jqW/h4gcXYEB6kVf6iuxjU9ONWA0ugUB94TP9UNmgdg=
|
||||||
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b/go.mod h1:iACcgahst7BboCpIMSpnFs4SKyU9ZjsvZBfNbUxZOJI=
|
github.com/extrame/xls v0.0.2-0.20200426124601-4a6cf263071b/go.mod h1:iACcgahst7BboCpIMSpnFs4SKyU9ZjsvZBfNbUxZOJI=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||||
github.com/gin-contrib/cache v1.3.0 h1:wEEw38uvb4rTraQJVpd9ex4ZotXNlc0fSaSUsuPXS/w=
|
github.com/gin-contrib/cache v1.3.2 h1:MsMTuG4KMhD2SVq5ygSYRci3BYdb/Egvk8lLNIB53gM=
|
||||||
github.com/gin-contrib/cache v1.3.0/go.mod h1:EA63LrWGI5vwSI95TS5fgBrtxZ1tM2NKx+NrEeyEDcU=
|
github.com/gin-contrib/cache v1.3.2/go.mod h1:lnZv6QsBGSiqyB3rbNO2uVMWDBcMiZtHqH3Jlk57vaE=
|
||||||
github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE=
|
github.com/gin-contrib/gzip v1.2.3 h1:dAhT722RuEG330ce2agAs75z7yB+NKvX/ZM1r8w0u2U=
|
||||||
github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4=
|
github.com/gin-contrib/gzip v1.2.3/go.mod h1:ad72i4Bzmaypk8M762gNXa2wkxxjbz0icRNnuLJ9a/c=
|
||||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
|
||||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
|
||||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||||
github.com/go-co-op/gocron/v2 v2.12.3 h1:3JkKjkFoAPp/i0YE+sonlF5gi+xnBChwYh75nX16MaE=
|
github.com/go-co-op/gocron/v2 v2.16.1 h1:ux/5zxVRveCaCuTtNI3DiOk581KC1KpJbpJFYUEVYwo=
|
||||||
github.com/go-co-op/gocron/v2 v2.12.3/go.mod h1:xY7bJxGazKam1cz04EebrlP4S9q4iWdiAylMGP3jY9w=
|
github.com/go-co-op/gocron/v2 v2.16.1/go.mod h1:opexeOFy5BplhsKdA7bzY9zeYih8I8/WNJ4arTIFPVc=
|
||||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
|
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||||
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
|
||||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws=
|
github.com/gomodule/redigo v1.9.2 h1:HrutZBLhSIU8abiSfW8pj8mPhOyMYjZT/wcA4/L9L9s=
|
||||||
github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE=
|
github.com/gomodule/redigo v1.9.2/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
|
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
|
||||||
github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
|
github.com/jonboulle/clockwork v0.5.0/go.mod h1:3mZlmanh0g2NDKO5TWZVJAfofYk64M7XN3SzBPjZF60=
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
|
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
@@ -98,37 +97,45 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0
|
|||||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
|
||||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
github.com/memcachier/mc/v3 v3.0.3 h1:qii+lDiPKi36O4Xg+HVKwHu6Oq+Gt17b+uEiA0Drwv4=
|
github.com/memcachier/mc/v3 v3.0.3 h1:qii+lDiPKi36O4Xg+HVKwHu6Oq+Gt17b+uEiA0Drwv4=
|
||||||
github.com/memcachier/mc/v3 v3.0.3/go.mod h1:GzjocBahcXPxt2cmqzknrgqCOmMxiSzhVKPOe90Tpug=
|
github.com/memcachier/mc/v3 v3.0.3/go.mod h1:GzjocBahcXPxt2cmqzknrgqCOmMxiSzhVKPOe90Tpug=
|
||||||
|
github.com/minio/crc64nvme v1.0.1 h1:DHQPrYPdqK7jQG/Ls5CTBZWeex/2FMS3G5XGkycuFrY=
|
||||||
|
github.com/minio/crc64nvme v1.0.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
|
||||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||||
github.com/minio/minio-go/v7 v7.0.80 h1:2mdUHXEykRdY/BigLt3Iuu1otL0JTogT0Nmltg0wujk=
|
github.com/minio/minio-go/v7 v7.0.91 h1:tWLZnEfo3OZl5PoXQwcwTAPNNrjyWwOh6cbZitW5JQc=
|
||||||
github.com/minio/minio-go/v7 v7.0.80/go.mod h1:84gmIilaX4zcvAWWzJ5Z1WI5axN+hAbM5w25xf8xvC0=
|
github.com/minio/minio-go/v7 v7.0.91/go.mod h1:uvMUcGrpgeSAAI6+sD3818508nUyMULw94j2Nxku/Go=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
|
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
|
||||||
|
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8=
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/pquerna/otp v1.4.0 h1:wZvl1TIVxKRThZIBiwOOHOGP/1+nZyWBil9Y2XNEDzg=
|
github.com/pquerna/otp v1.4.0 h1:wZvl1TIVxKRThZIBiwOOHOGP/1+nZyWBil9Y2XNEDzg=
|
||||||
github.com/pquerna/otp v1.4.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
github.com/pquerna/otp v1.4.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||||
|
github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM=
|
||||||
|
github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk=
|
||||||
|
github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||||
|
github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM/9/g00=
|
||||||
|
github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||||
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62 h1:pyecQtsPmlkCsMkYhT5iZ+sUXuwee+OvfuJjinEA3ko=
|
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62 h1:pyecQtsPmlkCsMkYhT5iZ+sUXuwee+OvfuJjinEA3ko=
|
||||||
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62/go.mod h1:65XQgovT59RWatovFwnwocoUxiI/eENTnOY5GK3STuY=
|
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62/go.mod h1:65XQgovT59RWatovFwnwocoUxiI/eENTnOY5GK3STuY=
|
||||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
@@ -142,8 +149,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
|||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||||
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
||||||
github.com/tealeg/xlsx v1.0.5 h1:+f8oFmvY8Gw1iUXzPk+kz+4GpbDZPK1FhPiQRd+ypgE=
|
github.com/tealeg/xlsx v1.0.5 h1:+f8oFmvY8Gw1iUXzPk+kz+4GpbDZPK1FhPiQRd+ypgE=
|
||||||
@@ -152,34 +159,37 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
|
|||||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
github.com/urfave/cli/v2 v2.27.4 h1:o1owoI+02Eb+K107p27wEX9Bb8eqIoZCfLXloLUSWJ8=
|
github.com/urfave/cli/v3 v3.2.0 h1:m8WIXY0U9LCuUl5r+0fqLWDhNYWt6qvlW+GcF4EoXf8=
|
||||||
github.com/urfave/cli/v2 v2.27.4/go.mod h1:m4QzxcD2qpra4z7WhzEGn74WZLViBnMpb1ToCAKdGRQ=
|
github.com/urfave/cli/v3 v3.2.0/go.mod h1:FJSKtM/9AiiTOJL4fJ6TbMUkxBXn7GO9guZqoZtpYpo=
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
|
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
|
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
|
||||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
|
|
||||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||||
|
github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d h1:llb0neMWDQe87IzJLS4Ci7psK/lVsjIS2otl+1WyRyY=
|
||||||
|
github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI=
|
||||||
|
github.com/xuri/excelize/v2 v2.9.0 h1:1tgOaEq92IOEumR1/JfYS/eR0KHOCsRv/rYXXh6YJQE=
|
||||||
|
github.com/xuri/excelize/v2 v2.9.0/go.mod h1:uqey4QBZ9gdMeWApPLdhm9x+9o2lq4iVmjiLfBS5hdE=
|
||||||
|
github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7 h1:hPVCafDV85blFTabnqKgNhDCkJX25eik94Si9cTER4A=
|
||||||
|
github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
|
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
|
||||||
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
|
||||||
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
|
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||||
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
|
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 h1:yixxcjnhBmY0nkL253HFVIm0JsFHwrHdT3Yh6szTnfY=
|
|
||||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI=
|
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI=
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
|
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||||
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
|
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
|
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||||
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
|
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||||
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk=
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk=
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
|||||||
Generated
+2731
-1420
File diff suppressed because it is too large
Load Diff
+29
-23
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "ezbookkeeping",
|
"name": "ezbookkeeping",
|
||||||
"version": "0.7.0",
|
"version": "0.9.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -15,52 +15,58 @@
|
|||||||
"serve": "cross-env NODE_ENV=development vite",
|
"serve": "cross-env NODE_ENV=development vite",
|
||||||
"build": "cross-env NODE_ENV=production vite build",
|
"build": "cross-env NODE_ENV=production vite build",
|
||||||
"serve:dist": "vite preview",
|
"serve:dist": "vite preview",
|
||||||
"lint": "eslint . --fix"
|
"lint": "tsc --noEmit && eslint . --fix"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mdi/js": "^7.4.47",
|
"@mdi/js": "^7.4.47",
|
||||||
"@vuepic/vue-datepicker": "^10.0.0",
|
"@vuepic/vue-datepicker": "^11.0.2",
|
||||||
"axios": "^1.7.7",
|
"axios": "^1.9.0",
|
||||||
"cbor-js": "^0.1.0",
|
"cbor-js": "^0.1.0",
|
||||||
"clipboard": "^2.0.11",
|
"clipboard": "^2.0.11",
|
||||||
"crypto-js": "^4.2.0",
|
"crypto-js": "^4.2.0",
|
||||||
"dom7": "^4.0.6",
|
"dom7": "^4.0.6",
|
||||||
"echarts": "^5.5.1",
|
"echarts": "^5.6.0",
|
||||||
"framework7": "^8.3.4",
|
"framework7": "^8.3.4",
|
||||||
"framework7-icons": "^5.0.5",
|
"framework7-icons": "^5.0.5",
|
||||||
"framework7-vue": "^8.3.4",
|
"framework7-vue": "^8.3.4",
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
"line-awesome": "^1.3.0",
|
"line-awesome": "^1.3.0",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
"moment-timezone": "^0.5.46",
|
"moment-timezone": "^0.5.48",
|
||||||
"pinia": "^2.2.5",
|
"pinia": "^3.0.2",
|
||||||
"register-service-worker": "^1.7.2",
|
"register-service-worker": "^1.7.2",
|
||||||
"skeleton-elements": "^4.0.1",
|
"skeleton-elements": "^4.0.1",
|
||||||
"swiper": "^10.2.0",
|
"swiper": "^10.2.0",
|
||||||
"ua-parser-js": "^1.0.39",
|
"ua-parser-js": "^1.0.39",
|
||||||
"vue": "^3.5.12",
|
"vue": "^3.5.13",
|
||||||
"vue-echarts": "^7.0.3",
|
"vue-echarts": "^7.0.3",
|
||||||
"vue-i18n": "^10.0.4",
|
"vue-i18n": "^11.1.3",
|
||||||
"vue-router": "^4.4.5",
|
"vue-router": "^4.5.1",
|
||||||
"vue3-perfect-scrollbar": "^2.0.0",
|
"vue3-perfect-scrollbar": "^2.0.0",
|
||||||
"vuedraggable": "^4.1.0",
|
"vuedraggable": "^4.1.0",
|
||||||
"vuetify": "^3.7.3"
|
"vuetify": "^3.8.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/compat": "^1.2.2",
|
"@tsconfig/node22": "^22.0.1",
|
||||||
"@eslint/eslintrc": "^3.1.0",
|
"@types/cbor-js": "^0.1.1",
|
||||||
"@eslint/js": "^9.14.0",
|
"@types/crypto-js": "^4.2.2",
|
||||||
"@vitejs/plugin-vue": "^5.1.4",
|
"@types/git-rev-sync": "^2.0.2",
|
||||||
|
"@types/node": "^22.15.2",
|
||||||
|
"@types/ua-parser-js": "^0.7.39",
|
||||||
|
"@vitejs/plugin-vue": "^5.2.3",
|
||||||
|
"@vue/eslint-config-typescript": "^14.5.0",
|
||||||
|
"@vue/tsconfig": "^0.7.0",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"eslint": "^9.14.0",
|
"eslint": "^9.25.1",
|
||||||
"eslint-plugin-vue": "^9.30.0",
|
"eslint-plugin-vue": "^10.0.0",
|
||||||
"git-rev-sync": "^3.0.2",
|
"git-rev-sync": "^3.0.2",
|
||||||
"globals": "^15.11.0",
|
"postcss-preset-env": "^10.1.6",
|
||||||
"postcss-preset-env": "^10.0.9",
|
"sass": "^1.87.0",
|
||||||
"sass": "^1.80.6",
|
"typescript": "^5.8.3",
|
||||||
"vite": "^5.4.10",
|
"vite": "^6.3.3",
|
||||||
"vite-plugin-pwa": "^0.20.5",
|
"vite-plugin-pwa": "^1.0.0",
|
||||||
"vite-plugin-vuetify": "^2.0.4"
|
"vite-plugin-vuetify": "^2.1.1",
|
||||||
|
"vue-tsc": "^2.2.10"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"> 1%",
|
"> 1%",
|
||||||
|
|||||||
+255
-20
@@ -28,6 +28,9 @@ var (
|
|||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
container: duplicatechecker.Container,
|
container: duplicatechecker.Container,
|
||||||
},
|
},
|
||||||
accounts: services.Accounts,
|
accounts: services.Accounts,
|
||||||
@@ -284,7 +287,7 @@ func (a *AccountsApi) AccountCreateHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
|
|
||||||
log.Infof(c, "[accounts.AccountCreateHandler] user \"uid:%d\" has created a new account \"id:%d\" successfully", uid, mainAccount.AccountId)
|
log.Infof(c, "[accounts.AccountCreateHandler] user \"uid:%d\" has created a new account \"id:%d\" successfully", uid, mainAccount.AccountId)
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_ACCOUNT, uid, accountCreateReq.ClientSessionId, utils.Int64ToString(mainAccount.AccountId))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_ACCOUNT, uid, accountCreateReq.ClientSessionId, utils.Int64ToString(mainAccount.AccountId))
|
||||||
accountInfoResp := mainAccount.ToAccountInfoResponse()
|
accountInfoResp := mainAccount.ToAccountInfoResponse()
|
||||||
|
|
||||||
if len(childrenAccounts) > 0 {
|
if len(childrenAccounts) > 0 {
|
||||||
@@ -308,11 +311,27 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if accountModifyReq.Id <= 0 {
|
||||||
|
return nil, errs.ErrAccountIdInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
utcOffset, err := c.GetClientTimezoneOffset()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] cannot get client timezone offset, because %s", err.Error())
|
||||||
|
return nil, errs.ErrClientTimezoneOffsetInvalid
|
||||||
|
}
|
||||||
|
|
||||||
if accountModifyReq.Category < models.ACCOUNT_CATEGORY_CASH || accountModifyReq.Category > models.ACCOUNT_CATEGORY_CERTIFICATE_OF_DEPOSIT {
|
if accountModifyReq.Category < models.ACCOUNT_CATEGORY_CASH || accountModifyReq.Category > models.ACCOUNT_CATEGORY_CERTIFICATE_OF_DEPOSIT {
|
||||||
log.Warnf(c, "[accounts.AccountModifyHandler] account category invalid, category is %d", accountModifyReq.Category)
|
log.Warnf(c, "[accounts.AccountModifyHandler] account category invalid, category is %d", accountModifyReq.Category)
|
||||||
return nil, errs.ErrAccountCategoryInvalid
|
return nil, errs.ErrAccountCategoryInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if accountModifyReq.Category != models.ACCOUNT_CATEGORY_CREDIT_CARD && accountModifyReq.CreditCardStatementDate != 0 {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] cannot set statement date with category \"%d\"", accountModifyReq.Category)
|
||||||
|
return nil, errs.ErrCannotSetStatementDateForNonCreditCard
|
||||||
|
}
|
||||||
|
|
||||||
uid := c.GetCurrentUid()
|
uid := c.GetCurrentUid()
|
||||||
accountAndSubAccounts, err := a.accounts.GetAccountAndSubAccountsByAccountId(c, uid, accountModifyReq.Id)
|
accountAndSubAccounts, err := a.accounts.GetAccountAndSubAccountsByAccountId(c, uid, accountModifyReq.Id)
|
||||||
|
|
||||||
@@ -328,20 +347,81 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
return nil, errs.ErrAccountNotFound
|
return nil, errs.ErrAccountNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(accountModifyReq.SubAccounts)+1 != len(accountAndSubAccounts) {
|
if accountModifyReq.Currency != nil && mainAccount.Currency != *accountModifyReq.Currency {
|
||||||
return nil, errs.ErrCannotAddOrDeleteSubAccountsWhenModify
|
return nil, errs.ErrNotSupportedChangeCurrency
|
||||||
}
|
}
|
||||||
|
|
||||||
if accountModifyReq.Category != models.ACCOUNT_CATEGORY_CREDIT_CARD && accountModifyReq.CreditCardStatementDate != 0 {
|
if accountModifyReq.Balance != nil {
|
||||||
log.Warnf(c, "[accounts.AccountModifyHandler] cannot set statement date with category \"%d\"", accountModifyReq.Category)
|
return nil, errs.ErrNotSupportedChangeBalance
|
||||||
return nil, errs.ErrCannotSetStatementDateForNonCreditCard
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if mainAccount.Type == models.ACCOUNT_TYPE_MULTI_SUB_ACCOUNTS {
|
if accountModifyReq.BalanceTime != nil {
|
||||||
|
return nil, errs.ErrNotSupportedChangeBalanceTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if mainAccount.Type == models.ACCOUNT_TYPE_SINGLE_ACCOUNT {
|
||||||
|
if len(accountModifyReq.SubAccounts) > 0 {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] account cannot have any sub-accounts")
|
||||||
|
return nil, errs.ErrAccountCannotHaveSubAccounts
|
||||||
|
}
|
||||||
|
} else if mainAccount.Type == models.ACCOUNT_TYPE_MULTI_SUB_ACCOUNTS {
|
||||||
|
if len(accountModifyReq.SubAccounts) < 1 {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] account does not have any sub-accounts")
|
||||||
|
return nil, errs.ErrAccountHaveNoSubAccount
|
||||||
|
}
|
||||||
|
|
||||||
for i := 0; i < len(accountModifyReq.SubAccounts); i++ {
|
for i := 0; i < len(accountModifyReq.SubAccounts); i++ {
|
||||||
subAccount := accountModifyReq.SubAccounts[i]
|
subAccountReq := accountModifyReq.SubAccounts[i]
|
||||||
|
|
||||||
if subAccount.CreditCardStatementDate != 0 {
|
if subAccountReq.Category != accountModifyReq.Category {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] category of sub-account#%d not equals to parent", i)
|
||||||
|
return nil, errs.ErrSubAccountCategoryNotEqualsToParent
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.Id == 0 { // create new sub-account
|
||||||
|
if subAccountReq.Currency == nil {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] sub-account#%d not set currency", i)
|
||||||
|
return nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
} else if subAccountReq.Currency != nil && *subAccountReq.Currency == validators.ParentAccountCurrencyPlaceholder {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] sub-account#%d cannot set currency placeholder", i)
|
||||||
|
return nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.Balance == nil {
|
||||||
|
defaultBalance := int64(0)
|
||||||
|
subAccountReq.Balance = &defaultBalance
|
||||||
|
}
|
||||||
|
|
||||||
|
if *subAccountReq.Balance == 0 {
|
||||||
|
defaultBalanceTime := int64(0)
|
||||||
|
subAccountReq.BalanceTime = &defaultBalanceTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if *subAccountReq.Balance != 0 && (subAccountReq.BalanceTime == nil || *subAccountReq.BalanceTime <= 0) {
|
||||||
|
log.Warnf(c, "[accounts.AccountModifyHandler] sub-account#%d balance time is not set", i)
|
||||||
|
return nil, errs.ErrAccountBalanceTimeNotSet
|
||||||
|
}
|
||||||
|
} else { // modify existed sub-account
|
||||||
|
subAccount, exists := accountMap[subAccountReq.Id]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrAccountNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.Currency != nil && subAccount.Currency != *subAccountReq.Currency {
|
||||||
|
return nil, errs.ErrNotSupportedChangeCurrency
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.Balance != nil {
|
||||||
|
return nil, errs.ErrNotSupportedChangeBalance
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.BalanceTime != nil {
|
||||||
|
return nil, errs.ErrNotSupportedChangeBalanceTime
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if subAccountReq.CreditCardStatementDate != 0 {
|
||||||
log.Warnf(c, "[accounts.AccountModifyHandler] sub-account#%d cannot set statement date", i)
|
log.Warnf(c, "[accounts.AccountModifyHandler] sub-account#%d cannot set statement date", i)
|
||||||
return nil, errs.ErrCannotSetStatementDateForSubAccount
|
return nil, errs.ErrCannotSetStatementDateForSubAccount
|
||||||
}
|
}
|
||||||
@@ -350,6 +430,9 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
|
|
||||||
anythingUpdate := false
|
anythingUpdate := false
|
||||||
var toUpdateAccounts []*models.Account
|
var toUpdateAccounts []*models.Account
|
||||||
|
var toAddAccounts []*models.Account
|
||||||
|
var toAddAccountBalanceTimes []int64
|
||||||
|
var toDeleteAccountIds []int64
|
||||||
|
|
||||||
toUpdateAccount := a.getToUpdateAccount(uid, &accountModifyReq, mainAccount, false)
|
toUpdateAccount := a.getToUpdateAccount(uid, &accountModifyReq, mainAccount, false)
|
||||||
|
|
||||||
@@ -358,18 +441,43 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
toUpdateAccounts = append(toUpdateAccounts, toUpdateAccount)
|
toUpdateAccounts = append(toUpdateAccounts, toUpdateAccount)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
toDeleteAccountIds = a.getToDeleteSubAccountIds(&accountModifyReq, mainAccount, accountAndSubAccounts)
|
||||||
|
|
||||||
|
if len(toDeleteAccountIds) > 0 {
|
||||||
|
anythingUpdate = true
|
||||||
|
}
|
||||||
|
|
||||||
|
maxOrderId := int32(0)
|
||||||
|
|
||||||
|
for i := 0; i < len(accountAndSubAccounts); i++ {
|
||||||
|
account := accountAndSubAccounts[i]
|
||||||
|
|
||||||
|
if account.AccountId != mainAccount.AccountId && account.DisplayOrder > maxOrderId {
|
||||||
|
maxOrderId = account.DisplayOrder
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for i := 0; i < len(accountModifyReq.SubAccounts); i++ {
|
for i := 0; i < len(accountModifyReq.SubAccounts); i++ {
|
||||||
subAccountReq := accountModifyReq.SubAccounts[i]
|
subAccountReq := accountModifyReq.SubAccounts[i]
|
||||||
|
|
||||||
if _, exists := accountMap[subAccountReq.Id]; !exists {
|
if _, exists := accountMap[subAccountReq.Id]; !exists {
|
||||||
return nil, errs.ErrAccountNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
toUpdateSubAccount := a.getToUpdateAccount(uid, subAccountReq, accountMap[subAccountReq.Id], true)
|
|
||||||
|
|
||||||
if toUpdateSubAccount != nil {
|
|
||||||
anythingUpdate = true
|
anythingUpdate = true
|
||||||
toUpdateAccounts = append(toUpdateAccounts, toUpdateSubAccount)
|
maxOrderId = maxOrderId + 1
|
||||||
|
newSubAccount := a.createNewSubAccountModelForModify(uid, mainAccount.Type, subAccountReq, maxOrderId)
|
||||||
|
toAddAccounts = append(toAddAccounts, newSubAccount)
|
||||||
|
|
||||||
|
if subAccountReq.BalanceTime != nil {
|
||||||
|
toAddAccountBalanceTimes = append(toAddAccountBalanceTimes, *subAccountReq.BalanceTime)
|
||||||
|
} else {
|
||||||
|
toAddAccountBalanceTimes = append(toAddAccountBalanceTimes, 0)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toUpdateSubAccount := a.getToUpdateAccount(uid, subAccountReq, accountMap[subAccountReq.Id], true)
|
||||||
|
|
||||||
|
if toUpdateSubAccount != nil {
|
||||||
|
anythingUpdate = true
|
||||||
|
toUpdateAccounts = append(toUpdateAccounts, toUpdateSubAccount)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -377,7 +485,43 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
return nil, errs.ErrNothingWillBeUpdated
|
return nil, errs.ErrNothingWillBeUpdated
|
||||||
}
|
}
|
||||||
|
|
||||||
err = a.accounts.ModifyAccounts(c, uid, toUpdateAccounts)
|
if len(toAddAccounts) > 0 && a.CurrentConfig().EnableDuplicateSubmissionsCheck && accountModifyReq.ClientSessionId != "" {
|
||||||
|
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_SUBACCOUNT, uid, accountModifyReq.ClientSessionId)
|
||||||
|
|
||||||
|
if found {
|
||||||
|
log.Infof(c, "[accounts.AccountModifyHandler] another account \"id:%s\" modification has been created for user \"uid:%d\"", remark, uid)
|
||||||
|
accountId, err := utils.StringToInt64(remark)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
accountAndSubAccounts, err := a.accounts.GetAccountAndSubAccountsByAccountId(c, uid, accountId)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[accounts.AccountModifyHandler] failed to get existed account \"id:%d\" for user \"uid:%d\", because %s", accountId, uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
accountMap := a.accounts.GetAccountMapByList(accountAndSubAccounts)
|
||||||
|
mainAccount, exists := accountMap[accountId]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrOperationFailed
|
||||||
|
}
|
||||||
|
|
||||||
|
accountInfoResp := mainAccount.ToAccountInfoResponse()
|
||||||
|
|
||||||
|
for i := 0; i < len(accountAndSubAccounts); i++ {
|
||||||
|
if accountAndSubAccounts[i].ParentAccountId == mainAccount.AccountId {
|
||||||
|
subAccountResp := accountAndSubAccounts[i].ToAccountInfoResponse()
|
||||||
|
accountInfoResp.SubAccounts = append(accountInfoResp.SubAccounts, subAccountResp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return accountInfoResp, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = a.accounts.ModifyAccounts(c, mainAccount, toUpdateAccounts, toAddAccounts, toAddAccountBalanceTimes, toDeleteAccountIds, utcOffset)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf(c, "[accounts.AccountModifyHandler] failed to update account \"id:%d\" for user \"uid:%d\", because %s", accountModifyReq.Id, uid, err.Error())
|
log.Errorf(c, "[accounts.AccountModifyHandler] failed to update account \"id:%d\" for user \"uid:%d\", because %s", accountModifyReq.Id, uid, err.Error())
|
||||||
@@ -386,6 +530,10 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
|
|
||||||
log.Infof(c, "[accounts.AccountModifyHandler] user \"uid:%d\" has updated account \"id:%d\" successfully", uid, accountModifyReq.Id)
|
log.Infof(c, "[accounts.AccountModifyHandler] user \"uid:%d\" has updated account \"id:%d\" successfully", uid, accountModifyReq.Id)
|
||||||
|
|
||||||
|
if len(toAddAccounts) > 0 {
|
||||||
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_SUBACCOUNT, uid, accountModifyReq.ClientSessionId, utils.Int64ToString(mainAccount.AccountId))
|
||||||
|
}
|
||||||
|
|
||||||
accountRespMap := make(map[int64]*models.AccountInfoResponse)
|
accountRespMap := make(map[int64]*models.AccountInfoResponse)
|
||||||
|
|
||||||
for i := 0; i < len(toUpdateAccounts); i++ {
|
for i := 0; i < len(toUpdateAccounts); i++ {
|
||||||
@@ -402,11 +550,23 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
accountRespMap[accountResp.Id] = accountResp
|
accountRespMap[accountResp.Id] = accountResp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(toAddAccounts); i++ {
|
||||||
|
account := toAddAccounts[i]
|
||||||
|
accountResp := account.ToAccountInfoResponse()
|
||||||
|
accountRespMap[accountResp.Id] = accountResp
|
||||||
|
}
|
||||||
|
|
||||||
|
deletedAccountIds := make(map[int64]bool)
|
||||||
|
|
||||||
|
for i := 0; i < len(toDeleteAccountIds); i++ {
|
||||||
|
deletedAccountIds[toDeleteAccountIds[i]] = true
|
||||||
|
}
|
||||||
|
|
||||||
for i := 0; i < len(accountAndSubAccounts); i++ {
|
for i := 0; i < len(accountAndSubAccounts); i++ {
|
||||||
oldAccount := accountAndSubAccounts[i]
|
oldAccount := accountAndSubAccounts[i]
|
||||||
_, exists := accountRespMap[oldAccount.AccountId]
|
_, exists := accountRespMap[oldAccount.AccountId]
|
||||||
|
|
||||||
if !exists {
|
if !exists && !deletedAccountIds[oldAccount.AccountId] {
|
||||||
oldAccountResp := oldAccount.ToAccountInfoResponse()
|
oldAccountResp := oldAccount.ToAccountInfoResponse()
|
||||||
accountRespMap[oldAccountResp.Id] = oldAccountResp
|
accountRespMap[oldAccountResp.Id] = oldAccountResp
|
||||||
}
|
}
|
||||||
@@ -415,8 +575,19 @@ func (a *AccountsApi) AccountModifyHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
accountResp := accountRespMap[accountModifyReq.Id]
|
accountResp := accountRespMap[accountModifyReq.Id]
|
||||||
|
|
||||||
for i := 0; i < len(accountAndSubAccounts); i++ {
|
for i := 0; i < len(accountAndSubAccounts); i++ {
|
||||||
if accountAndSubAccounts[i].ParentAccountId == accountResp.Id {
|
account := accountAndSubAccounts[i]
|
||||||
subAccountResp := accountRespMap[accountAndSubAccounts[i].AccountId]
|
|
||||||
|
if account.ParentAccountId == accountResp.Id && !deletedAccountIds[account.AccountId] {
|
||||||
|
subAccountResp := accountRespMap[account.AccountId]
|
||||||
|
accountResp.SubAccounts = append(accountResp.SubAccounts, subAccountResp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(toAddAccounts); i++ {
|
||||||
|
account := toAddAccounts[i]
|
||||||
|
|
||||||
|
if account.ParentAccountId == accountResp.Id {
|
||||||
|
subAccountResp := accountRespMap[account.AccountId]
|
||||||
accountResp.SubAccounts = append(accountResp.SubAccounts, subAccountResp)
|
accountResp.SubAccounts = append(accountResp.SubAccounts, subAccountResp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -505,6 +676,28 @@ func (a *AccountsApi) AccountDeleteHandler(c *core.WebContext) (any, *errs.Error
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SubAccountDeleteHandler deletes an existed sub-account by request parameters for current user
|
||||||
|
func (a *AccountsApi) SubAccountDeleteHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
var accountDeleteReq models.AccountDeleteRequest
|
||||||
|
err := c.ShouldBindJSON(&accountDeleteReq)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[accounts.SubAccountDeleteHandler] parse request failed, because %s", err.Error())
|
||||||
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
uid := c.GetCurrentUid()
|
||||||
|
err = a.accounts.DeleteSubAccount(c, uid, accountDeleteReq.Id)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[accounts.SubAccountDeleteHandler] failed to delete sub-account \"id:%d\" for user \"uid:%d\", because %s", accountDeleteReq.Id, uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Infof(c, "[accounts.SubAccountDeleteHandler] user \"uid:%d\" has deleted sub-account \"id:%d\"", uid, accountDeleteReq.Id)
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (a *AccountsApi) createNewAccountModel(uid int64, accountCreateReq *models.AccountCreateRequest, isSubAccount bool, order int32) *models.Account {
|
func (a *AccountsApi) createNewAccountModel(uid int64, accountCreateReq *models.AccountCreateRequest, isSubAccount bool, order int32) *models.Account {
|
||||||
accountExtend := &models.AccountExtend{}
|
accountExtend := &models.AccountExtend{}
|
||||||
|
|
||||||
@@ -527,6 +720,24 @@ func (a *AccountsApi) createNewAccountModel(uid int64, accountCreateReq *models.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *AccountsApi) createNewSubAccountModelForModify(uid int64, accountType models.AccountType, accountModifyReq *models.AccountModifyRequest, order int32) *models.Account {
|
||||||
|
accountExtend := &models.AccountExtend{}
|
||||||
|
|
||||||
|
return &models.Account{
|
||||||
|
Uid: uid,
|
||||||
|
Name: accountModifyReq.Name,
|
||||||
|
DisplayOrder: order,
|
||||||
|
Category: accountModifyReq.Category,
|
||||||
|
Type: accountType,
|
||||||
|
Icon: accountModifyReq.Icon,
|
||||||
|
Color: accountModifyReq.Color,
|
||||||
|
Currency: *accountModifyReq.Currency,
|
||||||
|
Balance: *accountModifyReq.Balance,
|
||||||
|
Comment: accountModifyReq.Comment,
|
||||||
|
Extend: accountExtend,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (a *AccountsApi) createSubAccountModels(uid int64, accountCreateReq *models.AccountCreateRequest) ([]*models.Account, []int64) {
|
func (a *AccountsApi) createSubAccountModels(uid int64, accountCreateReq *models.AccountCreateRequest) ([]*models.Account, []int64) {
|
||||||
if len(accountCreateReq.SubAccounts) <= 0 {
|
if len(accountCreateReq.SubAccounts) <= 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@@ -584,3 +795,27 @@ func (a *AccountsApi) getToUpdateAccount(uid int64, accountModifyReq *models.Acc
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *AccountsApi) getToDeleteSubAccountIds(accountModifyReq *models.AccountModifyRequest, mainAccount *models.Account, accountAndSubAccounts []*models.Account) []int64 {
|
||||||
|
newSubAccountIds := make(map[int64]bool, len(accountModifyReq.SubAccounts))
|
||||||
|
|
||||||
|
for i := 0; i < len(accountModifyReq.SubAccounts); i++ {
|
||||||
|
newSubAccountIds[accountModifyReq.SubAccounts[i].Id] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
toDeleteAccountIds := make([]int64, 0)
|
||||||
|
|
||||||
|
for i := 0; i < len(accountAndSubAccounts); i++ {
|
||||||
|
subAccount := accountAndSubAccounts[i]
|
||||||
|
|
||||||
|
if subAccount.AccountId == mainAccount.AccountId {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := newSubAccountIds[subAccount.AccountId]; !exists {
|
||||||
|
toDeleteAccountIds = append(toDeleteAccountIds, subAccount.AccountId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return toDeleteAccountIds
|
||||||
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/avatars"
|
"github.com/mayswind/ezbookkeeping/pkg/avatars"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/duplicatechecker"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/log"
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
@@ -15,6 +16,7 @@ import (
|
|||||||
// AuthorizationsApi represents authorization api
|
// AuthorizationsApi represents authorization api
|
||||||
type AuthorizationsApi struct {
|
type AuthorizationsApi struct {
|
||||||
ApiUsingConfig
|
ApiUsingConfig
|
||||||
|
ApiUsingDuplicateChecker
|
||||||
ApiWithUserInfo
|
ApiWithUserInfo
|
||||||
users *services.UserService
|
users *services.UserService
|
||||||
tokens *services.TokenService
|
tokens *services.TokenService
|
||||||
@@ -27,6 +29,12 @@ var (
|
|||||||
ApiUsingConfig: ApiUsingConfig{
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
|
container: duplicatechecker.Container,
|
||||||
|
},
|
||||||
ApiWithUserInfo: ApiWithUserInfo{
|
ApiWithUserInfo: ApiWithUserInfo{
|
||||||
ApiUsingConfig: ApiUsingConfig{
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
@@ -51,7 +59,23 @@ func (a *AuthorizationsApi) AuthorizeHandler(c *core.WebContext) (any, *errs.Err
|
|||||||
return nil, errs.ErrLoginNameOrPasswordInvalid
|
return nil, errs.ErrLoginNameOrPasswordInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
user, err := a.users.GetUserByUsernameOrEmailAndPassword(c, credential.LoginName, credential.Password)
|
err = a.CheckFailureCount(c, 0)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[authorizations.AuthorizeHandler] cannot login for user \"%s\", because %s", credential.LoginName, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
|
||||||
|
user, uid, err := a.users.GetUserByUsernameOrEmailAndPassword(c, credential.LoginName, credential.Password)
|
||||||
|
|
||||||
|
if errs.IsCustomError(err) {
|
||||||
|
failureCheckErr := a.CheckAndIncreaseFailureCount(c, uid)
|
||||||
|
|
||||||
|
if failureCheckErr != nil {
|
||||||
|
log.Warnf(c, "[authorizations.AuthorizeHandler] cannot login for user \"%s\", because %s", credential.LoginName, failureCheckErr.Error())
|
||||||
|
return nil, errs.Or(failureCheckErr, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warnf(c, "[authorizations.AuthorizeHandler] login failed for user \"%s\", because %s", credential.LoginName, err.Error())
|
log.Warnf(c, "[authorizations.AuthorizeHandler] login failed for user \"%s\", because %s", credential.LoginName, err.Error())
|
||||||
@@ -133,6 +157,13 @@ func (a *AuthorizationsApi) TwoFactorAuthorizeHandler(c *core.WebContext) (any,
|
|||||||
}
|
}
|
||||||
|
|
||||||
uid := c.GetCurrentUid()
|
uid := c.GetCurrentUid()
|
||||||
|
err = a.CheckFailureCount(c, uid)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeHandler] cannot auth for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
|
||||||
twoFactorSetting, err := a.twoFactorAuthorizations.GetUserTwoFactorSettingByUid(c, uid)
|
twoFactorSetting, err := a.twoFactorAuthorizations.GetUserTwoFactorSettingByUid(c, uid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -142,6 +173,14 @@ func (a *AuthorizationsApi) TwoFactorAuthorizeHandler(c *core.WebContext) (any,
|
|||||||
|
|
||||||
if !totp.Validate(credential.Passcode, twoFactorSetting.Secret) {
|
if !totp.Validate(credential.Passcode, twoFactorSetting.Secret) {
|
||||||
log.Warnf(c, "[authorizations.TwoFactorAuthorizeHandler] passcode is invalid for user \"uid:%d\"", uid)
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeHandler] passcode is invalid for user \"uid:%d\"", uid)
|
||||||
|
|
||||||
|
err = a.CheckAndIncreaseFailureCount(c, uid)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeHandler] cannot auth for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
|
||||||
return nil, errs.ErrPasscodeInvalid
|
return nil, errs.ErrPasscodeInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -196,6 +235,13 @@ func (a *AuthorizationsApi) TwoFactorAuthorizeByRecoveryCodeHandler(c *core.WebC
|
|||||||
}
|
}
|
||||||
|
|
||||||
uid := c.GetCurrentUid()
|
uid := c.GetCurrentUid()
|
||||||
|
err = a.CheckFailureCount(c, uid)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeByRecoveryCodeHandler] cannot auth for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
|
||||||
enableTwoFactor, err := a.twoFactorAuthorizations.ExistsTwoFactorSetting(c, uid)
|
enableTwoFactor, err := a.twoFactorAuthorizations.ExistsTwoFactorSetting(c, uid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -226,6 +272,15 @@ func (a *AuthorizationsApi) TwoFactorAuthorizeByRecoveryCodeHandler(c *core.WebC
|
|||||||
|
|
||||||
err = a.twoFactorAuthorizations.GetAndUseUserTwoFactorRecoveryCode(c, uid, credential.RecoveryCode, user.Salt)
|
err = a.twoFactorAuthorizations.GetAndUseUserTwoFactorRecoveryCode(c, uid, credential.RecoveryCode, user.Salt)
|
||||||
|
|
||||||
|
if errs.IsCustomError(err) {
|
||||||
|
failureCheckErr := a.CheckAndIncreaseFailureCount(c, uid)
|
||||||
|
|
||||||
|
if failureCheckErr != nil {
|
||||||
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeByRecoveryCodeHandler] cannot auth for user \"uid:%d\", because %s", uid, failureCheckErr.Error())
|
||||||
|
return nil, errs.Or(failureCheckErr, errs.ErrFailureCountLimitReached)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warnf(c, "[authorizations.TwoFactorAuthorizeByRecoveryCodeHandler] failed to get two-factor recovery code for user \"uid:%d\", because %s", uid, err.Error())
|
log.Warnf(c, "[authorizations.TwoFactorAuthorizeByRecoveryCodeHandler] failed to get two-factor recovery code for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrTwoFactorRecoveryCodeNotExist)
|
return nil, errs.Or(err, errs.ErrTwoFactorRecoveryCodeNotExist)
|
||||||
|
|||||||
+78
-3
@@ -5,9 +5,13 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/avatars"
|
"github.com/mayswind/ezbookkeeping/pkg/avatars"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/duplicatechecker"
|
"github.com/mayswind/ezbookkeeping/pkg/duplicatechecker"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
const internalTransactionPictureUrlFormat = "%spictures/%d.%s"
|
const internalTransactionPictureUrlFormat = "%spictures/%d.%s"
|
||||||
@@ -100,6 +104,7 @@ func (a *ApiUsingConfig) GetAfterOpenNotificationContent(userLanguage string, cl
|
|||||||
|
|
||||||
// ApiUsingDuplicateChecker represents an api that need to use duplicate checker
|
// ApiUsingDuplicateChecker represents an api that need to use duplicate checker
|
||||||
type ApiUsingDuplicateChecker struct {
|
type ApiUsingDuplicateChecker struct {
|
||||||
|
ApiUsingConfig
|
||||||
container *duplicatechecker.DuplicateCheckerContainer
|
container *duplicatechecker.DuplicateCheckerContainer
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,9 +113,79 @@ func (a *ApiUsingDuplicateChecker) GetSubmissionRemark(checkerType duplicatechec
|
|||||||
return a.container.GetSubmissionRemark(checkerType, uid, identification)
|
return a.container.GetSubmissionRemark(checkerType, uid, identification)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetSubmissionRemark saves the identification and remark to in-memory cache by the current duplicate checker
|
// SetSubmissionRemarkIfEnable saves the identification and remark by the current duplicate checker if the duplicate submission check is enabled
|
||||||
func (a *ApiUsingDuplicateChecker) SetSubmissionRemark(checkerType duplicatechecker.DuplicateCheckerType, uid int64, identification string, remark string) {
|
func (a *ApiUsingDuplicateChecker) SetSubmissionRemarkIfEnable(checkerType duplicatechecker.DuplicateCheckerType, uid int64, identification string, remark string) {
|
||||||
a.container.SetSubmissionRemark(checkerType, uid, identification, remark)
|
if a.CurrentConfig().EnableDuplicateSubmissionsCheck {
|
||||||
|
a.container.SetSubmissionRemark(checkerType, uid, identification, remark)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveSubmissionRemarkIfEnable removes the identification and remark by the current duplicate checker if the duplicate submission check is enabled
|
||||||
|
func (a *ApiUsingDuplicateChecker) RemoveSubmissionRemarkIfEnable(checkerType duplicatechecker.DuplicateCheckerType, uid int64, identification string) {
|
||||||
|
if a.CurrentConfig().EnableDuplicateSubmissionsCheck {
|
||||||
|
a.container.RemoveSubmissionRemark(checkerType, uid, identification)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckFailureCount returns whether the failure count of the specified IP and user has reached the limit and increases the failure count
|
||||||
|
func (a *ApiUsingDuplicateChecker) CheckFailureCount(c *core.WebContext, uid int64) error {
|
||||||
|
if a.CurrentConfig().MaxFailuresPerIpPerMinute > 0 {
|
||||||
|
clientIp := c.ClientIP()
|
||||||
|
ipFailureCount := a.container.GetFailureCount(clientIp)
|
||||||
|
|
||||||
|
if ipFailureCount >= a.CurrentConfig().MaxFailuresPerIpPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckFailureCount] operation failure via IP \"%s\", current failure count: %d reached the limit", clientIp, ipFailureCount)
|
||||||
|
return errs.ErrFailureCountLimitReached
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerUserPerMinute > 0 && uid > 0 {
|
||||||
|
uidFailureCount := a.container.GetFailureCount(utils.Int64ToString(uid))
|
||||||
|
|
||||||
|
if uidFailureCount >= a.CurrentConfig().MaxFailuresPerUserPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckFailureCount] operation failure via uid \"%d\", current failure count: %d reached the limit", uid, uidFailureCount)
|
||||||
|
return errs.ErrFailureCountLimitReached
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckAndIncreaseFailureCount returns whether the failure count of the specified IP and user has reached the limit and increases the failure count
|
||||||
|
func (a *ApiUsingDuplicateChecker) CheckAndIncreaseFailureCount(c *core.WebContext, uid int64) error {
|
||||||
|
clientIp := c.ClientIP()
|
||||||
|
ipFailureCount := uint32(0)
|
||||||
|
uidFailureCount := uint32(0)
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerIpPerMinute > 0 {
|
||||||
|
ipFailureCount = a.container.GetFailureCount(clientIp)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerUserPerMinute > 0 && uid > 0 {
|
||||||
|
uidFailureCount = a.container.GetFailureCount(utils.Int64ToString(uid))
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerIpPerMinute > 0 && ipFailureCount < a.CurrentConfig().MaxFailuresPerIpPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckAndIncreaseFailureCount] operation failure via IP \"%s\", previous failure count: %d", clientIp, ipFailureCount)
|
||||||
|
a.container.IncreaseFailureCount(clientIp)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerUserPerMinute > 0 && uid > 0 && uidFailureCount < a.CurrentConfig().MaxFailuresPerUserPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckAndIncreaseFailureCount] operation failure via uid \"%d\", previous failure count: %d", uid, uidFailureCount)
|
||||||
|
a.container.IncreaseFailureCount(utils.Int64ToString(uid))
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerIpPerMinute > 0 && ipFailureCount >= a.CurrentConfig().MaxFailuresPerIpPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckAndIncreaseFailureCount] operation failure via IP \"%s\", current failure count: %d reached the limit", clientIp, ipFailureCount)
|
||||||
|
return errs.ErrFailureCountLimitReached
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.CurrentConfig().MaxFailuresPerUserPerMinute > 0 && uid > 0 && uidFailureCount >= a.CurrentConfig().MaxFailuresPerUserPerMinute {
|
||||||
|
log.Warnf(c, "[base.CheckAndIncreaseFailureCount] operation failure via uid \"%d\", current failure count: %d reached the limit", uid, uidFailureCount)
|
||||||
|
return errs.ErrFailureCountLimitReached
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ApiUsingAvatarProvider represents an api that need to use avatar provider
|
// ApiUsingAvatarProvider represents an api that need to use avatar provider
|
||||||
|
|||||||
+25
-16
@@ -20,14 +20,15 @@ const pageCountForDataExport = 1000
|
|||||||
// DataManagementsApi represents data management api
|
// DataManagementsApi represents data management api
|
||||||
type DataManagementsApi struct {
|
type DataManagementsApi struct {
|
||||||
ApiUsingConfig
|
ApiUsingConfig
|
||||||
tokens *services.TokenService
|
tokens *services.TokenService
|
||||||
users *services.UserService
|
users *services.UserService
|
||||||
accounts *services.AccountService
|
accounts *services.AccountService
|
||||||
transactions *services.TransactionService
|
transactions *services.TransactionService
|
||||||
categories *services.TransactionCategoryService
|
categories *services.TransactionCategoryService
|
||||||
tags *services.TransactionTagService
|
tags *services.TransactionTagService
|
||||||
pictures *services.TransactionPictureService
|
pictures *services.TransactionPictureService
|
||||||
templates *services.TransactionTemplateService
|
templates *services.TransactionTemplateService
|
||||||
|
userCustomExchangeRates *services.UserCustomExchangeRatesService
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize a data management api singleton instance
|
// Initialize a data management api singleton instance
|
||||||
@@ -36,14 +37,15 @@ var (
|
|||||||
ApiUsingConfig: ApiUsingConfig{
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
tokens: services.Tokens,
|
tokens: services.Tokens,
|
||||||
users: services.Users,
|
users: services.Users,
|
||||||
accounts: services.Accounts,
|
accounts: services.Accounts,
|
||||||
transactions: services.Transactions,
|
transactions: services.Transactions,
|
||||||
categories: services.TransactionCategories,
|
categories: services.TransactionCategories,
|
||||||
tags: services.TransactionTags,
|
tags: services.TransactionTags,
|
||||||
pictures: services.TransactionPictures,
|
pictures: services.TransactionPictures,
|
||||||
templates: services.TransactionTemplates,
|
templates: services.TransactionTemplates,
|
||||||
|
userCustomExchangeRates: services.UserCustomExchangeRates,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -179,6 +181,13 @@ func (a *DataManagementsApi) ClearDataHandler(c *core.WebContext) (any, *errs.Er
|
|||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
err = a.userCustomExchangeRates.DeleteAllCustomExchangeRates(c, uid)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[data_managements.ClearDataHandler] failed to delete all user custom exchange rates, because %s", err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
log.Infof(c, "[data_managements.ClearDataHandler] user \"uid:%d\" has cleared all data", uid)
|
log.Infof(c, "[data_managements.ClearDataHandler] user \"uid:%d\" has cleared all data", uid)
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
+74
-93
@@ -1,25 +1,20 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"sort"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/exchangerates"
|
"github.com/mayswind/ezbookkeeping/pkg/exchangerates"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/log"
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/services"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ExchangeRatesApi represents exchange rate api
|
// ExchangeRatesApi represents exchange rate api
|
||||||
type ExchangeRatesApi struct {
|
type ExchangeRatesApi struct {
|
||||||
ApiUsingConfig
|
ApiUsingConfig
|
||||||
|
users *services.UserService
|
||||||
|
userCustomExchangeRates *services.UserCustomExchangeRatesService
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize a exchange rate api singleton instance
|
// Initialize a exchange rate api singleton instance
|
||||||
@@ -28,6 +23,8 @@ var (
|
|||||||
ApiUsingConfig: ApiUsingConfig{
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
|
users: services.Users,
|
||||||
|
userCustomExchangeRates: services.UserCustomExchangeRates,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -39,93 +36,77 @@ func (a *ExchangeRatesApi) LatestExchangeRateHandler(c *core.WebContext) (any, *
|
|||||||
return nil, errs.ErrInvalidExchangeRatesDataSource
|
return nil, errs.ErrInvalidExchangeRatesDataSource
|
||||||
}
|
}
|
||||||
|
|
||||||
uid := c.GetCurrentUid()
|
exchangeRateResponse, err := dataSource.GetLatestExchangeRates(c, c.GetCurrentUid(), a.container.Current)
|
||||||
|
|
||||||
transport := http.DefaultTransport.(*http.Transport).Clone()
|
|
||||||
utils.SetProxyUrl(transport, a.CurrentConfig().ExchangeRatesProxy)
|
|
||||||
|
|
||||||
if a.CurrentConfig().ExchangeRatesSkipTLSVerify {
|
|
||||||
transport.TLSClientConfig = &tls.Config{
|
|
||||||
InsecureSkipVerify: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &http.Client{
|
|
||||||
Transport: transport,
|
|
||||||
Timeout: time.Duration(a.CurrentConfig().ExchangeRatesRequestTimeout) * time.Millisecond,
|
|
||||||
}
|
|
||||||
|
|
||||||
requests, err := dataSource.BuildRequests()
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf(c, "[exchange_rates.LatestExchangeRateHandler] failed to build requests for user \"uid:%d\", because %s", uid, err.Error())
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
return nil, errs.ErrFailedToRequestRemoteApi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exchangeRateResps := make([]*models.LatestExchangeRateResponse, 0, len(requests))
|
return exchangeRateResponse, nil
|
||||||
|
}
|
||||||
for i := 0; i < len(requests); i++ {
|
|
||||||
req := requests[i]
|
// UserCustomExchangeRateUpdateHandler updates user custom exchange rates data by request parameters for current user
|
||||||
req.Header.Set("User-Agent", fmt.Sprintf("ezBookkeeping/%s ", settings.Version))
|
func (a *ExchangeRatesApi) UserCustomExchangeRateUpdateHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
var customExchangeRateUpdateReq models.UserCustomExchangeRateUpdateRequest
|
||||||
resp, err := client.Do(req)
|
err := c.ShouldBindJSON(&customExchangeRateUpdateReq)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf(c, "[exchange_rates.LatestExchangeRateHandler] failed to request latest exchange rate data for user \"uid:%d\", because %s", uid, err.Error())
|
log.Warnf(c, "[exchange_rates.UserCustomExchangeRateUpdateHandler] parse request failed, because %s", err.Error())
|
||||||
return nil, errs.ErrFailedToRequestRemoteApi
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if resp.StatusCode != 200 {
|
uid := c.GetCurrentUid()
|
||||||
log.Errorf(c, "[exchange_rates.LatestExchangeRateHandler] failed to get latest exchange rate data response for user \"uid:%d\", because response code is not 200", uid)
|
user, err := a.users.GetUserById(c, uid)
|
||||||
return nil, errs.ErrFailedToRequestRemoteApi
|
|
||||||
}
|
if err != nil {
|
||||||
|
log.Errorf(c, "[exchange_rates.UserCustomExchangeRateUpdateHandler] failed to get user \"uid:%d\", because %s", uid, err.Error())
|
||||||
defer resp.Body.Close()
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
body, err := io.ReadAll(resp.Body)
|
}
|
||||||
|
|
||||||
log.Debugf(c, "[exchange_rates.LatestExchangeRateHandler] response#%d is %s", i, body)
|
if customExchangeRateUpdateReq.Currency == user.DefaultCurrency {
|
||||||
|
return nil, errs.ErrCannotUpdateExchangeRateForDefaultCurrency
|
||||||
exchangeRateResp, err := dataSource.Parse(c, body)
|
}
|
||||||
|
|
||||||
if err != nil {
|
newCustomExchangeRate, defaultCurrencyExchangeRate, err := a.userCustomExchangeRates.UpdateCustomExchangeRate(c, uid, customExchangeRateUpdateReq.Currency, customExchangeRateUpdateReq.Rate, user.DefaultCurrency)
|
||||||
log.Errorf(c, "[exchange_rates.LatestExchangeRateHandler] failed to parse response for user \"uid:%d\", because %s", uid, err.Error())
|
|
||||||
return nil, errs.Or(err, errs.ErrFailedToRequestRemoteApi)
|
if err != nil {
|
||||||
}
|
log.Errorf(c, "[exchange_rates.UserCustomExchangeRateUpdateHandler] failed to update user custom exchange rate \"currency:%s\" for user \"uid:%d\", because %s", customExchangeRateUpdateReq.Currency, uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
exchangeRateResps = append(exchangeRateResps, exchangeRateResp)
|
}
|
||||||
}
|
|
||||||
|
log.Infof(c, "[exchange_rates.UserCustomExchangeRateUpdateHandler] user \"uid:%d\" has updated user custom exchange rate \"currency:%s\" successfully", uid, customExchangeRateUpdateReq.Currency)
|
||||||
lastExchangeRateResponse := exchangeRateResps[len(exchangeRateResps)-1]
|
return newCustomExchangeRate.ToUserCustomExchangeRateUpdateResponse(defaultCurrencyExchangeRate.Rate), nil
|
||||||
allExchangeRatesMap := make(map[string]string)
|
}
|
||||||
|
|
||||||
for i := 0; i < len(exchangeRateResps); i++ {
|
// UserCustomExchangeRateDeleteHandler deletes an existed user custom exchange rates data by request parameters for current user
|
||||||
exchangeRateResp := exchangeRateResps[i]
|
func (a *ExchangeRatesApi) UserCustomExchangeRateDeleteHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
var customExchangeRateDeleteReq models.UserCustomExchangeRateDeleteRequest
|
||||||
for j := 0; j < len(exchangeRateResp.ExchangeRates); j++ {
|
err := c.ShouldBindJSON(&customExchangeRateDeleteReq)
|
||||||
exchangeRate := exchangeRateResp.ExchangeRates[j]
|
|
||||||
allExchangeRatesMap[exchangeRate.Currency] = exchangeRate.Rate
|
if err != nil {
|
||||||
}
|
log.Warnf(c, "[exchange_rates.UserCustomExchangeRateDeleteHandler] parse request failed, because %s", err.Error())
|
||||||
}
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
|
}
|
||||||
allExchangeRatesMap[lastExchangeRateResponse.BaseCurrency] = "1"
|
|
||||||
allExchangeRates := make(models.LatestExchangeRateSlice, 0, len(allExchangeRatesMap))
|
uid := c.GetCurrentUid()
|
||||||
|
user, err := a.users.GetUserById(c, uid)
|
||||||
for currency, rate := range allExchangeRatesMap {
|
|
||||||
allExchangeRates = append(allExchangeRates, &models.LatestExchangeRate{
|
if err != nil {
|
||||||
Currency: currency,
|
log.Errorf(c, "[exchange_rates.UserCustomExchangeRateDeleteHandler] failed to get user \"uid:%d\", because %s", uid, err.Error())
|
||||||
Rate: rate,
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
})
|
}
|
||||||
}
|
|
||||||
|
if customExchangeRateDeleteReq.Currency == user.DefaultCurrency {
|
||||||
sort.Sort(allExchangeRates)
|
return nil, errs.ErrCannotDeleteExchangeRateForDefaultCurrency
|
||||||
|
}
|
||||||
finalExchangeRateResponse := &models.LatestExchangeRateResponse{
|
|
||||||
DataSource: lastExchangeRateResponse.DataSource,
|
err = a.userCustomExchangeRates.DeleteCustomExchangeRate(c, uid, customExchangeRateDeleteReq.Currency)
|
||||||
ReferenceUrl: lastExchangeRateResponse.ReferenceUrl,
|
|
||||||
UpdateTime: lastExchangeRateResponse.UpdateTime,
|
if err != nil {
|
||||||
BaseCurrency: lastExchangeRateResponse.BaseCurrency,
|
log.Errorf(c, "[exchange_rates.UserCustomExchangeRateDeleteHandler] failed to delete user custom exchange rate \"currency:%s\" for user \"uid:%d\", because %s", customExchangeRateDeleteReq.Currency, uid, err.Error())
|
||||||
ExchangeRates: allExchangeRates,
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
return finalExchangeRateResponse, nil
|
log.Infof(c, "[exchange_rates.UserCustomExchangeRateDeleteHandler] user \"uid:%d\" has deleted user custom exchange rate \"currency:%s\"", uid, customExchangeRateDeleteReq.Currency)
|
||||||
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,6 +29,9 @@ var (
|
|||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
container: duplicatechecker.Container,
|
container: duplicatechecker.Container,
|
||||||
},
|
},
|
||||||
categories: services.TransactionCategories,
|
categories: services.TransactionCategories,
|
||||||
@@ -161,7 +164,7 @@ func (a *TransactionCategoriesApi) CategoryCreateHandler(c *core.WebContext) (an
|
|||||||
|
|
||||||
log.Infof(c, "[transaction_categories.CategoryCreateHandler] user \"uid:%d\" has created a new category \"id:%d\" successfully", uid, category.CategoryId)
|
log.Infof(c, "[transaction_categories.CategoryCreateHandler] user \"uid:%d\" has created a new category \"id:%d\" successfully", uid, category.CategoryId)
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_CATEGORY, uid, categoryCreateReq.ClientSessionId, utils.Int64ToString(category.CategoryId))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_CATEGORY, uid, categoryCreateReq.ClientSessionId, utils.Int64ToString(category.CategoryId))
|
||||||
categoryResp := category.ToTransactionCategoryInfoResponse()
|
categoryResp := category.ToTransactionCategoryInfoResponse()
|
||||||
|
|
||||||
return categoryResp, nil
|
return categoryResp, nil
|
||||||
@@ -226,11 +229,11 @@ func (a *TransactionCategoriesApi) CategoryModifyHandler(c *core.WebContext) (an
|
|||||||
return nil, errs.ErrNothingWillBeUpdated
|
return nil, errs.ErrNothingWillBeUpdated
|
||||||
}
|
}
|
||||||
|
|
||||||
if category.ParentCategoryId == 0 && newCategory.ParentCategoryId != 0 {
|
if category.ParentCategoryId == models.LevelOneTransactionCategoryParentId && newCategory.ParentCategoryId != models.LevelOneTransactionCategoryParentId {
|
||||||
return nil, errs.Or(err, errs.ErrNotAllowChangePrimaryTransactionCategoryToSecondary)
|
return nil, errs.Or(err, errs.ErrNotAllowChangePrimaryTransactionCategoryToSecondary)
|
||||||
}
|
}
|
||||||
|
|
||||||
if category.ParentCategoryId != 0 && newCategory.ParentCategoryId == 0 {
|
if category.ParentCategoryId != models.LevelOneTransactionCategoryParentId && newCategory.ParentCategoryId == models.LevelOneTransactionCategoryParentId {
|
||||||
return nil, errs.Or(err, errs.ErrNotAllowChangeSecondaryTransactionCategoryToPrimary)
|
return nil, errs.Or(err, errs.ErrNotAllowChangeSecondaryTransactionCategoryToPrimary)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -253,7 +256,7 @@ func (a *TransactionCategoriesApi) CategoryModifyHandler(c *core.WebContext) (an
|
|||||||
return nil, errs.Or(err, errs.ErrNotAllowChangePrimaryTransactionType)
|
return nil, errs.Or(err, errs.ErrNotAllowChangePrimaryTransactionType)
|
||||||
}
|
}
|
||||||
|
|
||||||
if toPrimaryCategory.ParentCategoryId != 0 {
|
if toPrimaryCategory.ParentCategoryId != models.LevelOneTransactionCategoryParentId {
|
||||||
return nil, errs.Or(err, errs.ErrNotAllowUseSecondaryTransactionAsPrimaryCategory)
|
return nil, errs.Or(err, errs.ErrNotAllowUseSecondaryTransactionAsPrimaryCategory)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -430,7 +433,7 @@ func (a *TransactionCategoriesApi) getTransactionCategoryListByTypeResponse(cate
|
|||||||
for i := 0; i < len(categoryResps); i++ {
|
for i := 0; i < len(categoryResps); i++ {
|
||||||
categoryResp := categoryResps[i]
|
categoryResp := categoryResps[i]
|
||||||
|
|
||||||
if categoryResp.ParentId <= models.LevelOneTransactionParentId {
|
if categoryResp.ParentId <= models.LevelOneTransactionCategoryParentId {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -446,7 +449,7 @@ func (a *TransactionCategoriesApi) getTransactionCategoryListByTypeResponse(cate
|
|||||||
finalCategoryResps := make(models.TransactionCategoryInfoResponseSlice, 0)
|
finalCategoryResps := make(models.TransactionCategoryInfoResponseSlice, 0)
|
||||||
|
|
||||||
for i := 0; i < len(categoryResps); i++ {
|
for i := 0; i < len(categoryResps); i++ {
|
||||||
if parentId <= 0 && categoryResps[i].ParentId == models.LevelOneTransactionParentId {
|
if parentId <= 0 && categoryResps[i].ParentId == models.LevelOneTransactionCategoryParentId {
|
||||||
sort.Sort(categoryResps[i].SubCategories)
|
sort.Sort(categoryResps[i].SubCategories)
|
||||||
finalCategoryResps = append(finalCategoryResps, categoryResps[i])
|
finalCategoryResps = append(finalCategoryResps, categoryResps[i])
|
||||||
} else if parentId > 0 && categoryResps[i].ParentId == parentId {
|
} else if parentId > 0 && categoryResps[i].ParentId == parentId {
|
||||||
|
|||||||
@@ -26,6 +26,9 @@ var (
|
|||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
container: duplicatechecker.Container,
|
container: duplicatechecker.Container,
|
||||||
},
|
},
|
||||||
users: services.Users,
|
users: services.Users,
|
||||||
@@ -112,7 +115,7 @@ func (a *TransactionPicturesApi) TransactionPictureUploadHandler(c *core.WebCont
|
|||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_PICTURE, uid, clientSessionId, utils.Int64ToString(pictureInfo.PictureId))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_PICTURE, uid, clientSessionId, utils.Int64ToString(pictureInfo.PictureId))
|
||||||
pictureInfoResp := a.GetTransactionPictureInfoResponse(pictureInfo)
|
pictureInfoResp := a.GetTransactionPictureInfoResponse(pictureInfo)
|
||||||
|
|
||||||
return pictureInfoResp, nil
|
return pictureInfoResp, nil
|
||||||
|
|||||||
@@ -101,6 +101,47 @@ func (a *TransactionTagsApi) TagCreateHandler(c *core.WebContext) (any, *errs.Er
|
|||||||
return tagResp, nil
|
return tagResp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TagCreateBatchHandler saves some new transaction tags by request parameters for current user
|
||||||
|
func (a *TransactionTagsApi) TagCreateBatchHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
var tagCreateBatchReq models.TransactionTagCreateBatchRequest
|
||||||
|
err := c.ShouldBindJSON(&tagCreateBatchReq)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[transaction_tags.TagCreateBatchHandler] parse request failed, because %s", err.Error())
|
||||||
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
uid := c.GetCurrentUid()
|
||||||
|
|
||||||
|
maxOrderId, err := a.tags.GetMaxDisplayOrder(c, uid)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transaction_tags.TagCreateBatchHandler] failed to get max display order for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
tags := a.createNewTagModels(uid, &tagCreateBatchReq, maxOrderId+1)
|
||||||
|
|
||||||
|
err = a.tags.CreateTags(c, uid, tags, tagCreateBatchReq.SkipExists)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transaction_tags.TagCreateBatchHandler] failed to create tags for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Infof(c, "[transaction_tags.TagCreateBatchHandler] user \"uid:%d\" has created tags successfully", uid)
|
||||||
|
|
||||||
|
tagResps := make(models.TransactionTagInfoResponseSlice, len(tags))
|
||||||
|
|
||||||
|
for i := 0; i < len(tags); i++ {
|
||||||
|
tagResps[i] = tags[i].ToTransactionTagInfoResponse()
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(tagResps)
|
||||||
|
|
||||||
|
return tagResps, nil
|
||||||
|
}
|
||||||
|
|
||||||
// TagModifyHandler saves an existed transaction tag by request parameters for current user
|
// TagModifyHandler saves an existed transaction tag by request parameters for current user
|
||||||
func (a *TransactionTagsApi) TagModifyHandler(c *core.WebContext) (any, *errs.Error) {
|
func (a *TransactionTagsApi) TagModifyHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
var tagModifyReq models.TransactionTagModifyRequest
|
var tagModifyReq models.TransactionTagModifyRequest
|
||||||
@@ -230,3 +271,15 @@ func (a *TransactionTagsApi) createNewTagModel(uid int64, tagCreateReq *models.T
|
|||||||
DisplayOrder: order,
|
DisplayOrder: order,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *TransactionTagsApi) createNewTagModels(uid int64, tagCreateBatchReq *models.TransactionTagCreateBatchRequest, order int32) []*models.TransactionTag {
|
||||||
|
tags := make([]*models.TransactionTag, len(tagCreateBatchReq.Tags))
|
||||||
|
|
||||||
|
for i := 0; i < len(tagCreateBatchReq.Tags); i++ {
|
||||||
|
tagCreateReq := tagCreateBatchReq.Tags[i]
|
||||||
|
tag := a.createNewTagModel(uid, tagCreateReq, order+int32(i))
|
||||||
|
tags[i] = tag
|
||||||
|
}
|
||||||
|
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
|||||||
@@ -31,6 +31,9 @@ var (
|
|||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
container: duplicatechecker.Container,
|
container: duplicatechecker.Container,
|
||||||
},
|
},
|
||||||
templates: services.TransactionTemplates,
|
templates: services.TransactionTemplates,
|
||||||
@@ -156,7 +159,12 @@ func (a *TransactionTemplatesApi) TemplateCreateHandler(c *core.WebContext) (any
|
|||||||
}
|
}
|
||||||
|
|
||||||
serverUtcOffset := utils.GetServerTimezoneOffsetMinutes()
|
serverUtcOffset := utils.GetServerTimezoneOffsetMinutes()
|
||||||
template := a.createNewTemplateModel(uid, &templateCreateReq, maxOrderId+1)
|
template, err := a.createNewTemplateModel(uid, &templateCreateReq, maxOrderId+1)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transaction_templates.TemplateCreateHandler] failed to create new template for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
if a.CurrentConfig().EnableDuplicateSubmissionsCheck && templateCreateReq.ClientSessionId != "" {
|
if a.CurrentConfig().EnableDuplicateSubmissionsCheck && templateCreateReq.ClientSessionId != "" {
|
||||||
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE, uid, templateCreateReq.ClientSessionId)
|
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE, uid, templateCreateReq.ClientSessionId)
|
||||||
@@ -189,7 +197,7 @@ func (a *TransactionTemplatesApi) TemplateCreateHandler(c *core.WebContext) (any
|
|||||||
|
|
||||||
log.Infof(c, "[transaction_templates.TemplateCreateHandler] user \"uid:%d\" has created a new template \"id:%d\" successfully", uid, template.TemplateId)
|
log.Infof(c, "[transaction_templates.TemplateCreateHandler] user \"uid:%d\" has created a new template \"id:%d\" successfully", uid, template.TemplateId)
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE, uid, templateCreateReq.ClientSessionId, utils.Int64ToString(template.TemplateId))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE, uid, templateCreateReq.ClientSessionId, utils.Int64ToString(template.TemplateId))
|
||||||
templateResp := template.ToTransactionTemplateInfoResponse(serverUtcOffset)
|
templateResp := template.ToTransactionTemplateInfoResponse(serverUtcOffset)
|
||||||
|
|
||||||
return templateResp, nil
|
return templateResp, nil
|
||||||
@@ -260,6 +268,34 @@ func (a *TransactionTemplatesApi) TemplateModifyHandler(c *core.WebContext) (any
|
|||||||
newTemplate.ScheduledFrequency = a.getOrderedFrequencyValues(*templateModifyReq.ScheduledFrequency)
|
newTemplate.ScheduledFrequency = a.getOrderedFrequencyValues(*templateModifyReq.ScheduledFrequency)
|
||||||
newTemplate.ScheduledAt = a.getUTCScheduledAt(*templateModifyReq.ScheduledTimezoneUtcOffset)
|
newTemplate.ScheduledAt = a.getUTCScheduledAt(*templateModifyReq.ScheduledTimezoneUtcOffset)
|
||||||
newTemplate.ScheduledTimezoneUtcOffset = *templateModifyReq.ScheduledTimezoneUtcOffset
|
newTemplate.ScheduledTimezoneUtcOffset = *templateModifyReq.ScheduledTimezoneUtcOffset
|
||||||
|
|
||||||
|
if templateModifyReq.ScheduledStartDate != nil {
|
||||||
|
startTime, err := utils.ParseFromLongDateFirstTime(*templateModifyReq.ScheduledStartDate, *templateModifyReq.ScheduledTimezoneUtcOffset)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transaction_templates.TemplateModifyHandler] failed to parse scheduled start date for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
startUnixTime := startTime.Unix()
|
||||||
|
newTemplate.ScheduledStartTime = &startUnixTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if templateModifyReq.ScheduledEndDate != nil {
|
||||||
|
endTime, err := utils.ParseFromLongDateLastTime(*templateModifyReq.ScheduledEndDate, *templateModifyReq.ScheduledTimezoneUtcOffset)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transaction_templates.TemplateModifyHandler] failed to parse scheduled end date for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
endUnixTime := endTime.Unix()
|
||||||
|
newTemplate.ScheduledEndTime = &endUnixTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if newTemplate.ScheduledStartTime != nil && newTemplate.ScheduledEndTime != nil && *newTemplate.ScheduledStartTime > *newTemplate.ScheduledEndTime {
|
||||||
|
return nil, errs.ErrScheduledTransactionTemplateStartDataLaterThanEndDate
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if newTemplate.Name == template.Name &&
|
if newTemplate.Name == template.Name &&
|
||||||
@@ -277,6 +313,8 @@ func (a *TransactionTemplatesApi) TemplateModifyHandler(c *core.WebContext) (any
|
|||||||
} else if template.TemplateType == models.TRANSACTION_TEMPLATE_TYPE_SCHEDULE {
|
} else if template.TemplateType == models.TRANSACTION_TEMPLATE_TYPE_SCHEDULE {
|
||||||
if newTemplate.ScheduledFrequencyType == template.ScheduledFrequencyType &&
|
if newTemplate.ScheduledFrequencyType == template.ScheduledFrequencyType &&
|
||||||
newTemplate.ScheduledFrequency == template.ScheduledFrequency &&
|
newTemplate.ScheduledFrequency == template.ScheduledFrequency &&
|
||||||
|
newTemplate.ScheduledStartTime == template.ScheduledStartTime &&
|
||||||
|
newTemplate.ScheduledEndTime == template.ScheduledEndTime &&
|
||||||
newTemplate.ScheduledAt == template.ScheduledAt &&
|
newTemplate.ScheduledAt == template.ScheduledAt &&
|
||||||
newTemplate.ScheduledTimezoneUtcOffset == template.ScheduledTimezoneUtcOffset {
|
newTemplate.ScheduledTimezoneUtcOffset == template.ScheduledTimezoneUtcOffset {
|
||||||
return nil, errs.ErrNothingWillBeUpdated
|
return nil, errs.ErrNothingWillBeUpdated
|
||||||
@@ -419,7 +457,7 @@ func (a *TransactionTemplatesApi) TemplateDeleteHandler(c *core.WebContext) (any
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *TransactionTemplatesApi) createNewTemplateModel(uid int64, templateCreateReq *models.TransactionTemplateCreateRequest, order int32) *models.TransactionTemplate {
|
func (a *TransactionTemplatesApi) createNewTemplateModel(uid int64, templateCreateReq *models.TransactionTemplateCreateRequest, order int32) (*models.TransactionTemplate, error) {
|
||||||
template := &models.TransactionTemplate{
|
template := &models.TransactionTemplate{
|
||||||
Uid: uid,
|
Uid: uid,
|
||||||
TemplateType: templateCreateReq.TemplateType,
|
TemplateType: templateCreateReq.TemplateType,
|
||||||
@@ -441,9 +479,35 @@ func (a *TransactionTemplatesApi) createNewTemplateModel(uid int64, templateCrea
|
|||||||
template.ScheduledFrequency = a.getOrderedFrequencyValues(*templateCreateReq.ScheduledFrequency)
|
template.ScheduledFrequency = a.getOrderedFrequencyValues(*templateCreateReq.ScheduledFrequency)
|
||||||
template.ScheduledAt = a.getUTCScheduledAt(*templateCreateReq.ScheduledTimezoneUtcOffset)
|
template.ScheduledAt = a.getUTCScheduledAt(*templateCreateReq.ScheduledTimezoneUtcOffset)
|
||||||
template.ScheduledTimezoneUtcOffset = *templateCreateReq.ScheduledTimezoneUtcOffset
|
template.ScheduledTimezoneUtcOffset = *templateCreateReq.ScheduledTimezoneUtcOffset
|
||||||
|
|
||||||
|
if templateCreateReq.ScheduledStartDate != nil {
|
||||||
|
startTime, err := utils.ParseFromLongDateFirstTime(*templateCreateReq.ScheduledStartDate, *templateCreateReq.ScheduledTimezoneUtcOffset)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
startUnixTime := startTime.Unix()
|
||||||
|
template.ScheduledStartTime = &startUnixTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if templateCreateReq.ScheduledEndDate != nil {
|
||||||
|
endTime, err := utils.ParseFromLongDateLastTime(*templateCreateReq.ScheduledEndDate, *templateCreateReq.ScheduledTimezoneUtcOffset)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
endUnixTime := endTime.Unix()
|
||||||
|
template.ScheduledEndTime = &endUnixTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if template.ScheduledStartTime != nil && template.ScheduledEndTime != nil && *template.ScheduledStartTime > *template.ScheduledEndTime {
|
||||||
|
return nil, errs.ErrScheduledTransactionTemplateStartDataLaterThanEndDate
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return template
|
return template, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *TransactionTemplatesApi) getUTCScheduledAt(scheduledTimezoneUtcOffset int16) int16 {
|
func (a *TransactionTemplatesApi) getUTCScheduledAt(scheduledTimezoneUtcOffset int16) int16 {
|
||||||
|
|||||||
+255
-16
@@ -1,6 +1,8 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -8,6 +10,8 @@ import (
|
|||||||
orderedmap "github.com/wk8/go-ordered-map/v2"
|
orderedmap "github.com/wk8/go-ordered-map/v2"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters"
|
"github.com/mayswind/ezbookkeeping/pkg/converters"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/duplicatechecker"
|
"github.com/mayswind/ezbookkeeping/pkg/duplicatechecker"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
@@ -40,6 +44,9 @@ var (
|
|||||||
container: settings.Container,
|
container: settings.Container,
|
||||||
},
|
},
|
||||||
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
ApiUsingDuplicateChecker: ApiUsingDuplicateChecker{
|
||||||
|
ApiUsingConfig: ApiUsingConfig{
|
||||||
|
container: settings.Container,
|
||||||
|
},
|
||||||
container: duplicatechecker.Container,
|
container: duplicatechecker.Container,
|
||||||
},
|
},
|
||||||
transactions: services.Transactions,
|
transactions: services.Transactions,
|
||||||
@@ -382,10 +389,10 @@ func (a *TransactionsApi) TransactionStatisticsTrendsHandler(c *core.WebContext)
|
|||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
statisticTrendsResp := make(models.TransactionStatisticTrendsItemSlice, 0, len(allMonthlyTotalAmounts))
|
statisticTrendsResp := make(models.TransactionStatisticTrendsResponseItemSlice, 0, len(allMonthlyTotalAmounts))
|
||||||
|
|
||||||
for yearMonth, monthlyTotalAmounts := range allMonthlyTotalAmounts {
|
for yearMonth, monthlyTotalAmounts := range allMonthlyTotalAmounts {
|
||||||
monthlyStatisticResp := &models.TransactionStatisticTrendsItem{
|
monthlyStatisticResp := &models.TransactionStatisticTrendsResponseItem{
|
||||||
Year: yearMonth / 100,
|
Year: yearMonth / 100,
|
||||||
Month: yearMonth % 100,
|
Month: yearMonth % 100,
|
||||||
Items: make([]*models.TransactionStatisticResponseItem, len(monthlyTotalAmounts)),
|
Items: make([]*models.TransactionStatisticResponseItem, len(monthlyTotalAmounts)),
|
||||||
@@ -781,7 +788,7 @@ func (a *TransactionsApi) TransactionCreateHandler(c *core.WebContext) (any, *er
|
|||||||
|
|
||||||
log.Infof(c, "[transactions.TransactionCreateHandler] user \"uid:%d\" has created a new transaction \"id:%d\" successfully", uid, transaction.TransactionId)
|
log.Infof(c, "[transactions.TransactionCreateHandler] user \"uid:%d\" has created a new transaction \"id:%d\" successfully", uid, transaction.TransactionId)
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TRANSACTION, uid, transactionCreateReq.ClientSessionId, utils.Int64ToString(transaction.TransactionId))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_NEW_TRANSACTION, uid, transactionCreateReq.ClientSessionId, utils.Int64ToString(transaction.TransactionId))
|
||||||
transactionResp := transaction.ToTransactionInfoResponse(tagIds, transactionEditable)
|
transactionResp := transaction.ToTransactionInfoResponse(tagIds, transactionEditable)
|
||||||
transactionResp.Pictures = a.GetTransactionPictureInfoResponseList(pictureInfos)
|
transactionResp.Pictures = a.GetTransactionPictureInfoResponseList(pictureInfos)
|
||||||
|
|
||||||
@@ -1030,6 +1037,83 @@ func (a *TransactionsApi) TransactionDeleteHandler(c *core.WebContext) (any, *er
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TransactionParseImportDsvFileDataHandler returns the parsed file data by request parameters for current user
|
||||||
|
func (a *TransactionsApi) TransactionParseImportDsvFileDataHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
uid := c.GetCurrentUid()
|
||||||
|
form, err := c.MultipartForm()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to get multi-part form data for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.ErrParameterInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
fileTypes := form.Value["fileType"]
|
||||||
|
|
||||||
|
if len(fileTypes) < 1 || fileTypes[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileTypeIsEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
fileType := fileTypes[0]
|
||||||
|
|
||||||
|
if !converters.IsCustomDelimiterSeparatedValuesFileType(fileType) {
|
||||||
|
return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
fileEncodings := form.Value["fileEncoding"]
|
||||||
|
|
||||||
|
if len(fileEncodings) < 1 || fileEncodings[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileEncodingIsEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
fileEncoding := fileEncodings[0]
|
||||||
|
dataParser, err := converters.CreateNewDelimiterSeparatedValuesDataParser(fileType, fileEncoding)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
importFiles := form.File["file"]
|
||||||
|
|
||||||
|
if len(importFiles) < 1 {
|
||||||
|
log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] there is no import file in request for user \"uid:%d\"", uid)
|
||||||
|
return nil, errs.ErrNoFilesUpload
|
||||||
|
}
|
||||||
|
|
||||||
|
if importFiles[0].Size < 1 {
|
||||||
|
log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] the size of import file in request is zero for user \"uid:%d\"", uid)
|
||||||
|
return nil, errs.ErrUploadedFileEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
if importFiles[0].Size > int64(a.CurrentConfig().MaxImportFileSize) {
|
||||||
|
log.Warnf(c, "[transactions.TransactionParseImportDsvFileDataHandler] the upload file size \"%d\" exceeds the maximum size \"%d\" of import file for user \"uid:%d\"", importFiles[0].Size, a.CurrentConfig().MaxImportFileSize, uid)
|
||||||
|
return nil, errs.ErrExceedMaxUploadFileSize
|
||||||
|
}
|
||||||
|
|
||||||
|
importFile, err := importFiles[0].Open()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to get import file from request for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.ErrOperationFailed
|
||||||
|
}
|
||||||
|
|
||||||
|
defer importFile.Close()
|
||||||
|
fileData, err := io.ReadAll(importFile)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to read import file data for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
allLines, err := dataParser.ParseDsvFileLines(c, fileData)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportDsvFileDataHandler] failed to parse import file data for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
return allLines, nil
|
||||||
|
}
|
||||||
|
|
||||||
// TransactionParseImportFileHandler returns the parsed transaction data by request parameters for current user
|
// TransactionParseImportFileHandler returns the parsed transaction data by request parameters for current user
|
||||||
func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext) (any, *errs.Error) {
|
func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
uid := c.GetCurrentUid()
|
uid := c.GetCurrentUid()
|
||||||
@@ -1054,7 +1138,98 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext)
|
|||||||
}
|
}
|
||||||
|
|
||||||
fileType := fileTypes[0]
|
fileType := fileTypes[0]
|
||||||
dataImporter, err := converters.GetTransactionDataImporter(fileType)
|
|
||||||
|
var dataImporter converter.TransactionDataImporter
|
||||||
|
|
||||||
|
if converters.IsCustomDelimiterSeparatedValuesFileType(fileType) {
|
||||||
|
fileEncodings := form.Value["fileEncoding"]
|
||||||
|
|
||||||
|
if len(fileEncodings) < 1 || fileEncodings[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileEncodingIsEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
fileEncoding := fileEncodings[0]
|
||||||
|
|
||||||
|
columnMappings := form.Value["columnMapping"]
|
||||||
|
|
||||||
|
if len(columnMappings) < 1 || columnMappings[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileColumnMappingInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
var columnIndexMapping = map[datatable.TransactionDataTableColumn]int{}
|
||||||
|
err = json.Unmarshal([]byte(columnMappings[0]), &columnIndexMapping)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to parse column mapping for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.ErrImportFileColumnMappingInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionTypeMappings := form.Value["transactionTypeMapping"]
|
||||||
|
|
||||||
|
if len(transactionTypeMappings) < 1 || transactionTypeMappings[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileTransactionTypeMappingInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
var transactionTypeNameMapping = map[string]models.TransactionType{}
|
||||||
|
err = json.Unmarshal([]byte(transactionTypeMappings[0]), &transactionTypeNameMapping)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to parse transaction type mapping for user \"uid:%d\", because %s", uid, err.Error())
|
||||||
|
return nil, errs.ErrImportFileTransactionTypeMappingInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
hasHeaderLines := form.Value["hasHeaderLine"]
|
||||||
|
hasHeaderLine := false
|
||||||
|
|
||||||
|
if len(hasHeaderLines) > 0 {
|
||||||
|
hasHeaderLine = hasHeaderLines[0] == "true"
|
||||||
|
}
|
||||||
|
|
||||||
|
timeFormats := form.Value["timeFormat"]
|
||||||
|
|
||||||
|
if len(timeFormats) < 1 || timeFormats[0] == "" {
|
||||||
|
return nil, errs.ErrImportFileTransactionTimeFormatInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
timezoneFormats := form.Value["timezoneFormat"]
|
||||||
|
timezoneFormat := ""
|
||||||
|
|
||||||
|
if len(timezoneFormats) > 0 {
|
||||||
|
timezoneFormat = timezoneFormats[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
amountDecimalSeparators := form.Value["amountDecimalSeparator"]
|
||||||
|
amountDecimalSeparator := ""
|
||||||
|
|
||||||
|
if len(amountDecimalSeparators) > 0 {
|
||||||
|
amountDecimalSeparator = amountDecimalSeparators[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
amountDigitGroupingSymbols := form.Value["amountDigitGroupingSymbol"]
|
||||||
|
amountDigitGroupingSymbol := ""
|
||||||
|
|
||||||
|
if len(amountDigitGroupingSymbols) > 0 {
|
||||||
|
amountDigitGroupingSymbol = amountDigitGroupingSymbols[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
geoLocationSeparators := form.Value["geoSeparator"]
|
||||||
|
geoLocationSeparator := ""
|
||||||
|
|
||||||
|
if len(geoLocationSeparators) > 0 {
|
||||||
|
geoLocationSeparator = geoLocationSeparators[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionTagSeparators := form.Value["tagSeparator"]
|
||||||
|
transactionTagSeparator := ""
|
||||||
|
|
||||||
|
if len(transactionTagSeparators) > 0 {
|
||||||
|
transactionTagSeparator = transactionTagSeparators[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
dataImporter, err = converters.CreateNewDelimiterSeparatedValuesDataImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormats[0], timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, transactionTagSeparator)
|
||||||
|
} else {
|
||||||
|
dataImporter, err = converters.GetTransactionDataImporter(fileType)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported)
|
return nil, errs.Or(err, errs.ErrImportFileTypeNotSupported)
|
||||||
@@ -1084,6 +1259,7 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext)
|
|||||||
return nil, errs.ErrOperationFailed
|
return nil, errs.ErrOperationFailed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defer importFile.Close()
|
||||||
fileData, err := io.ReadAll(importFile)
|
fileData, err := io.ReadAll(importFile)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -1108,25 +1284,25 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext)
|
|||||||
accounts, err := a.accounts.GetAllAccountsByUid(c, user.Uid)
|
accounts, err := a.accounts.GetAllAccountsByUid(c, user.Uid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.BootErrorf(c, "[transactions.TransactionParseImportFileHandler] failed to get accounts for user \"uid:%d\", because %s", user.Uid, err.Error())
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to get accounts for user \"uid:%d\", because %s", user.Uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
accountMap := a.accounts.GetAccountNameMapByList(accounts)
|
accountMap := a.accounts.GetVisibleAccountNameMapByList(accounts)
|
||||||
|
|
||||||
categories, err := a.transactionCategories.GetAllCategoriesByUid(c, user.Uid, 0, -1)
|
categories, err := a.transactionCategories.GetAllCategoriesByUid(c, user.Uid, 0, -1)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.BootErrorf(c, "[transactions.TransactionParseImportFileHandler] failed to get categories for user \"uid:%d\", because %s", user.Uid, err.Error())
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to get categories for user \"uid:%d\", because %s", user.Uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
expenseCategoryMap, incomeCategoryMap, transferCategoryMap := a.transactionCategories.GetCategoryNameMapByList(categories)
|
expenseCategoryMap, incomeCategoryMap, transferCategoryMap := a.transactionCategories.GetVisibleSubCategoryNameMapByList(categories)
|
||||||
|
|
||||||
tags, err := a.transactionTags.GetAllTagsByUid(c, user.Uid)
|
tags, err := a.transactionTags.GetAllTagsByUid(c, user.Uid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.BootErrorf(c, "[transactions.TransactionParseImportFileHandler] failed to get tags for user \"uid:%d\", because %s", user.Uid, err.Error())
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to get tags for user \"uid:%d\", because %s", user.Uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1135,7 +1311,7 @@ func (a *TransactionsApi) TransactionParseImportFileHandler(c *core.WebContext)
|
|||||||
parsedTransactions, _, _, _, _, _, err := dataImporter.ParseImportedData(c, user, fileData, utcOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
parsedTransactions, _, _, _, _, _, err := dataImporter.ParseImportedData(c, user, fileData, utcOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.BootErrorf(c, "[transactions.TransactionParseImportFileHandler] failed to parse imported data for user \"uid:%d\", because %s", user.Uid, err.Error())
|
log.Errorf(c, "[transactions.TransactionParseImportFileHandler] failed to parse imported data for user \"uid:%d\", because %s", user.Uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1169,11 +1345,21 @@ func (a *TransactionsApi) TransactionImportHandler(c *core.WebContext) (any, *er
|
|||||||
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId)
|
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId)
|
||||||
|
|
||||||
if found {
|
if found {
|
||||||
log.Infof(c, "[transactions.TransactionImportHandler] another \"%s\" transactions has been imported for user \"uid:%d\"", remark, uid)
|
items := strings.Split(remark, ":")
|
||||||
count, err := utils.StringToInt(remark)
|
|
||||||
|
|
||||||
if err == nil {
|
if len(items) >= 2 {
|
||||||
return count, nil
|
if items[0] == "finished" {
|
||||||
|
log.Infof(c, "[transactions.TransactionImportHandler] another \"%s\" transactions has been imported for user \"uid:%d\"", items[1], uid)
|
||||||
|
count, err := utils.StringToInt(items[1])
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
return count, nil
|
||||||
|
}
|
||||||
|
} else if items[0] == "processing" {
|
||||||
|
return nil, errs.ErrRepeatedRequest
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Warnf(c, "[transactions.TransactionImportHandler] another transaction import task may be executing, but remark \"%s\" is invalid", remark)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1247,21 +1433,74 @@ func (a *TransactionsApi) TransactionImportHandler(c *core.WebContext) (any, *er
|
|||||||
newTransactions[i] = transaction
|
newTransactions[i] = transaction
|
||||||
}
|
}
|
||||||
|
|
||||||
err = a.transactions.BatchCreateTransactions(c, user.Uid, newTransactions, newTransactionTagIdsMap)
|
err = a.transactions.BatchCreateTransactions(c, user.Uid, newTransactions, newTransactionTagIdsMap, func(currentProcess float64) {
|
||||||
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId, fmt.Sprintf("processing:%.2f", currentProcess))
|
||||||
|
})
|
||||||
count := len(newTransactions)
|
count := len(newTransactions)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
a.RemoveSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId)
|
||||||
log.Errorf(c, "[transactions.TransactionImportHandler] failed to import %d transactions for user \"uid:%d\", because %s", count, uid, err.Error())
|
log.Errorf(c, "[transactions.TransactionImportHandler] failed to import %d transactions for user \"uid:%d\", because %s", count, uid, err.Error())
|
||||||
return nil, errs.Or(err, errs.ErrOperationFailed)
|
return nil, errs.Or(err, errs.ErrOperationFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof(c, "[transactions.TransactionImportHandler] user \"uid:%d\" has imported %d transactions successfully", uid, count)
|
log.Infof(c, "[transactions.TransactionImportHandler] user \"uid:%d\" has imported %d transactions successfully", uid, count)
|
||||||
|
|
||||||
a.SetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId, utils.IntToString(count))
|
a.SetSubmissionRemarkIfEnable(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportReq.ClientSessionId, fmt.Sprintf("finished:%d", count))
|
||||||
|
|
||||||
return count, nil
|
return count, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TransactionImportProcessHandler returns the process of specified transaction import task by request parameters for current user
|
||||||
|
func (a *TransactionsApi) TransactionImportProcessHandler(c *core.WebContext) (any, *errs.Error) {
|
||||||
|
var transactionImportProcessReq models.TransactionImportProcessRequest
|
||||||
|
err := c.ShouldBindQuery(&transactionImportProcessReq)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[transactions.TransactionImportProcessHandler] parse request failed, because %s", err.Error())
|
||||||
|
return nil, errs.NewIncompleteOrIncorrectSubmissionError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
uid := c.GetCurrentUid()
|
||||||
|
|
||||||
|
if !a.CurrentConfig().EnableDuplicateSubmissionsCheck {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
found, remark := a.GetSubmissionRemark(duplicatechecker.DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS, uid, transactionImportProcessReq.ClientSessionId)
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
items := strings.Split(remark, ":")
|
||||||
|
|
||||||
|
if len(items) < 2 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if items[0] == "finished" {
|
||||||
|
return 100, nil
|
||||||
|
} else if items[0] != "processing" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
process, err := utils.StringToFloat64(items[1])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(c, "[transactions.TransactionImportProcessHandler] parse process failed, because %s", err.Error())
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if process < 0 {
|
||||||
|
return nil, nil
|
||||||
|
} else if process >= 100 {
|
||||||
|
process = 100
|
||||||
|
}
|
||||||
|
|
||||||
|
return process, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (a *TransactionsApi) filterTransactions(c *core.WebContext, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account) []*models.Transaction {
|
func (a *TransactionsApi) filterTransactions(c *core.WebContext, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account) []*models.Transaction {
|
||||||
finalTransactions := make([]*models.Transaction, 0, len(transactions))
|
finalTransactions := make([]*models.Transaction, 0, len(transactions))
|
||||||
|
|
||||||
|
|||||||
@@ -421,6 +421,15 @@ func (a *UsersApi) UserUpdateProfileHandler(c *core.WebContext) (any, *errs.Erro
|
|||||||
userNew.CurrencyDisplayType = core.CURRENCY_DISPLAY_TYPE_INVALID
|
userNew.CurrencyDisplayType = core.CURRENCY_DISPLAY_TYPE_INVALID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if userUpdateReq.CoordinateDisplayType != nil && *userUpdateReq.CoordinateDisplayType != user.CoordinateDisplayType {
|
||||||
|
user.CoordinateDisplayType = *userUpdateReq.CoordinateDisplayType
|
||||||
|
userNew.CoordinateDisplayType = *userUpdateReq.CoordinateDisplayType
|
||||||
|
modifyProfileBasicInfo = true
|
||||||
|
anythingUpdate = true
|
||||||
|
} else {
|
||||||
|
userNew.CoordinateDisplayType = core.COORDINATE_DISPLAY_TYPE_INVALID
|
||||||
|
}
|
||||||
|
|
||||||
if userUpdateReq.ExpenseAmountColor != nil && *userUpdateReq.ExpenseAmountColor != user.ExpenseAmountColor {
|
if userUpdateReq.ExpenseAmountColor != nil && *userUpdateReq.ExpenseAmountColor != user.ExpenseAmountColor {
|
||||||
user.ExpenseAmountColor = *userUpdateReq.ExpenseAmountColor
|
user.ExpenseAmountColor = *userUpdateReq.ExpenseAmountColor
|
||||||
userNew.ExpenseAmountColor = *userUpdateReq.ExpenseAmountColor
|
userNew.ExpenseAmountColor = *userUpdateReq.ExpenseAmountColor
|
||||||
|
|||||||
@@ -810,7 +810,7 @@ func (l *UserDataCli) ImportTransaction(c *core.CliContext, username string, fil
|
|||||||
return errs.ErrOperationFailed
|
return errs.ErrOperationFailed
|
||||||
}
|
}
|
||||||
|
|
||||||
err = l.transactions.BatchCreateTransactions(c, user.Uid, newTransactions, newTransactionTagIdsMap)
|
err = l.transactions.BatchCreateTransactions(c, user.Uid, newTransactions, newTransactionTagIdsMap, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.CliErrorf(c, "[user_data.ImportTransaction] failed to create transaction, because %s", err.Error())
|
log.CliErrorf(c, "[user_data.ImportTransaction] failed to create transaction, because %s", err.Error())
|
||||||
@@ -876,7 +876,7 @@ func (l *UserDataCli) getUserEssentialData(c *core.CliContext, uid int64, userna
|
|||||||
return accountMap, categoryMap, tagMap, tagIndexes, tagIndexesMap, nil
|
return accountMap, categoryMap, tagMap, tagIndexes, tagIndexesMap, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *UserDataCli) getUserEssentialDataForImport(c *core.CliContext, uid int64, username string) (accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag, err error) {
|
func (l *UserDataCli) getUserEssentialDataForImport(c *core.CliContext, uid int64, username string) (accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag, err error) {
|
||||||
if uid <= 0 {
|
if uid <= 0 {
|
||||||
log.CliErrorf(c, "[user_data.getUserEssentialDataForImport] user uid \"%d\" is invalid", uid)
|
log.CliErrorf(c, "[user_data.getUserEssentialDataForImport] user uid \"%d\" is invalid", uid)
|
||||||
return nil, nil, nil, nil, nil, errs.ErrUserIdInvalid
|
return nil, nil, nil, nil, nil, errs.ErrUserIdInvalid
|
||||||
@@ -889,7 +889,7 @@ func (l *UserDataCli) getUserEssentialDataForImport(c *core.CliContext, uid int6
|
|||||||
return nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
accountMap = l.accounts.GetAccountNameMapByList(accounts)
|
accountMap = l.accounts.GetVisibleAccountNameMapByList(accounts)
|
||||||
|
|
||||||
categories, err := l.categories.GetAllCategoriesByUid(c, uid, 0, -1)
|
categories, err := l.categories.GetAllCategoriesByUid(c, uid, 0, -1)
|
||||||
|
|
||||||
@@ -898,7 +898,7 @@ func (l *UserDataCli) getUserEssentialDataForImport(c *core.CliContext, uid int6
|
|||||||
return nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
expenseCategoryMap, incomeCategoryMap, transferCategoryMap = l.categories.GetCategoryNameMapByList(categories)
|
expenseCategoryMap, incomeCategoryMap, transferCategoryMap = l.categories.GetVisibleSubCategoryNameMapByList(categories)
|
||||||
|
|
||||||
tags, err := l.tags.GetAllTagsByUid(c, uid)
|
tags, err := l.tags.GetAllTagsByUid(c, uid)
|
||||||
|
|
||||||
@@ -959,7 +959,7 @@ func (l *UserDataCli) checkTransactionCategory(c *core.CliContext, transaction *
|
|||||||
return errs.ErrTransactionCategoryNotFound
|
return errs.ErrTransactionCategoryNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if category.ParentCategoryId == models.LevelOneTransactionParentId {
|
if category.ParentCategoryId == models.LevelOneTransactionCategoryParentId {
|
||||||
log.CliErrorf(c, "[user_data.checkTransactionCategory] the transaction category \"id:%d\" of transaction \"id:%d\" is not a sub category", transaction.CategoryId, transaction.TransactionId)
|
log.CliErrorf(c, "[user_data.checkTransactionCategory] the transaction category \"id:%d\" of transaction \"id:%d\" is not a sub category", transaction.CategoryId, transaction.TransactionId)
|
||||||
return errs.ErrOperationFailed
|
return errs.ErrOperationFailed
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"golang.org/x/text/encoding/simplifiedchinese"
|
"golang.org/x/text/encoding/simplifiedchinese"
|
||||||
"golang.org/x/text/transform"
|
"golang.org/x/text/transform"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -56,7 +57,7 @@ type alipayTransactionDataCsvFileImporter struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the alipay transaction csv data
|
// ParseImportedData returns the imported data by parsing the alipay transaction csv data
|
||||||
func (c *alipayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *alipayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
enc := simplifiedchinese.GB18030
|
enc := simplifiedchinese.GB18030
|
||||||
reader := transform.NewReader(bytes.NewReader(data), enc.NewDecoder())
|
reader := transform.NewReader(bytes.NewReader(data), enc.NewDecoder())
|
||||||
|
|
||||||
@@ -78,7 +79,7 @@ func (c *alipayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Contex
|
|||||||
|
|
||||||
transactionRowParser := createAlipayTransactionDataRowParser(c.originalColumnNames)
|
transactionRowParser := createAlipayTransactionDataRowParser(c.originalColumnNames)
|
||||||
transactionDataTable := datatable.CreateNewCommonTransactionDataTable(commonDataTable, alipayTransactionSupportedColumns, transactionRowParser)
|
transactionDataTable := datatable.CreateNewCommonTransactionDataTable(commonDataTable, alipayTransactionSupportedColumns, transactionRowParser)
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(alipayTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(alipayTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,197 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
var operatorPriority = map[rune]int{
|
||||||
|
'+': 1,
|
||||||
|
'-': 1,
|
||||||
|
'*': 2,
|
||||||
|
'/': 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
func toPostfixExprTokens(ctx core.Context, expr string) ([]string, error) {
|
||||||
|
finalTokens := make([]string, 0)
|
||||||
|
operatorStack := make([]rune, 0)
|
||||||
|
currentNumberBuilder := strings.Builder{}
|
||||||
|
isLastTokenOperator := true
|
||||||
|
|
||||||
|
expr = strings.ReplaceAll(expr, " ", "")
|
||||||
|
|
||||||
|
for i := 0; i < len(expr); i++ {
|
||||||
|
ch := rune(expr[i])
|
||||||
|
|
||||||
|
// number
|
||||||
|
if '0' <= ch && ch <= '9' || ch == '.' {
|
||||||
|
currentNumberBuilder.WriteRune(ch)
|
||||||
|
continue
|
||||||
|
} else if ch == '-' && i+1 < len(expr) && '0' <= expr[i+1] && expr[i+1] <= '9' && currentNumberBuilder.Len() == 0 && isLastTokenOperator {
|
||||||
|
currentNumberBuilder.WriteRune(ch)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// operator or parenthesis
|
||||||
|
if currentNumberBuilder.Len() > 0 {
|
||||||
|
finalTokens = append(finalTokens, currentNumberBuilder.String())
|
||||||
|
currentNumberBuilder.Reset()
|
||||||
|
isLastTokenOperator = false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch ch {
|
||||||
|
case '+', '-', '*', '/':
|
||||||
|
if ch == '-' && isLastTokenOperator {
|
||||||
|
currentNumberBuilder.WriteRune(ch)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for len(operatorStack) > 0 {
|
||||||
|
topOperator := operatorStack[len(operatorStack)-1]
|
||||||
|
|
||||||
|
if topOperator == '(' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if operatorPriority[topOperator] >= operatorPriority[ch] {
|
||||||
|
finalTokens = append(finalTokens, string(topOperator))
|
||||||
|
operatorStack = operatorStack[:len(operatorStack)-1]
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
operatorStack = append(operatorStack, ch)
|
||||||
|
isLastTokenOperator = true
|
||||||
|
case '(':
|
||||||
|
operatorStack = append(operatorStack, ch)
|
||||||
|
isLastTokenOperator = true
|
||||||
|
case ')':
|
||||||
|
hasLeftParenthesis := false
|
||||||
|
|
||||||
|
for len(operatorStack) > 0 {
|
||||||
|
topOperator := operatorStack[len(operatorStack)-1]
|
||||||
|
operatorStack = operatorStack[:len(operatorStack)-1]
|
||||||
|
|
||||||
|
if topOperator == '(' {
|
||||||
|
hasLeftParenthesis = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
finalTokens = append(finalTokens, string(topOperator))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasLeftParenthesis {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.toPostfixExprTokens] cannot parse expression \"%s\", because missing left parenthesis", expr)
|
||||||
|
return nil, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
isLastTokenOperator = false
|
||||||
|
default:
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.toPostfixExprTokens] cannot parse expression \"%s\", because containing unknown token \"%c\"", expr, ch)
|
||||||
|
return nil, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if currentNumberBuilder.Len() > 0 {
|
||||||
|
finalTokens = append(finalTokens, currentNumberBuilder.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
for len(operatorStack) > 0 {
|
||||||
|
topOperator := operatorStack[len(operatorStack)-1]
|
||||||
|
operatorStack = operatorStack[:len(operatorStack)-1]
|
||||||
|
|
||||||
|
if topOperator == '(' {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.toPostfixExprTokens] cannot parse expression \"%s\", because missing right parenthesis", expr)
|
||||||
|
return nil, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
finalTokens = append(finalTokens, string(topOperator))
|
||||||
|
}
|
||||||
|
|
||||||
|
return finalTokens, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func evaluatePostfixExpr(ctx core.Context, tokens []string) (float64, error) {
|
||||||
|
stack := make([]float64, 0)
|
||||||
|
|
||||||
|
for i := 0; i < len(tokens); i++ {
|
||||||
|
token := tokens[i]
|
||||||
|
|
||||||
|
switch token {
|
||||||
|
case "+", "-", "*", "/": // operators
|
||||||
|
if len(stack) < 2 {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.evaluatePostfixExpr] cannot evaluate expression \"%s\", because not enough operands", strings.Join(tokens, " "))
|
||||||
|
return 0, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
// pop the top two operands
|
||||||
|
b := stack[len(stack)-1]
|
||||||
|
stack = stack[:len(stack)-1]
|
||||||
|
|
||||||
|
a := stack[len(stack)-1]
|
||||||
|
stack = stack[:len(stack)-1]
|
||||||
|
|
||||||
|
// evaluate the operation
|
||||||
|
var result float64
|
||||||
|
switch token {
|
||||||
|
case "+":
|
||||||
|
result = a + b
|
||||||
|
case "-":
|
||||||
|
result = a - b
|
||||||
|
case "*":
|
||||||
|
result = a * b
|
||||||
|
case "/":
|
||||||
|
if b == 0 {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.evaluatePostfixExpr] cannot evaluate expression \"%s\", because division by zero", strings.Join(tokens, " "))
|
||||||
|
return 0, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
result = a / b
|
||||||
|
}
|
||||||
|
|
||||||
|
// push the result back to the stack
|
||||||
|
stack = append(stack, result)
|
||||||
|
default: // operands
|
||||||
|
num, err := strconv.ParseFloat(token, 64)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.evaluatePostfixExpr] cannot evaluate expression \"%s\", because containing invalid number", strings.Join(tokens, " "))
|
||||||
|
return 0, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
stack = append(stack, num)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(stack) != 1 {
|
||||||
|
log.Warnf(ctx, "[beancount_amount_expression_evaluator.evaluatePostfixExpr] cannot evaluate expression \"%s\", because missing operator", strings.Join(tokens, " "))
|
||||||
|
return 0, errs.ErrInvalidAmountExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
return stack[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func evaluateBeancountAmountExpression(ctx core.Context, expr string) (string, error) {
|
||||||
|
if expr == "" {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
postfixExprTokens, err := toPostfixExprTokens(ctx, expr)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := evaluatePostfixExpr(ctx, postfixExprTokens)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%.2f", result), nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,216 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestToPostfixExprTokens_ValidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
result, err := toPostfixExprTokens(context, "1+2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1", "2", "+"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "3-4")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"3", "4", "-"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "5*6")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"5", "6", "*"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "8/2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"8", "2", "/"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "1+2*3-(4/2)")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1", "2", "3", "*", "+", "4", "2", "/", "-"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "1 + 2 * 3")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1", "2", "3", "*", "+"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "-1+2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"-1", "2", "+"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "1.5+2.3")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1.5", "2.3", "+"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "(1+2)-3")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1", "2", "+", "3", "-"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "2*-3-3/-2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"2", "-3", "*", "3", "-2", "/", "-"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "-1.2-3.4*(-5.6/7.8*(9.0-1.2))")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"-1.2", "3.4", "-5.6", "7.8", "/", "9.0", "1.2", "-", "*", "*", "-"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "((((((1+2)*(3+4))))))")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"1", "2", "+", "3", "4", "+", "*"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "(((())))")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "+-*/")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{"-", "*", "/", "+"}, result)
|
||||||
|
|
||||||
|
result, err = toPostfixExprTokens(context, "")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, []string{}, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToPostfixExprTokens_InvalidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
_, err := toPostfixExprTokens(context, "1=2")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = toPostfixExprTokens(context, "(1")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = toPostfixExprTokens(context, "2)")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = toPostfixExprTokens(context, "((((1+2)))")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = toPostfixExprTokens(context, ")(")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEvaluatePostfixExpr_ValidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
result, err := evaluatePostfixExpr(context, []string{"1", "2", "+"})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, float64(3), result)
|
||||||
|
|
||||||
|
result, err = evaluatePostfixExpr(context, []string{"5", "3", "-"})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, float64(2), result)
|
||||||
|
|
||||||
|
result, err = evaluatePostfixExpr(context, []string{"4", "3", "*"})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, float64(12), result)
|
||||||
|
|
||||||
|
result, err = evaluatePostfixExpr(context, []string{"6", "2", "/"})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, float64(3), result)
|
||||||
|
|
||||||
|
result, err = evaluatePostfixExpr(context, []string{"1", "2", "3", "*", "+", "4", "2", "/", "-"})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, float64(5), result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEvaluatePostfixExpr_InvalidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
_, err := evaluatePostfixExpr(context, []string{"1", "0", "/"})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"1", "+"})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"1", "="})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"1", "("})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"1", ")"})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"1", "2", "+", "3"})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluatePostfixExpr(context, []string{"abc"})
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEvaluateBeancountAmountExpression_ValidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
result, err := evaluateBeancountAmountExpression(context, "")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "1+2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "3.00", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "(1+2)*3")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "9.00", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "-1+2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "1.00", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "1.5+2.5")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "4.00", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "1+2*3-(4/2)")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "5.00", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "2*-3-3/-2")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "-4.50", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "-1.2-3.4*(-5.6/7.8*(9.0-1.2))")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "17.84", result)
|
||||||
|
|
||||||
|
result, err = evaluateBeancountAmountExpression(context, "(((2+3)))*(((((-5+7)))))")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "10.00", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEvaluateBeancountAmountExpression_InvalidExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
_, err := evaluateBeancountAmountExpression(context, "1++2")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1^2")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "+-*/")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "a+b")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1/0")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1+(2*3")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1+2*3)")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1+((((2*3)))")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1+2(3)")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
|
||||||
|
_, err = evaluateBeancountAmountExpression(context, "1)*(2")
|
||||||
|
assert.Equal(t, errs.ErrInvalidAmountExpression, err)
|
||||||
|
}
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
const beancountEquityAccountNameOpeningBalance = "Opening-Balances"
|
||||||
|
|
||||||
|
// beancountDirective represents the Beancount directive
|
||||||
|
type beancountDirective string
|
||||||
|
|
||||||
|
// Beancount directives
|
||||||
|
const (
|
||||||
|
beancountDirectiveOpen beancountDirective = "open"
|
||||||
|
beancountDirectiveClose beancountDirective = "close"
|
||||||
|
beancountDirectiveTransaction beancountDirective = "txn"
|
||||||
|
beancountDirectiveCompletedTransaction beancountDirective = "*"
|
||||||
|
beancountDirectiveInCompleteTransaction beancountDirective = "!"
|
||||||
|
beancountDirectivePaddingTransaction beancountDirective = "P"
|
||||||
|
beancountDirectiveCommodity beancountDirective = "commodity"
|
||||||
|
beancountDirectivePrice beancountDirective = "price"
|
||||||
|
beancountDirectiveNote beancountDirective = "note"
|
||||||
|
beancountDirectiveDocument beancountDirective = "document"
|
||||||
|
beancountDirectiveEvent beancountDirective = "event"
|
||||||
|
beancountDirectiveBalance beancountDirective = "balance"
|
||||||
|
beancountDirectivePad beancountDirective = "pad"
|
||||||
|
beancountDirectiveQuery beancountDirective = "query"
|
||||||
|
beancountDirectiveCustom beancountDirective = "custom"
|
||||||
|
)
|
||||||
|
|
||||||
|
// beancountAccountType represents the Beancount account type
|
||||||
|
type beancountAccountType byte
|
||||||
|
|
||||||
|
// Beancount account types
|
||||||
|
const (
|
||||||
|
beancountUnknownAccountType beancountAccountType = 0
|
||||||
|
beancountAssetsAccountType beancountAccountType = 1
|
||||||
|
beancountLiabilitiesAccountType beancountAccountType = 2
|
||||||
|
beancountEquityAccountType beancountAccountType = 3
|
||||||
|
beancountIncomeAccountType beancountAccountType = 4
|
||||||
|
beancountExpensesAccountType beancountAccountType = 5
|
||||||
|
)
|
||||||
|
|
||||||
|
// beancountData defines the structure of beancount data
|
||||||
|
type beancountData struct {
|
||||||
|
accounts map[string]*beancountAccount
|
||||||
|
transactions []*beancountTransactionEntry
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountAccount defines the structure of beancount account
|
||||||
|
type beancountAccount struct {
|
||||||
|
name string
|
||||||
|
accountType beancountAccountType
|
||||||
|
openDate string
|
||||||
|
closeDate string
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountTransactionEntry defines the structure of beancount transaction entry
|
||||||
|
type beancountTransactionEntry struct {
|
||||||
|
date string
|
||||||
|
directive beancountDirective
|
||||||
|
payee string
|
||||||
|
narration string
|
||||||
|
postings []*beancountPosting
|
||||||
|
tags []string
|
||||||
|
links []string
|
||||||
|
metadata map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountPosting defines the structure of beancount transaction posting
|
||||||
|
type beancountPosting struct {
|
||||||
|
account string
|
||||||
|
amount string
|
||||||
|
originalAmount string
|
||||||
|
commodity string
|
||||||
|
totalCost string
|
||||||
|
totalCostCommodity string
|
||||||
|
price string
|
||||||
|
priceCommodity string
|
||||||
|
metadata map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *beancountAccount) isOpeningBalanceEquityAccount() bool {
|
||||||
|
if a.accountType != beancountEquityAccountType {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
nameItems := strings.Split(a.name, string(beancountMetadataKeySuffix))
|
||||||
|
|
||||||
|
if len(nameItems) != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return nameItems[1] == beancountEquityAccountNameOpeningBalance
|
||||||
|
}
|
||||||
@@ -0,0 +1,655 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/csv"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/encoding/unicode"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
const beancountDefaultAssetsAccountTypeName = "Assets"
|
||||||
|
const beancountDefaultLiabilitiesAccountTypeName = "Liabilities"
|
||||||
|
const beancountDefaultEquityAccountTypeName = "Equity"
|
||||||
|
const beancountDefaultIncomeAccountTypeName = "Income"
|
||||||
|
const beancountDefaultExpenseAccountTypeName = "Expenses"
|
||||||
|
|
||||||
|
const beancountOptionAssetsAccountTypeName = "name_assets"
|
||||||
|
const beancountOptionLiabilitiesAccountTypeName = "name_liabilities"
|
||||||
|
const beancountOptionEquityAccountTypeName = "name_equity"
|
||||||
|
const beancountOptionIncomeAccountTypeName = "name_income"
|
||||||
|
const beancountOptionExpenseAccountTypeName = "name_expenses"
|
||||||
|
|
||||||
|
const beancountCommentPrefix = ';'
|
||||||
|
const beancountAccountNameItemsSeparator = ":"
|
||||||
|
const beancountMetadataKeySuffix = ':'
|
||||||
|
const beancountPricePrefix = '@'
|
||||||
|
const beancountLinkPrefix = '^'
|
||||||
|
const beancountTagPrefix = '#'
|
||||||
|
|
||||||
|
// beancountDataReader defines the structure of Beancount data reader
|
||||||
|
type beancountDataReader struct {
|
||||||
|
accountTypeNameMap map[string]beancountAccountType
|
||||||
|
accountTypeNameReversedMap map[beancountAccountType]string
|
||||||
|
allData [][]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// read returns the imported Beancount data
|
||||||
|
// Reference: https://beancount.github.io/docs/beancount_language_syntax.html
|
||||||
|
func (r *beancountDataReader) read(ctx core.Context) (*beancountData, error) {
|
||||||
|
if len(r.allData) < 1 {
|
||||||
|
return nil, errs.ErrNotFoundTransactionDataInFile
|
||||||
|
}
|
||||||
|
|
||||||
|
data := &beancountData{
|
||||||
|
accounts: make(map[string]*beancountAccount),
|
||||||
|
transactions: make([]*beancountTransactionEntry, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var currentTransactionEntry *beancountTransactionEntry
|
||||||
|
var currentTransactionPosting *beancountPosting
|
||||||
|
var currentTags []string
|
||||||
|
|
||||||
|
for i := 0; i < len(r.allData); i++ {
|
||||||
|
items := r.allData[i]
|
||||||
|
|
||||||
|
if len(items) == 0 || (len(items) == 1 && len(items[0]) == 0) || (len(r.getNotEmptyItemByIndex(items, 0)) > 0 && r.getNotEmptyItemByIndex(items, 0)[0] == beancountCommentPrefix) { // skip empty or comment lines
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.getNotEmptyItemsCount(items) < 2 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.read] cannot parse line#%d \"%s\", because not enough items in line", i, strings.Join(items, " "))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
firstItem := items[0]
|
||||||
|
|
||||||
|
if firstItem == "include" { // not support include directive
|
||||||
|
return nil, errs.ErrBeancountFileNotSupportInclude
|
||||||
|
} else if firstItem == "plugin" { // skip plugin directive lines
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
continue
|
||||||
|
} else if firstItem == "option" {
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
r.readAndSetOption(ctx, i, items)
|
||||||
|
continue
|
||||||
|
} else if firstItem == "pushtag" {
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
currentTags = r.readAndSetTags(ctx, i, items, currentTags, true)
|
||||||
|
continue
|
||||||
|
} else if firstItem == "poptag" {
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
currentTags = r.readAndSetTags(ctx, i, items, currentTags, false)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(firstItem) == 0 { // original line has space prefix, maybe transaction posting or metadata line
|
||||||
|
actualFirstItem := r.getNotEmptyItemByIndex(items, 0)
|
||||||
|
|
||||||
|
if len(actualFirstItem) == 0 { // skip empty lines
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('A' <= actualFirstItem[0] && actualFirstItem[0] <= 'Z') || actualFirstItem[0] == '!' { // transaction posting
|
||||||
|
if currentTransactionEntry != nil && currentTransactionPosting != nil {
|
||||||
|
currentTransactionEntry.postings = append(currentTransactionEntry.postings, currentTransactionPosting)
|
||||||
|
currentTransactionPosting = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
currentTransactionPosting, err = r.readTransactionPostingLine(ctx, i, items, data, actualFirstItem[0] == '!')
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else if 'a' <= actualFirstItem[0] && actualFirstItem[0] <= 'z' { // metadata
|
||||||
|
metadata := r.readTransactionMetadataLine(ctx, i, items)
|
||||||
|
|
||||||
|
if metadata == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataKey := metadata[0]
|
||||||
|
metadataValue := metadata[1]
|
||||||
|
|
||||||
|
if currentTransactionPosting != nil {
|
||||||
|
if _, exists := currentTransactionPosting.metadata[metadataKey]; !exists {
|
||||||
|
currentTransactionPosting.metadata[metadataKey] = metadataValue
|
||||||
|
}
|
||||||
|
} else if currentTransactionEntry != nil {
|
||||||
|
if _, exists := currentTransactionEntry.metadata[metadataKey]; !exists {
|
||||||
|
currentTransactionEntry.metadata[metadataKey] = metadataValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.read] cannot parse line#%d \"%s\", because line prefix is invalid", i, strings.Join(items, " "))
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if _, err := utils.ParseFromLongDateFirstTime(firstItem, 0); err == nil { // original line has date as first item
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
|
||||||
|
directive := r.getNotEmptyItemByIndex(items, 1)
|
||||||
|
|
||||||
|
if directive == string(beancountDirectiveOpen) ||
|
||||||
|
directive == string(beancountDirectiveClose) {
|
||||||
|
_, err := r.readAccountLine(ctx, i, items, firstItem, beancountDirective(directive), data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else if directive == string(beancountDirectiveTransaction) ||
|
||||||
|
directive == string(beancountDirectiveCompletedTransaction) ||
|
||||||
|
directive == string(beancountDirectiveInCompleteTransaction) ||
|
||||||
|
directive == string(beancountDirectivePaddingTransaction) {
|
||||||
|
currentTransactionEntry = r.readTransactionLine(ctx, i, items, firstItem, beancountDirective(directive), currentTags)
|
||||||
|
} else if directive == string(beancountDirectiveCommodity) ||
|
||||||
|
directive == string(beancountDirectivePrice) ||
|
||||||
|
directive == string(beancountDirectiveNote) ||
|
||||||
|
directive == string(beancountDirectiveDocument) ||
|
||||||
|
directive == string(beancountDirectiveEvent) ||
|
||||||
|
directive == string(beancountDirectiveBalance) ||
|
||||||
|
directive == string(beancountDirectivePad) ||
|
||||||
|
directive == string(beancountDirectiveQuery) ||
|
||||||
|
directive == string(beancountDirectiveCustom) { // skip commodity / price / note / document / event / balance / pad / query / custom lines
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.read] cannot parse line#%d \"%s\", because directive is unknown", i, strings.Join(items, " "))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else { // first item not start with date or space
|
||||||
|
currentTransactionEntry, currentTransactionPosting = r.updateCurrentState(data, currentTransactionEntry, currentTransactionPosting)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if currentTransactionEntry != nil {
|
||||||
|
if currentTransactionPosting != nil {
|
||||||
|
currentTransactionEntry.postings = append(currentTransactionEntry.postings, currentTransactionPosting)
|
||||||
|
currentTransactionPosting = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data.transactions = append(data.transactions, currentTransactionEntry)
|
||||||
|
currentTransactionEntry = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) updateCurrentState(data *beancountData, currentTransactionEntry *beancountTransactionEntry, currentTransactionPosting *beancountPosting) (*beancountTransactionEntry, *beancountPosting) {
|
||||||
|
if currentTransactionEntry != nil {
|
||||||
|
if currentTransactionPosting != nil {
|
||||||
|
currentTransactionEntry.postings = append(currentTransactionEntry.postings, currentTransactionPosting)
|
||||||
|
currentTransactionPosting = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data.transactions = append(data.transactions, currentTransactionEntry)
|
||||||
|
currentTransactionEntry = nil
|
||||||
|
currentTransactionPosting = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return currentTransactionEntry, currentTransactionPosting
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readAndSetOption(ctx core.Context, lineIndex int, items []string) {
|
||||||
|
if r.getNotEmptyItemsCount(items) != 3 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readAndSetOption] cannot parse account type name option line#%d \"%s\", because items count in line not correct", lineIndex, strings.Join(items, " "))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
optionName := r.getNotEmptyItemByIndex(items, 1)
|
||||||
|
optionValue := r.getNotEmptyItemByIndex(items, 2)
|
||||||
|
|
||||||
|
switch optionName {
|
||||||
|
case beancountOptionAssetsAccountTypeName:
|
||||||
|
r.setAccountTypeNameMap(beancountAssetsAccountType, optionValue)
|
||||||
|
break
|
||||||
|
case beancountOptionLiabilitiesAccountTypeName:
|
||||||
|
r.setAccountTypeNameMap(beancountLiabilitiesAccountType, optionValue)
|
||||||
|
break
|
||||||
|
case beancountOptionEquityAccountTypeName:
|
||||||
|
r.setAccountTypeNameMap(beancountEquityAccountType, optionValue)
|
||||||
|
break
|
||||||
|
case beancountOptionIncomeAccountTypeName:
|
||||||
|
r.setAccountTypeNameMap(beancountIncomeAccountType, optionValue)
|
||||||
|
break
|
||||||
|
case beancountOptionExpenseAccountTypeName:
|
||||||
|
r.setAccountTypeNameMap(beancountExpensesAccountType, optionValue)
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readAndSetOption] skip option line#%d \"%s\"", lineIndex, strings.Join(items, " "))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readAndSetTags(ctx core.Context, lineIndex int, items []string, currentTags []string, pushTag bool) []string {
|
||||||
|
if r.getNotEmptyItemsCount(items) != 2 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readAndSetTags] cannot parse push/pop tag line#%d \"%s\", because items count in line not correct", lineIndex, strings.Join(items, " "))
|
||||||
|
return currentTags
|
||||||
|
}
|
||||||
|
|
||||||
|
tag := r.getNotEmptyItemByIndex(items, 1)
|
||||||
|
|
||||||
|
if len(tag) < 2 || tag[0] != beancountTagPrefix {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readAndSetTags] cannot parse push/pop tag line#%d \"%s\", because tag is invalid", lineIndex, strings.Join(items, " "))
|
||||||
|
return currentTags
|
||||||
|
}
|
||||||
|
|
||||||
|
tag = tag[1:]
|
||||||
|
|
||||||
|
if pushTag {
|
||||||
|
for i := 0; i < len(currentTags); i++ {
|
||||||
|
if currentTags[i] == tag {
|
||||||
|
return currentTags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(currentTags, tag)
|
||||||
|
} else { // pop tag
|
||||||
|
for i := 0; i < len(currentTags); i++ {
|
||||||
|
if currentTags[i] == tag {
|
||||||
|
return append(currentTags[:i], currentTags[i+1:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return currentTags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) setAccountTypeNameMap(accountType beancountAccountType, accountTypeName string) {
|
||||||
|
delete(r.accountTypeNameMap, r.accountTypeNameReversedMap[accountType])
|
||||||
|
r.accountTypeNameMap[accountTypeName] = accountType
|
||||||
|
r.accountTypeNameReversedMap[accountType] = accountTypeName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readAccountLine(ctx core.Context, lineIndex int, items []string, date string, directive beancountDirective, data *beancountData) (*beancountAccount, error) {
|
||||||
|
if r.getNotEmptyItemsCount(items) < 3 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.parseAccount] cannot parse account line#%d \"%s\", because items count in line not correct", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
accountName := r.getNotEmptyItemByIndex(items, 2)
|
||||||
|
account, exists := data.accounts[accountName]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
account, err = r.createAccount(ctx, data, accountName)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if directive == beancountDirectiveOpen {
|
||||||
|
account.openDate = date
|
||||||
|
return account, nil
|
||||||
|
} else if directive == beancountDirectiveClose {
|
||||||
|
account.closeDate = date
|
||||||
|
return account, nil
|
||||||
|
} else {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.parseAccount] cannot parse account line#%d \"%s\", because directive is invalid", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) createAccount(ctx core.Context, data *beancountData, accountName string) (*beancountAccount, error) {
|
||||||
|
account := &beancountAccount{
|
||||||
|
name: accountName,
|
||||||
|
accountType: beancountUnknownAccountType,
|
||||||
|
}
|
||||||
|
|
||||||
|
accountNameItems := strings.Split(accountName, beancountAccountNameItemsSeparator)
|
||||||
|
|
||||||
|
if len(accountNameItems) > 1 {
|
||||||
|
accountType, exists := r.accountTypeNameMap[accountNameItems[0]]
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
account.accountType = accountType
|
||||||
|
} else {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.createAccount] cannot parse account \"%s\", because account type \"%s\" is invalid", accountName, accountNameItems[0])
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data.accounts[accountName] = account
|
||||||
|
return account, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readTransactionLine(ctx core.Context, lineIndex int, items []string, date string, directive beancountDirective, tags []string) *beancountTransactionEntry {
|
||||||
|
transactionEntry := &beancountTransactionEntry{
|
||||||
|
date: date,
|
||||||
|
directive: directive,
|
||||||
|
tags: make([]string, 0),
|
||||||
|
links: make([]string, 0),
|
||||||
|
metadata: make(map[string]string),
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionEntry.tags = append(transactionEntry.tags, tags...)
|
||||||
|
|
||||||
|
allTags := make(map[string]bool, len(transactionEntry.tags))
|
||||||
|
|
||||||
|
for _, tag := range transactionEntry.tags {
|
||||||
|
allTags[tag] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// YYYY-MM-DD [txn|Flag] [[Payee] Narration] [#tag] [ˆlink]
|
||||||
|
payeeNarrationFirstIndex := 2
|
||||||
|
payeeNarrationLastIndex := len(items) - 1
|
||||||
|
|
||||||
|
// parse remain items
|
||||||
|
for i := payeeNarrationFirstIndex; i < len(items); i++ {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
if len(item) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if item[0] == beancountCommentPrefix { // ; comment
|
||||||
|
if i-1 < payeeNarrationLastIndex {
|
||||||
|
payeeNarrationLastIndex = i - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if item[0] == beancountTagPrefix { // [#tag]
|
||||||
|
tagName := item[1:]
|
||||||
|
|
||||||
|
if _, exists := allTags[tagName]; !exists {
|
||||||
|
transactionEntry.tags = append(transactionEntry.tags, tagName)
|
||||||
|
allTags[tagName] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if i-1 < payeeNarrationLastIndex {
|
||||||
|
payeeNarrationLastIndex = i - 1
|
||||||
|
}
|
||||||
|
} else if item[0] == beancountLinkPrefix { // [ˆlink]
|
||||||
|
transactionEntry.links = append(transactionEntry.links, item[1:])
|
||||||
|
|
||||||
|
if i-1 < payeeNarrationLastIndex {
|
||||||
|
payeeNarrationLastIndex = i - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if payeeNarrationLastIndex-payeeNarrationFirstIndex >= 1 {
|
||||||
|
transactionEntry.payee = items[payeeNarrationFirstIndex]
|
||||||
|
transactionEntry.narration = items[payeeNarrationFirstIndex+1]
|
||||||
|
} else if payeeNarrationLastIndex-payeeNarrationFirstIndex >= 0 {
|
||||||
|
transactionEntry.narration = items[payeeNarrationFirstIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactionEntry
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readTransactionPostingLine(ctx core.Context, lineIndex int, items []string, data *beancountData, hasFlag bool) (*beancountPosting, error) {
|
||||||
|
// [Flag] Account Amount [{Cost}] [@ Price]
|
||||||
|
accountNameExpectedIndex := 0
|
||||||
|
|
||||||
|
if hasFlag {
|
||||||
|
accountNameExpectedIndex = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.getNotEmptyItemsCount(items) <= accountNameExpectedIndex {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot parse transaction posting line#%d \"%s\", because items count in line not correct", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
accountName, accountNameActualIndex := r.getNotEmptyItemAndIndexByIndex(items, accountNameExpectedIndex)
|
||||||
|
|
||||||
|
if accountName == "" || accountNameActualIndex < 0 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot parse transaction posting line#%d \"%s\", because missing account name", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, errs.ErrMissingAccountData
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionPositing := &beancountPosting{
|
||||||
|
account: accountName,
|
||||||
|
metadata: make(map[string]string),
|
||||||
|
}
|
||||||
|
|
||||||
|
amountActualLastIndex := -1
|
||||||
|
transactionPositing.originalAmount, amountActualLastIndex = r.getOriginalAmountAndLastIndexFromIndex(items, accountNameActualIndex+1)
|
||||||
|
|
||||||
|
if transactionPositing.originalAmount == "" || amountActualLastIndex < 0 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot parse transaction posting line#%d \"%s\", because missing amount", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
finalAmount, err := evaluateBeancountAmountExpression(ctx, transactionPositing.originalAmount)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot evaluate amount expression in line#%d \"%s\", because %s", lineIndex, strings.Join(items, " "), err.Error())
|
||||||
|
return nil, errs.ErrAmountInvalid
|
||||||
|
} else {
|
||||||
|
transactionPositing.amount = finalAmount
|
||||||
|
}
|
||||||
|
|
||||||
|
commodityActualIndex := -1
|
||||||
|
transactionPositing.commodity, commodityActualIndex = r.getNotEmptyItemAndIndexFromIndex(items, amountActualLastIndex+1)
|
||||||
|
|
||||||
|
if transactionPositing.commodity == "" || commodityActualIndex < 0 {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot parse transaction posting line#%d \"%s\", because missing commodity", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.ToUpper(transactionPositing.commodity) != transactionPositing.commodity { // The syntax for a currency is a word all in capital letters
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionPostingLine] cannot parse transaction posting line#%d \"%s\", because commodity name is not capital letters", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse remain items
|
||||||
|
if commodityActualIndex > 0 {
|
||||||
|
for i := commodityActualIndex + 1; i < len(items); i++ {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
if len(item) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if item[0] == beancountCommentPrefix { // ; comment
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(item) == 2 && item[0] == beancountPricePrefix && item[1] == beancountPricePrefix { // [@@ TotalCost]
|
||||||
|
totalCost, totalCostActualIndex := r.getNotEmptyItemAndIndexFromIndex(items, i+1)
|
||||||
|
|
||||||
|
if totalCostActualIndex > 0 {
|
||||||
|
transactionPositing.totalCost = totalCost
|
||||||
|
i = totalCostActualIndex
|
||||||
|
|
||||||
|
totalCostCommodity, totalCostCommodityActualIndex := r.getNotEmptyItemAndIndexFromIndex(items, totalCostActualIndex+1)
|
||||||
|
|
||||||
|
if totalCostCommodityActualIndex > 0 {
|
||||||
|
transactionPositing.totalCostCommodity = totalCostCommodity
|
||||||
|
i = totalCostCommodityActualIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if len(item) == 1 && item[0] == beancountPricePrefix { // [@ Price]
|
||||||
|
price, priceActualIndex := r.getNotEmptyItemAndIndexFromIndex(items, i+1)
|
||||||
|
|
||||||
|
if priceActualIndex > 0 {
|
||||||
|
transactionPositing.price = price
|
||||||
|
i = priceActualIndex
|
||||||
|
|
||||||
|
priceCommodity, priceCommodityActualIndex := r.getNotEmptyItemAndIndexFromIndex(items, priceActualIndex+1)
|
||||||
|
|
||||||
|
if priceCommodityActualIndex > 0 {
|
||||||
|
transactionPositing.priceCommodity = priceCommodity
|
||||||
|
i = priceCommodityActualIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if transactionPositing.account != "" {
|
||||||
|
_, exists := data.accounts[transactionPositing.account]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
_, err := r.createAccount(ctx, data, transactionPositing.account)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactionPositing, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) readTransactionMetadataLine(ctx core.Context, lineIndex int, items []string) []string {
|
||||||
|
key := r.getNotEmptyItemByIndex(items, 0)
|
||||||
|
value := r.getNotEmptyItemByIndex(items, 1)
|
||||||
|
|
||||||
|
if key == "" || value == "" {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionMetadataLine] cannot parse metadata line#%d \"%s\", because key or value is empty", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(key) == 0 || key[len(key)-1] != beancountMetadataKeySuffix {
|
||||||
|
log.Warnf(ctx, "[beancount_data_reader.readTransactionMetadataLine] cannot parse metadata line#%d \"%s\", because key is invalid correct", lineIndex, strings.Join(items, " "))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
key = key[:len(key)-1]
|
||||||
|
|
||||||
|
return []string{key, value}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) getNotEmptyItemByIndex(items []string, index int) string {
|
||||||
|
item, _ := r.getNotEmptyItemAndIndexByIndex(items, index)
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) getNotEmptyItemAndIndexByIndex(items []string, index int) (string, int) {
|
||||||
|
count := -1
|
||||||
|
|
||||||
|
for i := 0; i < len(items); i++ {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
if len(item) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
count++
|
||||||
|
|
||||||
|
if count == index {
|
||||||
|
return items[i], i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", -1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) getNotEmptyItemAndIndexFromIndex(items []string, startIndex int) (string, int) {
|
||||||
|
for i := startIndex; i < len(items); i++ {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
if len(item) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return item, i
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", -1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) getNotEmptyItemsCount(items []string) int {
|
||||||
|
count := 0
|
||||||
|
|
||||||
|
for i := 0; i < len(items); i++ {
|
||||||
|
if len(items[i]) > 0 {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *beancountDataReader) getOriginalAmountAndLastIndexFromIndex(items []string, startIndex int) (string, int) {
|
||||||
|
amountBuilder := strings.Builder{}
|
||||||
|
lastIndex := -1
|
||||||
|
|
||||||
|
for i := startIndex; i < len(items); i++ {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
if len(item) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
valid := true
|
||||||
|
|
||||||
|
// The Amount in “Postings” can also be an arithmetic expression using ( ) * / - +
|
||||||
|
for j := 0; j < len(item); j++ {
|
||||||
|
if !(item[j] >= '0' && item[j] <= '9') && item[j] != '.' && item[j] != '(' && item[j] != ')' &&
|
||||||
|
item[j] != '*' && item[j] != '/' && item[j] != '-' && item[j] != '+' {
|
||||||
|
valid = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !valid {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if amountBuilder.Len() > 0 {
|
||||||
|
amountBuilder.WriteRune(' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
amountBuilder.WriteString(item)
|
||||||
|
lastIndex = i
|
||||||
|
}
|
||||||
|
|
||||||
|
return amountBuilder.String(), lastIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
func createNewBeancountDataReader(ctx core.Context, data []byte) (*beancountDataReader, error) {
|
||||||
|
fallback := unicode.UTF8.NewDecoder()
|
||||||
|
reader := transform.NewReader(bytes.NewReader(data), unicode.BOMOverride(fallback))
|
||||||
|
csvReader := csv.NewReader(reader)
|
||||||
|
csvReader.Comma = ' '
|
||||||
|
csvReader.FieldsPerRecord = -1
|
||||||
|
|
||||||
|
allData := make([][]string, 0)
|
||||||
|
|
||||||
|
for {
|
||||||
|
items, err := csvReader.Read()
|
||||||
|
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[beancount_data_reader.createNewBeancountDataReader] cannot parse data, because %s", err.Error())
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
}
|
||||||
|
|
||||||
|
allData = append(allData, items)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &beancountDataReader{
|
||||||
|
accountTypeNameMap: map[string]beancountAccountType{
|
||||||
|
beancountDefaultAssetsAccountTypeName: beancountAssetsAccountType,
|
||||||
|
beancountDefaultLiabilitiesAccountTypeName: beancountLiabilitiesAccountType,
|
||||||
|
beancountDefaultEquityAccountTypeName: beancountEquityAccountType,
|
||||||
|
beancountDefaultIncomeAccountTypeName: beancountIncomeAccountType,
|
||||||
|
beancountDefaultExpenseAccountTypeName: beancountExpensesAccountType,
|
||||||
|
},
|
||||||
|
accountTypeNameReversedMap: map[beancountAccountType]string{
|
||||||
|
beancountAssetsAccountType: beancountDefaultAssetsAccountTypeName,
|
||||||
|
beancountLiabilitiesAccountType: beancountDefaultLiabilitiesAccountTypeName,
|
||||||
|
beancountEquityAccountType: beancountDefaultEquityAccountTypeName,
|
||||||
|
beancountIncomeAccountType: beancountDefaultIncomeAccountTypeName,
|
||||||
|
beancountExpensesAccountType: beancountDefaultExpenseAccountTypeName,
|
||||||
|
},
|
||||||
|
allData: allData,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,520 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBeancountDataReaderRead(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"; Test Beancount Data\n"+
|
||||||
|
"option \"name_assets\" \"AssetsAccount\"\n"+
|
||||||
|
"option \"name_liabilities\" \"LiabilitiesAccount\"\n"+
|
||||||
|
"option \"name_equity\" \"EquityAccount\"\n"+
|
||||||
|
"option \"name_income\" \"IncomeAccount\"\n"+
|
||||||
|
"option \"name_expenses\" \"ExpensesAccount\"\n"+
|
||||||
|
"\n"+
|
||||||
|
"2024-01-01 open AssetsAccount:TestAccount\n"+
|
||||||
|
"2024-01-02 open LiabilitiesAccount:TestAccount2\n"+
|
||||||
|
"2024-01-03 open EquityAccount:Opening-Balances\n"+
|
||||||
|
"\n"+
|
||||||
|
"; The following transactions with tag1 and tag2\n"+
|
||||||
|
"pushtag #tag1\n"+
|
||||||
|
"pushtag #tag2\n"+
|
||||||
|
"\n"+
|
||||||
|
"2024-01-05 * \"Payee Name\" \"Foo Bar\" #tag3 #tag4 ^test-link\n"+
|
||||||
|
" IncomeAccount:TestCategory -123.45 CNY\n"+
|
||||||
|
" AssetsAccount:TestAccount 123.45 CNY\n"+
|
||||||
|
"; The following transactions with tag2\n"+
|
||||||
|
"poptag #tag1\n"+
|
||||||
|
"2024-01-06 * \"test\n#test2\" #tag5 #tag6 ^test-link2\n"+
|
||||||
|
" LiabilitiesAccount:TestAccount2 -0.12 USD\n"+
|
||||||
|
" ExpensesAccount:TestCategory2 0.12 USD\n"+
|
||||||
|
"2024-01-07 close AssetsAccount:TestAccount\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 5, len(actualData.accounts))
|
||||||
|
assert.Equal(t, "AssetsAccount:TestAccount", actualData.accounts["AssetsAccount:TestAccount"].name)
|
||||||
|
assert.Equal(t, beancountAssetsAccountType, actualData.accounts["AssetsAccount:TestAccount"].accountType)
|
||||||
|
assert.Equal(t, "2024-01-01", actualData.accounts["AssetsAccount:TestAccount"].openDate)
|
||||||
|
assert.Equal(t, "2024-01-07", actualData.accounts["AssetsAccount:TestAccount"].closeDate)
|
||||||
|
|
||||||
|
assert.Equal(t, "LiabilitiesAccount:TestAccount2", actualData.accounts["LiabilitiesAccount:TestAccount2"].name)
|
||||||
|
assert.Equal(t, beancountLiabilitiesAccountType, actualData.accounts["LiabilitiesAccount:TestAccount2"].accountType)
|
||||||
|
assert.Equal(t, "2024-01-02", actualData.accounts["LiabilitiesAccount:TestAccount2"].openDate)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-05", actualData.transactions[0].date)
|
||||||
|
assert.Equal(t, "Payee Name", actualData.transactions[0].payee)
|
||||||
|
assert.Equal(t, "Foo Bar", actualData.transactions[0].narration)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[0].postings))
|
||||||
|
assert.Equal(t, "IncomeAccount:TestCategory", actualData.transactions[0].postings[0].account)
|
||||||
|
assert.Equal(t, "-123.45", actualData.transactions[0].postings[0].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[0].commodity)
|
||||||
|
assert.Equal(t, "AssetsAccount:TestAccount", actualData.transactions[0].postings[1].account)
|
||||||
|
assert.Equal(t, "123.45", actualData.transactions[0].postings[1].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[1].commodity)
|
||||||
|
|
||||||
|
assert.Equal(t, 4, len(actualData.transactions[0].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[0], "tag1")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[1], "tag2")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[2], "tag3")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[3], "tag4")
|
||||||
|
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions[0].links))
|
||||||
|
assert.Equal(t, actualData.transactions[0].links[0], "test-link")
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-06", actualData.transactions[1].date)
|
||||||
|
assert.Equal(t, "", actualData.transactions[1].payee)
|
||||||
|
assert.Equal(t, "test\n#test2", actualData.transactions[1].narration)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[1].postings))
|
||||||
|
assert.Equal(t, "LiabilitiesAccount:TestAccount2", actualData.transactions[1].postings[0].account)
|
||||||
|
assert.Equal(t, "-0.12", actualData.transactions[1].postings[0].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[0].commodity)
|
||||||
|
assert.Equal(t, "ExpensesAccount:TestCategory2", actualData.transactions[1].postings[1].account)
|
||||||
|
assert.Equal(t, "0.12", actualData.transactions[1].postings[1].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[1].commodity)
|
||||||
|
|
||||||
|
assert.Equal(t, 3, len(actualData.transactions[1].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[1].tags[0], "tag2")
|
||||||
|
assert.Equal(t, actualData.transactions[1].tags[1], "tag5")
|
||||||
|
assert.Equal(t, actualData.transactions[1].tags[2], "tag6")
|
||||||
|
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions[1].links))
|
||||||
|
assert.Equal(t, actualData.transactions[1].links[0], "test-link2")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderRead_EmptyContent(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrNotFoundTransactionDataInFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderRead_UnsupportedInclude(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte("include \"other.beancount\""))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrBeancountFileNotSupportInclude.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderRead_SkipUnsupportedDirective(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"plugin \"beancount.plugins.plugin_name\"\n"+
|
||||||
|
"unknown directive\n"+
|
||||||
|
"2024-01-01 commodity USD\n"+
|
||||||
|
"2024-01-01 price USD 1.08 CAD\n"+
|
||||||
|
"2024-01-01 note Assets:Test \"some text\"\n"+
|
||||||
|
"2024-01-01 document Assets:Test \"scheme://path\"\n"+
|
||||||
|
"2024-01-01 event \"location\" \"address\"\n"+
|
||||||
|
"2024-01-01 balance Assets:Test 100.00 USD\n"+
|
||||||
|
"2024-01-01 pad Assets:Test Equity:Opening-Balances\n"+
|
||||||
|
"2024-01-01 query \"Name\" \"\nSELECT FIELDS FROM TABLE\"\n"+
|
||||||
|
"2024-01-01 custom \"Type\" \"Value\"\n"+
|
||||||
|
"2024-01-01 unknown directive\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadAndSetOption_AccountTypeName(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"option \"name_assets\" \"A\"\n"+
|
||||||
|
"option \"name_liabilities\" \"L\"\n"+
|
||||||
|
"option \"name_equity\" \"E\"\n"+
|
||||||
|
"\n"+
|
||||||
|
"2024-01-01 open A:TestAccount\n"+
|
||||||
|
"2024-01-02 open L:TestAccount2\n"+
|
||||||
|
"2024-01-03 open E:Opening-Balances\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 3, len(actualData.accounts))
|
||||||
|
|
||||||
|
assert.Equal(t, "A:TestAccount", actualData.accounts["A:TestAccount"].name)
|
||||||
|
assert.Equal(t, beancountAssetsAccountType, actualData.accounts["A:TestAccount"].accountType)
|
||||||
|
|
||||||
|
assert.Equal(t, "L:TestAccount2", actualData.accounts["L:TestAccount2"].name)
|
||||||
|
assert.Equal(t, beancountLiabilitiesAccountType, actualData.accounts["L:TestAccount2"].accountType)
|
||||||
|
|
||||||
|
assert.Equal(t, "E:Opening-Balances", actualData.accounts["E:Opening-Balances"].name)
|
||||||
|
assert.Equal(t, beancountEquityAccountType, actualData.accounts["E:Opening-Balances"].accountType)
|
||||||
|
assert.True(t, actualData.accounts["E:Opening-Balances"].isOpeningBalanceEquityAccount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadAndSetOption_InvalidLineOrUnsupportedOption(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"option \"test\" \"Test\" \"Test2\"\n"+
|
||||||
|
"option \"test\" \"Test\"\n"+
|
||||||
|
"option \"test\"\n"+
|
||||||
|
"option \n"+
|
||||||
|
"option\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadAndSetTags(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"pushtag #tag1\n"+
|
||||||
|
"pushtag #tag2\n"+
|
||||||
|
"pushtag #tag2\n"+
|
||||||
|
"pushtag #tag1\n"+
|
||||||
|
"\n"+
|
||||||
|
"2024-01-01 * #tag3 #tag4\n"+
|
||||||
|
"poptag #tag1\n"+
|
||||||
|
"poptag #tag2\n"+
|
||||||
|
"pushtag\n"+
|
||||||
|
"pushtag \n"+
|
||||||
|
"pushtag tag\n"+
|
||||||
|
"2024-01-02 * #tag5 #tag6\n"+
|
||||||
|
"poptag #tag1\n"+
|
||||||
|
"poptag #tag2\n"+
|
||||||
|
"poptag\n"+
|
||||||
|
"poptag \n"+
|
||||||
|
"2024-01-03 * #tag5 #tag6\n"+
|
||||||
|
"pushtag #tag3\n"+
|
||||||
|
"pushtag #tag6\n"+
|
||||||
|
"2024-01-04 * #tag5 #tag6\n"+
|
||||||
|
"2024-01-05 * #tag5 #tag6 #tag6 #tag5\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 5, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, 4, len(actualData.transactions[0].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[0], "tag1")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[1], "tag2")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[2], "tag3")
|
||||||
|
assert.Equal(t, actualData.transactions[0].tags[3], "tag4")
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[1].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[1].tags[0], "tag5")
|
||||||
|
assert.Equal(t, actualData.transactions[1].tags[1], "tag6")
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[2].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[2].tags[0], "tag5")
|
||||||
|
assert.Equal(t, actualData.transactions[2].tags[1], "tag6")
|
||||||
|
|
||||||
|
assert.Equal(t, 3, len(actualData.transactions[3].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[3].tags[0], "tag3")
|
||||||
|
assert.Equal(t, actualData.transactions[3].tags[1], "tag6")
|
||||||
|
assert.Equal(t, actualData.transactions[3].tags[2], "tag5")
|
||||||
|
|
||||||
|
assert.Equal(t, 3, len(actualData.transactions[4].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[4].tags[0], "tag3")
|
||||||
|
assert.Equal(t, actualData.transactions[4].tags[1], "tag6")
|
||||||
|
assert.Equal(t, actualData.transactions[4].tags[2], "tag5")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadAccountLine_InvalidLine(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 open\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 0, len(actualData.accounts))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadAccountLine_InvalidAccountType(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 open Test:TestAccount\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
|
||||||
|
reader, err = createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"option \"name_assets\" \"A\"\n"+
|
||||||
|
"\n"+
|
||||||
|
"2024-01-01 open Assets:TestAccount\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionLine(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
"2024-01-02 * \"test\ttest2\ntest3\" ; some comment\n"+
|
||||||
|
"2024-01-03 ! \"test\" \"test2\"\n"+
|
||||||
|
"2024-01-04 P \"test\" #tag #tag2 ; some comment\n"+
|
||||||
|
"2024-01-05 txn \"test\" ^scheme://path/to/test/link ; some comment\n"+
|
||||||
|
"2024-01-06 txn ; \"test\" \"test2\" #tag ^link\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 6, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-01", actualData.transactions[0].date)
|
||||||
|
assert.Equal(t, beancountDirectiveCompletedTransaction, actualData.transactions[0].directive)
|
||||||
|
assert.Equal(t, "", actualData.transactions[0].payee)
|
||||||
|
assert.Equal(t, "", actualData.transactions[0].narration)
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-02", actualData.transactions[1].date)
|
||||||
|
assert.Equal(t, beancountDirectiveCompletedTransaction, actualData.transactions[1].directive)
|
||||||
|
assert.Equal(t, "", actualData.transactions[1].payee)
|
||||||
|
assert.Equal(t, "test\ttest2\ntest3", actualData.transactions[1].narration)
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-03", actualData.transactions[2].date)
|
||||||
|
assert.Equal(t, beancountDirectiveInCompleteTransaction, actualData.transactions[2].directive)
|
||||||
|
assert.Equal(t, "test", actualData.transactions[2].payee)
|
||||||
|
assert.Equal(t, "test2", actualData.transactions[2].narration)
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-04", actualData.transactions[3].date)
|
||||||
|
assert.Equal(t, beancountDirectivePaddingTransaction, actualData.transactions[3].directive)
|
||||||
|
assert.Equal(t, "", actualData.transactions[3].payee)
|
||||||
|
assert.Equal(t, "test", actualData.transactions[3].narration)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[3].tags))
|
||||||
|
assert.Equal(t, actualData.transactions[3].tags[0], "tag")
|
||||||
|
assert.Equal(t, actualData.transactions[3].tags[1], "tag2")
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-05", actualData.transactions[4].date)
|
||||||
|
assert.Equal(t, beancountDirectiveTransaction, actualData.transactions[4].directive)
|
||||||
|
assert.Equal(t, "", actualData.transactions[4].payee)
|
||||||
|
assert.Equal(t, "test", actualData.transactions[4].narration)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions[4].links))
|
||||||
|
assert.Equal(t, actualData.transactions[4].links[0], "scheme://path/to/test/link")
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-06", actualData.transactions[5].date)
|
||||||
|
assert.Equal(t, beancountDirectiveTransaction, actualData.transactions[5].directive)
|
||||||
|
assert.Equal(t, "", actualData.transactions[5].payee)
|
||||||
|
assert.Equal(t, "", actualData.transactions[5].narration)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Income:TestCategory -123.45 CNY ; some comment\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"+
|
||||||
|
"2024-01-02 *\n"+
|
||||||
|
" Liabilities:TestAccount2 -0.23 USD ; some comment\n"+
|
||||||
|
" Expenses:TestCategory2 0.12 USD @@ 0.84 CNY\n"+
|
||||||
|
" Expenses:TestCategory3 0.11 USD @ 7.12 CNY\n"+
|
||||||
|
" ! Expenses:TestCategory4 0.00 USD {0.00 CNY}\n"+
|
||||||
|
" Expenses:TestCategory5 \n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-01", actualData.transactions[0].date)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[0].postings))
|
||||||
|
assert.Equal(t, "Income:TestCategory", actualData.transactions[0].postings[0].account)
|
||||||
|
assert.Equal(t, "-123.45", actualData.transactions[0].postings[0].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[0].commodity)
|
||||||
|
|
||||||
|
assert.Equal(t, "Assets:TestAccount", actualData.transactions[0].postings[1].account)
|
||||||
|
assert.Equal(t, "123.45", actualData.transactions[0].postings[1].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[1].commodity)
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-02", actualData.transactions[1].date)
|
||||||
|
assert.Equal(t, 4, len(actualData.transactions[1].postings))
|
||||||
|
|
||||||
|
assert.Equal(t, "Liabilities:TestAccount2", actualData.transactions[1].postings[0].account)
|
||||||
|
assert.Equal(t, "-0.23", actualData.transactions[1].postings[0].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[0].commodity)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory2", actualData.transactions[1].postings[1].account)
|
||||||
|
|
||||||
|
assert.Equal(t, "0.12", actualData.transactions[1].postings[1].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[1].commodity)
|
||||||
|
assert.Equal(t, "0.84", actualData.transactions[1].postings[1].totalCost)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[1].postings[1].totalCostCommodity)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory3", actualData.transactions[1].postings[2].account)
|
||||||
|
|
||||||
|
assert.Equal(t, "0.11", actualData.transactions[1].postings[2].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[2].commodity)
|
||||||
|
assert.Equal(t, "7.12", actualData.transactions[1].postings[2].price)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[1].postings[2].priceCommodity)
|
||||||
|
|
||||||
|
assert.Equal(t, "0.00", actualData.transactions[1].postings[3].amount)
|
||||||
|
assert.Equal(t, "USD", actualData.transactions[1].postings[3].commodity)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_AmountExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Income:TestCategory (1.2-3.4) * 5.6 / 7.8 CNY\n"+
|
||||||
|
" Assets:TestAccount 1.2 * 3.4/-5.6 - 7.8 CNY\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-01", actualData.transactions[0].date)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[0].postings))
|
||||||
|
assert.Equal(t, "Income:TestCategory", actualData.transactions[0].postings[0].account)
|
||||||
|
assert.Equal(t, "(1.2-3.4) * 5.6 / 7.8", actualData.transactions[0].postings[0].originalAmount)
|
||||||
|
assert.Equal(t, "-1.58", actualData.transactions[0].postings[0].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[0].commodity)
|
||||||
|
|
||||||
|
assert.Equal(t, "Assets:TestAccount", actualData.transactions[0].postings[1].account)
|
||||||
|
assert.Equal(t, "1.2 * 3.4/-5.6 - 7.8", actualData.transactions[0].postings[1].originalAmount)
|
||||||
|
assert.Equal(t, "-8.53", actualData.transactions[0].postings[1].amount)
|
||||||
|
assert.Equal(t, "CNY", actualData.transactions[0].postings[1].commodity)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_InvalidAmountExpression(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Income:TestCategory (1.2-3.4)*5.6/0 CNY\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
|
reader, err = createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Assets:TestAccount abc CNY\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_InvalidAccountType(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Income:TestCategory -123.45 CNY\n"+
|
||||||
|
" Test:TestAccount 123.45 CNY\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_InvalidCommodity(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Income:TestCategory -123.45 cny\n"+
|
||||||
|
" Assets:TestAccount 123.45 cny\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_MissingAmount(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Assets:TestAccount\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions))
|
||||||
|
assert.Equal(t, 0, len(actualData.transactions[0].postings))
|
||||||
|
|
||||||
|
reader, err = createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Assets:TestAccount \n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err = reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 1, len(actualData.transactions))
|
||||||
|
assert.Equal(t, 0, len(actualData.transactions[0].postings))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionPostingLine_MissingCommodity(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Assets:TestAccount 123.45\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
|
||||||
|
reader, err = createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" Assets:TestAccount 123.45 \n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = reader.read(context)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountDataReaderReadTransactionMetadataLine(t *testing.T) {
|
||||||
|
context := core.NewNullContext()
|
||||||
|
reader, err := createNewBeancountDataReader(context, []byte(""+
|
||||||
|
"2024-01-01 *\n"+
|
||||||
|
" key: value\n"+
|
||||||
|
" key2: \"value 2\"\n"+
|
||||||
|
" key3: \n"+
|
||||||
|
" key4: \"\"\n"+
|
||||||
|
" key5 : \"\"\n"+
|
||||||
|
" key2: \"new value\"\n"+
|
||||||
|
" Income:TestCategory -123.45 CNY\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"+
|
||||||
|
"2024-01-02 *\n"+
|
||||||
|
" Liabilities:TestAccount2 -0.23 USD\n"+
|
||||||
|
" key6: value6\n"+
|
||||||
|
" key7: \"value 7\"\n"+
|
||||||
|
" key8: \n"+
|
||||||
|
" key9: \"\"\n"+
|
||||||
|
" key0 : \"\"\n"+
|
||||||
|
" key6: \"new value\"\n"+
|
||||||
|
" Expenses:TestCategory2 0.12 USD\n"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
actualData, err := reader.read(context)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-01", actualData.transactions[0].date)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[0].postings))
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[0].metadata))
|
||||||
|
assert.Equal(t, "value", actualData.transactions[0].metadata["key"])
|
||||||
|
assert.Equal(t, "value 2", actualData.transactions[0].metadata["key2"])
|
||||||
|
|
||||||
|
assert.Equal(t, "2024-01-02", actualData.transactions[1].date)
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[1].postings))
|
||||||
|
assert.Equal(t, 2, len(actualData.transactions[1].postings[0].metadata))
|
||||||
|
assert.Equal(t, "value6", actualData.transactions[1].postings[0].metadata["key6"])
|
||||||
|
assert.Equal(t, "value 7", actualData.transactions[1].postings[0].metadata["key7"])
|
||||||
|
assert.Equal(t, 0, len(actualData.transactions[1].postings[1].metadata))
|
||||||
|
}
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBeancountAccount_IsOpeningBalanceEquityAccount_True(t *testing.T) {
|
||||||
|
account := beancountAccount{
|
||||||
|
accountType: beancountEquityAccountType,
|
||||||
|
name: "Equity:Opening-Balances",
|
||||||
|
}
|
||||||
|
assert.True(t, account.isOpeningBalanceEquityAccount())
|
||||||
|
|
||||||
|
account = beancountAccount{
|
||||||
|
accountType: beancountEquityAccountType,
|
||||||
|
name: "E:Opening-Balances",
|
||||||
|
}
|
||||||
|
assert.True(t, account.isOpeningBalanceEquityAccount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountAccount_IsOpeningBalanceEquityAccount_False(t *testing.T) {
|
||||||
|
account := beancountAccount{
|
||||||
|
accountType: beancountAssetsAccountType,
|
||||||
|
name: "Equity:Opening-Balances",
|
||||||
|
}
|
||||||
|
assert.False(t, account.isOpeningBalanceEquityAccount())
|
||||||
|
|
||||||
|
account = beancountAccount{
|
||||||
|
accountType: beancountEquityAccountType,
|
||||||
|
name: "Opening-Balances",
|
||||||
|
}
|
||||||
|
assert.False(t, account.isOpeningBalanceEquityAccount())
|
||||||
|
|
||||||
|
account = beancountAccount{
|
||||||
|
accountType: beancountEquityAccountType,
|
||||||
|
name: "Equity:Other",
|
||||||
|
}
|
||||||
|
assert.False(t, account.isOpeningBalanceEquityAccount())
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var beancountTransactionTypeNameMapping = map[models.TransactionType]string{
|
||||||
|
models.TRANSACTION_TYPE_MODIFY_BALANCE: utils.IntToString(int(models.TRANSACTION_TYPE_MODIFY_BALANCE)),
|
||||||
|
models.TRANSACTION_TYPE_INCOME: utils.IntToString(int(models.TRANSACTION_TYPE_INCOME)),
|
||||||
|
models.TRANSACTION_TYPE_EXPENSE: utils.IntToString(int(models.TRANSACTION_TYPE_EXPENSE)),
|
||||||
|
models.TRANSACTION_TYPE_TRANSFER: utils.IntToString(int(models.TRANSACTION_TYPE_TRANSFER)),
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountTransactionDataImporter defines the structure of Beancount importer for transaction data
|
||||||
|
type beancountTransactionDataImporter struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize a beancount transaction data importer singleton instance
|
||||||
|
var (
|
||||||
|
BeancountTransactionDataImporter = &beancountTransactionDataImporter{}
|
||||||
|
)
|
||||||
|
|
||||||
|
// ParseImportedData returns the imported data by parsing the Beancount transaction data
|
||||||
|
func (c *beancountTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
|
beancountDataReader, err := createNewBeancountDataReader(ctx, data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
beancountData, err := beancountDataReader.read(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionDataTable, err := createNewBeancountTransactionDataTable(beancountData)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dataTableImporter := converter.CreateNewImporterWithTypeNameMapping(beancountTransactionTypeNameMapping, "", BEANCOUNT_TRANSACTION_TAG_SEPARATOR)
|
||||||
|
|
||||||
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
|
}
|
||||||
@@ -0,0 +1,358 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_MinimumValidData(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, allNewSubTransferCategories, allNewTags, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 *\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"+
|
||||||
|
"2024-09-02 *\n"+
|
||||||
|
" Income:TestCategory -0.12 CNY\n"+
|
||||||
|
" Assets:TestAccount 0.12 CNY\n"+
|
||||||
|
"2024-09-03 *\n"+
|
||||||
|
" Assets:TestAccount -1.00 CNY\n"+
|
||||||
|
" Expenses:TestCategory2 1.00 CNY\n"+
|
||||||
|
"2024-09-04 *\n"+
|
||||||
|
" Assets:TestAccount -0.05 CNY\n"+
|
||||||
|
" Assets:TestAccount2 0.05 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 4, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 2, len(allNewAccounts))
|
||||||
|
assert.Equal(t, 1, len(allNewSubExpenseCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubIncomeCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubTransferCategories))
|
||||||
|
assert.Equal(t, 0, len(allNewTags))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_MODIFY_BALANCE, allNewTransactions[0].Type)
|
||||||
|
assert.Equal(t, int64(1725148800), utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime))
|
||||||
|
assert.Equal(t, int64(12345), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[0].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[1].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[1].Type)
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime))
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[1].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[1].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Income:TestCategory", allNewTransactions[1].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[2].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[2].Type)
|
||||||
|
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
||||||
|
assert.Equal(t, int64(100), allNewTransactions[2].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[2].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory2", allNewTransactions[2].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[3].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[3].Type)
|
||||||
|
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime))
|
||||||
|
assert.Equal(t, int64(5), allNewTransactions[3].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[3].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewTransactions[3].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[3].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[0].Uid)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory2", allNewSubExpenseCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[0].Uid)
|
||||||
|
assert.Equal(t, "Income:TestCategory", allNewSubIncomeCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubTransferCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubTransferCategories[0].Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_MinimumValidData2(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, allNewSubTransferCategories, allNewTags, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 *\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
"2024-09-02 *\n"+
|
||||||
|
" Assets:TestAccount 0.12 CNY\n"+
|
||||||
|
" Income:TestCategory -0.12 CNY\n"+
|
||||||
|
"2024-09-03 *\n"+
|
||||||
|
" Expenses:TestCategory2 1.00 CNY\n"+
|
||||||
|
" Assets:TestAccount -1.00 CNY\n"+
|
||||||
|
"2024-09-04 *\n"+
|
||||||
|
" Assets:TestAccount2 0.05 CNY\n"+
|
||||||
|
" Assets:TestAccount -0.05 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 4, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 2, len(allNewAccounts))
|
||||||
|
assert.Equal(t, 1, len(allNewSubExpenseCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubIncomeCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubTransferCategories))
|
||||||
|
assert.Equal(t, 0, len(allNewTags))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_MODIFY_BALANCE, allNewTransactions[0].Type)
|
||||||
|
assert.Equal(t, int64(1725148800), utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime))
|
||||||
|
assert.Equal(t, int64(12345), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[0].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[1].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[1].Type)
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime))
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[1].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[1].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Income:TestCategory", allNewTransactions[1].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[2].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[2].Type)
|
||||||
|
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
||||||
|
assert.Equal(t, int64(100), allNewTransactions[2].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[2].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory2", allNewTransactions[2].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[3].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[3].Type)
|
||||||
|
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime))
|
||||||
|
assert.Equal(t, int64(5), allNewTransactions[3].Amount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[3].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewTransactions[3].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[3].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[0].Uid)
|
||||||
|
assert.Equal(t, "Expenses:TestCategory2", allNewSubExpenseCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[0].Uid)
|
||||||
|
assert.Equal(t, "Income:TestCategory", allNewSubIncomeCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubTransferCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubTransferCategories[0].Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_ParseInvalidTime(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024/09/01 *\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrNotFoundTransactionDataInFile.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_ParseValidCurrency(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Assets:TestAccount -0.12 USD\n"+
|
||||||
|
" Assets:TestAccount2 0.84 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 2, len(allNewAccounts))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, int64(84), allNewTransactions[0].RelatedAccountAmount)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "USD", allNewTransactions[0].OriginalSourceAccountCurrency)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewTransactions[0].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "CNY", allNewTransactions[0].OriginalDestinationAccountCurrency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "USD", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Assets:TestAccount2", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_ParseInvalidAmount(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 *\n"+
|
||||||
|
" Equity:Opening-Balances -abc CNY\n"+
|
||||||
|
" Assets:TestAccount abc CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 *\n"+
|
||||||
|
" Equity:Opening-Balances -1/0 CNY\n"+
|
||||||
|
" Assets:TestAccount 1/0 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_ParseDescription(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 * \"foo bar\t#test\n\"\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"+
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Income:TestCategory -0.12 CNY\n"+
|
||||||
|
" Assets:TestAccount 0.12 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(allNewTransactions))
|
||||||
|
|
||||||
|
assert.Equal(t, "foo bar\t#test\n", allNewTransactions[0].Comment)
|
||||||
|
assert.Equal(t, "Hello\nWorld", allNewTransactions[1].Comment)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_InvalidTransaction(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Assets:TestAccount 0.11 CNY\n"+
|
||||||
|
" Assets:TestAccount2 0.11 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Expenses:TestCategory -0.11 CNY\n"+
|
||||||
|
" Expenses:TestCategory2 0.11 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrThereAreNotSupportedTransactionType.Message)
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Income:TestCategory -0.11 CNY\n"+
|
||||||
|
" Income:TestCategory2 0.11 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrThereAreNotSupportedTransactionType.Message)
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Equity:TestCategory -0.11 CNY\n"+
|
||||||
|
" Equity:TestCategory2 0.11 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrThereAreNotSupportedTransactionType.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_NotSupportedToParseSplitTransaction(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-02 * \"Payee Name\" \"Hello\nWorld\"\n"+
|
||||||
|
" Assets:TestAccount -0.23 CNY\n"+
|
||||||
|
" Assets:TestAccount2 0.11 CNY\n"+
|
||||||
|
" Assets:TestAccount3 0.12 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrNotSupportedSplitTransactions.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBeancountTransactionDataFileParseImportedData_MissingTransactionRequiredData(t *testing.T) {
|
||||||
|
converter := BeancountTransactionDataImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Missing Transaction Time
|
||||||
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"* \"narration\"\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
" Assets:TestAccount 123.45 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrNotFoundTransactionDataInFile.Message)
|
||||||
|
|
||||||
|
// Missing Account Name
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 * \"narration\"\n"+
|
||||||
|
" Equity:Opening-Balances -123.45 CNY\n"+
|
||||||
|
" 123.45 CNY\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
|
||||||
|
// Missing Amount
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 * \"narration\"\n"+
|
||||||
|
" Equity:Opening-Balances\n"+
|
||||||
|
" Assets:TestAccount\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
|
||||||
|
// Missing Commodity
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"2024-09-01 * \"narration\"\n"+
|
||||||
|
" Equity:Opening-Balances -123.45\n"+
|
||||||
|
" Assets:TestAccount 123.45\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidBeancountFile.Message)
|
||||||
|
}
|
||||||
@@ -0,0 +1,248 @@
|
|||||||
|
package beancount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var beancountTransactionSupportedColumns = map[datatable.TransactionDataTableColumn]bool{
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_AMOUNT: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT: true,
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_DESCRIPTION: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var BEANCOUNT_TRANSACTION_TAG_SEPARATOR = "#"
|
||||||
|
|
||||||
|
// beancountTransactionDataTable defines the structure of Beancount transaction data table
|
||||||
|
type beancountTransactionDataTable struct {
|
||||||
|
allData []*beancountTransactionEntry
|
||||||
|
accountMap map[string]*beancountAccount
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountTransactionDataRow defines the structure of Beancount transaction data row
|
||||||
|
type beancountTransactionDataRow struct {
|
||||||
|
dataTable *beancountTransactionDataTable
|
||||||
|
data *beancountTransactionEntry
|
||||||
|
finalItems map[datatable.TransactionDataTableColumn]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// beancountTransactionDataRowIterator defines the structure of Beancount transaction data row iterator
|
||||||
|
type beancountTransactionDataRowIterator struct {
|
||||||
|
dataTable *beancountTransactionDataTable
|
||||||
|
currentIndex int
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasColumn returns whether the transaction data table has specified column
|
||||||
|
func (t *beancountTransactionDataTable) HasColumn(column datatable.TransactionDataTableColumn) bool {
|
||||||
|
_, exists := beancountTransactionSupportedColumns[column]
|
||||||
|
return exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionRowCount returns the total count of transaction data row
|
||||||
|
func (t *beancountTransactionDataTable) TransactionRowCount() int {
|
||||||
|
return len(t.allData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionRowIterator returns the iterator of transaction data row
|
||||||
|
func (t *beancountTransactionDataTable) TransactionRowIterator() datatable.TransactionDataRowIterator {
|
||||||
|
return &beancountTransactionDataRowIterator{
|
||||||
|
dataTable: t,
|
||||||
|
currentIndex: -1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid returns whether this row is valid data for importing
|
||||||
|
func (r *beancountTransactionDataRow) IsValid() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetData returns the data in the specified column type
|
||||||
|
func (r *beancountTransactionDataRow) GetData(column datatable.TransactionDataTableColumn) string {
|
||||||
|
_, exists := beancountTransactionSupportedColumns[column]
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
return r.finalItems[column]
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasNext returns whether the iterator does not reach the end
|
||||||
|
func (t *beancountTransactionDataRowIterator) HasNext() bool {
|
||||||
|
return t.currentIndex+1 < len(t.dataTable.allData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next imported data row
|
||||||
|
func (t *beancountTransactionDataRowIterator) Next(ctx core.Context, user *models.User) (daraRow datatable.TransactionDataRow, err error) {
|
||||||
|
if t.currentIndex+1 >= len(t.dataTable.allData) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
t.currentIndex++
|
||||||
|
|
||||||
|
data := t.dataTable.allData[t.currentIndex]
|
||||||
|
rowItems, err := t.parseTransaction(ctx, user, data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &beancountTransactionDataRow{
|
||||||
|
dataTable: t.dataTable,
|
||||||
|
data: data,
|
||||||
|
finalItems: rowItems,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *beancountTransactionDataRowIterator) parseTransaction(ctx core.Context, user *models.User, beancountEntry *beancountTransactionEntry) (map[datatable.TransactionDataTableColumn]string, error) {
|
||||||
|
data := make(map[datatable.TransactionDataTableColumn]string, len(beancountTransactionSupportedColumns))
|
||||||
|
|
||||||
|
if beancountEntry.date == "" {
|
||||||
|
return nil, errs.ErrMissingTransactionTime
|
||||||
|
}
|
||||||
|
|
||||||
|
// Beancount supports the international ISO 8601 standard format for dates, with dashes or the same ordering with slashes
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME] = strings.ReplaceAll(beancountEntry.date, "/", "-") + " 00:00:00"
|
||||||
|
|
||||||
|
if len(beancountEntry.postings) == 2 {
|
||||||
|
splitData1 := beancountEntry.postings[0]
|
||||||
|
splitData2 := beancountEntry.postings[1]
|
||||||
|
|
||||||
|
account1 := t.dataTable.accountMap[splitData1.account]
|
||||||
|
account2 := t.dataTable.accountMap[splitData2.account]
|
||||||
|
|
||||||
|
if account1 == nil || account2 == nil {
|
||||||
|
return nil, errs.ErrMissingAccountData
|
||||||
|
}
|
||||||
|
|
||||||
|
amount1, err := utils.ParseAmount(splitData1.amount)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse amount \"%s\", because %s", splitData1.amount, err.Error())
|
||||||
|
return nil, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
amount2, err := utils.ParseAmount(splitData2.amount)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse amount \"%s\", because %s", splitData2.amount, err.Error())
|
||||||
|
return nil, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((account1.accountType == beancountEquityAccountType || account1.accountType == beancountIncomeAccountType) && (account2.accountType == beancountAssetsAccountType || account2.accountType == beancountLiabilitiesAccountType)) ||
|
||||||
|
((account2.accountType == beancountEquityAccountType || account2.accountType == beancountIncomeAccountType) && (account1.accountType == beancountAssetsAccountType || account1.accountType == beancountLiabilitiesAccountType)) { // income
|
||||||
|
fromAccount := account1
|
||||||
|
toAccount := account2
|
||||||
|
toCurrency := splitData2.commodity
|
||||||
|
toAmount := amount2
|
||||||
|
|
||||||
|
if (account2.accountType == beancountEquityAccountType || account2.accountType == beancountIncomeAccountType) && (account1.accountType == beancountAssetsAccountType || account1.accountType == beancountLiabilitiesAccountType) {
|
||||||
|
fromAccount = account2
|
||||||
|
toAccount = account1
|
||||||
|
toCurrency = splitData1.commodity
|
||||||
|
toAmount = amount1
|
||||||
|
}
|
||||||
|
|
||||||
|
if fromAccount.isOpeningBalanceEquityAccount() {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = utils.IntToString(int(models.TRANSACTION_TYPE_MODIFY_BALANCE))
|
||||||
|
} else {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = utils.IntToString(int(models.TRANSACTION_TYPE_INCOME))
|
||||||
|
}
|
||||||
|
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = fromAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = toAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY] = toCurrency
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(toAmount)
|
||||||
|
} else if account1.accountType == beancountExpensesAccountType && (account2.accountType == beancountAssetsAccountType || account2.accountType == beancountLiabilitiesAccountType) ||
|
||||||
|
(account2.accountType == beancountExpensesAccountType && (account1.accountType == beancountAssetsAccountType || account1.accountType == beancountLiabilitiesAccountType)) { // expense
|
||||||
|
fromAccount := account1
|
||||||
|
fromCurrency := splitData1.commodity
|
||||||
|
fromAmount := amount1
|
||||||
|
toAccount := account2
|
||||||
|
|
||||||
|
if account1.accountType == beancountExpensesAccountType && (account2.accountType == beancountAssetsAccountType || account2.accountType == beancountLiabilitiesAccountType) {
|
||||||
|
fromAccount = account2
|
||||||
|
fromCurrency = splitData2.commodity
|
||||||
|
fromAmount = amount2
|
||||||
|
toAccount = account1
|
||||||
|
}
|
||||||
|
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = utils.IntToString(int(models.TRANSACTION_TYPE_EXPENSE))
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = toAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = fromAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY] = fromCurrency
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-fromAmount)
|
||||||
|
} else if (account1.accountType == beancountAssetsAccountType || account1.accountType == beancountLiabilitiesAccountType) &&
|
||||||
|
(account2.accountType == beancountAssetsAccountType || account2.accountType == beancountLiabilitiesAccountType) {
|
||||||
|
var fromAccount, toAccount *beancountAccount
|
||||||
|
var fromAmount, toAmount int64
|
||||||
|
var fromCurrency, toCurrency string
|
||||||
|
|
||||||
|
if amount1 < 0 {
|
||||||
|
fromAccount = account1
|
||||||
|
fromCurrency = splitData1.commodity
|
||||||
|
fromAmount = -amount1
|
||||||
|
toAccount = account2
|
||||||
|
toCurrency = splitData2.commodity
|
||||||
|
toAmount = amount2
|
||||||
|
} else if amount2 < 0 {
|
||||||
|
fromAccount = account2
|
||||||
|
fromCurrency = splitData2.commodity
|
||||||
|
fromAmount = -amount2
|
||||||
|
toAccount = account1
|
||||||
|
toCurrency = splitData1.commodity
|
||||||
|
toAmount = amount1
|
||||||
|
} else {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse transfer transaction, because unexcepted account amounts \"%d\" and \"%d\"", amount1, amount2)
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
}
|
||||||
|
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = utils.IntToString(int(models.TRANSACTION_TYPE_TRANSFER))
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = ""
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = fromAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY] = fromCurrency
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(fromAmount)
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = toAccount.name
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY] = toCurrency
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(toAmount)
|
||||||
|
} else {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse transaction, because unexcepted account types \"%d\" and \"%d\"", account1.accountType, account2.accountType)
|
||||||
|
return nil, errs.ErrThereAreNotSupportedTransactionType
|
||||||
|
}
|
||||||
|
} else if len(beancountEntry.postings) <= 1 {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse transaction, because postings count is %d", len(beancountEntry.postings))
|
||||||
|
return nil, errs.ErrInvalidBeancountFile
|
||||||
|
} else {
|
||||||
|
log.Errorf(ctx, "[beancount_transaction_data_table.parseTransaction] cannot parse split transaction, because postings count is %d", len(beancountEntry.postings))
|
||||||
|
return nil, errs.ErrNotSupportedSplitTransactions
|
||||||
|
}
|
||||||
|
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TAGS] = strings.Join(beancountEntry.tags, BEANCOUNT_TRANSACTION_TAG_SEPARATOR)
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = beancountEntry.narration
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createNewBeancountTransactionDataTable(beancountData *beancountData) (*beancountTransactionDataTable, error) {
|
||||||
|
if beancountData == nil {
|
||||||
|
return nil, errs.ErrNotFoundTransactionDataInFile
|
||||||
|
}
|
||||||
|
|
||||||
|
return &beancountTransactionDataTable{
|
||||||
|
allData: beancountData.transactions,
|
||||||
|
accountMap: beancountData.accounts,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,166 @@
|
|||||||
|
package converter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DataTableTransactionDataExporter defines the structure of plain text data table exporter for transaction data
|
||||||
|
type DataTableTransactionDataExporter struct {
|
||||||
|
transactionTypeMapping map[models.TransactionType]string
|
||||||
|
geoLocationSeparator string
|
||||||
|
transactionTagSeparator string
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildExportedContent writes the exported transaction data to the data table builder
|
||||||
|
func (c *DataTableTransactionDataExporter) BuildExportedContent(ctx core.Context, dataTableBuilder datatable.TransactionDataTableBuilder, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account, categoryMap map[int64]*models.TransactionCategory, tagMap map[int64]*models.TransactionTag, allTagIndexes map[int64][]int64) error {
|
||||||
|
for i := 0; i < len(transactions); i++ {
|
||||||
|
transaction := transactions[i]
|
||||||
|
|
||||||
|
if transaction.Type == models.TRANSACTION_DB_TYPE_TRANSFER_IN {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
dataRowMap := make(map[datatable.TransactionDataTableColumn]string, 15)
|
||||||
|
transactionTimeZone := time.FixedZone("Transaction Timezone", int(transaction.TimezoneUtcOffset)*60)
|
||||||
|
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME] = utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(transaction.TransactionTime), transactionTimeZone)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE] = utils.FormatTimezoneOffset(transactionTimeZone)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = dataTableBuilder.ReplaceDelimiters(c.getDisplayTransactionTypeName(transaction.Type))
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_CATEGORY] = c.getExportedTransactionCategoryName(dataTableBuilder, transaction.CategoryId, categoryMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = c.getExportedTransactionSubCategoryName(dataTableBuilder, transaction.CategoryId, categoryMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = c.getExportedAccountName(dataTableBuilder, transaction.AccountId, accountMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY] = c.getAccountCurrency(dataTableBuilder, transaction.AccountId, accountMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(transaction.Amount)
|
||||||
|
|
||||||
|
if transaction.Type == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = c.getExportedAccountName(dataTableBuilder, transaction.RelatedAccountId, accountMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY] = c.getAccountCurrency(dataTableBuilder, transaction.RelatedAccountId, accountMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(transaction.RelatedAccountAmount)
|
||||||
|
}
|
||||||
|
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION] = c.getExportedGeographicLocation(transaction)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_TAGS] = c.getExportedTags(dataTableBuilder, transaction.TransactionId, allTagIndexes, tagMap)
|
||||||
|
dataRowMap[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = dataTableBuilder.ReplaceDelimiters(transaction.Comment)
|
||||||
|
|
||||||
|
dataTableBuilder.AppendTransaction(dataRowMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getDisplayTransactionTypeName(transactionDbType models.TransactionDbType) string {
|
||||||
|
transactionType, err := transactionDbType.ToTransactionType()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionTypeName, exists := c.transactionTypeMapping[transactionType]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactionTypeName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getExportedTransactionCategoryName(dataTableBuilder datatable.TransactionDataTableBuilder, categoryId int64, categoryMap map[int64]*models.TransactionCategory) string {
|
||||||
|
category, exists := categoryMap[categoryId]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if category.ParentCategoryId == models.LevelOneTransactionCategoryParentId {
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(category.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
parentCategory, exists := categoryMap[category.ParentCategoryId]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(parentCategory.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getExportedTransactionSubCategoryName(dataTableBuilder datatable.TransactionDataTableBuilder, categoryId int64, categoryMap map[int64]*models.TransactionCategory) string {
|
||||||
|
category, exists := categoryMap[categoryId]
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(category.Name)
|
||||||
|
} else {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getExportedAccountName(dataTableBuilder datatable.TransactionDataTableBuilder, accountId int64, accountMap map[int64]*models.Account) string {
|
||||||
|
account, exists := accountMap[accountId]
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(account.Name)
|
||||||
|
} else {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getAccountCurrency(dataTableBuilder datatable.TransactionDataTableBuilder, accountId int64, accountMap map[int64]*models.Account) string {
|
||||||
|
account, exists := accountMap[accountId]
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(account.Currency)
|
||||||
|
} else {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getExportedGeographicLocation(transaction *models.Transaction) string {
|
||||||
|
if transaction.GeoLongitude != 0 || transaction.GeoLatitude != 0 {
|
||||||
|
return fmt.Sprintf("%f%s%f", transaction.GeoLongitude, c.geoLocationSeparator, transaction.GeoLatitude)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataExporter) getExportedTags(dataTableBuilder datatable.TransactionDataTableBuilder, transactionId int64, allTagIndexes map[int64][]int64, tagMap map[int64]*models.TransactionTag) string {
|
||||||
|
tagIndexes, exists := allTagIndexes[transactionId]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret strings.Builder
|
||||||
|
|
||||||
|
for i := 0; i < len(tagIndexes); i++ {
|
||||||
|
tagIndex := tagIndexes[i]
|
||||||
|
tag, exists := tagMap[tagIndex]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret.Len() > 0 {
|
||||||
|
ret.WriteString(c.transactionTagSeparator)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.WriteString(strings.Replace(tag.Name, c.transactionTagSeparator, " ", -1))
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataTableBuilder.ReplaceDelimiters(ret.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewExporter returns a new data table transaction data exporter according to the specified arguments
|
||||||
|
func CreateNewExporter(transactionTypeMapping map[models.TransactionType]string, geoLocationSeparator string, transactionTagSeparator string) *DataTableTransactionDataExporter {
|
||||||
|
return &DataTableTransactionDataExporter{
|
||||||
|
transactionTypeMapping: transactionTypeMapping,
|
||||||
|
geoLocationSeparator: geoLocationSeparator,
|
||||||
|
transactionTagSeparator: transactionTagSeparator,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,503 @@
|
|||||||
|
package converter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/validators"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DataTableTransactionDataImporter defines the structure of plain text data table importer for transaction data
|
||||||
|
type DataTableTransactionDataImporter struct {
|
||||||
|
transactionTypeMapping map[string]models.TransactionType
|
||||||
|
geoLocationSeparator string
|
||||||
|
transactionTagSeparator string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseImportedData returns the imported transaction data
|
||||||
|
func (c *DataTableTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, dataTable datatable.TransactionDataTable, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
|
if dataTable.TransactionRowCount() < 1 {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse import data for user \"uid:%d\", because data table row count is less 1", user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrNotFoundTransactionDataInFile
|
||||||
|
}
|
||||||
|
|
||||||
|
nameDbTypeMap, err := c.buildTransactionTypeNameDbTypeMap()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME) ||
|
||||||
|
!dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE) ||
|
||||||
|
!dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY) ||
|
||||||
|
!dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME) ||
|
||||||
|
!dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_AMOUNT) ||
|
||||||
|
!dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME) {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse import data for user \"uid:%d\", because missing essential columns in header row", user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrMissingRequiredFieldInHeaderRow
|
||||||
|
}
|
||||||
|
|
||||||
|
if accountMap == nil {
|
||||||
|
accountMap = make(map[string]*models.Account)
|
||||||
|
}
|
||||||
|
|
||||||
|
if expenseCategoryMap == nil {
|
||||||
|
expenseCategoryMap = make(map[string]map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
if incomeCategoryMap == nil {
|
||||||
|
incomeCategoryMap = make(map[string]map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
if transferCategoryMap == nil {
|
||||||
|
transferCategoryMap = make(map[string]map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tagMap == nil {
|
||||||
|
tagMap = make(map[string]*models.TransactionTag)
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions := make(models.ImportedTransactionSlice, 0, dataTable.TransactionRowCount())
|
||||||
|
allNewAccounts := make([]*models.Account, 0)
|
||||||
|
allNewSubExpenseCategories := make([]*models.TransactionCategory, 0)
|
||||||
|
allNewSubIncomeCategories := make([]*models.TransactionCategory, 0)
|
||||||
|
allNewSubTransferCategories := make([]*models.TransactionCategory, 0)
|
||||||
|
allNewTags := make([]*models.TransactionTag, 0)
|
||||||
|
|
||||||
|
dataRowIterator := dataTable.TransactionRowIterator()
|
||||||
|
dataRowIndex := 0
|
||||||
|
|
||||||
|
for dataRowIterator.HasNext() {
|
||||||
|
dataRowIndex++
|
||||||
|
dataRow, err := dataRowIterator.Next(ctx, user)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse data row \"index:%d\" for user \"uid:%d\", because %s", dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !dataRow.IsValid() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
timezoneOffset := defaultTimezoneOffset
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE) {
|
||||||
|
transactionTimezone, err := utils.ParseFromTimezoneOffset(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse time zone \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTimeZoneInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
timezoneOffset = utils.GetTimezoneOffsetMinutes(transactionTimezone)
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionTime, err := utils.ParseFromLongDateTime(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME), timezoneOffset)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse time \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTimeInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionDbType, err := c.getTransactionDbType(nameDbTypeMap, dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse transaction type \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.Or(err, errs.ErrTransactionTypeInvalid)
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryId := int64(0)
|
||||||
|
categoryName := ""
|
||||||
|
subCategoryName := ""
|
||||||
|
|
||||||
|
if transactionDbType != models.TRANSACTION_DB_TYPE_MODIFY_BALANCE {
|
||||||
|
transactionCategoryType, err := c.getTransactionCategoryType(transactionDbType)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse transaction category type in data row \"index:%d\" for user \"uid:%d\", because %s", dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.Or(err, errs.ErrTransactionTypeInvalid)
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryName = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_CATEGORY)
|
||||||
|
subCategoryName = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY)
|
||||||
|
|
||||||
|
if transactionDbType == models.TRANSACTION_DB_TYPE_EXPENSE {
|
||||||
|
subCategory, exists := c.getTransactionCategory(expenseCategoryMap, categoryName, subCategoryName)
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
||||||
|
allNewSubExpenseCategories = append(allNewSubExpenseCategories, subCategory)
|
||||||
|
|
||||||
|
if _, exists = expenseCategoryMap[subCategoryName]; !exists {
|
||||||
|
expenseCategoryMap[subCategoryName] = make(map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
expenseCategoryMap[subCategoryName][categoryName] = subCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryId = subCategory.CategoryId
|
||||||
|
} else if transactionDbType == models.TRANSACTION_DB_TYPE_INCOME {
|
||||||
|
subCategory, exists := c.getTransactionCategory(incomeCategoryMap, categoryName, subCategoryName)
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
||||||
|
allNewSubIncomeCategories = append(allNewSubIncomeCategories, subCategory)
|
||||||
|
|
||||||
|
if _, exists = incomeCategoryMap[subCategoryName]; !exists {
|
||||||
|
incomeCategoryMap[subCategoryName] = make(map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
incomeCategoryMap[subCategoryName][categoryName] = subCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryId = subCategory.CategoryId
|
||||||
|
} else if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
||||||
|
subCategory, exists := c.getTransactionCategory(transferCategoryMap, categoryName, subCategoryName)
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
||||||
|
allNewSubTransferCategories = append(allNewSubTransferCategories, subCategory)
|
||||||
|
|
||||||
|
if _, exists = transferCategoryMap[subCategoryName]; !exists {
|
||||||
|
transferCategoryMap[subCategoryName] = make(map[string]*models.TransactionCategory)
|
||||||
|
}
|
||||||
|
|
||||||
|
transferCategoryMap[subCategoryName][categoryName] = subCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryId = subCategory.CategoryId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
accountName := dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME)
|
||||||
|
accountCurrency := user.DefaultCurrency
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY) {
|
||||||
|
accountCurrency = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY)
|
||||||
|
|
||||||
|
if _, ok := validators.AllCurrencyNames[accountCurrency]; !ok {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] account currency \"%s\" is not supported in data row \"index:%d\" for user \"uid:%d\"", accountCurrency, dataRowIndex, user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
account, exists := accountMap[accountName]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
account = c.createNewAccountModel(user.Uid, accountName, accountCurrency)
|
||||||
|
allNewAccounts = append(allNewAccounts, account)
|
||||||
|
accountMap[accountName] = account
|
||||||
|
}
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY) {
|
||||||
|
if account.Name != "" && account.Currency != accountCurrency {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] currency \"%s\" in data row \"index:%d\" not equals currency \"%s\" of the account for user \"uid:%d\"", accountCurrency, dataRowIndex, account.Currency, user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
}
|
||||||
|
} else if exists {
|
||||||
|
accountCurrency = account.Currency
|
||||||
|
}
|
||||||
|
|
||||||
|
amount, err := utils.ParseAmount(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_AMOUNT))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse acmount \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_AMOUNT), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
relatedAccountId := int64(0)
|
||||||
|
relatedAccountAmount := int64(0)
|
||||||
|
account2Name := ""
|
||||||
|
account2Currency := ""
|
||||||
|
|
||||||
|
if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
||||||
|
account2Name = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME)
|
||||||
|
account2Currency = user.DefaultCurrency
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY) {
|
||||||
|
account2Currency = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY)
|
||||||
|
|
||||||
|
if _, ok := validators.AllCurrencyNames[account2Currency]; !ok {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] account2 currency \"%s\" is not supported in data row \"index:%d\" for user \"uid:%d\"", account2Currency, dataRowIndex, user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
account2, exists := accountMap[account2Name]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
account2 = c.createNewAccountModel(user.Uid, account2Name, account2Currency)
|
||||||
|
allNewAccounts = append(allNewAccounts, account2)
|
||||||
|
accountMap[account2Name] = account2
|
||||||
|
}
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY) {
|
||||||
|
if account2.Name != "" && account2.Currency != account2Currency {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] currency \"%s\" in data row \"index:%d\" not equals currency \"%s\" of the account2 for user \"uid:%d\"", account2Currency, dataRowIndex, account2.Currency, user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
||||||
|
}
|
||||||
|
} else if exists {
|
||||||
|
account2Currency = account2.Currency
|
||||||
|
}
|
||||||
|
|
||||||
|
relatedAccountId = account2.AccountId
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT) {
|
||||||
|
relatedAccountAmount, err = utils.ParseAmount(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse acmount2 \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
} else if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
||||||
|
relatedAccountAmount = amount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
geoLongitude := float64(0)
|
||||||
|
geoLatitude := float64(0)
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION) && c.geoLocationSeparator != "" {
|
||||||
|
geoLocationItems := strings.Split(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), c.geoLocationSeparator)
|
||||||
|
|
||||||
|
if len(geoLocationItems) == 2 {
|
||||||
|
geoLongitude, err = utils.StringToFloat64(geoLocationItems[0])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse geographic location \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrGeographicLocationInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
geoLatitude, err = utils.StringToFloat64(geoLocationItems[1])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] cannot parse geographic location \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), dataRowIndex, user.Uid, err.Error())
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrGeographicLocationInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var tagIds []string
|
||||||
|
var tagNames []string
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_TAGS) {
|
||||||
|
var tagNameItems []string
|
||||||
|
|
||||||
|
if c.transactionTagSeparator != "" {
|
||||||
|
tagNameItems = strings.Split(dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TAGS), c.transactionTagSeparator)
|
||||||
|
} else {
|
||||||
|
tagNameItems = append(tagNameItems, dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_TAGS))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(tagNameItems); i++ {
|
||||||
|
tagName := tagNameItems[i]
|
||||||
|
|
||||||
|
if tagName == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
tag, exists := tagMap[tagName]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
tag = c.createNewTransactionTagModel(user.Uid, tagName)
|
||||||
|
allNewTags = append(allNewTags, tag)
|
||||||
|
tagMap[tagName] = tag
|
||||||
|
}
|
||||||
|
|
||||||
|
if tag != nil {
|
||||||
|
tagIds = append(tagIds, utils.Int64ToString(tag.TagId))
|
||||||
|
}
|
||||||
|
|
||||||
|
tagNames = append(tagNames, tagName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
description := ""
|
||||||
|
|
||||||
|
if dataTable.HasColumn(datatable.TRANSACTION_DATA_TABLE_DESCRIPTION) {
|
||||||
|
description = dataRow.GetData(datatable.TRANSACTION_DATA_TABLE_DESCRIPTION)
|
||||||
|
}
|
||||||
|
|
||||||
|
transaction := &models.ImportTransaction{
|
||||||
|
Transaction: &models.Transaction{
|
||||||
|
Uid: user.Uid,
|
||||||
|
Type: transactionDbType,
|
||||||
|
CategoryId: categoryId,
|
||||||
|
TransactionTime: utils.GetMinTransactionTimeFromUnixTime(transactionTime.Unix()),
|
||||||
|
TimezoneUtcOffset: timezoneOffset,
|
||||||
|
AccountId: account.AccountId,
|
||||||
|
Amount: amount,
|
||||||
|
HideAmount: false,
|
||||||
|
RelatedAccountId: relatedAccountId,
|
||||||
|
RelatedAccountAmount: relatedAccountAmount,
|
||||||
|
Comment: description,
|
||||||
|
GeoLongitude: geoLongitude,
|
||||||
|
GeoLatitude: geoLatitude,
|
||||||
|
CreatedIp: "127.0.0.1",
|
||||||
|
},
|
||||||
|
TagIds: tagIds,
|
||||||
|
OriginalCategoryName: subCategoryName,
|
||||||
|
OriginalSourceAccountName: accountName,
|
||||||
|
OriginalSourceAccountCurrency: accountCurrency,
|
||||||
|
OriginalDestinationAccountName: account2Name,
|
||||||
|
OriginalDestinationAccountCurrency: account2Currency,
|
||||||
|
OriginalTagNames: tagNames,
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions = append(allNewTransactions, transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(allNewTransactions) < 1 {
|
||||||
|
log.Errorf(ctx, "[data_table_transaction_data_exporter.ParseImportedData] no transaction data parsed for \"uid:%d\"", user.Uid)
|
||||||
|
return nil, nil, nil, nil, nil, nil, errs.ErrNotFoundTransactionDataInFile
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(allNewTransactions)
|
||||||
|
|
||||||
|
return allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, allNewSubTransferCategories, allNewTags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) buildTransactionTypeNameDbTypeMap() (map[string]models.TransactionDbType, error) {
|
||||||
|
if c.transactionTypeMapping == nil {
|
||||||
|
return nil, errs.ErrTransactionTypeInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
nameDbTypeMap := make(map[string]models.TransactionDbType, len(c.transactionTypeMapping))
|
||||||
|
|
||||||
|
for name, transactionType := range c.transactionTypeMapping {
|
||||||
|
if transactionType == models.TRANSACTION_TYPE_MODIFY_BALANCE {
|
||||||
|
nameDbTypeMap[name] = models.TRANSACTION_DB_TYPE_MODIFY_BALANCE
|
||||||
|
} else if transactionType == models.TRANSACTION_TYPE_INCOME {
|
||||||
|
nameDbTypeMap[name] = models.TRANSACTION_DB_TYPE_INCOME
|
||||||
|
} else if transactionType == models.TRANSACTION_TYPE_EXPENSE {
|
||||||
|
nameDbTypeMap[name] = models.TRANSACTION_DB_TYPE_EXPENSE
|
||||||
|
} else if transactionType == models.TRANSACTION_TYPE_TRANSFER {
|
||||||
|
nameDbTypeMap[name] = models.TRANSACTION_DB_TYPE_TRANSFER_OUT
|
||||||
|
} else {
|
||||||
|
return nil, errs.ErrTransactionTypeInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nameDbTypeMap, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) getTransactionDbType(nameDbTypeMap map[string]models.TransactionDbType, transactionTypeName string) (models.TransactionDbType, error) {
|
||||||
|
transactionType, exists := nameDbTypeMap[transactionTypeName]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return 0, errs.ErrTransactionTypeInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactionType, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) getTransactionCategoryType(transactionType models.TransactionDbType) (models.TransactionCategoryType, error) {
|
||||||
|
if transactionType == models.TRANSACTION_DB_TYPE_INCOME {
|
||||||
|
return models.CATEGORY_TYPE_INCOME, nil
|
||||||
|
} else if transactionType == models.TRANSACTION_DB_TYPE_EXPENSE {
|
||||||
|
return models.CATEGORY_TYPE_EXPENSE, nil
|
||||||
|
} else if transactionType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
||||||
|
return models.CATEGORY_TYPE_TRANSFER, nil
|
||||||
|
} else {
|
||||||
|
return 0, errs.ErrTransactionTypeInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) getTransactionCategory(categories map[string]map[string]*models.TransactionCategory, categoryName string, subCategoryName string) (*models.TransactionCategory, bool) {
|
||||||
|
if len(categories) < 1 {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
subCategories, exists := categories[subCategoryName]
|
||||||
|
|
||||||
|
if !exists || len(subCategories) < 1 {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
if categoryName == "" {
|
||||||
|
for _, subCategory := range subCategories {
|
||||||
|
if subCategory != nil {
|
||||||
|
return subCategory, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subCategory, exists := subCategories[categoryName]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
for _, subCategory := range subCategories {
|
||||||
|
if subCategory != nil {
|
||||||
|
return subCategory, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return subCategory, exists
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) createNewAccountModel(uid int64, accountName string, currency string) *models.Account {
|
||||||
|
return &models.Account{
|
||||||
|
Uid: uid,
|
||||||
|
Name: accountName,
|
||||||
|
Currency: currency,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) createNewTransactionCategoryModel(uid int64, categoryName string, transactionCategoryType models.TransactionCategoryType) *models.TransactionCategory {
|
||||||
|
return &models.TransactionCategory{
|
||||||
|
Uid: uid,
|
||||||
|
Name: categoryName,
|
||||||
|
Type: transactionCategoryType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DataTableTransactionDataImporter) createNewTransactionTagModel(uid int64, tagName string) *models.TransactionTag {
|
||||||
|
return &models.TransactionTag{
|
||||||
|
Uid: uid,
|
||||||
|
Name: tagName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewImporterWithTypeNameMapping returns a new data table transaction data importer according to the specified arguments
|
||||||
|
func CreateNewImporterWithTypeNameMapping(transactionTypeMapping map[models.TransactionType]string, geoLocationSeparator string, transactionTagSeparator string) *DataTableTransactionDataImporter {
|
||||||
|
return &DataTableTransactionDataImporter{
|
||||||
|
transactionTypeMapping: buildTransactionNameTypeMap(transactionTypeMapping),
|
||||||
|
geoLocationSeparator: geoLocationSeparator,
|
||||||
|
transactionTagSeparator: transactionTagSeparator,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewSimpleImporter returns a new data table transaction data importer according to the specified arguments
|
||||||
|
func CreateNewSimpleImporter(transactionTypeMapping map[string]models.TransactionType) *DataTableTransactionDataImporter {
|
||||||
|
return &DataTableTransactionDataImporter{
|
||||||
|
transactionTypeMapping: transactionTypeMapping,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewSimpleImporterWithTypeNameMapping returns a new data table transaction data importer according to the specified arguments
|
||||||
|
func CreateNewSimpleImporterWithTypeNameMapping(transactionTypeMapping map[models.TransactionType]string) *DataTableTransactionDataImporter {
|
||||||
|
return &DataTableTransactionDataImporter{
|
||||||
|
transactionTypeMapping: buildTransactionNameTypeMap(transactionTypeMapping),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildTransactionNameTypeMap(transactionTypeMapping map[models.TransactionType]string) map[string]models.TransactionType {
|
||||||
|
if transactionTypeMapping == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
typeNameMap := make(map[string]models.TransactionType, len(transactionTypeMapping))
|
||||||
|
|
||||||
|
for transactionType, name := range transactionTypeMapping {
|
||||||
|
typeNameMap[name] = transactionType
|
||||||
|
}
|
||||||
|
|
||||||
|
return typeNameMap
|
||||||
|
}
|
||||||
+2
-2
@@ -1,4 +1,4 @@
|
|||||||
package base
|
package converter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -14,7 +14,7 @@ type TransactionDataExporter interface {
|
|||||||
// TransactionDataImporter defines the structure of transaction data importer
|
// TransactionDataImporter defines the structure of transaction data importer
|
||||||
type TransactionDataImporter interface {
|
type TransactionDataImporter interface {
|
||||||
// ParseImportedData returns the imported data
|
// ParseImportedData returns the imported data
|
||||||
ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error)
|
ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TransactionDataConverter defines the structure of transaction data converter
|
// TransactionDataConverter defines the structure of transaction data converter
|
||||||
@@ -1,572 +0,0 @@
|
|||||||
package datatable
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/log"
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/validators"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DataTableTransactionDataExporter defines the structure of plain text data table exporter for transaction data
|
|
||||||
type DataTableTransactionDataExporter struct {
|
|
||||||
transactionTypeMapping map[models.TransactionType]string
|
|
||||||
geoLocationSeparator string
|
|
||||||
transactionTagSeparator string
|
|
||||||
}
|
|
||||||
|
|
||||||
// DataTableTransactionDataImporter defines the structure of plain text data table importer for transaction data
|
|
||||||
type DataTableTransactionDataImporter struct {
|
|
||||||
transactionTypeMapping map[models.TransactionType]string
|
|
||||||
geoLocationSeparator string
|
|
||||||
transactionTagSeparator string
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateNewExporter returns a new data table transaction data exporter according to the specified arguments
|
|
||||||
func CreateNewExporter(transactionTypeMapping map[models.TransactionType]string, geoLocationSeparator string, transactionTagSeparator string) *DataTableTransactionDataExporter {
|
|
||||||
return &DataTableTransactionDataExporter{
|
|
||||||
transactionTypeMapping: transactionTypeMapping,
|
|
||||||
geoLocationSeparator: geoLocationSeparator,
|
|
||||||
transactionTagSeparator: transactionTagSeparator,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateNewImporter returns a new data table transaction data importer according to the specified arguments
|
|
||||||
func CreateNewImporter(transactionTypeMapping map[models.TransactionType]string, geoLocationSeparator string, transactionTagSeparator string) *DataTableTransactionDataImporter {
|
|
||||||
return &DataTableTransactionDataImporter{
|
|
||||||
transactionTypeMapping: transactionTypeMapping,
|
|
||||||
geoLocationSeparator: geoLocationSeparator,
|
|
||||||
transactionTagSeparator: transactionTagSeparator,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateNewSimpleImporter returns a new data table transaction data importer according to the specified arguments
|
|
||||||
func CreateNewSimpleImporter(transactionTypeMapping map[models.TransactionType]string) *DataTableTransactionDataImporter {
|
|
||||||
return &DataTableTransactionDataImporter{
|
|
||||||
transactionTypeMapping: transactionTypeMapping,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// BuildExportedContent writes the exported transaction data to the data table builder
|
|
||||||
func (c *DataTableTransactionDataExporter) BuildExportedContent(ctx core.Context, dataTableBuilder TransactionDataTableBuilder, uid int64, transactions []*models.Transaction, accountMap map[int64]*models.Account, categoryMap map[int64]*models.TransactionCategory, tagMap map[int64]*models.TransactionTag, allTagIndexes map[int64][]int64) error {
|
|
||||||
for i := 0; i < len(transactions); i++ {
|
|
||||||
transaction := transactions[i]
|
|
||||||
|
|
||||||
if transaction.Type == models.TRANSACTION_DB_TYPE_TRANSFER_IN {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
dataRowMap := make(map[TransactionDataTableColumn]string, 15)
|
|
||||||
transactionTimeZone := time.FixedZone("Transaction Timezone", int(transaction.TimezoneUtcOffset)*60)
|
|
||||||
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_TRANSACTION_TIME] = utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(transaction.TransactionTime), transactionTimeZone)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE] = utils.FormatTimezoneOffset(transactionTimeZone)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = dataTableBuilder.ReplaceDelimiters(c.getDisplayTransactionTypeName(transaction.Type))
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_CATEGORY] = c.getExportedTransactionCategoryName(dataTableBuilder, transaction.CategoryId, categoryMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_SUB_CATEGORY] = c.getExportedTransactionSubCategoryName(dataTableBuilder, transaction.CategoryId, categoryMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = c.getExportedAccountName(dataTableBuilder, transaction.AccountId, accountMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY] = c.getAccountCurrency(dataTableBuilder, transaction.AccountId, accountMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(transaction.Amount)
|
|
||||||
|
|
||||||
if transaction.Type == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = c.getExportedAccountName(dataTableBuilder, transaction.RelatedAccountId, accountMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY] = c.getAccountCurrency(dataTableBuilder, transaction.RelatedAccountId, accountMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(transaction.RelatedAccountAmount)
|
|
||||||
}
|
|
||||||
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION] = c.getExportedGeographicLocation(transaction)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_TAGS] = c.getExportedTags(dataTableBuilder, transaction.TransactionId, allTagIndexes, tagMap)
|
|
||||||
dataRowMap[TRANSACTION_DATA_TABLE_DESCRIPTION] = dataTableBuilder.ReplaceDelimiters(transaction.Comment)
|
|
||||||
|
|
||||||
dataTableBuilder.AppendTransaction(dataRowMap)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getDisplayTransactionTypeName(transactionDbType models.TransactionDbType) string {
|
|
||||||
transactionType, err := transactionDbType.ToTransactionType()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
transactionTypeName, exists := c.transactionTypeMapping[transactionType]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
return transactionTypeName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getExportedTransactionCategoryName(dataTableBuilder TransactionDataTableBuilder, categoryId int64, categoryMap map[int64]*models.TransactionCategory) string {
|
|
||||||
category, exists := categoryMap[categoryId]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if category.ParentCategoryId == 0 {
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(category.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
parentCategory, exists := categoryMap[category.ParentCategoryId]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(parentCategory.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getExportedTransactionSubCategoryName(dataTableBuilder TransactionDataTableBuilder, categoryId int64, categoryMap map[int64]*models.TransactionCategory) string {
|
|
||||||
category, exists := categoryMap[categoryId]
|
|
||||||
|
|
||||||
if exists {
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(category.Name)
|
|
||||||
} else {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getExportedAccountName(dataTableBuilder TransactionDataTableBuilder, accountId int64, accountMap map[int64]*models.Account) string {
|
|
||||||
account, exists := accountMap[accountId]
|
|
||||||
|
|
||||||
if exists {
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(account.Name)
|
|
||||||
} else {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getAccountCurrency(dataTableBuilder TransactionDataTableBuilder, accountId int64, accountMap map[int64]*models.Account) string {
|
|
||||||
account, exists := accountMap[accountId]
|
|
||||||
|
|
||||||
if exists {
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(account.Currency)
|
|
||||||
} else {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getExportedGeographicLocation(transaction *models.Transaction) string {
|
|
||||||
if transaction.GeoLongitude != 0 || transaction.GeoLatitude != 0 {
|
|
||||||
return fmt.Sprintf("%f%s%f", transaction.GeoLongitude, c.geoLocationSeparator, transaction.GeoLatitude)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataExporter) getExportedTags(dataTableBuilder TransactionDataTableBuilder, transactionId int64, allTagIndexes map[int64][]int64, tagMap map[int64]*models.TransactionTag) string {
|
|
||||||
tagIndexes, exists := allTagIndexes[transactionId]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
var ret strings.Builder
|
|
||||||
|
|
||||||
for i := 0; i < len(tagIndexes); i++ {
|
|
||||||
tagIndex := tagIndexes[i]
|
|
||||||
tag, exists := tagMap[tagIndex]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret.Len() > 0 {
|
|
||||||
ret.WriteString(c.transactionTagSeparator)
|
|
||||||
}
|
|
||||||
|
|
||||||
ret.WriteString(strings.Replace(tag.Name, c.transactionTagSeparator, " ", -1))
|
|
||||||
}
|
|
||||||
|
|
||||||
return dataTableBuilder.ReplaceDelimiters(ret.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseImportedData returns the imported transaction data
|
|
||||||
func (c *DataTableTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, dataTable TransactionDataTable, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
|
||||||
if dataTable.TransactionRowCount() < 1 {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse import data for user \"uid:%d\", because data table row count is less 1", user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrNotFoundTransactionDataInFile
|
|
||||||
}
|
|
||||||
|
|
||||||
nameDbTypeMap, err := c.buildTransactionTypeNameDbTypeMap()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, nil, nil, nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !dataTable.HasColumn(TRANSACTION_DATA_TABLE_TRANSACTION_TIME) ||
|
|
||||||
!dataTable.HasColumn(TRANSACTION_DATA_TABLE_TRANSACTION_TYPE) ||
|
|
||||||
!dataTable.HasColumn(TRANSACTION_DATA_TABLE_SUB_CATEGORY) ||
|
|
||||||
!dataTable.HasColumn(TRANSACTION_DATA_TABLE_ACCOUNT_NAME) ||
|
|
||||||
!dataTable.HasColumn(TRANSACTION_DATA_TABLE_AMOUNT) ||
|
|
||||||
!dataTable.HasColumn(TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME) {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse import data for user \"uid:%d\", because missing essential columns in header row", user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrMissingRequiredFieldInHeaderRow
|
|
||||||
}
|
|
||||||
|
|
||||||
if accountMap == nil {
|
|
||||||
accountMap = make(map[string]*models.Account)
|
|
||||||
}
|
|
||||||
|
|
||||||
if expenseCategoryMap == nil {
|
|
||||||
expenseCategoryMap = make(map[string]*models.TransactionCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
if incomeCategoryMap == nil {
|
|
||||||
incomeCategoryMap = make(map[string]*models.TransactionCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
if transferCategoryMap == nil {
|
|
||||||
transferCategoryMap = make(map[string]*models.TransactionCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
if tagMap == nil {
|
|
||||||
tagMap = make(map[string]*models.TransactionTag)
|
|
||||||
}
|
|
||||||
|
|
||||||
allNewTransactions := make(models.ImportedTransactionSlice, 0, dataTable.TransactionRowCount())
|
|
||||||
allNewAccounts := make([]*models.Account, 0)
|
|
||||||
allNewSubExpenseCategories := make([]*models.TransactionCategory, 0)
|
|
||||||
allNewSubIncomeCategories := make([]*models.TransactionCategory, 0)
|
|
||||||
allNewSubTransferCategories := make([]*models.TransactionCategory, 0)
|
|
||||||
allNewTags := make([]*models.TransactionTag, 0)
|
|
||||||
|
|
||||||
dataRowIterator := dataTable.TransactionRowIterator()
|
|
||||||
dataRowIndex := 0
|
|
||||||
|
|
||||||
for dataRowIterator.HasNext() {
|
|
||||||
dataRowIndex++
|
|
||||||
dataRow, err := dataRowIterator.Next(ctx, user)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse data row \"index:%d\" for user \"uid:%d\", because %s", dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !dataRow.IsValid() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
timezoneOffset := defaultTimezoneOffset
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE) {
|
|
||||||
transactionTimezone, err := utils.ParseFromTimezoneOffset(dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse time zone \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTimeZoneInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
timezoneOffset = utils.GetTimezoneOffsetMinutes(transactionTimezone)
|
|
||||||
}
|
|
||||||
|
|
||||||
transactionTime, err := utils.ParseFromLongDateTime(dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TIME), timezoneOffset)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse time \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TIME), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrTransactionTimeInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
transactionDbType, err := c.getTransactionDbType(nameDbTypeMap, dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TYPE))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse transaction type \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_TRANSACTION_TYPE), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.Or(err, errs.ErrTransactionTypeInvalid)
|
|
||||||
}
|
|
||||||
|
|
||||||
categoryId := int64(0)
|
|
||||||
subCategoryName := ""
|
|
||||||
|
|
||||||
if transactionDbType != models.TRANSACTION_DB_TYPE_MODIFY_BALANCE {
|
|
||||||
transactionCategoryType, err := c.getTransactionCategoryType(transactionDbType)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse transaction category type in data row \"index:%d\" for user \"uid:%d\", because %s", dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.Or(err, errs.ErrTransactionTypeInvalid)
|
|
||||||
}
|
|
||||||
|
|
||||||
subCategoryName = dataRow.GetData(TRANSACTION_DATA_TABLE_SUB_CATEGORY)
|
|
||||||
|
|
||||||
if transactionDbType == models.TRANSACTION_DB_TYPE_EXPENSE {
|
|
||||||
subCategory, exists := expenseCategoryMap[subCategoryName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
|
||||||
allNewSubExpenseCategories = append(allNewSubExpenseCategories, subCategory)
|
|
||||||
expenseCategoryMap[subCategoryName] = subCategory
|
|
||||||
}
|
|
||||||
|
|
||||||
categoryId = subCategory.CategoryId
|
|
||||||
} else if transactionDbType == models.TRANSACTION_DB_TYPE_INCOME {
|
|
||||||
subCategory, exists := incomeCategoryMap[subCategoryName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
|
||||||
allNewSubIncomeCategories = append(allNewSubIncomeCategories, subCategory)
|
|
||||||
incomeCategoryMap[subCategoryName] = subCategory
|
|
||||||
}
|
|
||||||
|
|
||||||
categoryId = subCategory.CategoryId
|
|
||||||
} else if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
|
||||||
subCategory, exists := transferCategoryMap[subCategoryName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
subCategory = c.createNewTransactionCategoryModel(user.Uid, subCategoryName, transactionCategoryType)
|
|
||||||
allNewSubTransferCategories = append(allNewSubTransferCategories, subCategory)
|
|
||||||
transferCategoryMap[subCategoryName] = subCategory
|
|
||||||
}
|
|
||||||
|
|
||||||
categoryId = subCategory.CategoryId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
accountName := dataRow.GetData(TRANSACTION_DATA_TABLE_ACCOUNT_NAME)
|
|
||||||
accountCurrency := user.DefaultCurrency
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY) {
|
|
||||||
accountCurrency = dataRow.GetData(TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY)
|
|
||||||
|
|
||||||
if _, ok := validators.AllCurrencyNames[accountCurrency]; !ok {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] account currency \"%s\" is not supported in data row \"index:%d\" for user \"uid:%d\"", accountCurrency, dataRowIndex, user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
account, exists := accountMap[accountName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
account = c.createNewAccountModel(user.Uid, accountName, accountCurrency)
|
|
||||||
allNewAccounts = append(allNewAccounts, account)
|
|
||||||
accountMap[accountName] = account
|
|
||||||
}
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY) {
|
|
||||||
if account.Name != "" && account.Currency != accountCurrency {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] currency \"%s\" in data row \"index:%d\" not equals currency \"%s\" of the account for user \"uid:%d\"", accountCurrency, dataRowIndex, account.Currency, user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
|
||||||
}
|
|
||||||
} else if exists {
|
|
||||||
accountCurrency = account.Currency
|
|
||||||
}
|
|
||||||
|
|
||||||
amount, err := utils.ParseAmount(dataRow.GetData(TRANSACTION_DATA_TABLE_AMOUNT))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse acmount \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_AMOUNT), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAmountInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
relatedAccountId := int64(0)
|
|
||||||
relatedAccountAmount := int64(0)
|
|
||||||
account2Name := ""
|
|
||||||
account2Currency := ""
|
|
||||||
|
|
||||||
if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
|
||||||
account2Name = dataRow.GetData(TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME)
|
|
||||||
account2Currency = user.DefaultCurrency
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY) {
|
|
||||||
account2Currency = dataRow.GetData(TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY)
|
|
||||||
|
|
||||||
if _, ok := validators.AllCurrencyNames[account2Currency]; !ok {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] account2 currency \"%s\" is not supported in data row \"index:%d\" for user \"uid:%d\"", account2Currency, dataRowIndex, user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
account2, exists := accountMap[account2Name]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
account2 = c.createNewAccountModel(user.Uid, account2Name, account2Currency)
|
|
||||||
allNewAccounts = append(allNewAccounts, account2)
|
|
||||||
accountMap[account2Name] = account2
|
|
||||||
}
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_CURRENCY) {
|
|
||||||
if account2.Name != "" && account2.Currency != account2Currency {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] currency \"%s\" in data row \"index:%d\" not equals currency \"%s\" of the account2 for user \"uid:%d\"", account2Currency, dataRowIndex, account2.Currency, user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAccountCurrencyInvalid
|
|
||||||
}
|
|
||||||
} else if exists {
|
|
||||||
account2Currency = account2.Currency
|
|
||||||
}
|
|
||||||
|
|
||||||
relatedAccountId = account2.AccountId
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_RELATED_AMOUNT) {
|
|
||||||
relatedAccountAmount, err = utils.ParseAmount(dataRow.GetData(TRANSACTION_DATA_TABLE_RELATED_AMOUNT))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse acmount2 \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_RELATED_AMOUNT), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrAmountInvalid
|
|
||||||
}
|
|
||||||
} else if transactionDbType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
|
||||||
relatedAccountAmount = amount
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
geoLongitude := float64(0)
|
|
||||||
geoLatitude := float64(0)
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION) {
|
|
||||||
geoLocationItems := strings.Split(dataRow.GetData(TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), c.geoLocationSeparator)
|
|
||||||
|
|
||||||
if len(geoLocationItems) == 2 {
|
|
||||||
geoLongitude, err = utils.StringToFloat64(geoLocationItems[0])
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse geographic location \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrGeographicLocationInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
geoLatitude, err = utils.StringToFloat64(geoLocationItems[1])
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] cannot parse geographic location \"%s\" in data row \"index:%d\" for user \"uid:%d\", because %s", dataRow.GetData(TRANSACTION_DATA_TABLE_GEOGRAPHIC_LOCATION), dataRowIndex, user.Uid, err.Error())
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrGeographicLocationInvalid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var tagIds []string
|
|
||||||
var tagNames []string
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_TAGS) {
|
|
||||||
tagNameItems := strings.Split(dataRow.GetData(TRANSACTION_DATA_TABLE_TAGS), c.transactionTagSeparator)
|
|
||||||
|
|
||||||
for i := 0; i < len(tagNameItems); i++ {
|
|
||||||
tagName := tagNameItems[i]
|
|
||||||
|
|
||||||
if tagName == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
tag, exists := tagMap[tagName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
tag = c.createNewTransactionTagModel(user.Uid, tagName)
|
|
||||||
allNewTags = append(allNewTags, tag)
|
|
||||||
tagMap[tagName] = tag
|
|
||||||
}
|
|
||||||
|
|
||||||
if tag != nil {
|
|
||||||
tagIds = append(tagIds, utils.Int64ToString(tag.TagId))
|
|
||||||
}
|
|
||||||
|
|
||||||
tagNames = append(tagNames, tagName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
description := ""
|
|
||||||
|
|
||||||
if dataTable.HasColumn(TRANSACTION_DATA_TABLE_DESCRIPTION) {
|
|
||||||
description = dataRow.GetData(TRANSACTION_DATA_TABLE_DESCRIPTION)
|
|
||||||
}
|
|
||||||
|
|
||||||
transaction := &models.ImportTransaction{
|
|
||||||
Transaction: &models.Transaction{
|
|
||||||
Uid: user.Uid,
|
|
||||||
Type: transactionDbType,
|
|
||||||
CategoryId: categoryId,
|
|
||||||
TransactionTime: utils.GetMinTransactionTimeFromUnixTime(transactionTime.Unix()),
|
|
||||||
TimezoneUtcOffset: timezoneOffset,
|
|
||||||
AccountId: account.AccountId,
|
|
||||||
Amount: amount,
|
|
||||||
HideAmount: false,
|
|
||||||
RelatedAccountId: relatedAccountId,
|
|
||||||
RelatedAccountAmount: relatedAccountAmount,
|
|
||||||
Comment: description,
|
|
||||||
GeoLongitude: geoLongitude,
|
|
||||||
GeoLatitude: geoLatitude,
|
|
||||||
CreatedIp: "127.0.0.1",
|
|
||||||
},
|
|
||||||
TagIds: tagIds,
|
|
||||||
OriginalCategoryName: subCategoryName,
|
|
||||||
OriginalSourceAccountName: accountName,
|
|
||||||
OriginalSourceAccountCurrency: accountCurrency,
|
|
||||||
OriginalDestinationAccountName: account2Name,
|
|
||||||
OriginalDestinationAccountCurrency: account2Currency,
|
|
||||||
OriginalTagNames: tagNames,
|
|
||||||
}
|
|
||||||
|
|
||||||
allNewTransactions = append(allNewTransactions, transaction)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(allNewTransactions) < 1 {
|
|
||||||
log.Errorf(ctx, "[data_table_transaction_data_converter.parseImportedData] no transaction data parsed for \"uid:%d\"", user.Uid)
|
|
||||||
return nil, nil, nil, nil, nil, nil, errs.ErrNotFoundTransactionDataInFile
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(allNewTransactions)
|
|
||||||
|
|
||||||
return allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, allNewSubTransferCategories, allNewTags, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) buildTransactionTypeNameDbTypeMap() (map[string]models.TransactionDbType, error) {
|
|
||||||
if c.transactionTypeMapping == nil {
|
|
||||||
return nil, errs.ErrTransactionTypeInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
nameDbTypeMap := make(map[string]models.TransactionDbType, len(c.transactionTypeMapping))
|
|
||||||
nameDbTypeMap[c.transactionTypeMapping[models.TRANSACTION_TYPE_MODIFY_BALANCE]] = models.TRANSACTION_DB_TYPE_MODIFY_BALANCE
|
|
||||||
nameDbTypeMap[c.transactionTypeMapping[models.TRANSACTION_TYPE_INCOME]] = models.TRANSACTION_DB_TYPE_INCOME
|
|
||||||
nameDbTypeMap[c.transactionTypeMapping[models.TRANSACTION_TYPE_EXPENSE]] = models.TRANSACTION_DB_TYPE_EXPENSE
|
|
||||||
nameDbTypeMap[c.transactionTypeMapping[models.TRANSACTION_TYPE_TRANSFER]] = models.TRANSACTION_DB_TYPE_TRANSFER_OUT
|
|
||||||
|
|
||||||
return nameDbTypeMap, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) getTransactionDbType(nameDbTypeMap map[string]models.TransactionDbType, transactionTypeName string) (models.TransactionDbType, error) {
|
|
||||||
transactionType, exists := nameDbTypeMap[transactionTypeName]
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return 0, errs.ErrTransactionTypeInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
return transactionType, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) getTransactionCategoryType(transactionType models.TransactionDbType) (models.TransactionCategoryType, error) {
|
|
||||||
if transactionType == models.TRANSACTION_DB_TYPE_INCOME {
|
|
||||||
return models.CATEGORY_TYPE_INCOME, nil
|
|
||||||
} else if transactionType == models.TRANSACTION_DB_TYPE_EXPENSE {
|
|
||||||
return models.CATEGORY_TYPE_EXPENSE, nil
|
|
||||||
} else if transactionType == models.TRANSACTION_DB_TYPE_TRANSFER_OUT {
|
|
||||||
return models.CATEGORY_TYPE_TRANSFER, nil
|
|
||||||
} else {
|
|
||||||
return 0, errs.ErrTransactionTypeInvalid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) createNewAccountModel(uid int64, accountName string, currency string) *models.Account {
|
|
||||||
return &models.Account{
|
|
||||||
Uid: uid,
|
|
||||||
Name: accountName,
|
|
||||||
Currency: currency,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) createNewTransactionCategoryModel(uid int64, categoryName string, transactionCategoryType models.TransactionCategoryType) *models.TransactionCategory {
|
|
||||||
return &models.TransactionCategory{
|
|
||||||
Uid: uid,
|
|
||||||
Name: categoryName,
|
|
||||||
Type: transactionCategoryType,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DataTableTransactionDataImporter) createNewTransactionTagModel(uid int64, tagName string) *models.TransactionTag {
|
|
||||||
return &models.TransactionTag{
|
|
||||||
Uid: uid,
|
|
||||||
Name: tagName,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package _default
|
package _default
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
@@ -66,7 +67,7 @@ func (c *defaultTransactionDataPlainTextConverter) ToExportedContent(ctx core.Co
|
|||||||
ezbookkeepingLineSeparator,
|
ezbookkeepingLineSeparator,
|
||||||
)
|
)
|
||||||
|
|
||||||
dataTableExporter := datatable.CreateNewExporter(
|
dataTableExporter := converter.CreateNewExporter(
|
||||||
ezbookkeepingTransactionTypeNameMapping,
|
ezbookkeepingTransactionTypeNameMapping,
|
||||||
ezbookkeepingGeoLocationSeparator,
|
ezbookkeepingGeoLocationSeparator,
|
||||||
ezbookkeepingTagSeparator,
|
ezbookkeepingTagSeparator,
|
||||||
@@ -82,7 +83,7 @@ func (c *defaultTransactionDataPlainTextConverter) ToExportedContent(ctx core.Co
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the transaction plain text data
|
// ParseImportedData returns the imported data by parsing the transaction plain text data
|
||||||
func (c *defaultTransactionDataPlainTextConverter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *defaultTransactionDataPlainTextConverter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
dataTable, err := createNewDefaultPlainTextDataTable(
|
dataTable, err := createNewDefaultPlainTextDataTable(
|
||||||
string(data),
|
string(data),
|
||||||
c.columnSeparator,
|
c.columnSeparator,
|
||||||
@@ -95,7 +96,7 @@ func (c *defaultTransactionDataPlainTextConverter) ParseImportedData(ctx core.Co
|
|||||||
|
|
||||||
transactionDataTable := datatable.CreateNewImportedTransactionDataTable(dataTable, ezbookkeepingDataColumnNameMapping)
|
transactionDataTable := datatable.CreateNewImportedTransactionDataTable(dataTable, ezbookkeepingDataColumnNameMapping)
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewImporter(
|
dataTableImporter := converter.CreateNewImporterWithTypeNameMapping(
|
||||||
ezbookkeepingTransactionTypeNameMapping,
|
ezbookkeepingTransactionTypeNameMapping,
|
||||||
ezbookkeepingGeoLocationSeparator,
|
ezbookkeepingGeoLocationSeparator,
|
||||||
ezbookkeepingTagSeparator,
|
ezbookkeepingTagSeparator,
|
||||||
|
|||||||
@@ -0,0 +1,231 @@
|
|||||||
|
package dsv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/csv"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/encoding"
|
||||||
|
"golang.org/x/text/encoding/charmap"
|
||||||
|
"golang.org/x/text/encoding/japanese"
|
||||||
|
"golang.org/x/text/encoding/korean"
|
||||||
|
"golang.org/x/text/encoding/simplifiedchinese"
|
||||||
|
"golang.org/x/text/encoding/traditionalchinese"
|
||||||
|
"golang.org/x/text/encoding/unicode"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
|
csvconverter "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var supportedFileTypeSeparators = map[string]rune{
|
||||||
|
"custom_csv": ',',
|
||||||
|
"custom_tsv": '\t',
|
||||||
|
}
|
||||||
|
|
||||||
|
var supportedFileEncodings = map[string]encoding.Encoding{
|
||||||
|
"utf-8": unicode.UTF8, // UTF-8
|
||||||
|
"utf-8-bom": unicode.UTF8BOM, // UTF-8 with BOM
|
||||||
|
"utf-16le": unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM), // UTF-16 Little Endian
|
||||||
|
"utf-16be": unicode.UTF16(unicode.BigEndian, unicode.IgnoreBOM), // UTF-16 Big Endian
|
||||||
|
"cp437": charmap.CodePage437, // OEM United States (CP-437)
|
||||||
|
"cp863": charmap.CodePage863, // OEM Canadian French (CP-863)
|
||||||
|
"cp037": charmap.CodePage037, // IBM EBCDIC US/Canada (CP-037)
|
||||||
|
"cp1047": charmap.CodePage1047, // IBM EBCDIC Open Systems (CP-1047)
|
||||||
|
"cp1140": charmap.CodePage1140, // IBM EBCDIC US/Canada with Euro (CP-1140)
|
||||||
|
"iso-8859-1": charmap.ISO8859_1, // Western European (ISO-8859-1)
|
||||||
|
"cp850": charmap.CodePage850, // Western European (CP-850)
|
||||||
|
"cp858": charmap.CodePage858, // Western European with Euro (CP-858)
|
||||||
|
"windows-1252": charmap.Windows1252, // Western European (Windows-1252)
|
||||||
|
"iso-8859-15": charmap.ISO8859_15, // Western European (ISO-8859-15)
|
||||||
|
"iso-8859-4": charmap.ISO8859_4, // North European (ISO-8859-4)
|
||||||
|
"iso-8859-10": charmap.ISO8859_10, // North European (ISO-8859-10)
|
||||||
|
"cp865": charmap.CodePage865, // North European (CP-865)
|
||||||
|
"iso-8859-2": charmap.ISO8859_2, // Central European (ISO-8859-2)
|
||||||
|
"cp852": charmap.CodePage852, // Central European (CP-852)
|
||||||
|
"windows-1250": charmap.Windows1250, // Central European (Windows-1250)
|
||||||
|
"iso-8859-14": charmap.ISO8859_14, // Celtic (ISO-8859-14)
|
||||||
|
"iso-8859-3": charmap.ISO8859_3, // South European (ISO-8859-3)
|
||||||
|
"cp860": charmap.CodePage860, // Portuguese (CP-860)
|
||||||
|
"iso-8859-7": charmap.ISO8859_7, // Greek (ISO-8859-7)
|
||||||
|
"windows-1253": charmap.Windows1253, // Greek (Windows-1253)
|
||||||
|
"iso-8859-9": charmap.ISO8859_9, // Turkish (ISO-8859-9)
|
||||||
|
"windows-1254": charmap.Windows1254, // Turkish (Windows-1254)
|
||||||
|
"iso-8859-13": charmap.ISO8859_13, // Baltic (ISO-8859-13)
|
||||||
|
"windows-1257": charmap.Windows1257, // Baltic (Windows-1257)
|
||||||
|
"iso-8859-16": charmap.ISO8859_16, // South-Eastern European (ISO-8859-16)
|
||||||
|
"iso-8859-5": charmap.ISO8859_5, // Cyrillic (ISO-8859-5)
|
||||||
|
"cp855": charmap.CodePage855, // Cyrillic (CP-855)
|
||||||
|
"cp866": charmap.CodePage866, // Cyrillic (CP-866)
|
||||||
|
"windows-1251": charmap.Windows1251, // Cyrillic (Windows-1251)
|
||||||
|
"koi8r": charmap.KOI8R, // Cyrillic (KOI8-R)
|
||||||
|
"koi8u": charmap.KOI8U, // Cyrillic (KOI8-U)
|
||||||
|
"iso-8859-6": charmap.ISO8859_6, // Arabic (ISO-8859-6)
|
||||||
|
"windows-1256": charmap.Windows1256, // Arabic (Windows-1256)
|
||||||
|
"iso-8859-8": charmap.ISO8859_8, // Hebrew (ISO-8859-8)
|
||||||
|
"cp862": charmap.CodePage862, // Hebrew (CP-862)
|
||||||
|
"windows-1255": charmap.Windows1255, // Hebrew (Windows-1255)
|
||||||
|
"windows-874": charmap.Windows874, // Thai (Windows-874)
|
||||||
|
"windows-1258": charmap.Windows1258, // Vietnamese (Windows-1258)
|
||||||
|
"gb18030": simplifiedchinese.GB18030, // Chinese (Simplified, GB18030)
|
||||||
|
"gbk": simplifiedchinese.GBK, // Chinese (Simplified, GBK)
|
||||||
|
"big5": traditionalchinese.Big5, // Chinese (Traditional, Big5)
|
||||||
|
"euc-kr": korean.EUCKR, // Korean (EUC-KR)
|
||||||
|
"euc-jp": japanese.EUCJP, // Japanese (EUC-JP)
|
||||||
|
"iso-2022-jp": japanese.ISO2022JP, // Japanese (ISO-2022-JP)
|
||||||
|
"shift_jis": japanese.ShiftJIS, // Japanese (Shift JIS)
|
||||||
|
}
|
||||||
|
|
||||||
|
var customTransactionTypeNameMapping = map[models.TransactionType]string{
|
||||||
|
models.TRANSACTION_TYPE_MODIFY_BALANCE: utils.IntToString(int(models.TRANSACTION_TYPE_MODIFY_BALANCE)),
|
||||||
|
models.TRANSACTION_TYPE_INCOME: utils.IntToString(int(models.TRANSACTION_TYPE_INCOME)),
|
||||||
|
models.TRANSACTION_TYPE_EXPENSE: utils.IntToString(int(models.TRANSACTION_TYPE_EXPENSE)),
|
||||||
|
models.TRANSACTION_TYPE_TRANSFER: utils.IntToString(int(models.TRANSACTION_TYPE_TRANSFER)),
|
||||||
|
}
|
||||||
|
|
||||||
|
type CustomTransactionDataDsvFileParser interface {
|
||||||
|
ParseDsvFileLines(ctx core.Context, data []byte) ([][]string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// customTransactionDataDsvFileImporter defines the structure of custom dsv importer for transaction data
|
||||||
|
type customTransactionDataDsvFileImporter struct {
|
||||||
|
fileEncoding encoding.Encoding
|
||||||
|
separator rune
|
||||||
|
columnIndexMapping map[datatable.TransactionDataTableColumn]int
|
||||||
|
transactionTypeNameMapping map[string]models.TransactionType
|
||||||
|
hasHeaderLine bool
|
||||||
|
timeFormat string
|
||||||
|
timezoneFormat string
|
||||||
|
amountDecimalSeparator string
|
||||||
|
amountDigitGroupingSymbol string
|
||||||
|
geoLocationSeparator string
|
||||||
|
transactionTagSeparator string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseDsvFileLines returns the parsed file lines for specified the dsv file data
|
||||||
|
func (c *customTransactionDataDsvFileImporter) ParseDsvFileLines(ctx core.Context, data []byte) ([][]string, error) {
|
||||||
|
reader := transform.NewReader(bytes.NewReader(data), c.fileEncoding.NewDecoder())
|
||||||
|
csvReader := csv.NewReader(reader)
|
||||||
|
csvReader.Comma = c.separator
|
||||||
|
csvReader.FieldsPerRecord = -1
|
||||||
|
|
||||||
|
allLines := make([][]string, 0)
|
||||||
|
|
||||||
|
for {
|
||||||
|
items, err := csvReader.Read()
|
||||||
|
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[custom_transaction_data_dsv_file_importer.ParseDsvFileLines] cannot parse dsv data, because %s", err.Error())
|
||||||
|
return nil, errs.ErrInvalidCSVFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(items) == 1 && items[0] == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for index := range items {
|
||||||
|
items[index] = strings.Trim(items[index], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
allLines = append(allLines, items)
|
||||||
|
}
|
||||||
|
|
||||||
|
return allLines, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseImportedData returns the imported data by parsing the custom transaction dsv data
|
||||||
|
func (c *customTransactionDataDsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
|
allLines, err := c.ParseDsvFileLines(ctx, data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !c.hasHeaderLine {
|
||||||
|
allLines = append([][]string{{}}, allLines...)
|
||||||
|
}
|
||||||
|
|
||||||
|
dataTable := csvconverter.CreateNewCustomCsvImportedDataTable(allLines)
|
||||||
|
transactionDataTable := CreateNewCustomPlainTextDataTable(dataTable, c.columnIndexMapping, c.transactionTypeNameMapping, c.timeFormat, c.timezoneFormat, c.amountDecimalSeparator, c.amountDigitGroupingSymbol)
|
||||||
|
dataTableImporter := converter.CreateNewImporterWithTypeNameMapping(customTransactionTypeNameMapping, c.geoLocationSeparator, c.transactionTagSeparator)
|
||||||
|
|
||||||
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsDelimiterSeparatedValuesFileType returns whether the file type is the delimiter-separated values file type
|
||||||
|
func IsDelimiterSeparatedValuesFileType(fileType string) bool {
|
||||||
|
_, exists := supportedFileTypeSeparators[fileType]
|
||||||
|
return exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewCustomTransactionDataDsvFileParser returns a new custom dsv parser for transaction data
|
||||||
|
func CreateNewCustomTransactionDataDsvFileParser(fileType string, fileEncoding string) (CustomTransactionDataDsvFileParser, error) {
|
||||||
|
separator, exists := supportedFileTypeSeparators[fileType]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrImportFileTypeNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
enc, exists := supportedFileEncodings[fileEncoding]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrImportFileEncodingNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
return &customTransactionDataDsvFileImporter{
|
||||||
|
fileEncoding: enc,
|
||||||
|
separator: separator,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewCustomTransactionDataDsvFileImporter returns a new custom dsv importer for transaction data
|
||||||
|
func CreateNewCustomTransactionDataDsvFileImporter(fileType string, fileEncoding string, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, hasHeaderLine bool, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string, geoLocationSeparator string, transactionTagSeparator string) (converter.TransactionDataImporter, error) {
|
||||||
|
separator, exists := supportedFileTypeSeparators[fileType]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrImportFileTypeNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
enc, exists := supportedFileEncodings[fileEncoding]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, errs.ErrImportFileEncodingNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists = columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME]; !exists {
|
||||||
|
return nil, errs.ErrMissingRequiredFieldInHeaderRow
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists = columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE]; !exists {
|
||||||
|
return nil, errs.ErrMissingRequiredFieldInHeaderRow
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists = columnIndexMapping[datatable.TRANSACTION_DATA_TABLE_AMOUNT]; !exists {
|
||||||
|
return nil, errs.ErrMissingRequiredFieldInHeaderRow
|
||||||
|
}
|
||||||
|
|
||||||
|
return &customTransactionDataDsvFileImporter{
|
||||||
|
fileEncoding: enc,
|
||||||
|
separator: separator,
|
||||||
|
columnIndexMapping: columnIndexMapping,
|
||||||
|
transactionTypeNameMapping: transactionTypeNameMapping,
|
||||||
|
hasHeaderLine: hasHeaderLine,
|
||||||
|
timeFormat: timeFormat,
|
||||||
|
timezoneFormat: timezoneFormat,
|
||||||
|
amountDecimalSeparator: amountDecimalSeparator,
|
||||||
|
amountDigitGroupingSymbol: amountDigitGroupingSymbol,
|
||||||
|
geoLocationSeparator: geoLocationSeparator,
|
||||||
|
transactionTagSeparator: transactionTagSeparator,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,300 @@
|
|||||||
|
package dsv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/log"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// customPlainTextDataTable defines the structure of custom plain text transaction data table
|
||||||
|
type customPlainTextDataTable struct {
|
||||||
|
innerDataTable datatable.ImportedDataTable
|
||||||
|
columnIndexMapping map[datatable.TransactionDataTableColumn]int
|
||||||
|
transactionTypeNameMapping map[string]models.TransactionType
|
||||||
|
timeFormat string
|
||||||
|
timezoneFormat string
|
||||||
|
timeFormatIncludeTimezone bool
|
||||||
|
amountDecimalSeparator string
|
||||||
|
amountDigitGroupingSymbol string
|
||||||
|
}
|
||||||
|
|
||||||
|
// customPlainTextDataRow defines the structure of custom plain text transaction data row
|
||||||
|
type customPlainTextDataRow struct {
|
||||||
|
transactionDataTable *customPlainTextDataTable
|
||||||
|
rowData map[datatable.TransactionDataTableColumn]string
|
||||||
|
isValid bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// customPlainTextDataRowIterator defines the structure of custom plain text transaction data row iterator
|
||||||
|
type customPlainTextDataRowIterator struct {
|
||||||
|
transactionDataTable *customPlainTextDataTable
|
||||||
|
innerIterator datatable.ImportedDataRowIterator
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasColumn returns whether the data table has specified column
|
||||||
|
func (t *customPlainTextDataTable) HasColumn(column datatable.TransactionDataTableColumn) bool {
|
||||||
|
// custom dsv file allows no sub category, account name and related account name column mapping, but data table converter needs these columns
|
||||||
|
if column == datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY ||
|
||||||
|
column == datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME ||
|
||||||
|
column == datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// timezone column will be added when original time format contains timezone
|
||||||
|
if t.timeFormatIncludeTimezone && column == datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
_, exists := t.columnIndexMapping[column]
|
||||||
|
return exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionRowCount returns the total count of transaction data row
|
||||||
|
func (t *customPlainTextDataTable) TransactionRowCount() int {
|
||||||
|
return t.innerDataTable.DataRowCount()
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionRowIterator returns the iterator of transaction data row
|
||||||
|
func (t *customPlainTextDataTable) TransactionRowIterator() datatable.TransactionDataRowIterator {
|
||||||
|
return &customPlainTextDataRowIterator{
|
||||||
|
transactionDataTable: t,
|
||||||
|
innerIterator: t.innerDataTable.DataRowIterator(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid returns whether this row is valid data for importing
|
||||||
|
func (r *customPlainTextDataRow) IsValid() bool {
|
||||||
|
return r.isValid
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetData returns the data in the specified column type
|
||||||
|
func (r *customPlainTextDataRow) GetData(column datatable.TransactionDataTableColumn) string {
|
||||||
|
return r.rowData[column]
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasNext returns whether the iterator does not reach the end
|
||||||
|
func (t *customPlainTextDataRowIterator) HasNext() bool {
|
||||||
|
return t.innerIterator.HasNext()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next transaction data row
|
||||||
|
func (t *customPlainTextDataRowIterator) Next(ctx core.Context, user *models.User) (daraRow datatable.TransactionDataRow, err error) {
|
||||||
|
importedRow := t.innerIterator.Next()
|
||||||
|
|
||||||
|
if importedRow == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData, isValid, err := t.parseTransaction(ctx, user, importedRow)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[custom_transaction_plain_text_data_table.Next] cannot parsing transaction in row \"%s\", because %s", t.innerIterator.CurrentRowId(), err.Error())
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &customPlainTextDataRow{
|
||||||
|
transactionDataTable: t.transactionDataTable,
|
||||||
|
rowData: rowData,
|
||||||
|
isValid: isValid,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *customPlainTextDataRowIterator) parseTransaction(ctx core.Context, user *models.User, row datatable.ImportedDataRow) (map[datatable.TransactionDataTableColumn]string, bool, error) {
|
||||||
|
rowData := make(map[datatable.TransactionDataTableColumn]string, len(t.transactionDataTable.columnIndexMapping))
|
||||||
|
|
||||||
|
for column, columnIndex := range t.transactionDataTable.columnIndexMapping {
|
||||||
|
if columnIndex < 0 || columnIndex >= row.ColumnCount() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
value := row.GetData(columnIndex)
|
||||||
|
rowData[column] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse transaction type
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] != "" {
|
||||||
|
transactionType, exists := t.transactionDataTable.transactionTypeNameMapping[rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE]]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
log.Warnf(ctx, "[custom_transaction_plain_text_data_table.parseTransaction] skip parsing this transaction, because transaction type \"%s\" mapping not defined", rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE])
|
||||||
|
return nil, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
mappedTransactionType, exists := customTransactionTypeNameMapping[transactionType]
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
log.Errorf(ctx, "[custom_transaction_plain_text_data_table.parseTransaction] cannot parsing transaction type \"%s\", because type \"%d\" is invalid", rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE], transactionType)
|
||||||
|
return nil, false, errs.ErrTransactionTypeInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = mappedTransactionType
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse date time
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME] != "" {
|
||||||
|
dateTime, err := time.Parse(t.transactionDataTable.timeFormat, rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errs.ErrTransactionTimeInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME] = utils.FormatUnixTimeToLongDateTime(dateTime.Unix(), dateTime.Location())
|
||||||
|
|
||||||
|
if t.transactionDataTable.timeFormatIncludeTimezone {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE] = utils.FormatTimezoneOffset(dateTime.Location())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse timezone
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE] != "" {
|
||||||
|
if t.transactionDataTable.timezoneFormat == "Z" || t.transactionDataTable.timezoneFormat == "" { // -HH:mm
|
||||||
|
// Do Nothing
|
||||||
|
} else if t.transactionDataTable.timezoneFormat == "ZZ" { // -HHmm
|
||||||
|
timezone := rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE]
|
||||||
|
|
||||||
|
if len(timezone) != 5 {
|
||||||
|
return nil, false, errs.ErrTransactionTimeZoneInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
timezone = timezone[:3] + ":" + timezone[3:]
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIMEZONE] = timezone
|
||||||
|
} else {
|
||||||
|
return nil, false, errs.ErrImportFileTransactionTimezoneFormatInvalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// use primary category if sub category is empty
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] == "" && rowData[datatable.TRANSACTION_DATA_TABLE_CATEGORY] != "" {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = rowData[datatable.TRANSACTION_DATA_TABLE_CATEGORY]
|
||||||
|
}
|
||||||
|
|
||||||
|
// trim trailing zero in decimal
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] != "" {
|
||||||
|
amount, err := t.parseAmount(ctx, rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[custom_transaction_plain_text_data_table.parseTransaction] cannot parsing transaction amount \"%s\", because %s", rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT], err.Error())
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = amount
|
||||||
|
}
|
||||||
|
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] != "" {
|
||||||
|
amount, err := t.parseAmount(ctx, rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[custom_transaction_plain_text_data_table.parseTransaction] cannot parsing transaction related amount \"%s\", because %s", rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT], err.Error())
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = amount
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := rowData[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY]; !exists {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := rowData[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME]; !exists {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME]; !exists {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return rowData, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *customPlainTextDataRowIterator) parseAmount(ctx core.Context, amountValue string) (string, error) {
|
||||||
|
if t.transactionDataTable.amountDigitGroupingSymbol != "" {
|
||||||
|
amountValue = strings.ReplaceAll(amountValue, t.transactionDataTable.amountDigitGroupingSymbol, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.transactionDataTable.amountDecimalSeparator != "" && t.transactionDataTable.amountDecimalSeparator != "." {
|
||||||
|
if strings.Contains(amountValue, ".") {
|
||||||
|
return "", errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
amountValue = strings.ReplaceAll(amountValue, t.transactionDataTable.amountDecimalSeparator, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
amountValue = utils.TrimTrailingZerosInDecimal(amountValue)
|
||||||
|
amount, err := utils.ParseAmount(amountValue)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
return utils.FormatAmount(amount), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewCustomPlainTextDataTable returns transaction data table from imported data table
|
||||||
|
func CreateNewCustomPlainTextDataTable(dataTable datatable.ImportedDataTable, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string) *customPlainTextDataTable {
|
||||||
|
timeFormatIncludeTimezone := strings.Contains(timeFormat, "z") || strings.Contains(timeFormat, "Z")
|
||||||
|
|
||||||
|
return &customPlainTextDataTable{
|
||||||
|
innerDataTable: dataTable,
|
||||||
|
columnIndexMapping: columnIndexMapping,
|
||||||
|
transactionTypeNameMapping: transactionTypeNameMapping,
|
||||||
|
timeFormat: getDateTimeFormat(timeFormat),
|
||||||
|
timezoneFormat: timezoneFormat,
|
||||||
|
timeFormatIncludeTimezone: timeFormatIncludeTimezone,
|
||||||
|
amountDecimalSeparator: amountDecimalSeparator,
|
||||||
|
amountDigitGroupingSymbol: amountDigitGroupingSymbol,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDateTimeFormat(format string) string {
|
||||||
|
// convert moment.js format to Go format
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "YYYY", "2006")
|
||||||
|
format = strings.ReplaceAll(format, "YY", "06")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "MMMM", "January")
|
||||||
|
format = strings.ReplaceAll(format, "MMM", "Jan")
|
||||||
|
format = strings.ReplaceAll(format, "MM", "01")
|
||||||
|
format = strings.ReplaceAll(format, "M", "1")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "DD", "02")
|
||||||
|
format = strings.ReplaceAll(format, "D", "2")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "dddd", "Monday")
|
||||||
|
format = strings.ReplaceAll(format, "ddd", "Mon")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "HH", "15")
|
||||||
|
format = strings.ReplaceAll(format, "H", "15")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "hh", "03")
|
||||||
|
format = strings.ReplaceAll(format, "h", "3")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "mm", "04")
|
||||||
|
format = strings.ReplaceAll(format, "m", "4")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "ss", "05")
|
||||||
|
format = strings.ReplaceAll(format, "s", "5")
|
||||||
|
|
||||||
|
for i := 9; i >= 1; i-- {
|
||||||
|
format = strings.ReplaceAll(format, "."+strings.Repeat("S", i), "."+strings.Repeat("9", i))
|
||||||
|
}
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "A", "PM")
|
||||||
|
format = strings.ReplaceAll(format, "a", "pm")
|
||||||
|
|
||||||
|
format = strings.ReplaceAll(format, "zz", "MST")
|
||||||
|
format = strings.ReplaceAll(format, "z", "MST")
|
||||||
|
|
||||||
|
if strings.Contains(format, "ZZ") {
|
||||||
|
format = strings.ReplaceAll(format, "ZZ", "Z0700")
|
||||||
|
} else if strings.Contains(format, "Z") {
|
||||||
|
format = strings.ReplaceAll(format, "Z", "Z07:00")
|
||||||
|
}
|
||||||
|
|
||||||
|
return format
|
||||||
|
}
|
||||||
+20
-20
@@ -10,27 +10,27 @@ import (
|
|||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ExcelFileImportedDataTable defines the structure of excel file data table
|
// ExcelMSCFBFileImportedDataTable defines the structure of excel (microsoft compound file binary) file data table
|
||||||
type ExcelFileImportedDataTable struct {
|
type ExcelMSCFBFileImportedDataTable struct {
|
||||||
workbook *xls.WorkBook
|
workbook *xls.WorkBook
|
||||||
headerLineColumnNames []string
|
headerLineColumnNames []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExcelFileDataRow defines the structure of excel file data table row
|
// ExcelMSCFBFileDataRow defines the structure of excel (microsoft compound file binary) file data table row
|
||||||
type ExcelFileDataRow struct {
|
type ExcelMSCFBFileDataRow struct {
|
||||||
sheet *xls.WorkSheet
|
sheet *xls.WorkSheet
|
||||||
rowIndex int
|
rowIndex int
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExcelFileDataRowIterator defines the structure of excel file data table row iterator
|
// ExcelMSCFBFileDataRowIterator defines the structure of excel (microsoft compound file binary) file data table row iterator
|
||||||
type ExcelFileDataRowIterator struct {
|
type ExcelMSCFBFileDataRowIterator struct {
|
||||||
dataTable *ExcelFileImportedDataTable
|
dataTable *ExcelMSCFBFileImportedDataTable
|
||||||
currentSheetIndex int
|
currentSheetIndex int
|
||||||
currentRowIndexInSheet uint16
|
currentRowIndexInSheet uint16
|
||||||
}
|
}
|
||||||
|
|
||||||
// DataRowCount returns the total count of data row
|
// DataRowCount returns the total count of data row
|
||||||
func (t *ExcelFileImportedDataTable) DataRowCount() int {
|
func (t *ExcelMSCFBFileImportedDataTable) DataRowCount() int {
|
||||||
totalDataRowCount := 0
|
totalDataRowCount := 0
|
||||||
|
|
||||||
for i := 0; i < t.workbook.NumSheets(); i++ {
|
for i := 0; i < t.workbook.NumSheets(); i++ {
|
||||||
@@ -47,13 +47,13 @@ func (t *ExcelFileImportedDataTable) DataRowCount() int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// HeaderColumnNames returns the header column name list
|
// HeaderColumnNames returns the header column name list
|
||||||
func (t *ExcelFileImportedDataTable) HeaderColumnNames() []string {
|
func (t *ExcelMSCFBFileImportedDataTable) HeaderColumnNames() []string {
|
||||||
return t.headerLineColumnNames
|
return t.headerLineColumnNames
|
||||||
}
|
}
|
||||||
|
|
||||||
// DataRowIterator returns the iterator of data row
|
// DataRowIterator returns the iterator of data row
|
||||||
func (t *ExcelFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
|
func (t *ExcelMSCFBFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
|
||||||
return &ExcelFileDataRowIterator{
|
return &ExcelMSCFBFileDataRowIterator{
|
||||||
dataTable: t,
|
dataTable: t,
|
||||||
currentSheetIndex: 0,
|
currentSheetIndex: 0,
|
||||||
currentRowIndexInSheet: 0,
|
currentRowIndexInSheet: 0,
|
||||||
@@ -61,19 +61,19 @@ func (t *ExcelFileImportedDataTable) DataRowIterator() datatable.ImportedDataRow
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ColumnCount returns the total count of column in this data row
|
// ColumnCount returns the total count of column in this data row
|
||||||
func (r *ExcelFileDataRow) ColumnCount() int {
|
func (r *ExcelMSCFBFileDataRow) ColumnCount() int {
|
||||||
row := r.sheet.Row(r.rowIndex)
|
row := r.sheet.Row(r.rowIndex)
|
||||||
return row.LastCol() + 1
|
return row.LastCol() + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetData returns the data in the specified column index
|
// GetData returns the data in the specified column index
|
||||||
func (r *ExcelFileDataRow) GetData(columnIndex int) string {
|
func (r *ExcelMSCFBFileDataRow) GetData(columnIndex int) string {
|
||||||
row := r.sheet.Row(r.rowIndex)
|
row := r.sheet.Row(r.rowIndex)
|
||||||
return row.Col(columnIndex)
|
return row.Col(columnIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
// HasNext returns whether the iterator does not reach the end
|
// HasNext returns whether the iterator does not reach the end
|
||||||
func (t *ExcelFileDataRowIterator) HasNext() bool {
|
func (t *ExcelMSCFBFileDataRowIterator) HasNext() bool {
|
||||||
workbook := t.dataTable.workbook
|
workbook := t.dataTable.workbook
|
||||||
|
|
||||||
if t.currentSheetIndex >= workbook.NumSheets() {
|
if t.currentSheetIndex >= workbook.NumSheets() {
|
||||||
@@ -100,12 +100,12 @@ func (t *ExcelFileDataRowIterator) HasNext() bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CurrentRowId returns current index
|
// CurrentRowId returns current index
|
||||||
func (t *ExcelFileDataRowIterator) CurrentRowId() string {
|
func (t *ExcelMSCFBFileDataRowIterator) CurrentRowId() string {
|
||||||
return fmt.Sprintf("table#%d-row#%d", t.currentSheetIndex, t.currentRowIndexInSheet)
|
return fmt.Sprintf("table#%d-row#%d", t.currentSheetIndex, t.currentRowIndexInSheet)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next returns the next imported data row
|
// Next returns the next imported data row
|
||||||
func (t *ExcelFileDataRowIterator) Next() datatable.ImportedDataRow {
|
func (t *ExcelMSCFBFileDataRowIterator) Next() datatable.ImportedDataRow {
|
||||||
workbook := t.dataTable.workbook
|
workbook := t.dataTable.workbook
|
||||||
currentRowIndexInTable := t.currentRowIndexInSheet
|
currentRowIndexInTable := t.currentRowIndexInSheet
|
||||||
|
|
||||||
@@ -133,14 +133,14 @@ func (t *ExcelFileDataRowIterator) Next() datatable.ImportedDataRow {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ExcelFileDataRow{
|
return &ExcelMSCFBFileDataRow{
|
||||||
sheet: currentSheet,
|
sheet: currentSheet,
|
||||||
rowIndex: int(t.currentRowIndexInSheet),
|
rowIndex: int(t.currentRowIndexInSheet),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateNewExcelFileImportedDataTable returns excel xls data table by file binary data
|
// CreateNewExcelMSCFBFileImportedDataTable returns excel (microsoft compound file binary) data table by file binary data
|
||||||
func CreateNewExcelFileImportedDataTable(data []byte) (*ExcelFileImportedDataTable, error) {
|
func CreateNewExcelMSCFBFileImportedDataTable(data []byte) (*ExcelMSCFBFileImportedDataTable, error) {
|
||||||
reader := bytes.NewReader(data)
|
reader := bytes.NewReader(data)
|
||||||
workbook, err := xls.OpenReader(reader, "")
|
workbook, err := xls.OpenReader(reader, "")
|
||||||
|
|
||||||
@@ -184,7 +184,7 @@ func CreateNewExcelFileImportedDataTable(data []byte) (*ExcelFileImportedDataTab
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ExcelFileImportedDataTable{
|
return &ExcelMSCFBFileImportedDataTable{
|
||||||
workbook: workbook,
|
workbook: workbook,
|
||||||
headerLineColumnNames: headerRowItems,
|
headerLineColumnNames: headerRowItems,
|
||||||
}, nil
|
}, nil
|
||||||
+30
-30
@@ -9,63 +9,63 @@ import (
|
|||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableDataRowCount(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableDataRowCount(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, 2, datatable.DataRowCount())
|
assert.Equal(t, 2, datatable.DataRowCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableDataRowCount_MultipleSheets(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableDataRowCount_MultipleSheets(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, 5, datatable.DataRowCount())
|
assert.Equal(t, 5, datatable.DataRowCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableDataRowCount_OnlyHeaderLine(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableDataRowCount_OnlyHeaderLine(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, 0, datatable.DataRowCount())
|
assert.Equal(t, 0, datatable.DataRowCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableDataRowCount_EmptyContent(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableDataRowCount_EmptyContent(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, 0, datatable.DataRowCount())
|
assert.Equal(t, 0, datatable.DataRowCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableHeaderColumnNames(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableHeaderColumnNames(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.EqualValues(t, []string{"A1", "B1", "C1"}, datatable.HeaderColumnNames())
|
assert.EqualValues(t, []string{"A1", "B1", "C1"}, datatable.HeaderColumnNames())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileImportedDataTableHeaderColumnNames_EmptyContent(t *testing.T) {
|
func TestExcelMSCFBFileImportedDataTableHeaderColumnNames_EmptyContent(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.Nil(t, datatable.HeaderColumnNames())
|
assert.Nil(t, datatable.HeaderColumnNames())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowIterator(t *testing.T) {
|
func TestExcelMSCFBFileDataRowIterator(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
assert.True(t, iterator.HasNext())
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
@@ -86,11 +86,11 @@ func TestExcelFileDataRowIterator(t *testing.T) {
|
|||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowIterator_MultipleSheets(t *testing.T) {
|
func TestExcelMSCFBFileDataRowIterator_MultipleSheets(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
assert.True(t, iterator.HasNext())
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
@@ -123,11 +123,11 @@ func TestExcelFileDataRowIterator_MultipleSheets(t *testing.T) {
|
|||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowIterator_OnlyHeaderLine(t *testing.T) {
|
func TestExcelMSCFBFileDataRowIterator_OnlyHeaderLine(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
@@ -140,11 +140,11 @@ func TestExcelFileDataRowIterator_OnlyHeaderLine(t *testing.T) {
|
|||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowIterator_EmptyContent(t *testing.T) {
|
func TestExcelMSCFBFileDataRowIterator_EmptyContent(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
@@ -157,11 +157,11 @@ func TestExcelFileDataRowIterator_EmptyContent(t *testing.T) {
|
|||||||
assert.False(t, iterator.HasNext())
|
assert.False(t, iterator.HasNext())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowColumnCount(t *testing.T) {
|
func TestExcelMSCFBFileDataRowColumnCount(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
row1 := iterator.Next()
|
row1 := iterator.Next()
|
||||||
@@ -171,11 +171,11 @@ func TestExcelFileDataRowColumnCount(t *testing.T) {
|
|||||||
assert.EqualValues(t, 4, row2.ColumnCount())
|
assert.EqualValues(t, 4, row2.ColumnCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowGetData(t *testing.T) {
|
func TestExcelMSCFBFileDataRowGetData(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
row1 := iterator.Next()
|
row1 := iterator.Next()
|
||||||
@@ -189,22 +189,22 @@ func TestExcelFileDataRowGetData(t *testing.T) {
|
|||||||
assert.Equal(t, "C3", row2.GetData(2))
|
assert.Equal(t, "C3", row2.GetData(2))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowGetData_GetNotExistedColumnData(t *testing.T) {
|
func TestExcelMSCFBFileDataRowGetData_GetNotExistedColumnData(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
row1 := iterator.Next()
|
row1 := iterator.Next()
|
||||||
assert.Equal(t, "", row1.GetData(3))
|
assert.Equal(t, "", row1.GetData(3))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExcelFileDataRowGetData_MultipleSheets(t *testing.T) {
|
func TestExcelMSCFBFileDataRowGetData_MultipleSheets(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
datatable, err := CreateNewExcelFileImportedDataTable(testdata)
|
datatable, err := CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
iterator := datatable.DataRowIterator()
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
sheet1Row1 := iterator.Next()
|
sheet1Row1 := iterator.Next()
|
||||||
@@ -237,10 +237,10 @@ func TestExcelFileDataRowGetData_MultipleSheets(t *testing.T) {
|
|||||||
assert.Equal(t, "5-C3", sheet5Row2.GetData(2))
|
assert.Equal(t, "5-C3", sheet5Row2.GetData(2))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCreateNewExcelFileImportedDataTable_MultipleSheetsWithDifferentHeaders(t *testing.T) {
|
func TestCreateNewExcelMSCFBFileImportedDataTable_MultipleSheetsWithDifferentHeaders(t *testing.T) {
|
||||||
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_with_different_header_row_excel_file.xls")
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_with_different_header_row_excel_file.xls")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
_, err = CreateNewExcelFileImportedDataTable(testdata)
|
_, err = CreateNewExcelMSCFBFileImportedDataTable(testdata)
|
||||||
assert.EqualError(t, err, errs.ErrFieldsInMultiTableAreDifferent.Message)
|
assert.EqualError(t, err, errs.ErrFieldsInMultiTableAreDifferent.Message)
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,211 @@
|
|||||||
|
package excel
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/xuri/excelize/v2"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
)
|
||||||
|
|
||||||
|
// excelOOXMLSheet defines the structure of excel (Office Open XML) file sheet
|
||||||
|
type excelOOXMLSheet struct {
|
||||||
|
sheetName string
|
||||||
|
allData [][]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExcelOOXMLFileImportedDataTable defines the structure of excel (Office Open XML) file data table
|
||||||
|
type ExcelOOXMLFileImportedDataTable struct {
|
||||||
|
sheets []*excelOOXMLSheet
|
||||||
|
headerLineColumnNames []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExcelOOXMLFileDataRow defines the structure of excel (Office Open XML) file data table row
|
||||||
|
type ExcelOOXMLFileDataRow struct {
|
||||||
|
sheet *excelOOXMLSheet
|
||||||
|
rowData []string
|
||||||
|
rowIndex int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExcelOOXMLFileDataRowIterator defines the structure of excel (Office Open XML) file data table row iterator
|
||||||
|
type ExcelOOXMLFileDataRowIterator struct {
|
||||||
|
dataTable *ExcelOOXMLFileImportedDataTable
|
||||||
|
currentSheetIndex int
|
||||||
|
currentRowIndexInSheet int
|
||||||
|
}
|
||||||
|
|
||||||
|
// DataRowCount returns the total count of data row
|
||||||
|
func (t *ExcelOOXMLFileImportedDataTable) DataRowCount() int {
|
||||||
|
totalDataRowCount := 0
|
||||||
|
|
||||||
|
for i := 0; i < len(t.sheets); i++ {
|
||||||
|
sheet := t.sheets[i]
|
||||||
|
|
||||||
|
if len(sheet.allData) < 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
totalDataRowCount += len(sheet.allData) - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return totalDataRowCount
|
||||||
|
}
|
||||||
|
|
||||||
|
// HeaderColumnNames returns the header column name list
|
||||||
|
func (t *ExcelOOXMLFileImportedDataTable) HeaderColumnNames() []string {
|
||||||
|
return t.headerLineColumnNames
|
||||||
|
}
|
||||||
|
|
||||||
|
// DataRowIterator returns the iterator of data row
|
||||||
|
func (t *ExcelOOXMLFileImportedDataTable) DataRowIterator() datatable.ImportedDataRowIterator {
|
||||||
|
return &ExcelOOXMLFileDataRowIterator{
|
||||||
|
dataTable: t,
|
||||||
|
currentSheetIndex: 0,
|
||||||
|
currentRowIndexInSheet: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColumnCount returns the total count of column in this data row
|
||||||
|
func (r *ExcelOOXMLFileDataRow) ColumnCount() int {
|
||||||
|
return len(r.rowData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetData returns the data in the specified column index
|
||||||
|
func (r *ExcelOOXMLFileDataRow) GetData(columnIndex int) string {
|
||||||
|
if columnIndex < 0 || columnIndex >= len(r.rowData) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.rowData[columnIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasNext returns whether the iterator does not reach the end
|
||||||
|
func (t *ExcelOOXMLFileDataRowIterator) HasNext() bool {
|
||||||
|
sheets := t.dataTable.sheets
|
||||||
|
|
||||||
|
if t.currentSheetIndex >= len(sheets) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSheet := sheets[t.currentSheetIndex]
|
||||||
|
|
||||||
|
if t.currentRowIndexInSheet+1 < len(currentSheet.allData) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := t.currentSheetIndex + 1; i < len(sheets); i++ {
|
||||||
|
sheet := sheets[i]
|
||||||
|
|
||||||
|
if len(sheet.allData) <= 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// CurrentRowId returns current index
|
||||||
|
func (t *ExcelOOXMLFileDataRowIterator) CurrentRowId() string {
|
||||||
|
return fmt.Sprintf("table#%d-row#%d", t.currentSheetIndex, t.currentRowIndexInSheet)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next imported data row
|
||||||
|
func (t *ExcelOOXMLFileDataRowIterator) Next() datatable.ImportedDataRow {
|
||||||
|
sheets := t.dataTable.sheets
|
||||||
|
currentRowIndexInTable := t.currentRowIndexInSheet
|
||||||
|
|
||||||
|
for i := t.currentSheetIndex; i < len(sheets); i++ {
|
||||||
|
sheet := sheets[i]
|
||||||
|
|
||||||
|
if currentRowIndexInTable+1 < len(sheet.allData) {
|
||||||
|
t.currentRowIndexInSheet++
|
||||||
|
currentRowIndexInTable = t.currentRowIndexInSheet
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
t.currentSheetIndex++
|
||||||
|
t.currentRowIndexInSheet = 0
|
||||||
|
currentRowIndexInTable = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.currentSheetIndex >= len(sheets) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSheet := sheets[t.currentSheetIndex]
|
||||||
|
|
||||||
|
if t.currentRowIndexInSheet >= len(currentSheet.allData) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ExcelOOXMLFileDataRow{
|
||||||
|
sheet: currentSheet,
|
||||||
|
rowData: currentSheet.allData[t.currentRowIndexInSheet],
|
||||||
|
rowIndex: t.currentRowIndexInSheet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewExcelOOXMLFileImportedDataTable returns excel (Office Open XML) data table by file binary data
|
||||||
|
func CreateNewExcelOOXMLFileImportedDataTable(data []byte) (*ExcelOOXMLFileImportedDataTable, error) {
|
||||||
|
reader := bytes.NewReader(data)
|
||||||
|
file, err := excelize.OpenReader(reader)
|
||||||
|
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sheetNames := file.GetSheetList()
|
||||||
|
var headerRowItems []string
|
||||||
|
var sheets []*excelOOXMLSheet
|
||||||
|
|
||||||
|
for i := 0; i < len(sheetNames); i++ {
|
||||||
|
sheetName := sheetNames[i]
|
||||||
|
allData, err := file.GetRows(sheetName)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if allData == nil || len(allData) < 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
row := allData[0]
|
||||||
|
|
||||||
|
if i == 0 {
|
||||||
|
for j := 0; j < len(row); j++ {
|
||||||
|
headerItem := row[j]
|
||||||
|
|
||||||
|
if headerItem == "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
headerRowItems = append(headerRowItems, headerItem)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for j := 0; j < min(len(row), len(headerRowItems)); j++ {
|
||||||
|
headerItem := row[j]
|
||||||
|
|
||||||
|
if headerItem != headerRowItems[j] {
|
||||||
|
return nil, errs.ErrFieldsInMultiTableAreDifferent
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sheets = append(sheets, &excelOOXMLSheet{
|
||||||
|
sheetName: sheetName,
|
||||||
|
allData: allData,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ExcelOOXMLFileImportedDataTable{
|
||||||
|
sheets: sheets,
|
||||||
|
headerLineColumnNames: headerRowItems,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,246 @@
|
|||||||
|
package excel
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableDataRowCount(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 2, datatable.DataRowCount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableDataRowCount_MultipleSheets(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 5, datatable.DataRowCount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableDataRowCount_OnlyHeaderLine(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 0, datatable.DataRowCount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableDataRowCount_EmptyContent(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 0, datatable.DataRowCount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableHeaderColumnNames(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.EqualValues(t, []string{"A1", "B1", "C1"}, datatable.HeaderColumnNames())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileImportedDataTableHeaderColumnNames_EmptyContent(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.Nil(t, datatable.HeaderColumnNames())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowIterator(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// data row 1
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// data row 2
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 3
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 4
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowIterator_MultipleSheets(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// sheet 1 data row 1
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// sheet 1 data row 2
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// sheet 3 data row 1
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// sheet 5 data row 1
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.True(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// sheet 5 data row 2
|
||||||
|
assert.NotNil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowIterator_OnlyHeaderLine(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/only_one_row_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 1
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 2
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowIterator_EmptyContent(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/empty_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 1
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
|
||||||
|
// not existed data row 2
|
||||||
|
assert.Nil(t, iterator.Next())
|
||||||
|
assert.False(t, iterator.HasNext())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowColumnCount(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
|
row1 := iterator.Next()
|
||||||
|
assert.EqualValues(t, 3, row1.ColumnCount())
|
||||||
|
|
||||||
|
row2 := iterator.Next()
|
||||||
|
assert.EqualValues(t, 3, row2.ColumnCount())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowGetData(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
|
row1 := iterator.Next()
|
||||||
|
assert.Equal(t, "A2", row1.GetData(0))
|
||||||
|
assert.Equal(t, "B2", row1.GetData(1))
|
||||||
|
assert.Equal(t, "C2", row1.GetData(2))
|
||||||
|
|
||||||
|
row2 := iterator.Next()
|
||||||
|
assert.Equal(t, "A3", row2.GetData(0))
|
||||||
|
assert.Equal(t, "B3", row2.GetData(1))
|
||||||
|
assert.Equal(t, "C3", row2.GetData(2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowGetData_GetNotExistedColumnData(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/simple_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
|
row1 := iterator.Next()
|
||||||
|
assert.Equal(t, "", row1.GetData(3))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExcelOOXMLFileDataRowGetData_MultipleSheets(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
datatable, err := CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
iterator := datatable.DataRowIterator()
|
||||||
|
|
||||||
|
sheet1Row1 := iterator.Next()
|
||||||
|
assert.Equal(t, "1-A2", sheet1Row1.GetData(0))
|
||||||
|
assert.Equal(t, "1-B2", sheet1Row1.GetData(1))
|
||||||
|
assert.Equal(t, "1-C2", sheet1Row1.GetData(2))
|
||||||
|
|
||||||
|
sheet1Row2 := iterator.Next()
|
||||||
|
assert.Equal(t, "1-A3", sheet1Row2.GetData(0))
|
||||||
|
assert.Equal(t, "1-B3", sheet1Row2.GetData(1))
|
||||||
|
assert.Equal(t, "1-C3", sheet1Row2.GetData(2))
|
||||||
|
|
||||||
|
// skip empty sheet2
|
||||||
|
|
||||||
|
sheet3Row1 := iterator.Next()
|
||||||
|
assert.Equal(t, "3-A2", sheet3Row1.GetData(0))
|
||||||
|
assert.Equal(t, "3-B2", sheet3Row1.GetData(1))
|
||||||
|
assert.Equal(t, "", sheet3Row1.GetData(2))
|
||||||
|
|
||||||
|
// skip no data row sheet4
|
||||||
|
|
||||||
|
sheet5Row1 := iterator.Next()
|
||||||
|
assert.Equal(t, "5-A2", sheet5Row1.GetData(0))
|
||||||
|
assert.Equal(t, "5-B2", sheet5Row1.GetData(1))
|
||||||
|
assert.Equal(t, "5-C2", sheet5Row1.GetData(2))
|
||||||
|
|
||||||
|
sheet5Row2 := iterator.Next()
|
||||||
|
assert.Equal(t, "5-A3", sheet5Row2.GetData(0))
|
||||||
|
assert.Equal(t, "5-B3", sheet5Row2.GetData(1))
|
||||||
|
assert.Equal(t, "5-C3", sheet5Row2.GetData(2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateNewExcelOOXMLFileImportedDataTable_MultipleSheetsWithDifferentHeaders(t *testing.T) {
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/multiple_sheets_with_different_header_row_excel_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
_, err = CreateNewExcelOOXMLFileImportedDataTable(testdata)
|
||||||
|
assert.EqualError(t, err, errs.ErrFieldsInMultiTableAreDifferent.Message)
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -29,6 +30,7 @@ const feideeMymoneyAppTransactionDescriptionColumnName = "备注"
|
|||||||
const feideeMymoneyAppTransactionRelatedIdColumnName = "关联Id"
|
const feideeMymoneyAppTransactionRelatedIdColumnName = "关联Id"
|
||||||
|
|
||||||
const feideeMymoneyAppTransactionTypeModifyBalanceText = "余额变更"
|
const feideeMymoneyAppTransactionTypeModifyBalanceText = "余额变更"
|
||||||
|
const feideeMymoneyAppTransactionTypeModifyOutstandingBalanceText = "负债变更"
|
||||||
const feideeMymoneyAppTransactionTypeIncomeText = "收入"
|
const feideeMymoneyAppTransactionTypeIncomeText = "收入"
|
||||||
const feideeMymoneyAppTransactionTypeExpenseText = "支出"
|
const feideeMymoneyAppTransactionTypeExpenseText = "支出"
|
||||||
const feideeMymoneyAppTransactionTypeTransferInText = "转入"
|
const feideeMymoneyAppTransactionTypeTransferInText = "转入"
|
||||||
@@ -54,7 +56,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the feidee mymoney app transaction csv data
|
// ParseImportedData returns the imported data by parsing the feidee mymoney app transaction csv data
|
||||||
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *feideeMymoneyAppTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
fallback := unicode.UTF8.NewDecoder()
|
fallback := unicode.UTF8.NewDecoder()
|
||||||
reader := transform.NewReader(bytes.NewReader(data), unicode.BOMOverride(fallback))
|
reader := transform.NewReader(bytes.NewReader(data), unicode.BOMOverride(fallback))
|
||||||
|
|
||||||
@@ -82,7 +84,7 @@ func (c *feideeMymoneyAppTransactionDataCsvFileImporter) ParseImportedData(ctx c
|
|||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(feideeMymoneyTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(feideeMymoneyTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
@@ -189,9 +191,12 @@ func (c *feideeMymoneyAppTransactionDataCsvFileImporter) createNewFeideeMymoneyA
|
|||||||
|
|
||||||
transactionType := data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE]
|
transactionType := data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE]
|
||||||
|
|
||||||
if transactionType == feideeMymoneyAppTransactionTypeModifyBalanceText || transactionType == feideeMymoneyAppTransactionTypeIncomeText || transactionType == feideeMymoneyAppTransactionTypeExpenseText {
|
if transactionType == feideeMymoneyAppTransactionTypeModifyBalanceText || transactionType == feideeMymoneyAppTransactionTypeModifyOutstandingBalanceText ||
|
||||||
|
transactionType == feideeMymoneyAppTransactionTypeIncomeText || transactionType == feideeMymoneyAppTransactionTypeExpenseText {
|
||||||
if transactionType == feideeMymoneyAppTransactionTypeModifyBalanceText {
|
if transactionType == feideeMymoneyAppTransactionTypeModifyBalanceText {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_MODIFY_BALANCE]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_MODIFY_BALANCE]
|
||||||
|
} else if transactionType == feideeMymoneyAppTransactionTypeModifyOutstandingBalanceText {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeModifyOutstandingBalanceName
|
||||||
} else if transactionType == feideeMymoneyAppTransactionTypeIncomeText {
|
} else if transactionType == feideeMymoneyAppTransactionTypeIncomeText {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME]
|
||||||
} else if transactionType == feideeMymoneyAppTransactionTypeExpenseText {
|
} else if transactionType == feideeMymoneyAppTransactionTypeExpenseText {
|
||||||
|
|||||||
@@ -109,6 +109,56 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_MinimumValidData(t *testi
|
|||||||
assert.Equal(t, "Test Category3", allNewSubTransferCategories[0].Name)
|
assert.Equal(t, "Test Category3", allNewSubTransferCategories[0].Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseOutstandingBalanceModification(t *testing.T) {
|
||||||
|
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, _, _, err := converter.ParseImportedData(context, user, []byte("随手记导出文件(headers:v5;xxxxx)\n"+
|
||||||
|
"\"交易类型\",\"日期\",\"子类别\",\"账户\",\"金额\",\"备注\",\"关联Id\"\n"+
|
||||||
|
"\"负债变更\",\"2024-09-01 00:00:00\",\"\",\"Test Account\",\"123.45\",\"\",\"\"\n"+
|
||||||
|
"\"负债变更\",\"2024-09-01 01:00:00\",\"\",\"Test Account2\",\"-0.12\",\"\",\"\"\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 2, len(allNewAccounts))
|
||||||
|
assert.Equal(t, 1, len(allNewSubExpenseCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubIncomeCategories))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[0].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 00:00:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(12345), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[0].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[1].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[1].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 01:00:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[1].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[1].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[1].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Test Account", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Test Account2", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubExpenseCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubIncomeCategories[0].Name)
|
||||||
|
}
|
||||||
|
|
||||||
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
|
func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidTime(t *testing.T) {
|
||||||
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
|
converter := FeideeMymoneyAppTransactionDataCsvFileImporter
|
||||||
context := core.NewNullContext()
|
context := core.NewNullContext()
|
||||||
@@ -238,6 +288,11 @@ func TestFeideeMymoneyCsvFileImporterParseImportedData_ParseInvalidAmount(t *tes
|
|||||||
"\"余额变更\",\"2024-09-01 01:23:45\",\"\",\"Test Account\",\"123 45\",\"\",\"\""), 0, nil, nil, nil, nil, nil)
|
"\"余额变更\",\"2024-09-01 01:23:45\",\"\",\"Test Account\",\"123 45\",\"\",\"\""), 0, nil, nil, nil, nil, nil)
|
||||||
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte("随手记导出文件(headers:v5;xxxxx)\n"+
|
||||||
|
"\"交易类型\",\"日期\",\"子类别\",\"账户\",\"金额\",\"备注\",\"关联Id\"\n"+
|
||||||
|
"\"负债变更\",\"2024-09-01 01:23:45\",\"\",\"Test Account\",\"123 45\",\"\",\"\""), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte("随手记导出文件(headers:v5;xxxxx)\n"+
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte("随手记导出文件(headers:v5;xxxxx)\n"+
|
||||||
"\"交易类型\",\"日期\",\"子类别\",\"账户\",\"金额\",\"备注\",\"关联Id\"\n"+
|
"\"交易类型\",\"日期\",\"子类别\",\"账户\",\"金额\",\"备注\",\"关联Id\"\n"+
|
||||||
"\"转出\",\"2024-09-01 12:34:56\",\"Test Category\",\"Test Account\",\"123 45\",\"\",\"00000000-0000-0000-0000-000000000001\"\n"+
|
"\"转出\",\"2024-09-01 12:34:56\",\"Test Category\",\"Test Account\",\"123 45\",\"\",\"00000000-0000-0000-0000-000000000001\"\n"+
|
||||||
|
|||||||
@@ -0,0 +1,87 @@
|
|||||||
|
package feidee
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_MODIFY_BALANCE_NAME = "余额变更"
|
||||||
|
var FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_OUTSTANDING_MODIFY_BALANCE_NAME = "负债变更"
|
||||||
|
var FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_INCOME_NAME = "收入"
|
||||||
|
var FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_EXPENSE_NAME = "支出"
|
||||||
|
|
||||||
|
var feideeMymoneyElecloudTransactionTypeNameMapping = map[string]models.TransactionType{
|
||||||
|
FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_MODIFY_BALANCE_NAME: models.TRANSACTION_TYPE_MODIFY_BALANCE,
|
||||||
|
FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_OUTSTANDING_MODIFY_BALANCE_NAME: models.TRANSACTION_TYPE_MODIFY_BALANCE,
|
||||||
|
FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_INCOME_NAME: models.TRANSACTION_TYPE_INCOME,
|
||||||
|
FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_EXPENSE_NAME: models.TRANSACTION_TYPE_EXPENSE,
|
||||||
|
"转账": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"借入": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"借出": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"收债": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"还债": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"代付": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"报销": models.TRANSACTION_TYPE_TRANSFER,
|
||||||
|
"退款": models.TRANSACTION_TYPE_EXPENSE,
|
||||||
|
}
|
||||||
|
|
||||||
|
// feideeMymoneyElecloudTransactionDataRowParser defines the structure of feidee mymoney (elecloud) transaction data row parser
|
||||||
|
type feideeMymoneyElecloudTransactionDataRowParser struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAddedColumns returns the added columns after converting the data row
|
||||||
|
func (p *feideeMymoneyElecloudTransactionDataRowParser) GetAddedColumns() []datatable.TransactionDataTableColumn {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse returns the converted transaction data row
|
||||||
|
func (p *feideeMymoneyElecloudTransactionDataRowParser) Parse(data map[datatable.TransactionDataTableColumn]string) (rowData map[datatable.TransactionDataTableColumn]string, rowDataValid bool, err error) {
|
||||||
|
rowData = make(map[datatable.TransactionDataTableColumn]string, len(data))
|
||||||
|
|
||||||
|
for column, value := range data {
|
||||||
|
rowData[column] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = strings.ReplaceAll(rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT], ",", "") // remove thousand separator
|
||||||
|
|
||||||
|
if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] == FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_MODIFY_BALANCE_NAME {
|
||||||
|
amount, err := utils.ParseAmount(rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
// balance modification transaction in feidee mymoney (elecloud) is not the opening balance transaction, it can be added many times
|
||||||
|
if amount >= 0 {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_INCOME_NAME
|
||||||
|
} else {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_EXPENSE_NAME
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amount)
|
||||||
|
}
|
||||||
|
} else if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] == FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_OUTSTANDING_MODIFY_BALANCE_NAME {
|
||||||
|
amount, err := utils.ParseAmount(rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
// outstanding balance modification transaction in feidee mymoney app is not the opening balance transaction, it can be added many times
|
||||||
|
if amount >= 0 {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_EXPENSE_NAME
|
||||||
|
} else {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = FEIDEE_MYMONEY_ELECLOUD_TRANSACTION_TYPE_INCOME_NAME
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amount)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rowData, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// createFeideeMymoneyElecloudTransactionDataRowParser returns feidee mymoney (elecloud) transaction data row parser
|
||||||
|
func createFeideeMymoneyElecloudTransactionDataRowParser() datatable.TransactionDataRowParser {
|
||||||
|
return &feideeMymoneyElecloudTransactionDataRowParser{}
|
||||||
|
}
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
package feidee
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/excel"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
var feideeMymoneyElecloudDataColumnNameMapping = map[datatable.TransactionDataTableColumn]string{
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TIME: "日期",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE: "交易类型",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_CATEGORY: "分类",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY: "子分类",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME: "账户1",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_ACCOUNT_CURRENCY: "账户币种",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_AMOUNT: "金额",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME: "账户2",
|
||||||
|
datatable.TRANSACTION_DATA_TABLE_DESCRIPTION: "备注",
|
||||||
|
}
|
||||||
|
|
||||||
|
// feideeMymoneyElecloudTransactionDataXlsxFileImporter defines the structure of feidee mymoney (elecloud) xlsx importer for transaction data
|
||||||
|
type feideeMymoneyElecloudTransactionDataXlsxFileImporter struct {
|
||||||
|
converter.DataTableTransactionDataImporter
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize a feidee mymoney (elecloud) transaction data xlsx file importer singleton instance
|
||||||
|
var (
|
||||||
|
FeideeMymoneyElecloudTransactionDataXlsxFileImporter = &feideeMymoneyElecloudTransactionDataXlsxFileImporter{}
|
||||||
|
)
|
||||||
|
|
||||||
|
// ParseImportedData returns the imported data by parsing the feidee mymoney (elecloud) transaction xlsx data
|
||||||
|
func (c *feideeMymoneyElecloudTransactionDataXlsxFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
|
dataTable, err := excel.CreateNewExcelOOXMLFileImportedDataTable(data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
transactionRowParser := createFeideeMymoneyElecloudTransactionDataRowParser()
|
||||||
|
transactionDataTable := datatable.CreateNewImportedTransactionDataTableWithRowParser(dataTable, feideeMymoneyElecloudDataColumnNameMapping, transactionRowParser)
|
||||||
|
dataTableImporter := converter.CreateNewSimpleImporter(feideeMymoneyElecloudTransactionTypeNameMapping)
|
||||||
|
|
||||||
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
|
}
|
||||||
+117
@@ -0,0 +1,117 @@
|
|||||||
|
package feidee
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFeideeMymoneyElecloudTransactionDataXlsxImporterParseImportedData_MinimumValidData(t *testing.T) {
|
||||||
|
converter := FeideeMymoneyElecloudTransactionDataXlsxFileImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "USD",
|
||||||
|
}
|
||||||
|
|
||||||
|
testdata, err := os.ReadFile("../../../testdata/feidee_mymoney_elecloud_test_file.xlsx")
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, allNewSubExpenseCategories, allNewSubIncomeCategories, allNewSubTransferCategories, allNewTags, err := converter.ParseImportedData(context, user, testdata, 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 7, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 2, len(allNewAccounts))
|
||||||
|
assert.Equal(t, 3, len(allNewSubExpenseCategories))
|
||||||
|
assert.Equal(t, 3, len(allNewSubIncomeCategories))
|
||||||
|
assert.Equal(t, 1, len(allNewSubTransferCategories))
|
||||||
|
assert.Equal(t, 0, len(allNewTags))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[0].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 00:00:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(12345), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[0].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[1].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[1].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 01:00:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[1].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[1].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[1].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[2].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[2].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 01:23:45", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(12), allNewTransactions[2].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[2].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category", allNewTransactions[2].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[3].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[3].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 12:34:56", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(100), allNewTransactions[3].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[3].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category2", allNewTransactions[3].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[4].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[4].Type)
|
||||||
|
assert.Equal(t, "2024-09-01 23:59:59", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[4].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(5), allNewTransactions[4].Amount)
|
||||||
|
assert.Equal(t, "Test Comment5", allNewTransactions[4].Comment)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[4].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[4].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[4].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[5].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[5].Type)
|
||||||
|
assert.Equal(t, "2024-09-10 00:00:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[5].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(-654300), allNewTransactions[5].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[5].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category5", allNewTransactions[5].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[6].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[6].Type)
|
||||||
|
assert.Equal(t, "2024-09-11 05:06:00", utils.FormatUnixTimeToLongDateTime(utils.GetUnixTimeFromTransactionTime(allNewTransactions[6].TransactionTime), time.UTC))
|
||||||
|
assert.Equal(t, int64(-112340), allNewTransactions[6].Amount)
|
||||||
|
assert.Equal(t, "Foo#\\r\\nBar", allNewTransactions[6].Comment)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[6].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category4", allNewTransactions[6].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Test Account2", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Test Account", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubExpenseCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[1].Uid)
|
||||||
|
assert.Equal(t, "Test Category4", allNewSubExpenseCategories[1].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubExpenseCategories[2].Uid)
|
||||||
|
assert.Equal(t, "Test Category2", allNewSubExpenseCategories[2].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubIncomeCategories[0].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[1].Uid)
|
||||||
|
assert.Equal(t, "Test Category5", allNewSubIncomeCategories[1].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubIncomeCategories[2].Uid)
|
||||||
|
assert.Equal(t, "Test Category", allNewSubIncomeCategories[2].Name)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewSubTransferCategories[0].Uid)
|
||||||
|
assert.Equal(t, "", allNewSubTransferCategories[0].Name)
|
||||||
|
}
|
||||||
@@ -14,6 +14,8 @@ var feideeMymoneyTransactionTypeNameMapping = map[models.TransactionType]string{
|
|||||||
models.TRANSACTION_TYPE_TRANSFER: "转账",
|
models.TRANSACTION_TYPE_TRANSFER: "转账",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var feideeMymoneyTransactionTypeModifyOutstandingBalanceName = "负债变更"
|
||||||
|
|
||||||
// feideeMymoneyTransactionDataRowParser defines the structure of feidee mymoney transaction data row parser
|
// feideeMymoneyTransactionDataRowParser defines the structure of feidee mymoney transaction data row parser
|
||||||
type feideeMymoneyTransactionDataRowParser struct {
|
type feideeMymoneyTransactionDataRowParser struct {
|
||||||
}
|
}
|
||||||
@@ -49,6 +51,20 @@ func (p *feideeMymoneyTransactionDataRowParser) Parse(data map[datatable.Transac
|
|||||||
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE]
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE]
|
||||||
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amount)
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amount)
|
||||||
}
|
}
|
||||||
|
} else if rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] == feideeMymoneyTransactionTypeModifyOutstandingBalanceName {
|
||||||
|
amount, err := utils.ParseAmount(rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
// outstanding balance modification transaction in feidee mymoney app is not the opening balance transaction, it can be added many times
|
||||||
|
if amount >= 0 {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE]
|
||||||
|
} else {
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = feideeMymoneyTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME]
|
||||||
|
rowData[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amount)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return rowData, true, nil
|
return rowData, true, nil
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package feidee
|
package feidee
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/excel"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/excel"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -20,7 +21,7 @@ var feideeMymoneyWebDataColumnNameMapping = map[datatable.TransactionDataTableCo
|
|||||||
|
|
||||||
// feideeMymoneyWebTransactionDataXlsFileImporter defines the structure of feidee mymoney (web) xls importer for transaction data
|
// feideeMymoneyWebTransactionDataXlsFileImporter defines the structure of feidee mymoney (web) xls importer for transaction data
|
||||||
type feideeMymoneyWebTransactionDataXlsFileImporter struct {
|
type feideeMymoneyWebTransactionDataXlsFileImporter struct {
|
||||||
datatable.DataTableTransactionDataImporter
|
converter.DataTableTransactionDataImporter
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize a feidee mymoney (web) transaction data xls file importer singleton instance
|
// Initialize a feidee mymoney (web) transaction data xls file importer singleton instance
|
||||||
@@ -29,8 +30,8 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the feidee mymoney (web) transaction xls data
|
// ParseImportedData returns the imported data by parsing the feidee mymoney (web) transaction xls data
|
||||||
func (c *feideeMymoneyWebTransactionDataXlsFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *feideeMymoneyWebTransactionDataXlsFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
dataTable, err := excel.CreateNewExcelFileImportedDataTable(data)
|
dataTable, err := excel.CreateNewExcelMSCFBFileImportedDataTable(data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
@@ -38,7 +39,7 @@ func (c *feideeMymoneyWebTransactionDataXlsFileImporter) ParseImportedData(ctx c
|
|||||||
|
|
||||||
transactionRowParser := createFeideeMymoneyTransactionDataRowParser()
|
transactionRowParser := createFeideeMymoneyTransactionDataRowParser()
|
||||||
transactionDataTable := datatable.CreateNewImportedTransactionDataTableWithRowParser(dataTable, feideeMymoneyWebDataColumnNameMapping, transactionRowParser)
|
transactionDataTable := datatable.CreateNewImportedTransactionDataTableWithRowParser(dataTable, feideeMymoneyWebDataColumnNameMapping, transactionRowParser)
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(feideeMymoneyTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(feideeMymoneyTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package fireflyIII
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -39,7 +40,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the firefly III transaction csv data
|
// ParseImportedData returns the imported data by parsing the firefly III transaction csv data
|
||||||
func (c *fireflyIIITransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *fireflyIIITransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
reader := bytes.NewReader(data)
|
reader := bytes.NewReader(data)
|
||||||
dataTable, err := csv.CreateNewCsvImportedDataTable(ctx, reader)
|
dataTable, err := csv.CreateNewCsvImportedDataTable(ctx, reader)
|
||||||
|
|
||||||
@@ -49,7 +50,7 @@ func (c *fireflyIIITransactionDataCsvFileImporter) ParseImportedData(ctx core.Co
|
|||||||
|
|
||||||
transactionRowParser := createFireflyIIITransactionDataRowParser()
|
transactionRowParser := createFireflyIIITransactionDataRowParser()
|
||||||
transactionDataTable := datatable.CreateNewImportedTransactionDataTableWithRowParser(dataTable, fireflyIIITransactionDataColumnNameMapping, transactionRowParser)
|
transactionDataTable := datatable.CreateNewImportedTransactionDataTableWithRowParser(dataTable, fireflyIIITransactionDataColumnNameMapping, transactionRowParser)
|
||||||
dataTableImporter := datatable.CreateNewImporter(fireflyIIITransactionTypeNameMapping, "", ",")
|
dataTableImporter := converter.CreateNewImporterWithTypeNameMapping(fireflyIIITransactionTypeNameMapping, "", ",")
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package gnucash
|
package gnucash
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
@@ -24,7 +24,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the gnucash transaction data
|
// ParseImportedData returns the imported data by parsing the gnucash transaction data
|
||||||
func (c *gnucashTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *gnucashTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
gnucashDataReader, err := createNewGnuCashDatabaseReader(data)
|
gnucashDataReader, err := createNewGnuCashDatabaseReader(data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -43,7 +43,7 @@ func (c *gnucashTransactionDataImporter) ParseImportedData(ctx core.Context, use
|
|||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(gnucashTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(gnucashTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
+1
@@ -98,6 +98,7 @@ func (t *gnucashTransactionDataRowIterator) Next(ctx core.Context, user *models.
|
|||||||
rowItems, isValid, err := t.parseTransaction(ctx, user, data)
|
rowItems, isValid, err := t.parseTransaction(ctx, user, data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[gnucash_transaction_table.Next] cannot parsing transaction in row#%d, because %s", t.currentIndex, err.Error())
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,11 +129,26 @@ func (r *iifDataReader) read(ctx core.Context) ([]*iifAccountDataset, []*iifTran
|
|||||||
}
|
}
|
||||||
} else if lastLineSign == iifTransactionLineSignColumnName || lastLineSign == iifTransactionSplitLineSignColumnName {
|
} else if lastLineSign == iifTransactionLineSignColumnName || lastLineSign == iifTransactionSplitLineSignColumnName {
|
||||||
if items[0] == iifTransactionSplitLineSignColumnName {
|
if items[0] == iifTransactionSplitLineSignColumnName {
|
||||||
|
if currentTransactionData == nil {
|
||||||
|
log.Errorf(ctx, "[iif_data_reader.read] expected current transaction data is not nil, but read \"%s\"", items[0])
|
||||||
|
return nil, nil, errs.ErrInvalidIIFFile
|
||||||
|
}
|
||||||
|
|
||||||
currentTransactionData.splitData = append(currentTransactionData.splitData, &iifTransactionSplitData{
|
currentTransactionData.splitData = append(currentTransactionData.splitData, &iifTransactionSplitData{
|
||||||
dataItems: items,
|
dataItems: items,
|
||||||
})
|
})
|
||||||
lastLineSign = items[0]
|
lastLineSign = items[0]
|
||||||
} else if items[0] == iifTransactionEndLineSignColumnName {
|
} else if items[0] == iifTransactionEndLineSignColumnName {
|
||||||
|
if currentTransactionData == nil {
|
||||||
|
log.Errorf(ctx, "[iif_data_reader.read] expected current transaction data is not nil, but read \"%s\"", items[0])
|
||||||
|
return nil, nil, errs.ErrInvalidIIFFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(currentTransactionData.splitData) < 1 {
|
||||||
|
log.Errorf(ctx, "[iif_data_reader.read] expected reading transaction split line, but read \"%s\"", items[0])
|
||||||
|
return nil, nil, errs.ErrInvalidIIFFile
|
||||||
|
}
|
||||||
|
|
||||||
currentTransactionDataset.transactions = append(currentTransactionDataset.transactions, currentTransactionData)
|
currentTransactionDataset.transactions = append(currentTransactionDataset.transactions, currentTransactionData)
|
||||||
lastLineSign = ""
|
lastLineSign = ""
|
||||||
} else {
|
} else {
|
||||||
@@ -214,7 +229,7 @@ func (r *iifDataReader) readTransactionSampleLines(ctx core.Context, items []str
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(transactionEndSampleItems) < 1 || transactionEndSampleItems[0] != iifTransactionEndSampleLineSignColumnName {
|
if len(transactionEndSampleItems) < 1 || transactionEndSampleItems[0] != iifTransactionEndSampleLineSignColumnName {
|
||||||
log.Errorf(ctx, "[iif_data_reader.readTransactionSampleLines] expected reading transaction end sample line, but read \"%s\"", strings.Join(splitSampleItems, "\t"))
|
log.Errorf(ctx, "[iif_data_reader.readTransactionSampleLines] expected reading transaction end sample line, but read \"%s\"", strings.Join(transactionEndSampleItems, "\t"))
|
||||||
return nil, errs.ErrInvalidIIFFile
|
return nil, errs.ErrInvalidIIFFile
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package iif
|
package iif
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
@@ -23,7 +23,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the intuit interchange format (iif) data
|
// ParseImportedData returns the imported data by parsing the intuit interchange format (iif) data
|
||||||
func (c *iifTransactionDataFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *iifTransactionDataFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
iifDataReader := createNewIifDataReader(data)
|
iifDataReader := createNewIifDataReader(data)
|
||||||
accountDatasets, transactionDatasets, err := iifDataReader.read(ctx)
|
accountDatasets, transactionDatasets, err := iifDataReader.read(ctx)
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ func (c *iifTransactionDataFileImporter) ParseImportedData(ctx core.Context, use
|
|||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(iifTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(iifTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -333,7 +333,7 @@ func TestIIFTransactionDataFileParseImportedData_ParseYearMonthDayFormatTime(t *
|
|||||||
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime))
|
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIIFTransactionDataFileParseImportedData_ParseShortMonthDayFormatTime(t *testing.T) {
|
func TestIIFTransactionDataFileParseImportedData_ParseShortMonthDayYearFormatTime(t *testing.T) {
|
||||||
converter := IifTransactionDataFileImporter
|
converter := IifTransactionDataFileImporter
|
||||||
context := core.NewNullContext()
|
context := core.NewNullContext()
|
||||||
|
|
||||||
@@ -364,6 +364,37 @@ func TestIIFTransactionDataFileParseImportedData_ParseShortMonthDayFormatTime(t
|
|||||||
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestIIFTransactionDataFileParseImportedData_ParseShortMonthDayTwoDigitsYearFormatTime(t *testing.T) {
|
||||||
|
converter := IifTransactionDataFileImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t9/01/24\tTest Account\t123.45\n"+
|
||||||
|
"SPL\t9/01/24\tTest Account2\t-123.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/2/24\tTest Account\t123.45\n"+
|
||||||
|
"SPL\t09/2/24\tTest Account2\t-123.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t9/3/24\tTest Account\t123.45\n"+
|
||||||
|
"SPL\t9/3/24\tTest Account2\t-123.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 3, len(allNewTransactions))
|
||||||
|
assert.Equal(t, int64(1725148800), utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime))
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime))
|
||||||
|
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
||||||
|
}
|
||||||
|
|
||||||
func TestIIFTransactionDataFileParseImportedData_ParseInvalidTime(t *testing.T) {
|
func TestIIFTransactionDataFileParseImportedData_ParseInvalidTime(t *testing.T) {
|
||||||
converter := IifTransactionDataFileImporter
|
converter := IifTransactionDataFileImporter
|
||||||
context := core.NewNullContext()
|
context := core.NewNullContext()
|
||||||
@@ -377,8 +408,8 @@ func TestIIFTransactionDataFileParseImportedData_ParseInvalidTime(t *testing.T)
|
|||||||
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
"!ENDTRNS\t\t\t\n"+
|
"!ENDTRNS\t\t\t\n"+
|
||||||
"TRNS\t9/1/24\tTest Account\t123.45\n"+
|
"TRNS\t09-01-2024\tTest Account\t123.45\n"+
|
||||||
"SPL\t9/1/24\tTest Account2\t-123.45\n"+
|
"SPL\t09-01-2024\tTest Account2\t-123.45\n"+
|
||||||
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
assert.EqualError(t, err, errs.ErrTransactionTimeInvalid.Message)
|
assert.EqualError(t, err, errs.ErrTransactionTimeInvalid.Message)
|
||||||
|
|
||||||
@@ -486,7 +517,7 @@ func TestIIFTransactionDataFileParseImportedData_ParseDescription(t *testing.T)
|
|||||||
assert.Equal(t, "Test", allNewTransactions[0].Comment)
|
assert.Equal(t, "Test", allNewTransactions[0].Comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIIFTransactionDataFileParseImportedData_NotSupportedToParseSplitTransaction(t *testing.T) {
|
func TestIIFTransactionDataFileParseImportedData_ParseSplitTransaction(t *testing.T) {
|
||||||
converter := IifTransactionDataFileImporter
|
converter := IifTransactionDataFileImporter
|
||||||
context := core.NewNullContext()
|
context := core.NewNullContext()
|
||||||
|
|
||||||
@@ -495,11 +526,218 @@ func TestIIFTransactionDataFileParseImportedData_NotSupportedToParseSplitTransac
|
|||||||
DefaultCurrency: "CNY",
|
DefaultCurrency: "CNY",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
allNewTransactions, allNewAccounts, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!ACCNT\tNAME\tACCNTTYPE\n"+
|
||||||
|
"ACCNT\tTest Category\tINC\n"+
|
||||||
|
"ACCNT\tTest Category2\tEXP\n"+
|
||||||
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/01/2024\tTest Account\t123.45\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Category\t-23.45\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account2\t-100.00\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/02/2024\tTest Account\t-100.00\n"+
|
||||||
|
"SPL\t09/02/2024\tTest Category2\t30.00\n"+
|
||||||
|
"SPL\t09/02/2024\tTest Account3\t20.00\n"+
|
||||||
|
"SPL\t09/02/2024\tTest Account4\t50.00\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/03/2024\tTest Account\t100.00\n"+
|
||||||
|
"SPL\t09/03/2024\tTest Account2\t-100.00\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/04/2024\tTest Category\t-100.00\n"+
|
||||||
|
"SPL\t09/04/2024\tTest Account\t40.00\n"+
|
||||||
|
"SPL\t09/04/2024\tTest Account2\t60.00\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/05/2024\tTest Category2\t100.00\n"+
|
||||||
|
"SPL\t09/05/2024\tTest Account3\t-40.00\n"+
|
||||||
|
"SPL\t09/05/2024\tTest Account4\t-60.00\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 10, len(allNewTransactions))
|
||||||
|
assert.Equal(t, 4, len(allNewAccounts))
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[0].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[0].Type)
|
||||||
|
assert.Equal(t, int64(1725148800), utils.GetUnixTimeFromTransactionTime(allNewTransactions[0].TransactionTime))
|
||||||
|
assert.Equal(t, int64(2345), allNewTransactions[0].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[0].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category", allNewTransactions[0].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[1].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[1].Type)
|
||||||
|
assert.Equal(t, int64(1725148800), utils.GetUnixTimeFromTransactionTime(allNewTransactions[1].TransactionTime))
|
||||||
|
assert.Equal(t, int64(10000), allNewTransactions[1].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[1].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[1].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[1].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[2].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[2].Type)
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[2].TransactionTime))
|
||||||
|
assert.Equal(t, int64(3000), allNewTransactions[2].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[2].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category2", allNewTransactions[2].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[3].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[3].Type)
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[3].TransactionTime))
|
||||||
|
assert.Equal(t, int64(2000), allNewTransactions[3].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[3].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Account3", allNewTransactions[3].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[3].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[4].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[4].Type)
|
||||||
|
assert.Equal(t, int64(1725235200), utils.GetUnixTimeFromTransactionTime(allNewTransactions[4].TransactionTime))
|
||||||
|
assert.Equal(t, int64(5000), allNewTransactions[4].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[4].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Account4", allNewTransactions[4].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[4].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[5].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_TRANSFER_OUT, allNewTransactions[5].Type)
|
||||||
|
assert.Equal(t, int64(1725321600), utils.GetUnixTimeFromTransactionTime(allNewTransactions[5].TransactionTime))
|
||||||
|
assert.Equal(t, int64(10000), allNewTransactions[5].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[5].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[5].OriginalDestinationAccountName)
|
||||||
|
assert.Equal(t, "", allNewTransactions[5].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[6].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[6].Type)
|
||||||
|
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[6].TransactionTime))
|
||||||
|
assert.Equal(t, int64(4000), allNewTransactions[6].Amount)
|
||||||
|
assert.Equal(t, "Test Account", allNewTransactions[6].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category", allNewTransactions[6].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[7].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_INCOME, allNewTransactions[7].Type)
|
||||||
|
assert.Equal(t, int64(1725408000), utils.GetUnixTimeFromTransactionTime(allNewTransactions[7].TransactionTime))
|
||||||
|
assert.Equal(t, int64(6000), allNewTransactions[7].Amount)
|
||||||
|
assert.Equal(t, "Test Account2", allNewTransactions[7].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category", allNewTransactions[7].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[8].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[8].Type)
|
||||||
|
assert.Equal(t, int64(1725494400), utils.GetUnixTimeFromTransactionTime(allNewTransactions[8].TransactionTime))
|
||||||
|
assert.Equal(t, int64(4000), allNewTransactions[8].Amount)
|
||||||
|
assert.Equal(t, "Test Account3", allNewTransactions[8].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category2", allNewTransactions[8].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewTransactions[9].Uid)
|
||||||
|
assert.Equal(t, models.TRANSACTION_DB_TYPE_EXPENSE, allNewTransactions[9].Type)
|
||||||
|
assert.Equal(t, int64(1725494400), utils.GetUnixTimeFromTransactionTime(allNewTransactions[9].TransactionTime))
|
||||||
|
assert.Equal(t, int64(6000), allNewTransactions[9].Amount)
|
||||||
|
assert.Equal(t, "Test Account4", allNewTransactions[9].OriginalSourceAccountName)
|
||||||
|
assert.Equal(t, "Test Category2", allNewTransactions[9].OriginalCategoryName)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[0].Uid)
|
||||||
|
assert.Equal(t, "Test Account", allNewAccounts[0].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[0].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[1].Uid)
|
||||||
|
assert.Equal(t, "Test Account2", allNewAccounts[1].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[1].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[2].Uid)
|
||||||
|
assert.Equal(t, "Test Account3", allNewAccounts[2].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[2].Currency)
|
||||||
|
|
||||||
|
assert.Equal(t, int64(1234567890), allNewAccounts[3].Uid)
|
||||||
|
assert.Equal(t, "Test Account4", allNewAccounts[3].Name)
|
||||||
|
assert.Equal(t, "CNY", allNewAccounts[3].Currency)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIIFTransactionDataFileParseImportedData_ParseSplitTransactionDescription(t *testing.T) {
|
||||||
|
converter := IifTransactionDataFileImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
allNewTransactions, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tNAME\tAMOUNT\tMEMO\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tNAME\tAMOUNT\tMEMO\n"+
|
||||||
|
"!ENDTRNS\t\t\t\t\t\n"+
|
||||||
|
"TRNS\t09/01/2024\tTest Account\t\"Test\"\t123.45\t\"foo bar\t#test\"\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account2\t\t-100.00\t\"foo\ttest#bar\"\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account3\t\t-23.45\t\n"+
|
||||||
|
"ENDTRNS\t\t\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 2, len(allNewTransactions))
|
||||||
|
assert.Equal(t, "foo\ttest#bar", allNewTransactions[0].Comment)
|
||||||
|
assert.Equal(t, "foo bar\t#test", allNewTransactions[1].Comment)
|
||||||
|
|
||||||
|
allNewTransactions, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tNAME\tAMOUNT\tMEMO\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tNAME\tAMOUNT\tMEMO\n"+
|
||||||
|
"!ENDTRNS\t\t\t\t\t\n"+
|
||||||
|
"TRNS\t09/01/2024\tTest Account\tTest\t123.45\t\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account2\t\t-100.00\t\"test\"\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account3\tfoo\t-12.34\t\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account4\t\t-11.11\t\n"+
|
||||||
|
"ENDTRNS\t\t\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, 3, len(allNewTransactions))
|
||||||
|
assert.Equal(t, "test", allNewTransactions[0].Comment)
|
||||||
|
assert.Equal(t, "foo", allNewTransactions[1].Comment)
|
||||||
|
assert.Equal(t, "Test", allNewTransactions[2].Comment)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIIFTransactionDataFileParseImportedData_NotSupportedSplitTransaction(t *testing.T) {
|
||||||
|
converter := IifTransactionDataFileImporter
|
||||||
|
context := core.NewNullContext()
|
||||||
|
|
||||||
|
user := &models.User{
|
||||||
|
Uid: 1234567890,
|
||||||
|
DefaultCurrency: "CNY",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Opening balance transaction
|
||||||
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tTRNSTYPE\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tTRNSTYPE\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\t\n"+
|
||||||
|
"TRNS\tBEGINBALCHECK\t09/01/2024\tTest Account\t123.45\n"+
|
||||||
|
"SPL\tBEGINBALCHECK\t09/01/2024\tTest Account2\t-100.00\n"+
|
||||||
|
"SPL\tBEGINBALCHECK\t09/01/2024\tTest Account3\t-23.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrNotSupportedSplitTransactions.Message)
|
||||||
|
|
||||||
|
// Transaction with invalid amount
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/01/2024\tTest Account\t123 45\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account2\t-100.00\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account3\t-23.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
|
// Transaction split data with invalid amount
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
"!ENDTRNS\t\t\t\n"+
|
"!ENDTRNS\t\t\t\n"+
|
||||||
"TRNS\t09/01/2024\tTest Account\t123.45\n"+
|
"TRNS\t09/01/2024\tTest Account\t123.45\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account2\t-100 00\n"+
|
||||||
|
"SPL\t09/01/2024\tTest Account3\t-23.45\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrAmountInvalid.Message)
|
||||||
|
|
||||||
|
// Transaction amount not equal to sum of split data amount
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\n"+
|
||||||
|
"TRNS\t09/01/2024\tTest Account\t123.00\n"+
|
||||||
"SPL\t09/01/2024\tTest Account2\t-100.00\n"+
|
"SPL\t09/01/2024\tTest Account2\t-100.00\n"+
|
||||||
"SPL\t09/01/2024\tTest Account3\t-23.45\n"+
|
"SPL\t09/01/2024\tTest Account3\t-23.45\n"+
|
||||||
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
@@ -515,7 +753,7 @@ func TestIIFTransactionDataFileParseImportedData_InvalidDataLines(t *testing.T)
|
|||||||
DefaultCurrency: "CNY",
|
DefaultCurrency: "CNY",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Missing Transaction Line
|
//Missing Transaction Line
|
||||||
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
_, _, _, _, _, _, err := converter.ParseImportedData(context, user, []byte(
|
||||||
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
@@ -524,6 +762,14 @@ func TestIIFTransactionDataFileParseImportedData_InvalidDataLines(t *testing.T)
|
|||||||
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
assert.EqualError(t, err, errs.ErrInvalidIIFFile.Message)
|
assert.EqualError(t, err, errs.ErrInvalidIIFFile.Message)
|
||||||
|
|
||||||
|
// Missing Transaction And Split Line
|
||||||
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!SPL\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
"!ENDTRNS\t\t\t\n"+
|
||||||
|
"ENDTRNS\t\t\t\n"), 0, nil, nil, nil, nil, nil)
|
||||||
|
assert.EqualError(t, err, errs.ErrInvalidIIFFile.Message)
|
||||||
|
|
||||||
// Missing Split Line
|
// Missing Split Line
|
||||||
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
_, _, _, _, _, _, err = converter.ParseImportedData(context, user, []byte(
|
||||||
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
"!TRNS\tDATE\tACCNT\tAMOUNT\n"+
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package iif
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -59,6 +60,7 @@ type iifTransactionDataRowIterator struct {
|
|||||||
dataTable *iifTransactionDataTable
|
dataTable *iifTransactionDataTable
|
||||||
currentDatasetIndex int
|
currentDatasetIndex int
|
||||||
currentIndexInDataset int
|
currentIndexInDataset int
|
||||||
|
currentSplitDataIndex int
|
||||||
}
|
}
|
||||||
|
|
||||||
// HasColumn returns whether the transaction data table has specified column
|
// HasColumn returns whether the transaction data table has specified column
|
||||||
@@ -72,8 +74,15 @@ func (t *iifTransactionDataTable) TransactionRowCount() int {
|
|||||||
totalDataRowCount := 0
|
totalDataRowCount := 0
|
||||||
|
|
||||||
for i := 0; i < len(t.transactionDatasets); i++ {
|
for i := 0; i < len(t.transactionDatasets); i++ {
|
||||||
transactions := t.transactionDatasets[i]
|
datasets := t.transactionDatasets[i]
|
||||||
totalDataRowCount += len(transactions.transactions)
|
|
||||||
|
for j := 0; j < len(datasets.transactions); j++ {
|
||||||
|
transaction := datasets.transactions[j]
|
||||||
|
|
||||||
|
if transaction.splitData != nil {
|
||||||
|
totalDataRowCount += len(transaction.splitData)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return totalDataRowCount
|
return totalDataRowCount
|
||||||
@@ -84,7 +93,8 @@ func (t *iifTransactionDataTable) TransactionRowIterator() datatable.Transaction
|
|||||||
return &iifTransactionDataRowIterator{
|
return &iifTransactionDataRowIterator{
|
||||||
dataTable: t,
|
dataTable: t,
|
||||||
currentDatasetIndex: 0,
|
currentDatasetIndex: 0,
|
||||||
currentIndexInDataset: -1,
|
currentIndexInDataset: 0,
|
||||||
|
currentSplitDataIndex: -1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,6 +126,9 @@ func (t *iifTransactionDataRowIterator) HasNext() bool {
|
|||||||
|
|
||||||
if t.currentIndexInDataset+1 < len(currentDataset.transactions) {
|
if t.currentIndexInDataset+1 < len(currentDataset.transactions) {
|
||||||
return true
|
return true
|
||||||
|
} else if t.currentIndexInDataset < len(currentDataset.transactions) &&
|
||||||
|
t.currentSplitDataIndex+1 < len(currentDataset.transactions[t.currentIndexInDataset].splitData) {
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := t.currentDatasetIndex + 1; i < len(allDatasets); i++ {
|
for i := t.currentDatasetIndex + 1; i < len(allDatasets); i++ {
|
||||||
@@ -134,20 +147,29 @@ func (t *iifTransactionDataRowIterator) HasNext() bool {
|
|||||||
// Next returns the next imported data row
|
// Next returns the next imported data row
|
||||||
func (t *iifTransactionDataRowIterator) Next(ctx core.Context, user *models.User) (daraRow datatable.TransactionDataRow, err error) {
|
func (t *iifTransactionDataRowIterator) Next(ctx core.Context, user *models.User) (daraRow datatable.TransactionDataRow, err error) {
|
||||||
allDatasets := t.dataTable.transactionDatasets
|
allDatasets := t.dataTable.transactionDatasets
|
||||||
currentIndexInDataset := t.currentIndexInDataset
|
|
||||||
|
|
||||||
for i := t.currentDatasetIndex; i < len(allDatasets); i++ {
|
for i := t.currentDatasetIndex; i < len(allDatasets); i++ {
|
||||||
|
foundNextRow := false
|
||||||
dataset := allDatasets[i]
|
dataset := allDatasets[i]
|
||||||
|
|
||||||
if currentIndexInDataset+1 < len(dataset.transactions) {
|
for j := t.currentIndexInDataset; j < len(dataset.transactions); j++ {
|
||||||
|
if t.currentSplitDataIndex+1 < len(dataset.transactions[j].splitData) {
|
||||||
|
t.currentSplitDataIndex++
|
||||||
|
foundNextRow = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
t.currentIndexInDataset++
|
t.currentIndexInDataset++
|
||||||
currentIndexInDataset = t.currentIndexInDataset
|
t.currentSplitDataIndex = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
if foundNextRow {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
t.currentDatasetIndex++
|
t.currentDatasetIndex++
|
||||||
t.currentIndexInDataset = -1
|
t.currentIndexInDataset = 0
|
||||||
currentIndexInDataset = -1
|
t.currentSplitDataIndex = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.currentDatasetIndex >= len(allDatasets) {
|
if t.currentDatasetIndex >= len(allDatasets) {
|
||||||
@@ -161,9 +183,28 @@ func (t *iifTransactionDataRowIterator) Next(ctx core.Context, user *models.User
|
|||||||
}
|
}
|
||||||
|
|
||||||
data := currentDataset.transactions[t.currentIndexInDataset]
|
data := currentDataset.transactions[t.currentIndexInDataset]
|
||||||
rowItems, err := t.parseTransaction(ctx, user, currentDataset, data)
|
|
||||||
|
if len(data.splitData) < 1 {
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.Next] cannot parsing transaction in row#%d (dataset#%d), because split data is empty", t.currentIndexInDataset, t.currentDatasetIndex)
|
||||||
|
return nil, errs.ErrInvalidIIFFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.currentSplitDataIndex >= len(data.splitData) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(data.splitData) > 1 {
|
||||||
|
_, err := t.isSplitTransactionSupported(ctx, currentDataset, data)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rowItems, err := t.parseTransaction(ctx, user, currentDataset, data, t.currentSplitDataIndex)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.Next] cannot parsing transaction in row#%d-split#%d (dataset#%d), because %s", t.currentIndexInDataset, t.currentSplitDataIndex, t.currentDatasetIndex, err.Error())
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -173,13 +214,7 @@ func (t *iifTransactionDataRowIterator) Next(ctx core.Context, user *models.User
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user *models.User, dataset *iifTransactionDataset, transactionData *iifTransactionData) (map[datatable.TransactionDataTableColumn]string, error) {
|
func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user *models.User, dataset *iifTransactionDataset, transactionData *iifTransactionData, splitDataIndex int) (map[datatable.TransactionDataTableColumn]string, error) {
|
||||||
if len(transactionData.splitData) < 1 {
|
|
||||||
return nil, errs.ErrInvalidIIFFile
|
|
||||||
} else if len(transactionData.splitData) > 1 {
|
|
||||||
return nil, errs.ErrNotSupportedSplitTransactions
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
data := make(map[datatable.TransactionDataTableColumn]string, len(iifTransactionSupportedColumns))
|
data := make(map[datatable.TransactionDataTableColumn]string, len(iifTransactionSupportedColumns))
|
||||||
@@ -189,18 +224,18 @@ func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
transactionType, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionTypeColumnName)
|
transactionType, _ := dataset.getSplitDataItemValue(transactionData.splitData[splitDataIndex], iifTransactionTypeColumnName)
|
||||||
accountName1, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionAccountNameColumnName)
|
mainAccountName, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionAccountNameColumnName)
|
||||||
accountName2, _ := dataset.getSplitDataItemValue(transactionData.splitData[0], iifTransactionAccountNameColumnName)
|
splitAccountName, _ := dataset.getSplitDataItemValue(transactionData.splitData[splitDataIndex], iifTransactionAccountNameColumnName)
|
||||||
amount1, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionAmountColumnName)
|
mainAmount, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionAmountColumnName)
|
||||||
amount2, _ := dataset.getSplitDataItemValue(transactionData.splitData[0], iifTransactionAmountColumnName)
|
splitAmount, _ := dataset.getSplitDataItemValue(transactionData.splitData[splitDataIndex], iifTransactionAmountColumnName)
|
||||||
amountNum1, err := utils.ParseAmount(strings.ReplaceAll(amount1, ",", ""))
|
mainAmountNum, err := parseAmount(mainAmount)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errs.ErrAmountInvalid
|
return nil, errs.ErrAmountInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
amountNum2, err := utils.ParseAmount(strings.ReplaceAll(amount2, ",", ""))
|
splitAmountNum, err := parseAmount(splitAmount)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errs.ErrAmountInvalid
|
return nil, errs.ErrAmountInvalid
|
||||||
@@ -208,24 +243,35 @@ func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
|
|
||||||
if transactionType == iifTransactionTypeBeginningBalance { // balance modification
|
if transactionType == iifTransactionTypeBeginningBalance { // balance modification
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_MODIFY_BALANCE]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_MODIFY_BALANCE]
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName1
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = mainAccountName
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(amountNum1)
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(mainAmountNum)
|
||||||
} else if t.dataTable.incomeAccountNames[accountName1] || t.dataTable.incomeAccountNames[accountName2] { // income
|
} else if (t.dataTable.incomeAccountNames[mainAccountName] && !t.dataTable.incomeAccountNames[splitAccountName] && !t.dataTable.expenseAccountNames[splitAccountName]) ||
|
||||||
|
(t.dataTable.incomeAccountNames[splitAccountName] && !t.dataTable.incomeAccountNames[mainAccountName] && !t.dataTable.expenseAccountNames[mainAccountName]) { // income
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_INCOME]
|
||||||
categoryName := ""
|
categoryName := ""
|
||||||
accountName := ""
|
accountName := ""
|
||||||
amountNum := int64(0)
|
amountNum := int64(0)
|
||||||
|
|
||||||
if t.dataTable.incomeAccountNames[accountName1] && !t.dataTable.incomeAccountNames[accountName2] {
|
if t.dataTable.incomeAccountNames[mainAccountName] && !t.dataTable.incomeAccountNames[splitAccountName] {
|
||||||
categoryName = accountName1
|
categoryName = mainAccountName
|
||||||
accountName = accountName2
|
accountName = splitAccountName
|
||||||
amountNum = amountNum2
|
|
||||||
} else if t.dataTable.incomeAccountNames[accountName2] && !t.dataTable.incomeAccountNames[accountName1] {
|
if len(transactionData.splitData) > 1 {
|
||||||
categoryName = accountName2
|
amountNum = splitAmountNum
|
||||||
accountName = accountName1
|
} else {
|
||||||
amountNum = amountNum1
|
amountNum = -mainAmountNum
|
||||||
|
}
|
||||||
|
} else if t.dataTable.incomeAccountNames[splitAccountName] && !t.dataTable.incomeAccountNames[mainAccountName] {
|
||||||
|
categoryName = splitAccountName
|
||||||
|
accountName = mainAccountName
|
||||||
|
|
||||||
|
if len(transactionData.splitData) > 1 {
|
||||||
|
amountNum = -splitAmountNum
|
||||||
|
} else {
|
||||||
|
amountNum = mainAmountNum
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Errorf(ctx, "[iif_transaction_data_table.parseTransaction] cannot parse transaction, because two accounts \"%s\" and \"%s\" are all income account", accountName1, accountName2)
|
log.Errorf(ctx, "[iif_transaction_data_table.parseTransaction] cannot parse transaction, because main account \"%s\" and split account \"%s\" are all income account", mainAccountName, splitAccountName)
|
||||||
return nil, errs.ErrInvalidIIFFile
|
return nil, errs.ErrInvalidIIFFile
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -240,22 +286,33 @@ func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
|
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(amountNum)
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(amountNum)
|
||||||
} else if t.dataTable.expenseAccountNames[accountName1] || t.dataTable.expenseAccountNames[accountName2] { // expense
|
} else if (t.dataTable.expenseAccountNames[mainAccountName] && !t.dataTable.expenseAccountNames[splitAccountName] && !t.dataTable.incomeAccountNames[splitAccountName]) ||
|
||||||
|
(t.dataTable.expenseAccountNames[splitAccountName] && !t.dataTable.expenseAccountNames[mainAccountName] && !t.dataTable.incomeAccountNames[mainAccountName]) { // expense
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_EXPENSE]
|
||||||
categoryName := ""
|
categoryName := ""
|
||||||
accountName := ""
|
accountName := ""
|
||||||
amountNum := int64(0)
|
amountNum := int64(0)
|
||||||
|
|
||||||
if t.dataTable.expenseAccountNames[accountName1] && !t.dataTable.expenseAccountNames[accountName2] {
|
if t.dataTable.expenseAccountNames[mainAccountName] && !t.dataTable.expenseAccountNames[splitAccountName] {
|
||||||
categoryName = accountName1
|
categoryName = mainAccountName
|
||||||
accountName = accountName2
|
accountName = splitAccountName
|
||||||
amountNum = amountNum2
|
|
||||||
} else if t.dataTable.expenseAccountNames[accountName2] && !t.dataTable.expenseAccountNames[accountName1] {
|
if len(transactionData.splitData) > 1 {
|
||||||
categoryName = accountName2
|
amountNum = -splitAmountNum
|
||||||
accountName = accountName1
|
} else {
|
||||||
amountNum = amountNum1
|
amountNum = mainAmountNum
|
||||||
|
}
|
||||||
|
} else if t.dataTable.expenseAccountNames[splitAccountName] && !t.dataTable.expenseAccountNames[mainAccountName] {
|
||||||
|
categoryName = splitAccountName
|
||||||
|
accountName = mainAccountName
|
||||||
|
|
||||||
|
if len(transactionData.splitData) > 1 {
|
||||||
|
amountNum = splitAmountNum
|
||||||
|
} else {
|
||||||
|
amountNum = -mainAmountNum
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Errorf(ctx, "[iif_transaction_data_table.parseTransaction] cannot parse transaction, because two accounts \"%s\" and \"%s\" are all expense account", accountName1, accountName2)
|
log.Errorf(ctx, "[iif_transaction_data_table.parseTransaction] cannot parse transaction, because main account \"%s\" and split account \"%s\" are all expense account", mainAccountName, splitAccountName)
|
||||||
return nil, errs.ErrInvalidIIFFile
|
return nil, errs.ErrInvalidIIFFile
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -269,26 +326,57 @@ func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
}
|
}
|
||||||
|
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amountNum)
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(amountNum)
|
||||||
} else {
|
} else {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_TRANSFER]
|
data[datatable.TRANSACTION_DATA_TABLE_TRANSACTION_TYPE] = iifTransactionTypeNameMapping[models.TRANSACTION_TYPE_TRANSFER]
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = ""
|
data[datatable.TRANSACTION_DATA_TABLE_SUB_CATEGORY] = ""
|
||||||
|
amountNum := int64(0)
|
||||||
|
relatedAmountNum := int64(0)
|
||||||
|
mainAccountTransferToSplitAccount := false
|
||||||
|
|
||||||
if amountNum1 >= 0 {
|
if len(transactionData.splitData) > 1 {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName2
|
amountNum = splitAmountNum
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amountNum2)
|
relatedAmountNum = splitAmountNum
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = accountName1
|
mainAccountTransferToSplitAccount = amountNum >= 0
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(amountNum1)
|
} else {
|
||||||
} else if amountNum2 >= 0 {
|
if mainAmountNum >= 0 {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = accountName1
|
amountNum = splitAmountNum
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amountNum1)
|
relatedAmountNum = mainAmountNum
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = accountName2
|
mainAccountTransferToSplitAccount = false
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(amountNum2)
|
} else if splitAmountNum >= 0 {
|
||||||
|
amountNum = mainAmountNum
|
||||||
|
relatedAmountNum = splitAmountNum
|
||||||
|
mainAccountTransferToSplitAccount = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if mainAccountTransferToSplitAccount {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = mainAccountName
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = splitAccountName
|
||||||
|
} else {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_ACCOUNT_NAME] = splitAccountName
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_ACCOUNT_NAME] = mainAccountName
|
||||||
|
}
|
||||||
|
|
||||||
|
if amountNum >= 0 {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(amountNum)
|
||||||
|
} else {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_AMOUNT] = utils.FormatAmount(-amountNum)
|
||||||
|
}
|
||||||
|
|
||||||
|
if relatedAmountNum >= 0 {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(relatedAmountNum)
|
||||||
|
} else {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_RELATED_AMOUNT] = utils.FormatAmount(-relatedAmountNum)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if memo, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionMemoColumnName); memo != "" {
|
if splitMemo, _ := dataset.getSplitDataItemValue(transactionData.splitData[splitDataIndex], iifTransactionMemoColumnName); splitMemo != "" {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = splitMemo
|
||||||
|
} else if memo, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionMemoColumnName); memo != "" {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = memo
|
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = memo
|
||||||
|
} else if splitName, _ := dataset.getSplitDataItemValue(transactionData.splitData[splitDataIndex], iifTransactionNameColumnName); splitName != "" {
|
||||||
|
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = splitName
|
||||||
} else if name, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionNameColumnName); name != "" {
|
} else if name, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionNameColumnName); name != "" {
|
||||||
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = name
|
data[datatable.TRANSACTION_DATA_TABLE_DESCRIPTION] = name
|
||||||
} else {
|
} else {
|
||||||
@@ -298,6 +386,49 @@ func (t *iifTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *iifTransactionDataRowIterator) isSplitTransactionSupported(ctx core.Context, dataset *iifTransactionDataset, transactionData *iifTransactionData) (bool, error) {
|
||||||
|
supportSplitTransactions := true
|
||||||
|
transactionType, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionTypeColumnName)
|
||||||
|
|
||||||
|
if transactionType == iifTransactionTypeBeginningBalance { // balance modification
|
||||||
|
supportSplitTransactions = false
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.isSplitTransactionSupported] cannot parse split balance modification transaction#%d (dataset#%d)", t.currentIndexInDataset, t.currentDatasetIndex)
|
||||||
|
} else {
|
||||||
|
transactionAmountStr, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionAmountColumnName)
|
||||||
|
transactionAmount, err := parseAmount(transactionAmountStr)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.isSplitTransactionSupported] cannot parsing transaction in row#%d (dataset#%d), because transaction amount \"%s\" is invalid", t.currentIndexInDataset, t.currentDatasetIndex, transactionAmountStr)
|
||||||
|
return false, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
splitTotalAmount := int64(0)
|
||||||
|
|
||||||
|
for i := 0; i < len(transactionData.splitData); i++ {
|
||||||
|
splitAmountStr, _ := dataset.getSplitDataItemValue(transactionData.splitData[i], iifTransactionAmountColumnName)
|
||||||
|
splitAmount, err := parseAmount(splitAmountStr)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.isSplitTransactionSupported] cannot parsing transaction in row#%d-split#%d (dataset#%d), because split amount \"%s\" is invalid", t.currentIndexInDataset, i, t.currentDatasetIndex, splitAmountStr)
|
||||||
|
return false, errs.ErrAmountInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
splitTotalAmount += splitAmount
|
||||||
|
}
|
||||||
|
|
||||||
|
if splitTotalAmount != -transactionAmount {
|
||||||
|
supportSplitTransactions = false
|
||||||
|
log.Errorf(ctx, "[iif_transaction_data_table.isSplitTransactionSupported] cannot parse split transaction#%d (dataset#%d), because the sum amount of each split data \"%d\" not equal to the transaction amount \"%d\"", t.currentIndexInDataset, t.currentDatasetIndex, splitTotalAmount, -transactionAmount)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(transactionData.splitData) > 1 && !supportSplitTransactions {
|
||||||
|
return false, errs.ErrNotSupportedSplitTransactions
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (t *iifTransactionDataRowIterator) parseTransactionTime(dataset *iifTransactionDataset, transactionData *iifTransactionData) (string, error) {
|
func (t *iifTransactionDataRowIterator) parseTransactionTime(dataset *iifTransactionDataset, transactionData *iifTransactionData) (string, error) {
|
||||||
date, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionDateColumnName)
|
date, _ := dataset.getTransactionDataItemValue(transactionData, iifTransactionDateColumnName)
|
||||||
dateParts := strings.Split(date, "/")
|
dateParts := strings.Split(date, "/")
|
||||||
@@ -316,6 +447,10 @@ func (t *iifTransactionDataRowIterator) parseTransactionTime(dataset *iifTransac
|
|||||||
day = dateParts[2]
|
day = dateParts[2]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(year) == 2 {
|
||||||
|
year = utils.IntToString(time.Now().Year()/100) + year
|
||||||
|
}
|
||||||
|
|
||||||
if len(month) < 2 {
|
if len(month) < 2 {
|
||||||
month = "0" + month
|
month = "0" + month
|
||||||
}
|
}
|
||||||
@@ -390,3 +525,7 @@ func getIncomeAndExpenseAccountNameMap(accountDatasets []*iifAccountDataset) (in
|
|||||||
|
|
||||||
return incomeAccountNames, expenseAccountNames
|
return incomeAccountNames, expenseAccountNames
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func parseAmount(amount string) (int64, error) {
|
||||||
|
return utils.ParseAmount(strings.ReplaceAll(amount, ",", ""))
|
||||||
|
}
|
||||||
|
|||||||
@@ -20,8 +20,9 @@ import (
|
|||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
const ofxUnicodeEncoding = "unicode"
|
const ofx1USAsciiEncoding = "usascii"
|
||||||
const ofxUSAsciiEncoding = "usascii"
|
const ofx1UnicodeEncoding = "unicode"
|
||||||
|
const ofx1UTF8Encoding = "utf8" // non-standard ofx 1.x encoding, used by some banks (https://github.com/mayswind/ezbookkeeping/issues/48)
|
||||||
const ofx1SGMLDataFormat = "OFXSGML"
|
const ofx1SGMLDataFormat = "OFXSGML"
|
||||||
|
|
||||||
var ofx2HeaderPattern = regexp.MustCompile("<\\?OFX( +[A-Z]+=\"[^=]*\")* *\\?>")
|
var ofx2HeaderPattern = regexp.MustCompile("<\\?OFX( +[A-Z]+=\"[^=]*\")* *\\?>")
|
||||||
@@ -231,7 +232,7 @@ func readOFX1FileHeader(ctx core.Context, data []byte) (fileHeader *ofxFileHeade
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if fileEncoding == ofxUSAsciiEncoding {
|
if fileEncoding == ofx1USAsciiEncoding {
|
||||||
if utils.IsStringOnlyContainsDigits(fileCharset) {
|
if utils.IsStringOnlyContainsDigits(fileCharset) {
|
||||||
fileCharset = "cp" + fileCharset
|
fileCharset = "cp" + fileCharset
|
||||||
}
|
}
|
||||||
@@ -245,12 +246,18 @@ func readOFX1FileHeader(ctx core.Context, data []byte) (fileHeader *ofxFileHeade
|
|||||||
if enc == nil {
|
if enc == nil {
|
||||||
enc = charmap.Windows1252
|
enc = charmap.Windows1252
|
||||||
}
|
}
|
||||||
} else if fileEncoding == ofxUnicodeEncoding {
|
} else if fileEncoding == ofx1UnicodeEncoding {
|
||||||
enc, _ = charset.Lookup(ofxUnicodeEncoding)
|
enc, _ = charset.Lookup(ofx1UnicodeEncoding)
|
||||||
|
|
||||||
if enc == nil {
|
if enc == nil {
|
||||||
enc = unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM)
|
enc = unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM)
|
||||||
}
|
}
|
||||||
|
} else if fileEncoding == ofx1UTF8Encoding {
|
||||||
|
enc, _ = charset.Lookup(ofx1UTF8Encoding)
|
||||||
|
|
||||||
|
if enc == nil {
|
||||||
|
enc = unicode.UTF8
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Errorf(ctx, "[ofx_data_reader.readOFX1FileHeader] cannot parse ofx 1.x file, because encoding \"%s\" is unknown", fileEncoding)
|
log.Errorf(ctx, "[ofx_data_reader.readOFX1FileHeader] cannot parse ofx 1.x file, because encoding \"%s\" is unknown", fileEncoding)
|
||||||
return nil, nil, "", nil, errs.ErrInvalidOFXFile
|
return nil, nil, "", nil, errs.ErrInvalidOFXFile
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package ofx
|
package ofx
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
@@ -23,7 +23,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the open financial exchange (ofx) file transaction data
|
// ParseImportedData returns the imported data by parsing the open financial exchange (ofx) file transaction data
|
||||||
func (c *ofxTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *ofxTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
ofxDataReader, err := createNewOFXFileReader(ctx, data)
|
ofxDataReader, err := createNewOFXFileReader(ctx, data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -42,7 +42,7 @@ func (c *ofxTransactionDataImporter) ParseImportedData(ctx core.Context, user *m
|
|||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(ofxTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(ofxTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
+3
-1
@@ -105,6 +105,7 @@ func (t *ofxTransactionDataRowIterator) Next(ctx core.Context, user *models.User
|
|||||||
rowItems, err := t.parseTransaction(ctx, user, data)
|
rowItems, err := t.parseTransaction(ctx, user, data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[ofx_transaction_table.Next] cannot parsing transaction in row#%d, because %s", t.currentIndex, err.Error())
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,9 +152,10 @@ func (t *ofxTransactionDataRowIterator) parseTransaction(ctx core.Context, user
|
|||||||
return nil, errs.ErrAmountInvalid
|
return nil, errs.ErrAmountInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
amount, err := utils.ParseAmount(strings.ReplaceAll(ofxTransaction.Amount, ",", ".")) // ofx supports decimal point or comma to indicate the start of the fractional amount
|
amount, err := utils.ParseAmount(utils.TrimTrailingZerosInDecimal(strings.ReplaceAll(ofxTransaction.Amount, ",", "."))) // ofx supports decimal point or comma to indicate the start of the fractional amount
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[ofx_transaction_table.parseTransaction] cannot parsing transaction amount \"%s\", because %s", ofxTransaction.Amount, err.Error())
|
||||||
return nil, errs.ErrAmountInvalid
|
return nil, errs.ErrAmountInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
package qif
|
package qif
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/models"
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
"github.com/mayswind/ezbookkeeping/pkg/utils"
|
||||||
@@ -35,7 +35,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the quicken interchange format (qif) transaction data
|
// ParseImportedData returns the imported data by parsing the quicken interchange format (qif) transaction data
|
||||||
func (c *qifTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *qifTransactionDataImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
qifDataReader := createNewQifDataReader(data)
|
qifDataReader := createNewQifDataReader(data)
|
||||||
qifData, err := qifDataReader.read(ctx)
|
qifData, err := qifDataReader.read(ctx)
|
||||||
|
|
||||||
@@ -49,7 +49,7 @@ func (c *qifTransactionDataImporter) ParseImportedData(ctx core.Context, user *m
|
|||||||
return nil, nil, nil, nil, nil, nil, err
|
return nil, nil, nil, nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(qifTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(qifTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ func (t *qifTransactionDataRowIterator) Next(ctx core.Context, user *models.User
|
|||||||
rowItems, err := t.parseTransaction(ctx, user, data)
|
rowItems, err := t.parseTransaction(ctx, user, data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "[qif_transaction_data_table.Next] cannot parsing transaction in row#%d, because %s", t.currentIndex, err.Error())
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,11 @@ package converters
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/alipay"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/alipay"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/base"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/beancount"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/default"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/default"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/dsv"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/feidee"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/feidee"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/fireflyIII"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/fireflyIII"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/gnucash"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/gnucash"
|
||||||
@@ -12,10 +15,11 @@ import (
|
|||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/qif"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/qif"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/wechat"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/wechat"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
"github.com/mayswind/ezbookkeeping/pkg/errs"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetTransactionDataExporter returns the transaction data exporter according to the file type
|
// GetTransactionDataExporter returns the transaction data exporter according to the file type
|
||||||
func GetTransactionDataExporter(fileType string) base.TransactionDataExporter {
|
func GetTransactionDataExporter(fileType string) converter.TransactionDataExporter {
|
||||||
if fileType == "csv" {
|
if fileType == "csv" {
|
||||||
return _default.DefaultTransactionDataCSVFileConverter
|
return _default.DefaultTransactionDataCSVFileConverter
|
||||||
} else if fileType == "tsv" {
|
} else if fileType == "tsv" {
|
||||||
@@ -26,7 +30,7 @@ func GetTransactionDataExporter(fileType string) base.TransactionDataExporter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetTransactionDataImporter returns the transaction data importer according to the file type
|
// GetTransactionDataImporter returns the transaction data importer according to the file type
|
||||||
func GetTransactionDataImporter(fileType string) (base.TransactionDataImporter, error) {
|
func GetTransactionDataImporter(fileType string) (converter.TransactionDataImporter, error) {
|
||||||
if fileType == "ezbookkeeping_csv" {
|
if fileType == "ezbookkeeping_csv" {
|
||||||
return _default.DefaultTransactionDataCSVFileConverter, nil
|
return _default.DefaultTransactionDataCSVFileConverter, nil
|
||||||
} else if fileType == "ezbookkeeping_tsv" {
|
} else if fileType == "ezbookkeeping_tsv" {
|
||||||
@@ -47,10 +51,14 @@ func GetTransactionDataImporter(fileType string) (base.TransactionDataImporter,
|
|||||||
return gnucash.GnuCashTransactionDataImporter, nil
|
return gnucash.GnuCashTransactionDataImporter, nil
|
||||||
} else if fileType == "firefly_iii_csv" {
|
} else if fileType == "firefly_iii_csv" {
|
||||||
return fireflyIII.FireflyIIITransactionDataCsvFileImporter, nil
|
return fireflyIII.FireflyIIITransactionDataCsvFileImporter, nil
|
||||||
|
} else if fileType == "beancount" {
|
||||||
|
return beancount.BeancountTransactionDataImporter, nil
|
||||||
} else if fileType == "feidee_mymoney_csv" {
|
} else if fileType == "feidee_mymoney_csv" {
|
||||||
return feidee.FeideeMymoneyAppTransactionDataCsvFileImporter, nil
|
return feidee.FeideeMymoneyAppTransactionDataCsvFileImporter, nil
|
||||||
} else if fileType == "feidee_mymoney_xls" {
|
} else if fileType == "feidee_mymoney_xls" {
|
||||||
return feidee.FeideeMymoneyWebTransactionDataXlsFileImporter, nil
|
return feidee.FeideeMymoneyWebTransactionDataXlsFileImporter, nil
|
||||||
|
} else if fileType == "feidee_mymoney_elecloud_xlsx" {
|
||||||
|
return feidee.FeideeMymoneyElecloudTransactionDataXlsxFileImporter, nil
|
||||||
} else if fileType == "alipay_app_csv" {
|
} else if fileType == "alipay_app_csv" {
|
||||||
return alipay.AlipayAppTransactionDataCsvFileImporter, nil
|
return alipay.AlipayAppTransactionDataCsvFileImporter, nil
|
||||||
} else if fileType == "alipay_web_csv" {
|
} else if fileType == "alipay_web_csv" {
|
||||||
@@ -61,3 +69,18 @@ func GetTransactionDataImporter(fileType string) (base.TransactionDataImporter,
|
|||||||
return nil, errs.ErrImportFileTypeNotSupported
|
return nil, errs.ErrImportFileTypeNotSupported
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsCustomDelimiterSeparatedValuesFileType returns whether the file type is the delimiter-separated values file type
|
||||||
|
func IsCustomDelimiterSeparatedValuesFileType(fileType string) bool {
|
||||||
|
return dsv.IsDelimiterSeparatedValuesFileType(fileType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewDelimiterSeparatedValuesDataParser returns a new delimiter-separated values data parser according to the file type and encoding
|
||||||
|
func CreateNewDelimiterSeparatedValuesDataParser(fileType string, fileEncoding string) (dsv.CustomTransactionDataDsvFileParser, error) {
|
||||||
|
return dsv.CreateNewCustomTransactionDataDsvFileParser(fileType, fileEncoding)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNewDelimiterSeparatedValuesDataImporter returns a new delimiter-separated values data importer according to the file type and encoding
|
||||||
|
func CreateNewDelimiterSeparatedValuesDataImporter(fileType string, fileEncoding string, columnIndexMapping map[datatable.TransactionDataTableColumn]int, transactionTypeNameMapping map[string]models.TransactionType, hasHeaderLine bool, timeFormat string, timezoneFormat string, amountDecimalSeparator string, amountDigitGroupingSymbol string, geoLocationSeparator string, transactionTagSeparator string) (converter.TransactionDataImporter, error) {
|
||||||
|
return dsv.CreateNewCustomTransactionDataDsvFileImporter(fileType, fileEncoding, columnIndexMapping, transactionTypeNameMapping, hasHeaderLine, timeFormat, timezoneFormat, amountDecimalSeparator, amountDigitGroupingSymbol, geoLocationSeparator, transactionTagSeparator)
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/converters/converter"
|
||||||
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
csvdatatable "github.com/mayswind/ezbookkeeping/pkg/converters/csv"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
"github.com/mayswind/ezbookkeeping/pkg/converters/datatable"
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
@@ -44,7 +45,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParseImportedData returns the imported data by parsing the wechat pay transaction csv data
|
// ParseImportedData returns the imported data by parsing the wechat pay transaction csv data
|
||||||
func (c *wechatPayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]*models.TransactionCategory, incomeCategoryMap map[string]*models.TransactionCategory, transferCategoryMap map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
func (c *wechatPayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Context, user *models.User, data []byte, defaultTimezoneOffset int16, accountMap map[string]*models.Account, expenseCategoryMap map[string]map[string]*models.TransactionCategory, incomeCategoryMap map[string]map[string]*models.TransactionCategory, transferCategoryMap map[string]map[string]*models.TransactionCategory, tagMap map[string]*models.TransactionTag) (models.ImportedTransactionSlice, []*models.Account, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionCategory, []*models.TransactionTag, error) {
|
||||||
fallback := unicode.UTF8.NewDecoder()
|
fallback := unicode.UTF8.NewDecoder()
|
||||||
reader := transform.NewReader(bytes.NewReader(data), unicode.BOMOverride(fallback))
|
reader := transform.NewReader(bytes.NewReader(data), unicode.BOMOverride(fallback))
|
||||||
|
|
||||||
@@ -67,7 +68,7 @@ func (c *wechatPayTransactionDataCsvFileImporter) ParseImportedData(ctx core.Con
|
|||||||
|
|
||||||
transactionRowParser := createWeChatPayTransactionDataRowParser()
|
transactionRowParser := createWeChatPayTransactionDataRowParser()
|
||||||
transactionDataTable := datatable.CreateNewCommonTransactionDataTable(commonDataTable, wechatPayTransactionSupportedColumns, transactionRowParser)
|
transactionDataTable := datatable.CreateNewCommonTransactionDataTable(commonDataTable, wechatPayTransactionSupportedColumns, transactionRowParser)
|
||||||
dataTableImporter := datatable.CreateNewSimpleImporter(wechatPayTransactionTypeNameMapping)
|
dataTableImporter := converter.CreateNewSimpleImporterWithTypeNameMapping(wechatPayTransactionTypeNameMapping)
|
||||||
|
|
||||||
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
return dataTableImporter.ParseImportedData(ctx, user, transactionDataTable, defaultTimezoneOffset, accountMap, expenseCategoryMap, incomeCategoryMap, transferCategoryMap, tagMap)
|
||||||
}
|
}
|
||||||
|
|||||||
+23
-4
@@ -1,12 +1,15 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/urfave/cli/v2"
|
"context"
|
||||||
|
|
||||||
|
"github.com/urfave/cli/v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CliContext represents the command-line context
|
// CliContext represents the command-line context
|
||||||
type CliContext struct {
|
type CliContext struct {
|
||||||
*cli.Context
|
context.Context
|
||||||
|
command *cli.Command
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetContextId returns the current context id
|
// GetContextId returns the current context id
|
||||||
@@ -19,9 +22,25 @@ func (c *CliContext) GetClientLocale() string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Bool returns the boolean value of parameter
|
||||||
|
func (c *CliContext) Bool(name string) bool {
|
||||||
|
return c.command.Bool(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Int returns the integer value of parameter
|
||||||
|
func (c *CliContext) Int(name string) int {
|
||||||
|
return c.command.Int(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the string value of parameter
|
||||||
|
func (c *CliContext) String(name string) string {
|
||||||
|
return c.command.String(name)
|
||||||
|
}
|
||||||
|
|
||||||
// WrapCliContext returns a context wrapped by this file
|
// WrapCliContext returns a context wrapped by this file
|
||||||
func WrapCilContext(cliCtx *cli.Context) *CliContext {
|
func WrapCilContext(ctx context.Context, cmd *cli.Command) *CliContext {
|
||||||
return &CliContext{
|
return &CliContext{
|
||||||
Context: cliCtx,
|
Context: ctx,
|
||||||
|
command: cmd,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// CoordinateDisplayType represents the display type of geographic coordinates
|
||||||
|
type CoordinateDisplayType byte
|
||||||
|
|
||||||
|
// Coordinate Display Type
|
||||||
|
const (
|
||||||
|
COORDINATE_DISPLAY_TYPE_DEFAULT CoordinateDisplayType = 0
|
||||||
|
COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DECIMAL_DEGREES CoordinateDisplayType = 1
|
||||||
|
COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DECIMAL_DEGREES CoordinateDisplayType = 2
|
||||||
|
COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DECIMAL_MINUTES CoordinateDisplayType = 3
|
||||||
|
COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DECIMAL_MINUTES CoordinateDisplayType = 4
|
||||||
|
COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DEGREES_MINUTES_SECONDS CoordinateDisplayType = 5
|
||||||
|
COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DEGREES_MINUTES_SECONDS CoordinateDisplayType = 6
|
||||||
|
COORDINATE_DISPLAY_TYPE_INVALID CoordinateDisplayType = 255
|
||||||
|
)
|
||||||
|
|
||||||
|
// String returns a textual representation of the geographic coordinates display type enum
|
||||||
|
func (d CoordinateDisplayType) String() string {
|
||||||
|
switch d {
|
||||||
|
case COORDINATE_DISPLAY_TYPE_DEFAULT:
|
||||||
|
return "Default"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DECIMAL_DEGREES:
|
||||||
|
return "Latitude Longitude (Decimal Degrees)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DECIMAL_DEGREES:
|
||||||
|
return "Longitude Latitude (Decimal Degrees)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DECIMAL_MINUTES:
|
||||||
|
return "Latitude Longitude (Decimal Minutes)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DECIMAL_MINUTES:
|
||||||
|
return "Longitude Latitude (Decimal Minutes)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LATITUDE_LONGITUDE_DEGREES_MINUTES_SECONDS:
|
||||||
|
return "Latitude Longitude (Degrees Minutes Seconds)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_LONGITUDE_LATITUDE_DEGREES_MINUTES_SECONDS:
|
||||||
|
return "Longitude Latitude (Degrees Minutes Seconds)"
|
||||||
|
case COORDINATE_DISPLAY_TYPE_INVALID:
|
||||||
|
return "Invalid"
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("Invalid(%d)", int(d))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -15,6 +15,9 @@ type MiddlewareHandlerFunc func(*WebContext)
|
|||||||
// ApiHandlerFunc represents the api handler function
|
// ApiHandlerFunc represents the api handler function
|
||||||
type ApiHandlerFunc func(*WebContext) (any, *errs.Error)
|
type ApiHandlerFunc func(*WebContext) (any, *errs.Error)
|
||||||
|
|
||||||
|
// EventStreamApiHandlerFunc represents the event stream api handler function
|
||||||
|
type EventStreamApiHandlerFunc func(*WebContext) *errs.Error
|
||||||
|
|
||||||
// DataHandlerFunc represents the handler function that returns file data byte array and file name
|
// DataHandlerFunc represents the handler function that returns file data byte array and file name
|
||||||
type DataHandlerFunc func(*WebContext) ([]byte, string, *errs.Error)
|
type DataHandlerFunc func(*WebContext) ([]byte, string, *errs.Error)
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ const (
|
|||||||
DECIMAL_SEPARATOR_DEFAULT DecimalSeparator = 0
|
DECIMAL_SEPARATOR_DEFAULT DecimalSeparator = 0
|
||||||
DECIMAL_SEPARATOR_DOT DecimalSeparator = 1
|
DECIMAL_SEPARATOR_DOT DecimalSeparator = 1
|
||||||
DECIMAL_SEPARATOR_COMMA DecimalSeparator = 2
|
DECIMAL_SEPARATOR_COMMA DecimalSeparator = 2
|
||||||
DECIMAL_SEPARATOR_SPACE DecimalSeparator = 3
|
|
||||||
DECIMAL_SEPARATOR_INVALID DecimalSeparator = 255
|
DECIMAL_SEPARATOR_INVALID DecimalSeparator = 255
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -25,8 +24,6 @@ func (f DecimalSeparator) String() string {
|
|||||||
return "Dot"
|
return "Dot"
|
||||||
case DECIMAL_SEPARATOR_COMMA:
|
case DECIMAL_SEPARATOR_COMMA:
|
||||||
return "Comma"
|
return "Comma"
|
||||||
case DECIMAL_SEPARATOR_SPACE:
|
|
||||||
return "Space"
|
|
||||||
case DECIMAL_SEPARATOR_INVALID:
|
case DECIMAL_SEPARATOR_INVALID:
|
||||||
return "Invalid"
|
return "Invalid"
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -0,0 +1,4 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
// TaskProcessUpdateHandler represents the task process update handler
|
||||||
|
type TaskProcessUpdateHandler func(currentProcess float64)
|
||||||
@@ -15,22 +15,22 @@ type GocronLoggerAdapter struct {
|
|||||||
|
|
||||||
// Debug logs debug log
|
// Debug logs debug log
|
||||||
func (logger GocronLoggerAdapter) Debug(msg string, args ...any) {
|
func (logger GocronLoggerAdapter) Debug(msg string, args ...any) {
|
||||||
log.Debugf(core.NewNullContext(), logger.getFinalLog(msg, args...))
|
log.Debugf(core.NewNullContext(), "%s", logger.getFinalLog(msg, args...))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Info logs info log
|
// Info logs info log
|
||||||
func (logger GocronLoggerAdapter) Info(msg string, args ...any) {
|
func (logger GocronLoggerAdapter) Info(msg string, args ...any) {
|
||||||
log.Infof(core.NewNullContext(), logger.getFinalLog(msg, args...))
|
log.Infof(core.NewNullContext(), "%s", logger.getFinalLog(msg, args...))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Warn logs warn log
|
// Warn logs warn log
|
||||||
func (logger GocronLoggerAdapter) Warn(msg string, args ...any) {
|
func (logger GocronLoggerAdapter) Warn(msg string, args ...any) {
|
||||||
log.Warnf(core.NewNullContext(), logger.getFinalLog(msg, args...))
|
log.Warnf(core.NewNullContext(), "%s", logger.getFinalLog(msg, args...))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error logs error log
|
// Error logs error log
|
||||||
func (logger GocronLoggerAdapter) Error(msg string, args ...any) {
|
func (logger GocronLoggerAdapter) Error(msg string, args ...any) {
|
||||||
log.Errorf(core.NewNullContext(), logger.getFinalLog(msg, args...))
|
log.Errorf(core.NewNullContext(), "%s", logger.getFinalLog(msg, args...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (logger GocronLoggerAdapter) getFinalLog(msg string, args ...any) string {
|
func (logger GocronLoggerAdapter) getFinalLog(msg string, args ...any) string {
|
||||||
|
|||||||
@@ -4,11 +4,13 @@ import (
|
|||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
|
|
||||||
"github.com/mayswind/ezbookkeeping/pkg/core"
|
"github.com/mayswind/ezbookkeeping/pkg/core"
|
||||||
|
"github.com/mayswind/ezbookkeeping/pkg/settings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Database represents a database instance
|
// Database represents a database instance
|
||||||
type Database struct {
|
type Database struct {
|
||||||
engineGroup *xorm.EngineGroup
|
databaseType string
|
||||||
|
engineGroup *xorm.EngineGroup
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSession starts a new session with the specified context
|
// NewSession starts a new session with the specified context
|
||||||
@@ -41,3 +43,23 @@ func (db *Database) DoTransaction(c core.Context, fn func(sess *xorm.Session) er
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetSavePoint sets a save point in the current transaction for Postgres
|
||||||
|
func (db *Database) SetSavePoint(sess *xorm.Session, savePointName string) error {
|
||||||
|
if db.databaseType == settings.PostgresDbType {
|
||||||
|
_, err := sess.Exec("SAVEPOINT " + savePointName)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RollbackToSavePoint rolls back to the specified save point in the current transaction for Postgres
|
||||||
|
func (db *Database) RollbackToSavePoint(sess *xorm.Session, savePointName string) error {
|
||||||
|
if db.databaseType == settings.PostgresDbType {
|
||||||
|
_, err := sess.Exec("ROLLBACK TO SAVEPOINT " + savePointName)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -104,7 +104,8 @@ func initializeDatabase(dbConfig *settings.DatabaseConfig) (*Database, error) {
|
|||||||
engineGroup.SetConnMaxLifetime(time.Duration(dbConfig.ConnectionMaxLifeTime) * time.Second)
|
engineGroup.SetConnMaxLifetime(time.Duration(dbConfig.ConnectionMaxLifeTime) * time.Second)
|
||||||
|
|
||||||
return &Database{
|
return &Database{
|
||||||
engineGroup: engineGroup,
|
databaseType: dbConfig.DatabaseType,
|
||||||
|
engineGroup: engineGroup,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,9 @@ import "time"
|
|||||||
type DuplicateChecker interface {
|
type DuplicateChecker interface {
|
||||||
GetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string) (bool, string)
|
GetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string) (bool, string)
|
||||||
SetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string, remark string)
|
SetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string, remark string)
|
||||||
|
RemoveSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string)
|
||||||
GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string)
|
GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string)
|
||||||
RemoveCronJobRunningInfo(jobName string)
|
RemoveCronJobRunningInfo(jobName string)
|
||||||
|
GetFailureCount(failureKey string) uint32
|
||||||
|
IncreaseFailureCount(failureKey string) uint32
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,11 +34,16 @@ func (c *DuplicateCheckerContainer) GetSubmissionRemark(checkerType DuplicateChe
|
|||||||
return c.Current.GetSubmissionRemark(checkerType, uid, identification)
|
return c.Current.GetSubmissionRemark(checkerType, uid, identification)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetSubmissionRemark saves the identification and remark to in-memory cache by the current duplicate checker
|
// SetSubmissionRemark saves the identification and remark by the current duplicate checker
|
||||||
func (c *DuplicateCheckerContainer) SetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string, remark string) {
|
func (c *DuplicateCheckerContainer) SetSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string, remark string) {
|
||||||
c.Current.SetSubmissionRemark(checkerType, uid, identification, remark)
|
c.Current.SetSubmissionRemark(checkerType, uid, identification, remark)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RemoveSubmissionRemark removes the identification and remark by the current duplicate checker
|
||||||
|
func (c *DuplicateCheckerContainer) RemoveSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string) {
|
||||||
|
c.Current.RemoveSubmissionRemark(checkerType, uid, identification)
|
||||||
|
}
|
||||||
|
|
||||||
// GetOrSetCronJobRunningInfo returns the running info when the cron job is running or saves the running info by the current duplicate checker
|
// GetOrSetCronJobRunningInfo returns the running info when the cron job is running or saves the running info by the current duplicate checker
|
||||||
func (c *DuplicateCheckerContainer) GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string) {
|
func (c *DuplicateCheckerContainer) GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string) {
|
||||||
return c.Current.GetOrSetCronJobRunningInfo(jobName, runningInfo, runningInterval)
|
return c.Current.GetOrSetCronJobRunningInfo(jobName, runningInfo, runningInterval)
|
||||||
@@ -48,3 +53,13 @@ func (c *DuplicateCheckerContainer) GetOrSetCronJobRunningInfo(jobName string, r
|
|||||||
func (c *DuplicateCheckerContainer) RemoveCronJobRunningInfo(jobName string) {
|
func (c *DuplicateCheckerContainer) RemoveCronJobRunningInfo(jobName string) {
|
||||||
c.Current.RemoveCronJobRunningInfo(jobName)
|
c.Current.RemoveCronJobRunningInfo(jobName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetFailureCount returns the failure count of the specified failure key
|
||||||
|
func (c *DuplicateCheckerContainer) GetFailureCount(failureKey string) uint32 {
|
||||||
|
return c.Current.GetFailureCount(failureKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncreaseFailureCount increases the failure count of the specified failure key
|
||||||
|
func (c *DuplicateCheckerContainer) IncreaseFailureCount(failureKey string) uint32 {
|
||||||
|
return c.Current.IncreaseFailureCount(failureKey)
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,9 +7,11 @@ type DuplicateCheckerType uint8
|
|||||||
const (
|
const (
|
||||||
DUPLICATE_CHECKER_TYPE_BACKGROUND_CRON_JOB DuplicateCheckerType = 0
|
DUPLICATE_CHECKER_TYPE_BACKGROUND_CRON_JOB DuplicateCheckerType = 0
|
||||||
DUPLICATE_CHECKER_TYPE_NEW_ACCOUNT DuplicateCheckerType = 1
|
DUPLICATE_CHECKER_TYPE_NEW_ACCOUNT DuplicateCheckerType = 1
|
||||||
DUPLICATE_CHECKER_TYPE_NEW_CATEGORY DuplicateCheckerType = 2
|
DUPLICATE_CHECKER_TYPE_NEW_SUBACCOUNT DuplicateCheckerType = 2
|
||||||
DUPLICATE_CHECKER_TYPE_NEW_TRANSACTION DuplicateCheckerType = 3
|
DUPLICATE_CHECKER_TYPE_NEW_CATEGORY DuplicateCheckerType = 3
|
||||||
DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE DuplicateCheckerType = 4
|
DUPLICATE_CHECKER_TYPE_NEW_TRANSACTION DuplicateCheckerType = 4
|
||||||
DUPLICATE_CHECKER_TYPE_NEW_PICTURE DuplicateCheckerType = 5
|
DUPLICATE_CHECKER_TYPE_NEW_TEMPLATE DuplicateCheckerType = 5
|
||||||
DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS DuplicateCheckerType = 6
|
DUPLICATE_CHECKER_TYPE_NEW_PICTURE DuplicateCheckerType = 6
|
||||||
|
DUPLICATE_CHECKER_TYPE_IMPORT_TRANSACTIONS DuplicateCheckerType = 7
|
||||||
|
DUPLICATE_CHECKER_TYPE_FAILURE_CHECK DuplicateCheckerType = 255
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -42,6 +42,11 @@ func (c *InMemoryDuplicateChecker) SetSubmissionRemark(checkerType DuplicateChec
|
|||||||
c.cache.Set(c.getCacheKey(checkerType, uid, identification), remark, cache.DefaultExpiration)
|
c.cache.Set(c.getCacheKey(checkerType, uid, identification), remark, cache.DefaultExpiration)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RemoveSubmissionRemark removes the identification and remark in in-memory cache
|
||||||
|
func (c *InMemoryDuplicateChecker) RemoveSubmissionRemark(checkerType DuplicateCheckerType, uid int64, identification string) {
|
||||||
|
c.cache.Delete(c.getCacheKey(checkerType, uid, identification))
|
||||||
|
}
|
||||||
|
|
||||||
// GetOrSetCronJobRunningInfo returns the running info when the cron job is running or saves the running info by the current duplicate checker
|
// GetOrSetCronJobRunningInfo returns the running info when the cron job is running or saves the running info by the current duplicate checker
|
||||||
func (c *InMemoryDuplicateChecker) GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string) {
|
func (c *InMemoryDuplicateChecker) GetOrSetCronJobRunningInfo(jobName string, runningInfo string, runningInterval time.Duration) (bool, string) {
|
||||||
c.mutex.Lock()
|
c.mutex.Lock()
|
||||||
@@ -69,6 +74,34 @@ func (c *InMemoryDuplicateChecker) RemoveCronJobRunningInfo(jobName string) {
|
|||||||
c.cache.Delete(c.getCacheKey(DUPLICATE_CHECKER_TYPE_BACKGROUND_CRON_JOB, 0, jobName))
|
c.cache.Delete(c.getCacheKey(DUPLICATE_CHECKER_TYPE_BACKGROUND_CRON_JOB, 0, jobName))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetFailureCount returns the failure count of the specified failure key
|
||||||
|
func (c *InMemoryDuplicateChecker) GetFailureCount(failureKey string) uint32 {
|
||||||
|
existedFailureCount, found := c.cache.Get(c.getCacheKey(DUPLICATE_CHECKER_TYPE_FAILURE_CHECK, 0, failureKey))
|
||||||
|
|
||||||
|
if found {
|
||||||
|
return existedFailureCount.(uint32)
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncreaseFailureCount increases the failure count of the specified failure key
|
||||||
|
func (c *InMemoryDuplicateChecker) IncreaseFailureCount(failureKey string) uint32 {
|
||||||
|
c.mutex.Lock()
|
||||||
|
defer c.mutex.Unlock()
|
||||||
|
|
||||||
|
cacheKey := c.getCacheKey(DUPLICATE_CHECKER_TYPE_FAILURE_CHECK, 0, failureKey)
|
||||||
|
_, found := c.cache.Get(cacheKey)
|
||||||
|
|
||||||
|
if found {
|
||||||
|
failureCount, _ := c.cache.IncrementUint32(cacheKey, uint32(1))
|
||||||
|
return failureCount
|
||||||
|
} else {
|
||||||
|
c.cache.Set(cacheKey, uint32(1), 1*time.Minute)
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (c *InMemoryDuplicateChecker) getCacheKey(checkerType DuplicateCheckerType, uid int64, identification string) string {
|
func (c *InMemoryDuplicateChecker) getCacheKey(checkerType DuplicateCheckerType, uid int64, identification string) string {
|
||||||
return fmt.Sprintf("%d|%d|%s", checkerType, uid, identification)
|
return fmt.Sprintf("%d|%d|%s", checkerType, uid, identification)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -155,3 +155,77 @@ func TestGetOrSetRunningInfoConcurrent(t *testing.T) {
|
|||||||
|
|
||||||
assert.Equal(t, uint32(999), setRunningInfoCount.Load())
|
assert.Equal(t, uint32(999), setRunningInfoCount.Load())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetFailureCount(t *testing.T) {
|
||||||
|
checker, _ := NewInMemoryDuplicateChecker(&settings.Config{
|
||||||
|
DuplicateSubmissionsIntervalDuration: time.Second,
|
||||||
|
InMemoryDuplicateCheckerCleanupIntervalDuration: time.Second,
|
||||||
|
})
|
||||||
|
|
||||||
|
failureKey := "127.0.0.1"
|
||||||
|
|
||||||
|
failureCount := checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(0), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.IncreaseFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(1), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(1), failureCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIncreaseFailureCount(t *testing.T) {
|
||||||
|
checker, _ := NewInMemoryDuplicateChecker(&settings.Config{
|
||||||
|
DuplicateSubmissionsIntervalDuration: time.Second,
|
||||||
|
InMemoryDuplicateCheckerCleanupIntervalDuration: time.Second,
|
||||||
|
})
|
||||||
|
|
||||||
|
failureKey := "127.0.0.1"
|
||||||
|
|
||||||
|
failureCount := checker.IncreaseFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(1), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(1), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.IncreaseFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(2), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(2), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.IncreaseFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(3), failureCount)
|
||||||
|
|
||||||
|
failureCount = checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(3), failureCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIncreaseFailureCountConcurrent(t *testing.T) {
|
||||||
|
checker, _ := NewInMemoryDuplicateChecker(&settings.Config{
|
||||||
|
DuplicateSubmissionsIntervalDuration: time.Second,
|
||||||
|
InMemoryDuplicateCheckerCleanupIntervalDuration: time.Second,
|
||||||
|
})
|
||||||
|
|
||||||
|
failureKey := "127.0.0.1"
|
||||||
|
|
||||||
|
concurrentCount := 10
|
||||||
|
var waitGroup sync.WaitGroup
|
||||||
|
|
||||||
|
for routineIndex := 0; routineIndex < concurrentCount; routineIndex++ {
|
||||||
|
waitGroup.Add(1)
|
||||||
|
|
||||||
|
go func(currentRoutineIndex int) {
|
||||||
|
for cycle := 0; cycle < 10; cycle++ {
|
||||||
|
checker.IncreaseFailureCount(failureKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
waitGroup.Done()
|
||||||
|
}(routineIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
waitGroup.Wait()
|
||||||
|
|
||||||
|
failureCount := checker.GetFailureCount(failureKey)
|
||||||
|
assert.Equal(t, uint32(100), failureCount)
|
||||||
|
}
|
||||||
|
|||||||
+5
-1
@@ -14,7 +14,6 @@ var (
|
|||||||
ErrParentAccountCannotSetBalance = NewNormalError(NormalSubcategoryAccount, 7, http.StatusBadRequest, "parent account cannot set balance")
|
ErrParentAccountCannotSetBalance = NewNormalError(NormalSubcategoryAccount, 7, http.StatusBadRequest, "parent account cannot set balance")
|
||||||
ErrSubAccountCategoryNotEqualsToParent = NewNormalError(NormalSubcategoryAccount, 8, http.StatusBadRequest, "sub-account category not equals to parent")
|
ErrSubAccountCategoryNotEqualsToParent = NewNormalError(NormalSubcategoryAccount, 8, http.StatusBadRequest, "sub-account category not equals to parent")
|
||||||
ErrSubAccountTypeInvalid = NewNormalError(NormalSubcategoryAccount, 9, http.StatusBadRequest, "sub-account type invalid")
|
ErrSubAccountTypeInvalid = NewNormalError(NormalSubcategoryAccount, 9, http.StatusBadRequest, "sub-account type invalid")
|
||||||
ErrCannotAddOrDeleteSubAccountsWhenModify = NewNormalError(NormalSubcategoryAccount, 10, http.StatusBadRequest, "cannot add or delete sub-accounts when modify account")
|
|
||||||
ErrSourceAccountNotFound = NewNormalError(NormalSubcategoryAccount, 11, http.StatusBadRequest, "source account not found")
|
ErrSourceAccountNotFound = NewNormalError(NormalSubcategoryAccount, 11, http.StatusBadRequest, "source account not found")
|
||||||
ErrDestinationAccountNotFound = NewNormalError(NormalSubcategoryAccount, 12, http.StatusBadRequest, "destination account not found")
|
ErrDestinationAccountNotFound = NewNormalError(NormalSubcategoryAccount, 12, http.StatusBadRequest, "destination account not found")
|
||||||
ErrAccountInUseCannotBeDeleted = NewNormalError(NormalSubcategoryAccount, 13, http.StatusBadRequest, "account is in use and cannot be deleted")
|
ErrAccountInUseCannotBeDeleted = NewNormalError(NormalSubcategoryAccount, 13, http.StatusBadRequest, "account is in use and cannot be deleted")
|
||||||
@@ -22,4 +21,9 @@ var (
|
|||||||
ErrAccountBalanceTimeNotSet = NewNormalError(NormalSubcategoryAccount, 15, http.StatusBadRequest, "account balance time is not set")
|
ErrAccountBalanceTimeNotSet = NewNormalError(NormalSubcategoryAccount, 15, http.StatusBadRequest, "account balance time is not set")
|
||||||
ErrCannotSetStatementDateForNonCreditCard = NewNormalError(NormalSubcategoryAccount, 16, http.StatusBadRequest, "cannot set statement date for non credit card account")
|
ErrCannotSetStatementDateForNonCreditCard = NewNormalError(NormalSubcategoryAccount, 16, http.StatusBadRequest, "cannot set statement date for non credit card account")
|
||||||
ErrCannotSetStatementDateForSubAccount = NewNormalError(NormalSubcategoryAccount, 17, http.StatusBadRequest, "cannot set statement date for sub account")
|
ErrCannotSetStatementDateForSubAccount = NewNormalError(NormalSubcategoryAccount, 17, http.StatusBadRequest, "cannot set statement date for sub account")
|
||||||
|
ErrSubAccountNotFound = NewNormalError(NormalSubcategoryAccount, 18, http.StatusBadRequest, "sub-account not found")
|
||||||
|
ErrSubAccountInUseCannotBeDeleted = NewNormalError(NormalSubcategoryAccount, 19, http.StatusBadRequest, "sub-account is in use and cannot be deleted")
|
||||||
|
ErrNotSupportedChangeCurrency = NewNormalError(NormalSubcategoryAccount, 20, http.StatusBadRequest, "not supported to modify account currency")
|
||||||
|
ErrNotSupportedChangeBalance = NewNormalError(NormalSubcategoryAccount, 21, http.StatusBadRequest, "not supported to modify account balance")
|
||||||
|
ErrNotSupportedChangeBalanceTime = NewNormalError(NormalSubcategoryAccount, 22, http.StatusBadRequest, "not supported to modify account balance time")
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -25,4 +25,7 @@ var (
|
|||||||
ErrInvalidIIFFile = NewNormalError(NormalSubcategoryConverter, 18, http.StatusBadRequest, "invalid iif file")
|
ErrInvalidIIFFile = NewNormalError(NormalSubcategoryConverter, 18, http.StatusBadRequest, "invalid iif file")
|
||||||
ErrInvalidOFXFile = NewNormalError(NormalSubcategoryConverter, 19, http.StatusBadRequest, "invalid ofx file")
|
ErrInvalidOFXFile = NewNormalError(NormalSubcategoryConverter, 19, http.StatusBadRequest, "invalid ofx file")
|
||||||
ErrInvalidSGMLFile = NewNormalError(NormalSubcategoryConverter, 20, http.StatusBadRequest, "invalid sgml file")
|
ErrInvalidSGMLFile = NewNormalError(NormalSubcategoryConverter, 20, http.StatusBadRequest, "invalid sgml file")
|
||||||
|
ErrInvalidBeancountFile = NewNormalError(NormalSubcategoryConverter, 21, http.StatusBadRequest, "invalid beancount file")
|
||||||
|
ErrBeancountFileNotSupportInclude = NewNormalError(NormalSubcategoryConverter, 22, http.StatusBadRequest, "not support include directive for beancount file")
|
||||||
|
ErrInvalidAmountExpression = NewNormalError(NormalSubcategoryConverter, 23, http.StatusBadRequest, "invalid amount expression")
|
||||||
)
|
)
|
||||||
|
|||||||
+14
-13
@@ -25,19 +25,20 @@ const (
|
|||||||
|
|
||||||
// Sub categories of normal error
|
// Sub categories of normal error
|
||||||
const (
|
const (
|
||||||
NormalSubcategoryGlobal = 0
|
NormalSubcategoryGlobal = 0
|
||||||
NormalSubcategoryUser = 1
|
NormalSubcategoryUser = 1
|
||||||
NormalSubcategoryToken = 2
|
NormalSubcategoryToken = 2
|
||||||
NormalSubcategoryTwofactor = 3
|
NormalSubcategoryTwofactor = 3
|
||||||
NormalSubcategoryAccount = 4
|
NormalSubcategoryAccount = 4
|
||||||
NormalSubcategoryTransaction = 5
|
NormalSubcategoryTransaction = 5
|
||||||
NormalSubcategoryCategory = 6
|
NormalSubcategoryCategory = 6
|
||||||
NormalSubcategoryTag = 7
|
NormalSubcategoryTag = 7
|
||||||
NormalSubcategoryDataManagement = 8
|
NormalSubcategoryDataManagement = 8
|
||||||
NormalSubcategoryMapProxy = 9
|
NormalSubcategoryMapProxy = 9
|
||||||
NormalSubcategoryTemplate = 10
|
NormalSubcategoryTemplate = 10
|
||||||
NormalSubcategoryPicture = 11
|
NormalSubcategoryPicture = 11
|
||||||
NormalSubcategoryConverter = 12
|
NormalSubcategoryConverter = 12
|
||||||
|
NormalSubcategoryUserCustomExchangeRate = 13
|
||||||
)
|
)
|
||||||
|
|
||||||
// Error represents the specific error returned to user
|
// Error represents the specific error returned to user
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ var (
|
|||||||
ErrNoFilesUpload = NewNormalError(NormalSubcategoryGlobal, 15, http.StatusBadRequest, "no files uploaded")
|
ErrNoFilesUpload = NewNormalError(NormalSubcategoryGlobal, 15, http.StatusBadRequest, "no files uploaded")
|
||||||
ErrUploadedFileEmpty = NewNormalError(NormalSubcategoryGlobal, 16, http.StatusBadRequest, "uploaded file is empty")
|
ErrUploadedFileEmpty = NewNormalError(NormalSubcategoryGlobal, 16, http.StatusBadRequest, "uploaded file is empty")
|
||||||
ErrExceedMaxUploadFileSize = NewNormalError(NormalSubcategoryGlobal, 17, http.StatusBadRequest, "uploaded file size exceeds the maximum allowed size")
|
ErrExceedMaxUploadFileSize = NewNormalError(NormalSubcategoryGlobal, 17, http.StatusBadRequest, "uploaded file size exceeds the maximum allowed size")
|
||||||
|
ErrFailureCountLimitReached = NewNormalError(NormalSubcategoryGlobal, 18, http.StatusBadRequest, "failure count exceeded maximum limit")
|
||||||
|
ErrRepeatedRequest = NewNormalError(NormalSubcategoryGlobal, 19, http.StatusBadRequest, "repeated request")
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetParameterInvalidMessage returns specific error message for invalid parameter error
|
// GetParameterInvalidMessage returns specific error message for invalid parameter error
|
||||||
|
|||||||
@@ -35,4 +35,10 @@ var (
|
|||||||
ErrCannotAddTransactionBeforeBalanceModificationTransaction = NewSystemError(NormalSubcategoryTransaction, 28, http.StatusBadRequest, "cannot add transaction before balance modification transaction")
|
ErrCannotAddTransactionBeforeBalanceModificationTransaction = NewSystemError(NormalSubcategoryTransaction, 28, http.StatusBadRequest, "cannot add transaction before balance modification transaction")
|
||||||
ErrBalanceModificationTransactionCannotModifyTime = NewSystemError(NormalSubcategoryTransaction, 29, http.StatusBadRequest, "balance modification transaction cannot modify transaction time")
|
ErrBalanceModificationTransactionCannotModifyTime = NewSystemError(NormalSubcategoryTransaction, 29, http.StatusBadRequest, "balance modification transaction cannot modify transaction time")
|
||||||
ErrTransferTransactionAmountCannotBeLessThanZero = NewNormalError(NormalSubcategoryTransaction, 30, http.StatusBadRequest, "transfer transaction amount cannot be less than zero")
|
ErrTransferTransactionAmountCannotBeLessThanZero = NewNormalError(NormalSubcategoryTransaction, 30, http.StatusBadRequest, "transfer transaction amount cannot be less than zero")
|
||||||
|
ErrImportFileEncodingIsEmpty = NewSystemError(NormalSubcategoryTransaction, 31, http.StatusBadRequest, "import file encoding is empty")
|
||||||
|
ErrImportFileEncodingNotSupported = NewSystemError(NormalSubcategoryTransaction, 32, http.StatusBadRequest, "import file encoding not supported")
|
||||||
|
ErrImportFileColumnMappingInvalid = NewSystemError(NormalSubcategoryTransaction, 33, http.StatusBadRequest, "column mapping invalid")
|
||||||
|
ErrImportFileTransactionTypeMappingInvalid = NewSystemError(NormalSubcategoryTransaction, 34, http.StatusBadRequest, "transaction type mapping invalid")
|
||||||
|
ErrImportFileTransactionTimeFormatInvalid = NewSystemError(NormalSubcategoryTransaction, 35, http.StatusBadRequest, "transaction time format invalid")
|
||||||
|
ErrImportFileTransactionTimezoneFormatInvalid = NewSystemError(NormalSubcategoryTransaction, 36, http.StatusBadRequest, "transaction time zone format invalid")
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -4,10 +4,11 @@ import "net/http"
|
|||||||
|
|
||||||
// Error codes related to transaction templates
|
// Error codes related to transaction templates
|
||||||
var (
|
var (
|
||||||
ErrTransactionTemplateIdInvalid = NewNormalError(NormalSubcategoryTemplate, 0, http.StatusBadRequest, "transaction template id is invalid")
|
ErrTransactionTemplateIdInvalid = NewNormalError(NormalSubcategoryTemplate, 0, http.StatusBadRequest, "transaction template id is invalid")
|
||||||
ErrTransactionTemplateNotFound = NewNormalError(NormalSubcategoryTemplate, 1, http.StatusBadRequest, "transaction template not found")
|
ErrTransactionTemplateNotFound = NewNormalError(NormalSubcategoryTemplate, 1, http.StatusBadRequest, "transaction template not found")
|
||||||
ErrTransactionTemplateTypeInvalid = NewNormalError(NormalSubcategoryTemplate, 2, http.StatusBadRequest, "transaction template type is invalid")
|
ErrTransactionTemplateTypeInvalid = NewNormalError(NormalSubcategoryTemplate, 2, http.StatusBadRequest, "transaction template type is invalid")
|
||||||
ErrScheduledTransactionNotEnabled = NewNormalError(NormalSubcategoryTemplate, 3, http.StatusBadRequest, "scheduled transaction is not enabled")
|
ErrScheduledTransactionNotEnabled = NewNormalError(NormalSubcategoryTemplate, 3, http.StatusBadRequest, "scheduled transaction is not enabled")
|
||||||
ErrScheduledTransactionFrequencyInvalid = NewNormalError(NormalSubcategoryTemplate, 4, http.StatusBadRequest, "scheduled transaction frequency is invalid")
|
ErrScheduledTransactionFrequencyInvalid = NewNormalError(NormalSubcategoryTemplate, 4, http.StatusBadRequest, "scheduled transaction frequency is invalid")
|
||||||
ErrTransactionTemplateHasTooManyTags = NewNormalError(NormalSubcategoryTemplate, 5, http.StatusBadRequest, "transaction template has too many tags")
|
ErrTransactionTemplateHasTooManyTags = NewNormalError(NormalSubcategoryTemplate, 5, http.StatusBadRequest, "transaction template has too many tags")
|
||||||
|
ErrScheduledTransactionTemplateStartDataLaterThanEndDate = NewNormalError(NormalSubcategoryTemplate, 6, http.StatusBadRequest, "scheduled transaction start date is later than end time")
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
package errs
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
// Error codes related to user custom exchange rates
|
||||||
|
var (
|
||||||
|
ErrUserCustomExchangeRateNotFound = NewNormalError(NormalSubcategoryUserCustomExchangeRate, 0, http.StatusBadRequest, "user custom exchange rate data not found")
|
||||||
|
ErrCannotUpdateExchangeRateForDefaultCurrency = NewNormalError(NormalSubcategoryUserCustomExchangeRate, 1, http.StatusBadRequest, "cannot update exchange rate data for base currency")
|
||||||
|
ErrCannotDeleteExchangeRateForDefaultCurrency = NewNormalError(NormalSubcategoryUserCustomExchangeRate, 2, http.StatusBadRequest, "cannot delete exchange rate data for base currency")
|
||||||
|
)
|
||||||
@@ -25,7 +25,7 @@ const bankOfCanadaDataUpdateDateTimezone = "America/Toronto"
|
|||||||
|
|
||||||
// BankOfCanadaDataSource defines the structure of exchange rates data source of bank of Canada
|
// BankOfCanadaDataSource defines the structure of exchange rates data source of bank of Canada
|
||||||
type BankOfCanadaDataSource struct {
|
type BankOfCanadaDataSource struct {
|
||||||
ExchangeRatesDataSource
|
HttpExchangeRatesDataSource
|
||||||
}
|
}
|
||||||
|
|
||||||
// BankOfCanadaExchangeRateData represents the whole data from bank of Canada
|
// BankOfCanadaExchangeRateData represents the whole data from bank of Canada
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ const bankOfIsraelDataUpdateDateFormat = "2006-01-02T15:04:05.9999999Z"
|
|||||||
|
|
||||||
// BankOfIsraelDataSource defines the structure of exchange rates data source of bank of Israel
|
// BankOfIsraelDataSource defines the structure of exchange rates data source of bank of Israel
|
||||||
type BankOfIsraelDataSource struct {
|
type BankOfIsraelDataSource struct {
|
||||||
ExchangeRatesDataSource
|
HttpExchangeRatesDataSource
|
||||||
}
|
}
|
||||||
|
|
||||||
// BankOfIsraelExchangeRateData represents the whole data from bank of Israel
|
// BankOfIsraelExchangeRateData represents the whole data from bank of Israel
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user