Compare commits
394 Commits
feature/li
...
feature/rs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b6be4e5b2c | ||
|
|
1794b86041 | ||
|
|
f405124ce4 | ||
|
|
25060c1477 | ||
|
|
6ad218f354 | ||
|
|
cbc4537b06 | ||
|
|
d1c52548b0 | ||
|
|
9dc847b72f | ||
|
|
179fdfca92 | ||
|
|
5b04adb21f | ||
|
|
356d25e548 | ||
|
|
76e834b09c | ||
|
|
a9958af818 | ||
|
|
1b55eba24b | ||
|
|
7ba7ebfe52 | ||
|
|
fb359b7f87 | ||
|
|
7f087819a6 | ||
|
|
e836fa3d38 | ||
|
|
cb50d2838a | ||
|
|
aff7125914 | ||
|
|
45d82dce04 | ||
|
|
7a3b27227a | ||
|
|
7b50a19b2c | ||
|
|
741b942dea | ||
|
|
d3a5df0007 | ||
|
|
9f28d947d3 | ||
|
|
6d6a8526cc | ||
|
|
74560c3289 | ||
|
|
f94bf3f8ce | ||
|
|
d26db7096d | ||
|
|
afde0a7423 | ||
|
|
cc930a3ff9 | ||
|
|
60ecdadc74 | ||
|
|
82fc1850cf | ||
|
|
88f937f73e | ||
|
|
b3497c7306 | ||
|
|
366ab2e0cd | ||
|
|
98e4fabd2e | ||
|
|
716c3573fd | ||
|
|
c9638aefe9 | ||
|
|
8bd4721686 | ||
|
|
f4a879a452 | ||
|
|
1b8bb0c1fd | ||
|
|
5c33579544 | ||
|
|
f8081ff09e | ||
|
|
01b7eeeecf | ||
|
|
d2f4c374a9 | ||
|
|
07073eebe9 | ||
|
|
590a4ae476 | ||
|
|
b553a81d47 | ||
|
|
7d4e53e413 | ||
|
|
839b0f6f5e | ||
|
|
893c5da4ef | ||
|
|
f9b893edfa | ||
|
|
b4fadb39bf | ||
|
|
310f8bf6f7 | ||
|
|
903a26a330 | ||
|
|
41ebd39810 | ||
|
|
281de5196e | ||
|
|
a9ab864cbb | ||
|
|
f3ff910821 | ||
|
|
ba5179f1e8 | ||
|
|
05e8f6ed78 | ||
|
|
23150815a0 | ||
|
|
a50f223fe3 | ||
|
|
9329345d98 | ||
|
|
c71c32b363 | ||
|
|
5590aa7234 | ||
|
|
4a3491e0b5 | ||
|
|
e8cb87ae3d | ||
|
|
5564047001 | ||
|
|
22577c5f87 | ||
|
|
4dc2627da2 | ||
|
|
b33198d1bf | ||
|
|
f826b9eb6e | ||
|
|
2b58121552 | ||
|
|
762547d0e9 | ||
|
|
727523eb3f | ||
|
|
5bbdb66eb2 | ||
|
|
c6eff4f90d | ||
|
|
2559173c2c | ||
|
|
8adea132ef | ||
|
|
9d924f8d1c | ||
|
|
1b297fed90 | ||
|
|
2c2a93c440 | ||
|
|
bb076cce5d | ||
|
|
b16b02c3f1 | ||
|
|
0af38c6e0e | ||
|
|
e8d5412e14 | ||
|
|
170cf4753e | ||
|
|
660e76145e | ||
|
|
31a6f7b621 | ||
|
|
f6699ad93b | ||
|
|
6e508e4454 | ||
|
|
2b101844e9 | ||
|
|
fb036935e6 | ||
|
|
c3e09ddab0 | ||
|
|
36ae07074d | ||
|
|
a4518ce261 | ||
|
|
861ea7ef94 | ||
|
|
541af0b77e | ||
|
|
8c1b51b7e9 | ||
|
|
a71c4fe7ec | ||
|
|
b9d4197b5c | ||
|
|
ce7559087e | ||
|
|
110d87e512 | ||
|
|
cd817714cd | ||
|
|
23db345756 | ||
|
|
16990bd0c3 | ||
|
|
2e3b770bea | ||
|
|
28f62623bf | ||
|
|
c9f3e8cb9f | ||
|
|
6b751eb715 | ||
|
|
5d953da267 | ||
|
|
e87ae31a51 | ||
|
|
9f029b892b | ||
|
|
40a9ced0f7 | ||
|
|
87fbd7e5da | ||
|
|
bca5514a76 | ||
|
|
62ddbb20ac | ||
|
|
9d376961f4 | ||
|
|
e77aa00bcd | ||
|
|
465330820d | ||
|
|
ec9cbba67e | ||
|
|
8961ea6fc9 | ||
|
|
ca7ca9da81 | ||
|
|
2a234f14df | ||
|
|
3ff97bf628 | ||
|
|
416d6f2aef | ||
|
|
ecaafaca69 | ||
|
|
fdb14cd49b | ||
|
|
070e955b89 | ||
|
|
9390ab3c6c | ||
|
|
f67221ee01 | ||
|
|
e09294d9aa | ||
|
|
5675acb71a | ||
|
|
0305a5dcef | ||
|
|
5f03340454 | ||
|
|
0f69ba46c5 | ||
|
|
71ecb6bd4e | ||
|
|
2b712cc808 | ||
|
|
3a68b7b554 | ||
|
|
e41727a1fc | ||
|
|
857d0f3316 | ||
|
|
1f68f62689 | ||
|
|
09b43a8e95 | ||
|
|
b8d765d229 | ||
|
|
06a919ff8d | ||
|
|
ea1e7769b1 | ||
|
|
d5e6f99819 | ||
|
|
a5ab9726dd | ||
|
|
6464fc56d8 | ||
|
|
156e1b928c | ||
|
|
edf17b8100 | ||
|
|
5ab980ce1a | ||
|
|
f1d80fadc4 | ||
|
|
d331d48ca2 | ||
|
|
e740db11ed | ||
|
|
6cff7b3c30 | ||
|
|
88cdd2fcbf | ||
|
|
55896be694 | ||
|
|
c4f17e42e1 | ||
|
|
e8b11bd42a | ||
|
|
a566fb3988 | ||
|
|
de071b37eb | ||
|
|
7b4d408733 | ||
|
|
06478d89ea | ||
|
|
7cedf4c620 | ||
|
|
ef1ea5eeee | ||
|
|
96f222db94 | ||
|
|
065eb9b878 | ||
|
|
ed583d80a3 | ||
|
|
7752db3916 | ||
|
|
e18254e4d8 | ||
|
|
555fc97e8f | ||
|
|
f135adfdba | ||
|
|
72b766f32a | ||
|
|
5278861ccd | ||
|
|
ebcf88070c | ||
|
|
273811bbb8 | ||
|
|
26b53e725d | ||
|
|
5434ec85e2 | ||
|
|
91f438aeff | ||
|
|
501f67ebe7 | ||
|
|
be52fef0bb | ||
|
|
2c0cf40a15 | ||
|
|
8566070d9b | ||
|
|
8f4118a6b8 | ||
|
|
54c53f0b56 | ||
|
|
eafcee8c67 | ||
|
|
5c8e1e0f4a | ||
|
|
76043d5876 | ||
|
|
936f3b0752 | ||
|
|
9380608781 | ||
|
|
544b75bcd5 | ||
|
|
2ae63d2323 | ||
|
|
f24717a3a3 | ||
|
|
f0f5558f3e | ||
|
|
3d8c732258 | ||
|
|
f312237bd5 | ||
|
|
952dde3fef | ||
|
|
e5387f6d06 | ||
|
|
93d6697b4e | ||
|
|
bad2f3415a | ||
|
|
d2f5d97282 | ||
|
|
f8c3fef839 | ||
|
|
14b47a929f | ||
|
|
4b3c0466eb | ||
|
|
041397b137 | ||
|
|
6548c286a6 | ||
|
|
ccf78285b6 | ||
|
|
5e9366fa92 | ||
|
|
0e30cb1439 | ||
|
|
b8d86518e7 | ||
|
|
725399ac7c | ||
|
|
ca18994092 | ||
|
|
caefc438b9 | ||
|
|
0ece8c7dec | ||
|
|
e7c42f3623 | ||
|
|
18faf89b89 | ||
|
|
3a750ae6a2 | ||
|
|
48c614d8c3 | ||
|
|
c37d502c27 | ||
|
|
3fed7a081d | ||
|
|
e75497d03b | ||
|
|
6d1421f1b7 | ||
|
|
dd210be037 | ||
|
|
7c7d6ad548 | ||
|
|
2f471c0e3f | ||
|
|
cff219674f | ||
|
|
e35f9eb75b | ||
|
|
ef1eff2ecb | ||
|
|
244ff61fb3 | ||
|
|
50e2623f19 | ||
|
|
fd0b997c13 | ||
|
|
13d057e4f7 | ||
|
|
d1a6be6ca6 | ||
|
|
edece02c13 | ||
|
|
d68cf4e44d | ||
|
|
9d85a58634 | ||
|
|
c4a4cd0957 | ||
|
|
5af7615054 | ||
|
|
a68a1334fc | ||
|
|
dd3e38355c | ||
|
|
890461bcf8 | ||
|
|
750265cb79 | ||
|
|
004de824ba | ||
|
|
fc43b35628 | ||
|
|
696653f945 | ||
|
|
157dca50e9 | ||
|
|
7d2130b229 | ||
|
|
2cc81211af | ||
|
|
ea9a5b0eb0 | ||
|
|
e60cee6a73 | ||
|
|
a3ee60a464 | ||
|
|
28595cbeb3 | ||
|
|
dbdbf5210e | ||
|
|
a4876f6f14 | ||
|
|
9835ae7e50 | ||
|
|
59f763162c | ||
|
|
a9412b418f | ||
|
|
742c5b76fe | ||
|
|
7a85fc0179 | ||
|
|
73e73ffe58 | ||
|
|
3c5523894d | ||
|
|
4b9c76d3db | ||
|
|
0a694eea8a | ||
|
|
87f48dad79 | ||
|
|
68ef50ca46 | ||
|
|
8b1da33ffe | ||
|
|
9a9b18a3ef | ||
|
|
83f69d89ff | ||
|
|
0bd7d23114 | ||
|
|
9b860a6aa6 | ||
|
|
4812519a4c | ||
|
|
f3ce6ad467 | ||
|
|
78d9b48854 | ||
|
|
b37d0eba04 | ||
|
|
5289b3f54c | ||
|
|
5c08fe0611 | ||
|
|
4bc9b70882 | ||
|
|
3f98f9ff39 | ||
|
|
e78a7bfbf1 | ||
|
|
b8b1412bf8 | ||
|
|
a81d66ace3 | ||
|
|
dcfd72b7e7 | ||
|
|
6467db4a21 | ||
|
|
f2570c97f3 | ||
|
|
98ad518b5d | ||
|
|
7a5e17a345 | ||
|
|
fde257c722 | ||
|
|
97357f082d | ||
|
|
6875bd82fc | ||
|
|
b1f6cad741 | ||
|
|
fb28fec60d | ||
|
|
500130be59 | ||
|
|
101173c87c | ||
|
|
4fb1b0dbd1 | ||
|
|
4270d5e8ec | ||
|
|
fe9f1146ce | ||
|
|
6fce43a122 | ||
|
|
a8a8f9b3e5 | ||
|
|
0dd0125e9f | ||
|
|
d3d97b5924 | ||
|
|
c8462fb50b | ||
|
|
c99ca9edcc | ||
|
|
fc7e96feb9 | ||
|
|
31dd7be296 | ||
|
|
2899373e42 | ||
|
|
7640c3d0ef | ||
|
|
af7772f617 | ||
|
|
a1670caf06 | ||
|
|
61e35b9773 | ||
|
|
e4671ffdb3 | ||
|
|
3035f923cb | ||
|
|
97e83def48 | ||
|
|
ba31deaebf | ||
|
|
bd571dc93d | ||
|
|
c1da8321ac | ||
|
|
0a1972b854 | ||
|
|
96f387e90b | ||
|
|
e034fc1019 | ||
|
|
d539912762 | ||
|
|
85b72af3bb | ||
|
|
3004f4b583 | ||
|
|
78dbd7eacd | ||
|
|
add73e6f16 | ||
|
|
2c5a7d103d | ||
|
|
f8f855d5d2 | ||
|
|
f2c109116c | ||
|
|
41c63fab96 | ||
|
|
de94651fca | ||
|
|
611c468b70 | ||
|
|
fd2207e39e | ||
|
|
10564442e2 | ||
|
|
a44307ca2d | ||
|
|
cfea2ed954 | ||
|
|
62e59605ed | ||
|
|
2644b1b7ac | ||
|
|
6e4c6d0c54 | ||
|
|
2b7d950a9f | ||
|
|
5ece8d67f9 | ||
|
|
8511b67811 | ||
|
|
a0a444e476 | ||
|
|
6fec92926d | ||
|
|
2cbefb261b | ||
|
|
4bebbb158c | ||
|
|
8fe2172c0a | ||
|
|
f52ef2d57e | ||
|
|
3f1c005548 | ||
|
|
821bd2b2d8 | ||
|
|
d8405feab3 | ||
|
|
bd94437c05 | ||
|
|
b290bdb473 | ||
|
|
f8e2af4fd4 | ||
|
|
97c8dfa5b9 | ||
|
|
37cce68e2e | ||
|
|
1f87a71b98 | ||
|
|
5511ea1887 | ||
|
|
e371081a1d | ||
|
|
70816b48e8 | ||
|
|
2c9fc7b4a7 | ||
|
|
3bb4652a49 | ||
|
|
d5f8e01dd8 | ||
|
|
27e0517f9a | ||
|
|
f59aeda28e | ||
|
|
a8a69f8c36 | ||
|
|
4ae6b8328e | ||
|
|
6d4337e4ce | ||
|
|
3df928caf4 | ||
|
|
faeb33fdc0 | ||
|
|
bacdeb20eb | ||
|
|
781c426b2f | ||
|
|
d5c9fb8dec | ||
|
|
78c372b9d8 | ||
|
|
ea31069609 | ||
|
|
3f9a3e8f64 | ||
|
|
2f02715efd | ||
|
|
c94fa64a33 | ||
|
|
e2cb666b3c | ||
|
|
9f24e0145b | ||
|
|
41413471aa | ||
|
|
f8a11166f0 | ||
|
|
27517b3b47 | ||
|
|
834c0d92b4 | ||
|
|
e04215f0e1 | ||
|
|
9fa8d98ae8 | ||
|
|
31621f273b | ||
|
|
41f158dac1 | ||
|
|
e5687f3a7c | ||
|
|
1a5b684e1f | ||
|
|
bf70c487ca | ||
|
|
4e1d34ba77 | ||
|
|
3a75ad61f3 |
116
.github/workflows/build-app-check.yaml
vendored
Normal file
116
.github/workflows/build-app-check.yaml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# This file is generated. Do not edit manually
|
||||
# --------------------------------------------------------------------------------------------
|
||||
name: Electron app check build
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- check-[0-9]+-[0-9]+-[0-9]+.[0-9]+
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-14
|
||||
- windows-2022
|
||||
- ubuntu-22.04
|
||||
steps:
|
||||
- name: Install python 3.11 (MacOS)
|
||||
if: matrix.os == 'macos-14'
|
||||
run: |
|
||||
brew install python@3.11
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 22.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 22.x
|
||||
- name: adjustPackageJson
|
||||
run: |
|
||||
|
||||
node adjustPackageJson --community
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
|
||||
yarn fillPackagedPlugins
|
||||
- name: Install Snapcraft
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
uses: samuelmeuli/action-snapcraft@v1
|
||||
- name: Publish
|
||||
run: |
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp app/dist/*.deb artifacts/dbgate-check.deb || true
|
||||
cp app/dist/*x86*.AppImage artifacts/dbgate-check.AppImage || true
|
||||
cp app/dist/*arm64*.AppImage artifacts/dbgate-check-arm64.AppImage || true
|
||||
cp app/dist/*armv7l*.AppImage artifacts/dbgate-check-armv7l.AppImage || true
|
||||
cp app/dist/*win*.exe artifacts/dbgate-check.exe || true
|
||||
cp app/dist/*win_x64.zip artifacts/dbgate-windows-check.zip || true
|
||||
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-check-arm64.zip || true
|
||||
cp app/dist/*-mac_universal.dmg artifacts/dbgate-check.dmg || true
|
||||
cp app/dist/*-mac_x64.dmg artifacts/dbgate-check-x64.dmg || true
|
||||
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-check-arm64.dmg || true
|
||||
mv app/dist/*.snap artifacts/dbgate-check.snap || true
|
||||
|
||||
mv app/dist/*.exe artifacts/ || true
|
||||
mv app/dist/*.zip artifacts/ || true
|
||||
mv app/dist/*.tar.gz artifacts/ || true
|
||||
mv app/dist/*.AppImage artifacts/ || true
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
mv app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;
|
||||
2
.github/workflows/build-app-pro-beta.yaml
vendored
2
.github/workflows/build-app-pro-beta.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-app-pro.yaml
vendored
2
.github/workflows/build-app-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-cloud-pro.yaml
vendored
2
.github/workflows/build-cloud-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-docker-pro.yaml
vendored
2
.github/workflows/build-docker-pro.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-npm-pro.yaml
vendored
2
.github/workflows/build-npm-pro.yaml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
13
.github/workflows/e2e-pro.yaml
vendored
13
.github/workflows/e2e-pro.yaml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
|
||||
ref: e75fc4967ca3a4c379aee89d7911c5ec909306ce
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -69,6 +69,17 @@ jobs:
|
||||
with:
|
||||
name: screenshots
|
||||
path: screenshots
|
||||
- name: Push E2E screenshots
|
||||
if: ${{ github.ref_name == 'master' }}
|
||||
run: |
|
||||
git config --global user.email "info@dbgate.info"
|
||||
git config --global user.name "GitHub Actions"
|
||||
git clone https://${{ secrets.DIFLOW_GIT_SECRET }}@github.com/dbgate/dbgate-img.git
|
||||
cp ../dbgate-merged/e2e-tests/screenshots/*.png dbgate-img/static/img
|
||||
cd dbgate-img/static/img
|
||||
git add .
|
||||
git commit --amend --no-edit
|
||||
git push --force
|
||||
services:
|
||||
postgres-cypress:
|
||||
image: postgres
|
||||
|
||||
3
.github/workflows/process-templates.yaml
vendored
3
.github/workflows/process-templates.yaml
vendored
@@ -16,6 +16,9 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
|
||||
- name: git pull
|
||||
run: |
|
||||
git pull
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
|
||||
9
.github/workflows/run-tests.yaml
vendored
9
.github/workflows/run-tests.yaml
vendored
@@ -37,6 +37,11 @@ jobs:
|
||||
run: |
|
||||
cd packages/datalib
|
||||
yarn test:ci
|
||||
- name: Tools tests
|
||||
if: always()
|
||||
run: |
|
||||
cd packages/tools
|
||||
yarn test:ci
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
@@ -93,3 +98,7 @@ jobs:
|
||||
image: cassandra:5.0.2
|
||||
ports:
|
||||
- '15942:9042'
|
||||
libsql:
|
||||
image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
ports:
|
||||
- '8080:8080'
|
||||
|
||||
66
CHANGELOG.md
66
CHANGELOG.md
@@ -8,6 +8,70 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
## 6.4.2
|
||||
|
||||
- ADDED: Source label to docker container #1105
|
||||
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
|
||||
- ADDED: View PostgreSQL query console output #1108
|
||||
- FIXED: Single quote generete MySql error #1107
|
||||
- ADDED: Ability to limit query result count #1098
|
||||
- CHANGED: Correct processing of bigint columns #1087 #1055 #583
|
||||
- CHANGED: Improved and optimalized algorithm of loading redis keys #1062, #1034
|
||||
- FIXED: Fixed loading Redis keys with :: in key name
|
||||
|
||||
## 6.4.0
|
||||
- ADDED: DuckDB support
|
||||
- ADDED: Data deployer (Premium)
|
||||
- ADDED: Compare data between JSON lines file in archive and database table
|
||||
- CHANGED: Data Duplicator => Data Replicator (suitable for update, create and delete data, much more customizable)
|
||||
- REMOVED: Data duplicator GUI (replaced with Data Deployer)
|
||||
- ADDED: Exporting to ZIP file
|
||||
- ADDED: Download SQL and SQLite files
|
||||
- ADDED: Upload SQLite files
|
||||
- ADDED: Upload archive as ZIP folder (Premium)
|
||||
- ADDED: Compress, uncompress archive folder (Premium)
|
||||
- ADDED: Export connections and settings #357
|
||||
- ADDED: Filtering by MongoDB ObjectId works now also without ObjectId(...) wrapper
|
||||
- ADDED: Split queries using blank lines #1089
|
||||
- FIXED: JSON-to-Grid only works if there is no newline #1085
|
||||
- CHANGED: When running multiple commands in script, stop execution after first error #1070
|
||||
- FIXED: Selection rectangle remains visible after closing JSONB edit cell value form #1031
|
||||
- FIXED: Diplaying numeric FK column with right alignement #1021
|
||||
- ADDED: Additional arguments for MySQL and PostgreSQL backup #1092
|
||||
- CHANGED: Amazon and Azure instalations are not auto-upgraded by default
|
||||
|
||||
## 6.3.3
|
||||
- CHANGED: New administration UI, redesigned administration of users, connections and roles
|
||||
- ADDED: Encrypting passwords in team-premium edition
|
||||
- ADDED: Show scale bar on map #1090
|
||||
- FIXED: Fixed native backup/restore for MySQL+PostgreSQL over SSH tunnel #1092
|
||||
- CHANGED: Column mapping dialog - fixes and improvements for copying from one existing table into another
|
||||
- ADDED: Search in columns in table editor
|
||||
- ADDED: Line Wrap for JSON viewer #768
|
||||
|
||||
### 6.3.2
|
||||
- ADDED: "Use system theme" switch, use changed system theme without restart #1084
|
||||
- ADDED: "Skip SETNAME instruction" option for Redis #1077
|
||||
- FIXED: Clickhouse views are now available even for user with limited permissions #1076
|
||||
- ADDED: Multiple-token search delimited with comma (=OR) in structure search boxes
|
||||
- CHANGED: When filtering columns in data browser, data view shows only filtered columns
|
||||
- ADDED: Advanced settings for diagrams (Premium)
|
||||
- ADDED: Diagrams - zoom with Ctrl+mouse wheel
|
||||
- FIXED: Scrollable diagram exports + scroll by mouse drag
|
||||
- FIXED: Fixed many problems in diagrams when zoom is applied
|
||||
- FIXED: Correctly end connection process after succesful/unsuccesful connect
|
||||
|
||||
### 6.3.0
|
||||
- ADDED: Support for libSQL and Turso (Premium)
|
||||
- ADDED: Native backup and restore database for MySQL and PostgreSQL (Premium)
|
||||
- REMOVED: DbGate internal dump export for MySQL (replaced with call of mysqldump)
|
||||
- REMOVED: Import SQL dump with internal DbGate capabilities (replaced by calling of mysql and psql utilities)
|
||||
- FIXED: Many fixes in stream processing (imoprt/export), especialy for MongoDB
|
||||
- ADDED: Indicating progress of import/export tasks, better error reporting
|
||||
- CHANGED: #1060 - Changed shortcut for AI assistant
|
||||
- ADDED: /health endpoint with diagnostic info
|
||||
- FIXED: Linux Appimage crash => A JavaScript error occurred in the main process #1065 , #1067
|
||||
|
||||
### 6.2.1
|
||||
- ADDED: Commit/rollback and autocommit in scripts #1039
|
||||
- FIXED: Doesn't import all the records from MongoDB #1044
|
||||
@@ -18,7 +82,7 @@ Builds:
|
||||
- FIXED: Scroll in XML cell view, XML view respect themes
|
||||
- REMOVED: armv7l build for Linux (because of problems with glibc compatibility)
|
||||
- CHANGED: Upgraded to node:22 for docker builds
|
||||
- CHANGED: Upgraded SQLite engine version (better-sqlite3@11.8.1)
|
||||
- CHANGED: Upgraded SQLite engine version
|
||||
|
||||
### 6.2.0
|
||||
- ADDED: Query AI Assistant (Premium)
|
||||
|
||||
14
README.md
14
README.md
@@ -15,10 +15,12 @@ But there are also many advanced features like schema compare, visual query desi
|
||||
DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
|
||||
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
|
||||
* **Download** application for Windows, Linux or Mac from [dbgate.org](https://dbgate.org/download/)
|
||||
* **Download** application for Windows, Linux or Mac from [dbgate.io](https://dbgate.io/download/)
|
||||
* Looking for DbGate Community? **Download** from [dbgate.org](https://dbgate.org/download/)
|
||||
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Use nodeJs [scripting interface](https://dbgate.org/docs/scripting) ([API documentation](https://dbgate.org/docs/apidoc))
|
||||
* Use nodeJs [scripting interface](https://docs.dbgate.io/scripting) ([API documentation](https://docs.dbgate.io/apidoc))
|
||||
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
|
||||
* [Give us feedback](https://dbgate.org/feedback) - it will help us to decide, how to improve DbGate in future
|
||||
|
||||
## Supported databases
|
||||
* MySQL
|
||||
@@ -34,8 +36,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* CosmosDB (Premium)
|
||||
* ClickHouse
|
||||
* Apache Cassandra
|
||||
|
||||
<!-- Learn more about DbGate features at the [DbGate website](https://dbgate.org/), or try our online [demo application](https://demo.dbgate.org) -->
|
||||
* libSQL/Turso (Premium)
|
||||
* DuckDB
|
||||
|
||||
|
||||
<a href="https://raw.githubusercontent.com/dbgate/dbgate/master/img/screenshot1.png">
|
||||
@@ -90,7 +92,7 @@ Any contributions are welcome. If you want to contribute without coding, conside
|
||||
* Create issue, if you find problem in app, or you have idea to new feature. If issue already exists, you could leave comment on it, to prioritise most wanted issues
|
||||
* Create some tutorial video on [youtube](https://www.youtube.com/playlist?list=PLCo7KjCVXhr0RfUSjM9wJMsp_ShL1q61A)
|
||||
* Become a backer on [GitHub sponsors](https://github.com/sponsors/dbgate) or [Open collective](https://opencollective.com/dbgate)
|
||||
* Where a small coding is acceptable for you, you could [create plugin](https://dbgate.org/docs/plugin-development). Plugins for new themes can be created actually without JS coding
|
||||
* Where a small coding is acceptable for you, you could [create plugin](https://docs.dbgate.io/plugin-development). Plugins for new themes can be created actually without JS coding
|
||||
|
||||
Thank you!
|
||||
|
||||
@@ -185,4 +187,4 @@ yarn plugin # this compiles plugin and copies it into existing DbGate installati
|
||||
After restarting DbGate, you could use your new plugin from DbGate.
|
||||
|
||||
## Logging
|
||||
DbGate uses [pinomin logger](https://github.com/dbgate/pinomin). So by default, it produces JSON log messages into console and log files. If you want to see formatted logs, please use [pino-pretty](https://github.com/pinojs/pino-pretty) log formatter.
|
||||
DbGate uses [pinomin logger](https://github.com/dbgate/pinomin). So by default, it produces JSON log messages into console and log files. If you want to see formatted logs, please use [pino-pretty](https://github.com/pinojs/pino-pretty) log formatter.
|
||||
|
||||
@@ -357,6 +357,7 @@ function createWindow() {
|
||||
title: isProApp() ? 'DbGate Premium' : 'DbGate',
|
||||
frame: useNativeMenu,
|
||||
titleBarStyle: useNativeMenu ? undefined : 'hidden',
|
||||
backgroundColor: electron.nativeTheme.shouldUseDarkColors ? '#111111' : undefined,
|
||||
...bounds,
|
||||
icon: os.platform() == 'win32' ? 'icon.ico' : path.resolve(__dirname, '../icon.png'),
|
||||
partition: isProApp() ? 'persist:dbgate-premium' : 'persist:dbgate',
|
||||
|
||||
@@ -4,6 +4,7 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
submenu: [
|
||||
{ command: 'new.connection', hideDisabled: true },
|
||||
{ command: 'new.sqliteDatabase', hideDisabled: true },
|
||||
{ command: 'new.duckdbDatabase', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'new.query', hideDisabled: true },
|
||||
{ command: 'new.queryDesign', hideDisabled: true },
|
||||
@@ -87,6 +88,9 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'folder.showData', hideDisabled: true },
|
||||
{ command: 'new.gist', hideDisabled: true },
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'app.exportConnections', hideDisabled: true },
|
||||
{ command: 'app.importConnections', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
...(isMac
|
||||
@@ -104,6 +108,7 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'app.openWeb', hideDisabled: true },
|
||||
{ command: 'app.openIssue', hideDisabled: true },
|
||||
{ command: 'app.openSponsoring', hideDisabled: true },
|
||||
{ command: 'app.giveFeedback', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'settings.commands', hideDisabled: true },
|
||||
{ command: 'tabs.changelog', hideDisabled: true },
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
@@ -14,12 +14,14 @@ const volatilePackages = [
|
||||
'ioredis',
|
||||
'node-redis-dump2',
|
||||
'better-sqlite3',
|
||||
'libsql',
|
||||
'@azure/cosmos',
|
||||
'@aws-sdk/rds-signer',
|
||||
'activedirectory2',
|
||||
'axios',
|
||||
'ssh2',
|
||||
'wkx',
|
||||
'@duckdb/node-api',
|
||||
];
|
||||
|
||||
module.exports = volatilePackages;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:22
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
iputils-ping \
|
||||
iproute2 \
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:18-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
WORKDIR /home/dbgate-docker
|
||||
|
||||
RUN apk --no-cache upgrade \
|
||||
|
||||
@@ -13,16 +13,22 @@ describe('Add connection', () => {
|
||||
it('adds connection', () => {
|
||||
// cy.get('[data-testid=ConnectionList_buttonNewConnection]').click();
|
||||
cy.get('[data-testid=ConnectionDriverFields_connectionType]').select('MySQL');
|
||||
cy.themeshot('connection');
|
||||
cy.themeshot('new-connection');
|
||||
cy.get('[data-testid=ConnectionDriverFields_user]').clear().type('root');
|
||||
cy.get('[data-testid=ConnectionDriverFields_password]').clear().type('Pwd2020Db');
|
||||
cy.get('[data-testid=ConnectionDriverFields_port]').clear().type('16004');
|
||||
cy.get('[data-testid=ConnectionDriverFields_displayName]').clear().type('test-mysql-1');
|
||||
|
||||
// test connection
|
||||
cy.get('[data-testid=ConnectionTab_buttonTest]').click();
|
||||
cy.testid('ConnectionTab_buttonTest').click();
|
||||
cy.contains('Connected:');
|
||||
|
||||
cy.testid('ConnectionTab_tabSshTunnel').click();
|
||||
cy.testid('ConnectionTab_tabControlContent').themeshot('connection-sshtunnel-window', { padding: 50 });
|
||||
|
||||
cy.testid('ConnectionTab_tabSsl').click();
|
||||
cy.testid('ConnectionTab_tabControlContent').themeshot('connection-ssl-window', { padding: 50 });
|
||||
|
||||
// save and connect
|
||||
cy.get('[data-testid=ConnectionTab_buttonSave]').click();
|
||||
cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
|
||||
@@ -106,4 +112,11 @@ describe('Add connection', () => {
|
||||
|
||||
cy.contains('performance_schema');
|
||||
});
|
||||
|
||||
it('export connections', () => {
|
||||
cy.testid('WidgetIconPanel_menu').click();
|
||||
cy.contains('Tools').click();
|
||||
cy.contains('Export connections').click();
|
||||
cy.themeshot('export-connections');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,18 +15,23 @@ beforeEach(() => {
|
||||
describe('Data browser data', () => {
|
||||
it('Export window', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').rightclick();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Album').rightclick();
|
||||
cy.contains('Export').click();
|
||||
cy.contains('Export advanced').click();
|
||||
cy.wait(1000);
|
||||
// cy.testid('SourceTargetConfig_buttonCurrentArchive_target').click();
|
||||
cy.testid('FormTablesSelect_buttonAll_tables').click();
|
||||
// cy.testid('FormTablesSelect_buttonAll_tables').click();
|
||||
// cy.testid('SourceTargetConfig_tablesSelect_source').click();
|
||||
// cy.find('.listContainer').contains('Album').click();
|
||||
// cy.find('.listContainer').contains('Track').click();
|
||||
// cy.wait(4000);
|
||||
// cy.contains('All tables').click();
|
||||
cy.contains('Run').click();
|
||||
cy.contains('Finished job script');
|
||||
cy.contains('Album.csv');
|
||||
cy.testid('WidgetIconPanel_database').click();
|
||||
cy.themeshot('exportcsv');
|
||||
cy.themeshot('configure-export-csv');
|
||||
});
|
||||
|
||||
it('Data archive editor - macros', () => {
|
||||
@@ -37,7 +42,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Out Of Exile').click({ shiftKey: true });
|
||||
cy.contains('Change text case').click();
|
||||
cy.contains('AUDIOSLAVE');
|
||||
cy.themeshot('freetable');
|
||||
cy.themeshot('data-archive-macros');
|
||||
});
|
||||
|
||||
it('Load table data', () => {
|
||||
@@ -76,11 +81,25 @@ describe('Data browser data', () => {
|
||||
cy.contains('Aerosmith').should('not.exist');
|
||||
});
|
||||
|
||||
it('Data filter', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Album').click();
|
||||
cy.testid('DataFilterControl_input_Title').type('Rock{enter}');
|
||||
cy.contains('Rows: 7');
|
||||
cy.testid('DataFilterControl_input_AlbumId').type('>10xxx{enter}');
|
||||
cy.contains('Rows: 7');
|
||||
cy.testid('DataFilterControl_filtermenu_Title').click();
|
||||
// hide what is not needed
|
||||
cy.testid('WidgetIconPanel_database').click();
|
||||
cy.testid('DataGrid_itemReferences').click();
|
||||
cy.themeshot('data-browser-filter');
|
||||
cy.testid('DataGridCore_button_clearFilters').click();
|
||||
cy.contains('Rows: 347');
|
||||
});
|
||||
|
||||
it('Data grid screenshots', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.window().then(win => {
|
||||
win.__changeCurrentTheme('theme-dark');
|
||||
});
|
||||
|
||||
cy.contains('MyChinook').click();
|
||||
|
||||
@@ -95,16 +114,25 @@ describe('Data browser data', () => {
|
||||
cy.contains('PgChinook').click();
|
||||
cy.contains('customer').click();
|
||||
cy.contains('Leonie').click();
|
||||
cy.themeshot('datagrid');
|
||||
cy.themeshot('common-data-browser');
|
||||
|
||||
cy.contains('invoice').click();
|
||||
cy.contains('invoice_line (invoice_id)').click();
|
||||
cy.themeshot('masterdetail');
|
||||
cy.themeshot('data-browser-master-detail');
|
||||
|
||||
cy.contains('9, Place Louis Barthou').click();
|
||||
cy.contains('Switch to form').click();
|
||||
cy.contains('Switch to table'); // test that we are in form view
|
||||
cy.themeshot('formview');
|
||||
cy.themeshot('data-browser-form-view');
|
||||
});
|
||||
|
||||
it('Column search', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Customer').click();
|
||||
cy.testid('ColumnManager_searchColumns').clear().type('name,id{enter}');
|
||||
cy.contains('Company').should('not.exist');
|
||||
cy.themeshot('data-browser-column-search');
|
||||
});
|
||||
|
||||
it('SQL Gen', () => {
|
||||
@@ -112,7 +140,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('PgChinook').rightclick();
|
||||
cy.contains('SQL Generator').click();
|
||||
cy.contains('Check all').click();
|
||||
cy.themeshot('sqlgen');
|
||||
cy.themeshot('sql-generator');
|
||||
});
|
||||
|
||||
it('Macros in DB', () => {
|
||||
@@ -127,7 +155,7 @@ describe('Data browser data', () => {
|
||||
cy.testid('DataGrid_itemMacros').click();
|
||||
cy.contains('Change text case').click();
|
||||
cy.contains('NIELSEN');
|
||||
cy.themeshot('macros');
|
||||
cy.themeshot('data-browser-macros');
|
||||
});
|
||||
|
||||
it('Perspectives', () => {
|
||||
@@ -143,7 +171,7 @@ describe('Data browser data', () => {
|
||||
// check track is loaded
|
||||
cy.contains('Put The Finger On You');
|
||||
|
||||
cy.themeshot('perspective1');
|
||||
cy.themeshot('perspective-designer');
|
||||
});
|
||||
|
||||
it('Query editor - code completion', () => {
|
||||
@@ -152,11 +180,12 @@ describe('Data browser data', () => {
|
||||
cy.contains('Customer').rightclick();
|
||||
cy.contains('SQL template').click();
|
||||
cy.contains('CREATE TABLE').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realPress('PageDown');
|
||||
cy.get('body').realType('select * from Album where Album.');
|
||||
// code completion
|
||||
cy.contains('ArtistId');
|
||||
cy.themeshot('query');
|
||||
cy.themeshot('query-editor-code-completion');
|
||||
});
|
||||
|
||||
it('Query editor - join wizard', () => {
|
||||
@@ -169,7 +198,7 @@ describe('Data browser data', () => {
|
||||
cy.get('body').realPress(['Control', 'j']);
|
||||
// JOIN wizard
|
||||
cy.contains('INNER JOIN Customer ON Invoice.CustomerId = Customer.CustomerId');
|
||||
cy.themeshot('joinwizard');
|
||||
cy.themeshot('query-editor-join-wizard');
|
||||
});
|
||||
|
||||
it('Mongo JSON data view', () => {
|
||||
@@ -186,7 +215,7 @@ describe('Data browser data', () => {
|
||||
cy.testid('WidgetIconPanel_cell-data').click();
|
||||
// test JSON view
|
||||
cy.contains('Country: "Brazil"');
|
||||
cy.themeshot('mongoquery');
|
||||
cy.themeshot('mongo-query-json-view');
|
||||
});
|
||||
|
||||
it('SQL preview', () => {
|
||||
@@ -196,7 +225,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Show SQL').click();
|
||||
// index should be part of create script
|
||||
cy.contains('CREATE INDEX `IFK_CustomerSupportRepId`');
|
||||
cy.themeshot('sqlpreview');
|
||||
cy.themeshot('sql-preview-create-index');
|
||||
});
|
||||
|
||||
it('Query designer', () => {
|
||||
@@ -205,7 +234,7 @@ describe('Data browser data', () => {
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.contains('customer').click();
|
||||
// cy.contains('left join').rightclick();
|
||||
cy.themeshot('querydesigner');
|
||||
cy.themeshot('query-designer');
|
||||
});
|
||||
|
||||
it('Database diagram', () => {
|
||||
@@ -216,7 +245,7 @@ describe('Data browser data', () => {
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
// check diagram is shown
|
||||
cy.contains('MediaTypeId');
|
||||
cy.themeshot('diagram');
|
||||
cy.themeshot('database-diagram');
|
||||
});
|
||||
|
||||
it('Charts', () => {
|
||||
@@ -225,7 +254,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('line-chart').click();
|
||||
cy.testid('TabsPanel_buttonSplit').click();
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.themeshot('charts');
|
||||
cy.themeshot('view-split-charts');
|
||||
});
|
||||
|
||||
it('Keyboard configuration', () => {
|
||||
@@ -233,7 +262,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Keyboard shortcuts').click();
|
||||
cy.contains('dataForm.refresh').click();
|
||||
cy.testid('CommandModal_keyboardButton').click();
|
||||
cy.themeshot('keyboard');
|
||||
cy.themeshot('keyboard-configuration');
|
||||
});
|
||||
|
||||
it('Command palette', () => {
|
||||
@@ -244,7 +273,7 @@ describe('Data browser data', () => {
|
||||
// cy.realPress('F1');
|
||||
cy.realPress('PageDown');
|
||||
cy.realPress('PageDown');
|
||||
cy.testid('CommandPalette_main').themeshot('commandpalette', { padding: 50 });
|
||||
cy.testid('CommandPalette_main').themeshot('command-palette', { padding: 50 });
|
||||
});
|
||||
|
||||
it('Show map', () => {
|
||||
@@ -257,7 +286,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('13.9').click({ shiftKey: true });
|
||||
cy.testid('WidgetIconPanel_cell-data').click();
|
||||
cy.wait(2000);
|
||||
cy.themeshot('map');
|
||||
cy.themeshot('cell-map-view');
|
||||
});
|
||||
|
||||
it('Search in connections', () => {
|
||||
@@ -269,7 +298,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Album').click();
|
||||
cy.testid('SqlObjectList_searchMenuDropDown').click();
|
||||
cy.contains('Column name').click();
|
||||
cy.themeshot('connsearch');
|
||||
cy.themeshot('search-in-connections');
|
||||
});
|
||||
|
||||
it('Plugin tab', () => {
|
||||
@@ -279,7 +308,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Total white theme');
|
||||
// wait for load logos
|
||||
cy.wait(2000);
|
||||
cy.themeshot('plugin');
|
||||
cy.themeshot('view-plugin-tab');
|
||||
});
|
||||
|
||||
it('Edit mongo data JSON', () => {
|
||||
@@ -306,7 +335,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('Helena').rightclick();
|
||||
cy.contains('Delete document').click();
|
||||
cy.contains('Save').click();
|
||||
cy.themeshot('mongosave');
|
||||
cy.themeshot('save-changes-mongodb');
|
||||
});
|
||||
|
||||
it('Edit mongo data JSON', () => {
|
||||
@@ -320,7 +349,7 @@ describe('Data browser data', () => {
|
||||
cy.testid('ColumnManagerRow_checkbox__id').click();
|
||||
cy.testid('DataFilterControl_input_countries.1').type('EXISTS{enter}');
|
||||
cy.testid('WidgetIconPanel_cell-data').click();
|
||||
cy.themeshot('collection');
|
||||
cy.themeshot('mongodb-json-cell-view');
|
||||
});
|
||||
|
||||
it('Table structure editor', () => {
|
||||
@@ -329,10 +358,10 @@ describe('Data browser data', () => {
|
||||
cy.contains('Customer').rightclick();
|
||||
cy.contains('Open structure').click();
|
||||
cy.contains('varchar(40)');
|
||||
cy.themeshot('structure');
|
||||
cy.themeshot('table-structure-editor');
|
||||
cy.contains('EmployeeId').click();
|
||||
cy.contains('Ref column - Employee');
|
||||
cy.themeshot('fkeditor');
|
||||
cy.themeshot('foreign-key-editor');
|
||||
});
|
||||
|
||||
it('Compare database', () => {
|
||||
@@ -344,13 +373,13 @@ describe('Data browser data', () => {
|
||||
cy.testid('CompareModelTab_gridObjects_Customer_Customer').click();
|
||||
cy.testid('WidgetIconPanel_database').click();
|
||||
cy.testid('CompareModelTab_tabDdl').click();
|
||||
cy.themeshot('dbcompare');
|
||||
cy.themeshot('compare-database-models');
|
||||
cy.contains('Settings').click();
|
||||
cy.testid('CompareModelTab_tabOperations').click();
|
||||
cy.themeshot('comparesettings');
|
||||
cy.themeshot('compare-database-settings');
|
||||
});
|
||||
|
||||
it.skip('Query editor - AI assistant', () => {
|
||||
it('Query editor - AI assistant', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewQuery').click();
|
||||
@@ -359,10 +388,10 @@ describe('Data browser data', () => {
|
||||
cy.testid('ConfirmModal_okButton').click();
|
||||
cy.testid('QueryAiAssistant_promptInput').type('album names');
|
||||
cy.testid('QueryAiAssistant_queryFromQuestionButton').click();
|
||||
cy.contains('Use this').click();
|
||||
cy.contains('Use this', { timeout: 10000 }).click();
|
||||
cy.testid('QueryTab_executeButton').click();
|
||||
cy.contains('Balls to the Wall');
|
||||
cy.themeshot('aiassistant');
|
||||
cy.themeshot('ai-assistant');
|
||||
});
|
||||
|
||||
it('Modify data', () => {
|
||||
@@ -388,7 +417,7 @@ describe('Data browser data', () => {
|
||||
cy.contains('INSERT INTO `Employee`');
|
||||
cy.contains("SET `FirstName`='Jane'");
|
||||
cy.contains('DELETE FROM `Employee`');
|
||||
cy.themeshot('modifydata');
|
||||
cy.themeshot('data-browser-save-changes');
|
||||
|
||||
// cy.testid('ConfirmSqlModal_okButton').click();
|
||||
// cy.contains('Cannot delete or update a parent row')
|
||||
@@ -403,4 +432,60 @@ describe('Data browser data', () => {
|
||||
cy.contains('Novak');
|
||||
cy.contains('Rows: 8');
|
||||
});
|
||||
|
||||
it('Export menu', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Album').click();
|
||||
cy.testid('DataFilterControl_input_ArtistId').type('22{enter}');
|
||||
// cy.contains('Presence').rightclick();
|
||||
// cy.contains('Coda').rightclick();
|
||||
// cy.testid('DropDownMenu-container-0').contains('Export').click();
|
||||
cy.contains('Export').click();
|
||||
// cy.wait(1000);
|
||||
cy.themeshot('data-browser-export-menu');
|
||||
});
|
||||
|
||||
it('MySQL native backup', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').rightclick();
|
||||
cy.contains('Create database backup').click();
|
||||
cy.contains('Customer');
|
||||
cy.themeshot('mysql-backup-configuration');
|
||||
});
|
||||
|
||||
it('View table YAML model', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').rightclick();
|
||||
cy.contains('Export DB model').click();
|
||||
cy.testid('ExportDbModelModal_archiveFolder').select('(Create new)');
|
||||
cy.testid('InputTextModal_value').clear().type('test-model');
|
||||
cy.testid('InputTextModal_ok').click();
|
||||
cy.testid('ModalBase_window').themeshot('export-database-model-window', { padding: 50 });
|
||||
cy.testid('ExportDbModelModal_exportButton').click();
|
||||
cy.contains('Album').click();
|
||||
cy.contains('autoIncrement');
|
||||
cy.themeshot('database-model-table-yaml');
|
||||
});
|
||||
|
||||
it('Data replicator', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.contains('chinook-archive').rightclick();
|
||||
cy.contains('Data deployer').click();
|
||||
cy.contains('Dry run').click();
|
||||
cy.testid('TableControl_row_2_checkbox').click();
|
||||
cy.testid('TableControl_row_2').click();
|
||||
cy.testid('DataDeploySettings_find_checkbox').click();
|
||||
cy.testid('DataDeploySettings_create_checkbox').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.themeshot('data-deployer');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.testid('ConfirmDataDeployModal_okButton').click();
|
||||
cy.contains('Replicated Customer, inserted 59 rows');
|
||||
cy.contains('Finished job script');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.themeshot('data-replicator');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -45,36 +45,36 @@ describe('Run as portal', () => {
|
||||
cy.get('[data-testid=InputTextModal_ok]').click();
|
||||
});
|
||||
|
||||
it('Import Chinook MySQL', () => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.get('[data-testid=DatabaseAppObject_Chinook]').rightclick();
|
||||
cy.contains('Chinook').rightclick();
|
||||
cy.contains('Restore/import SQL dump').click();
|
||||
cy.get('#uploadFileButton').selectFile('data/chinook-mysql.sql', { force: true });
|
||||
cy.wait(500);
|
||||
cy.get('[data-testid=ImportDatabaseDumpModal_runImport]').click();
|
||||
cy.contains('Importing database');
|
||||
cy.contains('Finished job script');
|
||||
cy.get('[data-testid=RunScriptModal_close]').click();
|
||||
cy.contains('Chinook').click();
|
||||
cy.contains('Album');
|
||||
});
|
||||
// it('Import Chinook MySQL', () => {
|
||||
// cy.visit('http://localhost:3000');
|
||||
// cy.contains('MySql-connection').click();
|
||||
// cy.get('[data-testid=DatabaseAppObject_Chinook]').rightclick();
|
||||
// cy.contains('Chinook').rightclick();
|
||||
// cy.contains('Restore/import SQL dump').click();
|
||||
// cy.get('#uploadFileButton').selectFile('data/chinook-mysql.sql', { force: true });
|
||||
// cy.wait(500);
|
||||
// cy.get('[data-testid=ImportDatabaseDumpModal_runImport]').click();
|
||||
// cy.contains('Importing database');
|
||||
// cy.contains('Finished job script');
|
||||
// cy.get('[data-testid=RunScriptModal_close]').click();
|
||||
// cy.contains('Chinook').click();
|
||||
// cy.contains('Album');
|
||||
// });
|
||||
|
||||
it('Import Chinook Postgres', () => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.contains('Postgres-connection').click();
|
||||
cy.get('[data-testid=DatabaseAppObject_Chinook]').rightclick();
|
||||
cy.contains('Restore/import SQL dump').click();
|
||||
cy.get('#uploadFileButton').selectFile('data/chinook-postgres.sql', { force: true });
|
||||
cy.wait(500);
|
||||
cy.get('[data-testid=ImportDatabaseDumpModal_runImport]').click();
|
||||
cy.contains('Importing database');
|
||||
cy.contains('Finished job script');
|
||||
cy.get('[data-testid=RunScriptModal_close]').click();
|
||||
cy.contains('Chinook').click();
|
||||
cy.contains('album');
|
||||
});
|
||||
// it('Import Chinook Postgres', () => {
|
||||
// cy.visit('http://localhost:3000');
|
||||
// cy.contains('Postgres-connection').click();
|
||||
// cy.get('[data-testid=DatabaseAppObject_Chinook]').rightclick();
|
||||
// cy.contains('Restore/import SQL dump').click();
|
||||
// cy.get('#uploadFileButton').selectFile('data/chinook-postgres.sql', { force: true });
|
||||
// cy.wait(500);
|
||||
// cy.get('[data-testid=ImportDatabaseDumpModal_runImport]').click();
|
||||
// cy.contains('Importing database');
|
||||
// cy.contains('Finished job script');
|
||||
// cy.get('[data-testid=RunScriptModal_close]').click();
|
||||
// cy.contains('Chinook').click();
|
||||
// cy.contains('album');
|
||||
// });
|
||||
|
||||
it('Open ask pwd connection', () => {
|
||||
cy.visit('http://localhost:3000');
|
||||
@@ -83,7 +83,7 @@ describe('Run as portal', () => {
|
||||
cy.testid('DatabaseLoginModal_password').clear().type('Pwd2020Db');
|
||||
cy.testid('DatabaseLoginModal_connect').click();
|
||||
cy.contains('Chinook').click();
|
||||
cy.contains('album');
|
||||
// cy.contains('album');
|
||||
});
|
||||
|
||||
// it('import chinook DB', () => {
|
||||
|
||||
@@ -11,21 +11,23 @@ describe('Team edition tests', () => {
|
||||
|
||||
cy.testid('AdminMenuWidget_itemConnections').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.testid('ConnectionDriverFields_connectionType').select('PostgreSQL');
|
||||
cy.themeshot('connadmin');
|
||||
cy.themeshot('connection-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemRoles').click();
|
||||
cy.contains('Permissions').click();
|
||||
cy.themeshot('roleadmin');
|
||||
cy.contains('logged-user').click();
|
||||
cy.themeshot('role-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemUsers').click();
|
||||
cy.contains('New user').click();
|
||||
cy.themeshot('user-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemAuthentication').click();
|
||||
cy.contains('Add authentication').click();
|
||||
cy.contains('Use database login').click();
|
||||
cy.contains('Add authentication').click();
|
||||
cy.contains('OAuth 2.0').click();
|
||||
cy.themeshot('authadmin');
|
||||
cy.themeshot('authentication-administration');
|
||||
});
|
||||
|
||||
it('OAuth authentication', () => {
|
||||
@@ -77,6 +79,5 @@ describe('Team edition tests', () => {
|
||||
cy.testid('LoginPage_submitLogin').click();
|
||||
cy.testid('AdminMenuWidget_itemUsers').click();
|
||||
cy.contains('test@example.com');
|
||||
cy.contains('Rows: 1');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -21,8 +21,8 @@ services:
|
||||
build: containers/mysql-ssh-login
|
||||
restart: always
|
||||
ports:
|
||||
- 16005:3306
|
||||
- "16015:22"
|
||||
- 16017:3306
|
||||
- "16012:22"
|
||||
|
||||
mysql-ssh-keyfile:
|
||||
build: containers/mysql-ssh-keyfile
|
||||
|
||||
@@ -195,6 +195,11 @@ async function run() {
|
||||
path.join(baseDir, 'archive-e2etests', 'default')
|
||||
);
|
||||
|
||||
await copyFolder(
|
||||
path.resolve(path.join(__dirname, '../data/chinook-jsonl')),
|
||||
path.join(baseDir, 'archive-e2etests', 'chinook-archive')
|
||||
);
|
||||
|
||||
await copyFolder(
|
||||
path.resolve(path.join(__dirname, '../data/files/query')),
|
||||
path.join(baseDir, 'files-e2etests', 'query')
|
||||
|
||||
0
e2e-tests/screenshots/.gitkeep
Normal file
0
e2e-tests/screenshots/.gitkeep
Normal file
@@ -1 +0,0 @@
|
||||
Folder with screenshots
|
||||
@@ -52,7 +52,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||
}
|
||||
|
||||
describe('Alter database', () => {
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences && !x.skipDropReferences).map(engine => [engine.label, engine]))(
|
||||
'Drop referenced table - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDiff(conn, driver, db => {
|
||||
|
||||
@@ -90,7 +90,7 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
|
||||
// const TESTED_COLUMNS = ['col_std'];
|
||||
// const TESTED_COLUMNS = ['col_ref'];
|
||||
|
||||
function create_engines_columns_source(engines) {
|
||||
function createEnginesColumnsSource(engines) {
|
||||
return _.flatten(
|
||||
engines.map(engine =>
|
||||
TESTED_COLUMNS.filter(col => col.endsWith('_pk') || !engine.skipNonPkRename)
|
||||
@@ -116,45 +116,30 @@ describe('Alter table', () => {
|
||||
})
|
||||
);
|
||||
|
||||
const columnsSource = create_engines_columns_source(engines);
|
||||
const dropableColumnsSrouce = columnsSource.filter(
|
||||
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
|
||||
test.each(
|
||||
createEnginesColumnsSource(engines.filter(x => !x.skipDropColumn)).filter(
|
||||
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
|
||||
)
|
||||
)(
|
||||
'Drop column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
|
||||
})
|
||||
);
|
||||
const hasDropableColumns = dropableColumnsSrouce.length > 0;
|
||||
|
||||
if (hasDropableColumns) {
|
||||
test.each(dropableColumnsSrouce)(
|
||||
'Drop column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column))
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipNullable && !x.skipChangeNullability)))(
|
||||
'Change nullability - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
const hasEnginesWithNullable = engines.filter(x => !x.skipNullable).length > 0;
|
||||
|
||||
if (hasEnginesWithNullable) {
|
||||
const source = create_engines_columns_source(engines.filter(x => !x.skipNullable));
|
||||
|
||||
test.each(source)(
|
||||
'Change nullability - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
test.each(columnsSource)(
|
||||
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipRenameColumn)))(
|
||||
'Rename column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
@@ -175,37 +160,32 @@ describe('Alter table', () => {
|
||||
})
|
||||
);
|
||||
|
||||
const enginesWithDefault = engines.filter(x => !x.skipDefaultValue);
|
||||
const hasEnginesWithDefault = enginesWithDefault.length > 0;
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Add default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
if (hasEnginesWithDefault) {
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Add default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
|
||||
});
|
||||
})
|
||||
);
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Unset default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Unset default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Change default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Change default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// test.each(engines.map(engine => [engine.label, engine]))(
|
||||
// 'Change autoincrement - %s',
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data duplicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
});
|
||||
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
|
||||
const deployDb = require('dbgate-api/src/shell/deployDb');
|
||||
const storageModel = require('dbgate-api/src/storageModel');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data replicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
jsonArray: [
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
],
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Import storage DB - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await deployDb({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
loadedDbModel: storageModel,
|
||||
targetSchema: engine.defaultSchemaName,
|
||||
});
|
||||
|
||||
async function queryValue(sql) {
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
return res1.rows[0].val?.toString();
|
||||
}
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('2');
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeFalsy();
|
||||
|
||||
const DB1 = {
|
||||
auth_methods: [
|
||||
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
|
||||
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
|
||||
],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
|
||||
config: [
|
||||
{ group: 'admin', key: 'encyptKey', value: '1234' },
|
||||
{ group: 'admin', key: 'adminPasswordState', value: 'set' },
|
||||
{ group: 'license', key: 'licenseKey', value: '123467' },
|
||||
],
|
||||
roles: [
|
||||
{ id: -3, name: 'superadmin' },
|
||||
{ id: -2, name: 'logged-user' },
|
||||
{ id: -1, name: 'anonymous-user' },
|
||||
],
|
||||
role_permissions: [
|
||||
{ id: 14, role_id: -1, permission: 'perm1' },
|
||||
{ id: 29, role_id: -1, permission: 'perm2' },
|
||||
{ id: 1, role_id: -1, permission: 'perm3' },
|
||||
],
|
||||
};
|
||||
|
||||
const DB2 = {
|
||||
auth_methods: [{ id: 10, name: 'My Auth', amoid: 'myauth1' }],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'my authClient', value: 'mydbgate' }],
|
||||
config: [],
|
||||
roles: [{ id: 1, name: 'test' }],
|
||||
role_permissions: [{ id: 14, role_id: 1, permission: 'permxx' }],
|
||||
};
|
||||
|
||||
function createDuplConfig(db) {
|
||||
return {
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 'auth_methods',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['amoid'],
|
||||
jsonArray: db.auth_methods,
|
||||
},
|
||||
{
|
||||
name: 'auth_methods_config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['auth_method_id', 'key'],
|
||||
jsonArray: db.auth_methods_config,
|
||||
},
|
||||
{
|
||||
name: 'config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['group', 'key'],
|
||||
jsonArray: db.config,
|
||||
},
|
||||
{
|
||||
name: 'roles',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['name'],
|
||||
jsonArray: db.roles,
|
||||
},
|
||||
{
|
||||
name: 'role_permissions',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
deleteMissing: true,
|
||||
matchColumns: ['role_id', 'permission'],
|
||||
deleteRestrictionColumns: ['role_id'],
|
||||
jsonArray: db.role_permissions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('123467');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.auth_methods_config[0].value = 'dbgate2';
|
||||
DB1.config[2].value = '567';
|
||||
DB1.role_permissions.splice(2, 1);
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate2');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('567');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
|
||||
// now add DB2
|
||||
await dataReplicator(createDuplConfig(DB2));
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('4');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('2');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.role_permissions.splice(1, 1);
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
}),
|
||||
15 * 1000
|
||||
);
|
||||
});
|
||||
@@ -51,7 +51,8 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -75,7 +76,8 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -103,10 +105,12 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader2, writer2);
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res1.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt2 = parseInt(res2.rows[0].cnt.toString());
|
||||
expect(cnt2).toEqual(6);
|
||||
})
|
||||
);
|
||||
const enginesWithDumpFile = engines.filter(x => x.dumpFile);
|
||||
@@ -192,7 +196,8 @@ describe('DB Import/export', () => {
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~categories`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('4');
|
||||
const cnt1 = parseInt(res1.rows[0].cnt.toString());
|
||||
expect(cnt1).toEqual(4);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -20,7 +20,11 @@ function flatSourceParameters() {
|
||||
}
|
||||
|
||||
function flatSourceTriggers() {
|
||||
return _.flatten(engines.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine])));
|
||||
return _.flatten(
|
||||
engines
|
||||
.filter(engine => !engine.skipTriggers)
|
||||
.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine]))
|
||||
);
|
||||
}
|
||||
|
||||
function flatSourceSchedulerEvents() {
|
||||
|
||||
@@ -183,12 +183,12 @@ describe('Query', () => {
|
||||
{ discardResult: true }
|
||||
);
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT COUNT(*) AS ~cnt FROM ~t1'));
|
||||
// console.log(res);
|
||||
expect(res.rows[0].cnt == 3).toBeTruthy();
|
||||
const cnt = parseInt(res.rows[0].cnt);
|
||||
expect(cnt).toEqual(3);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Select scope identity - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp =>
|
||||
|
||||
@@ -8,14 +8,14 @@ services:
|
||||
# ports:
|
||||
# - 15000:5432
|
||||
#
|
||||
# mariadb:
|
||||
# image: mariadb
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15004:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 15004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
# - 15001:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
#
|
||||
|
||||
|
||||
# cassandradb:
|
||||
# image: cassandra:5.0.2
|
||||
@@ -81,11 +81,11 @@ services:
|
||||
# ports:
|
||||
# - 15006:1521
|
||||
|
||||
libsql:
|
||||
image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
platform: linux/amd64
|
||||
ports:
|
||||
- '8080:8080'
|
||||
- '5002:5001'
|
||||
volumes:
|
||||
- ./data/libsql:/var/lib/sqld
|
||||
# libsql:
|
||||
# image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
# platform: linux/amd64
|
||||
# ports:
|
||||
# - '8080:8080'
|
||||
# - '5002:5001'
|
||||
# volumes:
|
||||
# - ./data/libsql:/var/lib/sqld
|
||||
|
||||
@@ -551,7 +551,7 @@ const clickhouseEngine = {
|
||||
skipUnique: true,
|
||||
skipAutoIncrement: true,
|
||||
skipPkColumnTesting: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipStringLength: true,
|
||||
alterTableAddColumnSyntax: true,
|
||||
dbSnapshotBySeconds: true,
|
||||
@@ -643,7 +643,7 @@ const cassandraEngine = {
|
||||
skipOrderBy: true,
|
||||
skipAutoIncrement: true,
|
||||
skipDataModifications: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipDeploy: true,
|
||||
skipImportModel: true,
|
||||
|
||||
@@ -654,6 +654,32 @@ const cassandraEngine = {
|
||||
objects: [],
|
||||
};
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo} */
|
||||
const duckdbEngine = {
|
||||
label: 'DuckDB',
|
||||
generateDbFile: true,
|
||||
defaultSchemaName: 'main',
|
||||
connection: {
|
||||
engine: 'duckdb@dbgate-plugin-duckdb',
|
||||
},
|
||||
objects: [views],
|
||||
skipOnCI: false,
|
||||
skipChangeColumn: true,
|
||||
// skipIndexes: true,
|
||||
skipStringLength: true,
|
||||
skipTriggers: true,
|
||||
skipDataReplicator: true,
|
||||
skipAutoIncrement: true,
|
||||
skipDropColumn: true,
|
||||
skipRenameColumn: true,
|
||||
skipChangeNullability: true,
|
||||
skipDeploy: true,
|
||||
supportRenameSqlObject: true,
|
||||
skipIncrementalAnalysis: true,
|
||||
skipDefaultValue: true,
|
||||
skipDropReferences: true,
|
||||
};
|
||||
|
||||
const enginesOnCi = [
|
||||
// all engines, which would be run on GitHub actions
|
||||
mysqlEngine,
|
||||
@@ -667,6 +693,7 @@ const enginesOnCi = [
|
||||
clickhouseEngine,
|
||||
oracleEngine,
|
||||
cassandraEngine,
|
||||
duckdbEngine,
|
||||
];
|
||||
|
||||
const enginesOnLocal = [
|
||||
@@ -680,8 +707,9 @@ const enginesOnLocal = [
|
||||
// cockroachDbEngine,
|
||||
// clickhouseEngine,
|
||||
// libsqlFileEngine,
|
||||
libsqlWsEngine,
|
||||
// libsqlWsEngine,
|
||||
// oracleEngine,
|
||||
duckdbEngine,
|
||||
];
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
|
||||
@@ -696,3 +724,6 @@ module.exports.cockroachDbEngine = cockroachDbEngine;
|
||||
module.exports.clickhouseEngine = clickhouseEngine;
|
||||
module.exports.oracleEngine = oracleEngine;
|
||||
module.exports.cassandraEngine = cassandraEngine;
|
||||
module.exports.libsqlFileEngine = libsqlFileEngine;
|
||||
module.exports.libsqlWsEngine = libsqlWsEngine;
|
||||
module.exports.duckdbEngine = duckdbEngine;
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
|
||||
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
|
||||
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest --testTimeout=5000",
|
||||
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
|
||||
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/alter-database.spec.js",
|
||||
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit --testTimeout=10000",
|
||||
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "6.2.2-packer-beta.4",
|
||||
"version": "6.4.3-beta.3",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
# LOCAL_DBGATE_CLOUD=1
|
||||
# LOCAL_DBGATE_IDENTITY=1
|
||||
|
||||
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/rds-signer": "^3.665.0",
|
||||
"activedirectory2": "^2.1.0",
|
||||
"archiver": "^7.0.1",
|
||||
"async-lock": "^1.2.6",
|
||||
"axios": "^0.21.1",
|
||||
"body-parser": "^1.19.0",
|
||||
@@ -30,7 +31,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.3",
|
||||
"dbgate-query-splitter": "^4.11.5",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
@@ -62,7 +63,8 @@
|
||||
"simple-encryptor": "^4.0.0",
|
||||
"ssh2": "^1.16.0",
|
||||
"stream-json": "^1.8.0",
|
||||
"tar": "^6.0.5"
|
||||
"tar": "^6.0.5",
|
||||
"yauzl": "^3.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd -f .env node src/index.js --listen-api",
|
||||
@@ -77,10 +79,12 @@
|
||||
"start:azure": "env-cmd -f env/azure/.env node src/index.js --listen-api",
|
||||
"start:e2e:team": "cross-env DEVWEB=1 DEVMODE=1 env-cmd -f ../../e2e-tests/env/team/.env node src/index.js --listen-api",
|
||||
"ts": "tsc",
|
||||
"build": "webpack",
|
||||
"build": "rspack",
|
||||
"build:doc": "jsdoc2md --template doctpl.hbs ./src/shell/* > ../../../dbgate.github.io/_docs/apidoc.md"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rspack/cli": "^1.3.11",
|
||||
"@rspack/core": "^1.3.11",
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
@@ -88,8 +92,6 @@
|
||||
"jsdoc-to-markdown": "^9.0.5",
|
||||
"node-loader": "^1.0.2",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^4.4.3",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4"
|
||||
"typescript": "^4.4.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
var webpack = require('webpack');
|
||||
const rspack = require('@rspack/core');
|
||||
var path = require('path');
|
||||
var getBundleExternals = require('../../common/getBundleExternals');
|
||||
|
||||
@@ -2,14 +2,20 @@ const fs = require('fs-extra');
|
||||
const readline = require('readline');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder, uploadsdir } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const dbgateApi = require('../shell');
|
||||
const jsldata = require('./jsldata');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { isProApp } = require('../utility/checkLicense');
|
||||
const listZipEntries = require('../utility/listZipEntries');
|
||||
const unzipJsonLinesFile = require('../shell/unzipJsonLinesFile');
|
||||
const { zip } = require('lodash');
|
||||
const zipDirectory = require('../shell/zipDirectory');
|
||||
const unzipDirectory = require('../shell/unzipDirectory');
|
||||
|
||||
const logger = getLogger('archive');
|
||||
|
||||
@@ -47,9 +53,31 @@ module.exports = {
|
||||
return folder;
|
||||
},
|
||||
|
||||
async getZipFiles({ file }) {
|
||||
const entries = await listZipEntries(path.join(archivedir(), file));
|
||||
const files = entries.map(entry => {
|
||||
let name = entry.fileName;
|
||||
if (isProApp() && entry.fileName.endsWith('.jsonl')) {
|
||||
name = entry.fileName.slice(0, -6);
|
||||
}
|
||||
return {
|
||||
name: name,
|
||||
label: name,
|
||||
type: isProApp() && entry.fileName.endsWith('.jsonl') ? 'jsonl' : 'other',
|
||||
};
|
||||
});
|
||||
return files;
|
||||
},
|
||||
|
||||
files_meta: true,
|
||||
async files({ folder }) {
|
||||
try {
|
||||
if (folder.endsWith('.zip')) {
|
||||
if (await fs.exists(path.join(archivedir(), folder))) {
|
||||
return this.getZipFiles({ file: folder });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
const dir = resolveArchiveFolder(folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
|
||||
@@ -91,6 +119,16 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
createFile_meta: true,
|
||||
async createFile({ folder, file, fileType, tableInfo }) {
|
||||
await fs.writeFile(
|
||||
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
|
||||
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
|
||||
);
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
},
|
||||
|
||||
deleteFile_meta: true,
|
||||
async deleteFile({ folder, file, fileType }) {
|
||||
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
|
||||
@@ -158,7 +196,7 @@ module.exports = {
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folder }) {
|
||||
if (!folder) throw new Error('Missing folder parameter');
|
||||
if (folder.endsWith('.link')) {
|
||||
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
|
||||
await fs.unlink(path.join(archivedir(), folder));
|
||||
} else {
|
||||
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
|
||||
@@ -204,9 +242,10 @@ module.exports = {
|
||||
},
|
||||
|
||||
async getNewArchiveFolder({ database }) {
|
||||
const isLink = database.endsWith(database);
|
||||
const name = isLink ? database.slice(0, -5) : database;
|
||||
const suffix = isLink ? '.link' : '';
|
||||
const isLink = database.endsWith('.link');
|
||||
const isZip = database.endsWith('.zip');
|
||||
const name = isLink ? database.slice(0, -5) : isZip ? database.slice(0, -4) : database;
|
||||
const suffix = isLink ? '.link' : isZip ? '.zip' : '';
|
||||
if (!(await fs.exists(path.join(archivedir(), database)))) return database;
|
||||
let index = 2;
|
||||
while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) {
|
||||
@@ -214,4 +253,58 @@ module.exports = {
|
||||
}
|
||||
return `${name}${index}${suffix}`;
|
||||
},
|
||||
|
||||
getArchiveData_meta: true,
|
||||
async getArchiveData({ folder, file }) {
|
||||
let rows;
|
||||
if (folder.endsWith('.zip')) {
|
||||
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
|
||||
} else {
|
||||
rows = jsonLinesParse(await fs.readFile(path.join(archivedir(), folder, `${file}.jsonl`), { encoding: 'utf8' }));
|
||||
}
|
||||
return rows.filter(x => !x.__isStreamHeader);
|
||||
},
|
||||
|
||||
saveUploadedZip_meta: true,
|
||||
async saveUploadedZip({ filePath, fileName }) {
|
||||
if (!fileName?.endsWith('.zip')) {
|
||||
throw new Error(`${fileName} is not a ZIP file`);
|
||||
}
|
||||
|
||||
const folder = await this.getNewArchiveFolder({ database: fileName });
|
||||
await fs.copyFile(filePath, path.join(archivedir(), folder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
zip_meta: true,
|
||||
async zip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
|
||||
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
unzip_meta: true,
|
||||
async unzip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
|
||||
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
getZippedPath_meta: true,
|
||||
async getZippedPath({ folder }) {
|
||||
if (folder.endsWith('.zip')) {
|
||||
return { filePath: path.join(archivedir(), folder) };
|
||||
}
|
||||
|
||||
const uploadName = crypto.randomUUID();
|
||||
const filePath = path.join(uploadsdir(), uploadName);
|
||||
await zipDirectory(path.join(archivedir(), folder), filePath);
|
||||
return { filePath };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,6 +12,9 @@ const {
|
||||
getAuthProviderById,
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
const { decryptPasswordString } = require('../utility/crypting');
|
||||
const { createDbGateIdentitySession, startCloudTokenChecking } = require('../utility/cloudIntf');
|
||||
const socket = require('../utility/socket');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
@@ -43,6 +46,8 @@ function authMiddleware(req, res, next) {
|
||||
'/connections/dblogin-app',
|
||||
'/connections/dblogin-auth',
|
||||
'/connections/dblogin-auth-token',
|
||||
'/health',
|
||||
'/__health',
|
||||
];
|
||||
|
||||
// console.log('********************* getAuthProvider()', getAuthProvider());
|
||||
@@ -94,7 +99,7 @@ module.exports = {
|
||||
let adminPassword = process.env.ADMIN_PASSWORD;
|
||||
if (!adminPassword) {
|
||||
const adminConfig = await storage.readConfig({ group: 'admin' });
|
||||
adminPassword = adminConfig?.adminPassword;
|
||||
adminPassword = decryptPasswordString(adminConfig?.adminPassword);
|
||||
}
|
||||
if (adminPassword && adminPassword == password) {
|
||||
return {
|
||||
@@ -132,5 +137,14 @@ module.exports = {
|
||||
return getAuthProviderById(amoid).redirect(params);
|
||||
},
|
||||
|
||||
createCloudLoginSession_meta: true,
|
||||
async createCloudLoginSession({ client }) {
|
||||
const res = await createDbGateIdentitySession(client);
|
||||
startCloudTokenChecking(res.sid, tokenHolder => {
|
||||
socket.emit('got-cloud-token', tokenHolder);
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
authMiddleware,
|
||||
};
|
||||
|
||||
250
packages/api/src/controllers/cloud.js
Normal file
250
packages/api/src/controllers/cloud.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const {
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
} = require('../utility/cloudIntf');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { recryptConnection, getInternalEncryptor, encryptConnection } = require('../utility/crypting');
|
||||
const { getConnectionLabel, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('cloud');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
module.exports = {
|
||||
publicFiles_meta: true,
|
||||
async publicFiles() {
|
||||
const res = await getPublicCloudFiles();
|
||||
return res;
|
||||
},
|
||||
|
||||
publicFileData_meta: true,
|
||||
async publicFileData({ path }) {
|
||||
const res = getPublicFileData(path);
|
||||
return res;
|
||||
},
|
||||
|
||||
refreshPublicFiles_meta: true,
|
||||
async refreshPublicFiles({ isRefresh }) {
|
||||
await refreshPublicFiles(isRefresh);
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
contentList_meta: true,
|
||||
async contentList() {
|
||||
try {
|
||||
const resp = await callCloudApiGet('content-list');
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
getContent_meta: true,
|
||||
async getContent({ folid, cntid }) {
|
||||
const resp = await getCloudContent(folid, cntid);
|
||||
return resp;
|
||||
},
|
||||
|
||||
putContent_meta: true,
|
||||
async putContent({ folid, cntid, content, name, type }) {
|
||||
const resp = await putCloudContent(folid, cntid, content, name, type);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
createFolder_meta: true,
|
||||
async createFolder({ name }) {
|
||||
const resp = await callCloudApiPost(`folders/create`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
grantFolder_meta: true,
|
||||
async grantFolder({ inviteLink }) {
|
||||
const m = inviteLink.match(/^dbgate\:\/\/folder\/v1\/([a-zA-Z0-9]+)\?mode=(read|write|admin)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid invite link format');
|
||||
}
|
||||
const invite = m[1];
|
||||
const mode = m[2];
|
||||
|
||||
const resp = await callCloudApiPost(`folders/grant/${mode}`, { invite });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameFolder_meta: true,
|
||||
async renameFolder({ folid, name }) {
|
||||
const resp = await callCloudApiPost(`folders/rename`, { folid, name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folid }) {
|
||||
const resp = await callCloudApiPost(`folders/delete`, { folid });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
getInviteToken_meta: true,
|
||||
async getInviteToken({ folid, role }) {
|
||||
const resp = await callCloudApiGet(`invite-token/${folid}/${role}`);
|
||||
return resp;
|
||||
},
|
||||
|
||||
refreshContent_meta: true,
|
||||
async refreshContent() {
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
copyConnectionCloud_meta: true,
|
||||
async copyConnectionCloud({ conid, folid }) {
|
||||
const conn = await connections.getCore({ conid });
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = recryptConnection(conn, getInternalEncryptor(), folderEncryptor);
|
||||
const connToSend = _.omit(recryptedConn, ['_id']);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
undefined,
|
||||
JSON.stringify(connToSend),
|
||||
getConnectionLabel(conn),
|
||||
'connection'
|
||||
);
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveConnection_meta: true,
|
||||
async saveConnection({ folid, connection }) {
|
||||
let cntid = undefined;
|
||||
if (connection._id) {
|
||||
const m = connection._id.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
folid = m[1];
|
||||
cntid = m[2];
|
||||
}
|
||||
|
||||
if (!folid) {
|
||||
throw new Error('Missing cloud folder ID');
|
||||
}
|
||||
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = encryptConnection(connection, folderEncryptor);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
cntid,
|
||||
JSON.stringify(recryptedConn),
|
||||
getConnectionLabel(recryptedConn),
|
||||
'connection'
|
||||
);
|
||||
|
||||
if (resp.apiErrorMessage) {
|
||||
return resp;
|
||||
}
|
||||
|
||||
removeCloudCachedConnection(folid, resp.cntid);
|
||||
cntid = resp.cntid;
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
...recryptedConn,
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
},
|
||||
|
||||
duplicateConnection_meta: true,
|
||||
async duplicateConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const respGet = await getCloudContent(folid, cntid);
|
||||
const conn = JSON.parse(respGet.content);
|
||||
const conn2 = {
|
||||
...conn,
|
||||
displayName: getConnectionLabel(conn) + ' - copy',
|
||||
};
|
||||
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection');
|
||||
return respPut;
|
||||
},
|
||||
|
||||
deleteConnection_meta: true,
|
||||
async deleteConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteContent_meta: true,
|
||||
async deleteContent({ folid, cntid }) {
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameContent_meta: true,
|
||||
async renameContent({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/rename/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveFile_meta: true,
|
||||
async saveFile({ folid, cntid, fileName, data, contentFolder, format }) {
|
||||
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', contentFolder, format);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
copyFile_meta: true,
|
||||
async copyFile({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/duplicate/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folid, cntid, filePath }, req) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
if (!content) {
|
||||
throw new Error('File not found');
|
||||
}
|
||||
await fs.writeFile(filePath, content);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
@@ -19,6 +19,14 @@ const storage = require('./storage');
|
||||
const { getAuthProxyUrl } = require('../utility/authProxy');
|
||||
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
|
||||
const { extractErrorMessage } = require('dbgate-tools');
|
||||
const {
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptConnection,
|
||||
getInternalEncryptor,
|
||||
recryptUser,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
} = require('../utility/crypting');
|
||||
|
||||
const lock = new AsyncLock();
|
||||
|
||||
@@ -107,6 +115,7 @@ module.exports = {
|
||||
datadir(),
|
||||
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
|
||||
),
|
||||
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
|
||||
...currentVersion,
|
||||
};
|
||||
|
||||
@@ -144,7 +153,7 @@ module.exports = {
|
||||
const res = {
|
||||
...value,
|
||||
};
|
||||
if (value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
if (platformInfo.isElectron && value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
|
||||
res['app.useNativeMenu'] = false;
|
||||
}
|
||||
@@ -161,14 +170,19 @@ module.exports = {
|
||||
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const settings = await storage.readConfig({ group: 'settings' });
|
||||
return this.fillMissingSettings(settings);
|
||||
} else {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
@@ -246,19 +260,31 @@ module.exports = {
|
||||
const res = await lock.acquire('settings', async () => {
|
||||
const currentValue = await this.loadSettings();
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
let updated = currentValue;
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
updated = {
|
||||
...currentValue,
|
||||
...values,
|
||||
};
|
||||
await storage.writeConfig({
|
||||
group: 'settings',
|
||||
config: updated,
|
||||
});
|
||||
} else {
|
||||
updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
}
|
||||
}
|
||||
|
||||
socket.emitChanged(`settings-changed`);
|
||||
@@ -272,8 +298,12 @@ module.exports = {
|
||||
|
||||
changelog_meta: true,
|
||||
async changelog() {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
try {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
} catch (err) {
|
||||
return ''
|
||||
}
|
||||
},
|
||||
|
||||
checkLicense_meta: true,
|
||||
@@ -281,4 +311,91 @@ module.exports = {
|
||||
const resp = await checkLicenseKey(licenseKey);
|
||||
return resp;
|
||||
},
|
||||
|
||||
recryptDatabaseForExport(db) {
|
||||
const encryptionKey = generateTransportEncryptionKey();
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const config = _.cloneDeep([
|
||||
...(db.config?.filter(c => !(c.group == 'admin' && c.key == 'encryptionKey')) || []),
|
||||
{ group: 'admin', key: 'encryptionKey', value: encryptionKey },
|
||||
]);
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', getInternalEncryptor(), transportEncryptor);
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
users: db.users?.map(conn => recryptUser(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
recryptDatabaseFromImport(db) {
|
||||
const encryptionKey = db.config?.find(c => c.group == 'admin' && c.key == 'encryptionKey')?.value;
|
||||
if (!encryptionKey) {
|
||||
throw new Error('Missing encryption key in the database');
|
||||
}
|
||||
const config = _.cloneDeep(db.config || []).filter(c => !(c.group == 'admin' && c.key == 'encryptionKey'));
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', transportEncryptor, getInternalEncryptor());
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, transportEncryptor, getInternalEncryptor())),
|
||||
users: db.users?.map(conn => recryptUser(conn, transportEncryptor, getInternalEncryptor())),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
exportConnectionsAndSettings_meta: true,
|
||||
async exportConnectionsAndSettings(_params, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const db = await storage.getExportedDatabase();
|
||||
return this.recryptDatabaseForExport(db);
|
||||
}
|
||||
|
||||
return this.recryptDatabaseForExport({
|
||||
connections: (await connections.list(null, req)).map((conn, index) => ({
|
||||
..._.omit(conn, ['_id']),
|
||||
id: index + 1,
|
||||
conid: conn._id,
|
||||
})),
|
||||
});
|
||||
},
|
||||
|
||||
importConnectionsAndSettings_meta: true,
|
||||
async importConnectionsAndSettings({ db }, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
const recryptedDb = this.recryptDatabaseFromImport(db);
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
await storage.replicateImportedDatabase(recryptedDb);
|
||||
} else {
|
||||
await connections.importFromArray(
|
||||
recryptedDb.connections.map(conn => ({
|
||||
..._.omit(conn, ['conid', 'id']),
|
||||
_id: conn.conid,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -38,6 +38,11 @@ function getNamedArgs() {
|
||||
res.databaseFile = name;
|
||||
res.engine = 'sqlite@dbgate-plugin-sqlite';
|
||||
}
|
||||
|
||||
if (name.endsWith('.duckdb')) {
|
||||
res.databaseFile = name;
|
||||
res.engine = 'duckdb@dbgate-plugin-duckdb';
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
@@ -102,12 +107,21 @@ function getPortalCollections() {
|
||||
trustServerCertificate: process.env[`SSL_TRUST_CERTIFICATE_${id}`],
|
||||
}));
|
||||
|
||||
for (const conn of connections) {
|
||||
for (const prop in process.env) {
|
||||
if (prop.startsWith(`CONNECTION_${conn._id}_`)) {
|
||||
const name = prop.substring(`CONNECTION_${conn._id}_`.length);
|
||||
conn[name] = process.env[prop];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
|
||||
const noengine = connections.filter(x => !x.engine);
|
||||
if (noengine.length > 0) {
|
||||
logger.warn(
|
||||
{ connections: noengine.map(x => x._id) },
|
||||
'Invalid CONNECTIONS configutation, missing ENGINE for connection ID'
|
||||
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
|
||||
);
|
||||
}
|
||||
return connections;
|
||||
@@ -225,6 +239,19 @@ module.exports = {
|
||||
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
|
||||
},
|
||||
|
||||
async getUsedEngines() {
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageEngines = await storage.getUsedEngines();
|
||||
if (storageEngines) {
|
||||
return storageEngines;
|
||||
}
|
||||
if (portalConnections) {
|
||||
return _.uniq(_.compact(portalConnections.map(x => x.engine)));
|
||||
}
|
||||
return _.uniq((await this.datastore.find()).map(x => x.engine));
|
||||
},
|
||||
|
||||
test_meta: true,
|
||||
test({ connection, requestDbList = false }) {
|
||||
const subprocess = fork(
|
||||
@@ -307,6 +334,18 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
importFromArray(list) {
|
||||
this.datastore.transformAll(connections => {
|
||||
const mapped = connections.map(x => {
|
||||
const found = list.find(y => y._id == x._id);
|
||||
if (found) return found;
|
||||
return x;
|
||||
});
|
||||
return [...mapped, ...list.filter(x => !connections.find(y => y._id == x._id))];
|
||||
});
|
||||
socket.emitChanged('connection-list-changed');
|
||||
},
|
||||
|
||||
async checkUnsavedConnectionsLimit() {
|
||||
if (!this.datastore) {
|
||||
return;
|
||||
@@ -384,6 +423,13 @@ module.exports = {
|
||||
return volatile;
|
||||
}
|
||||
|
||||
const cloudMatch = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (cloudMatch) {
|
||||
const { loadCachedCloudConnection } = require('../utility/cloudIntf');
|
||||
const conn = await loadCachedCloudConnection(cloudMatch[1], cloudMatch[2]);
|
||||
return conn;
|
||||
}
|
||||
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
@@ -426,6 +472,22 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
newDuckdbDatabase_meta: true,
|
||||
async newDuckdbDatabase({ file }) {
|
||||
const duckdbDir = path.join(filesdir(), 'duckdb');
|
||||
if (!(await fs.exists(duckdbDir))) {
|
||||
await fs.mkdir(duckdbDir);
|
||||
}
|
||||
const databaseFile = path.join(duckdbDir, `${file}.duckdb`);
|
||||
const res = await this.save({
|
||||
engine: 'duckdb@dbgate-plugin-duckdb',
|
||||
databaseFile,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: `${file}.duckdb`,
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
dbloginWeb_meta: {
|
||||
raw: true,
|
||||
method: 'get',
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const connections = require('./connections');
|
||||
const runners = require('./runners');
|
||||
const archive = require('./archive');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
@@ -36,6 +37,10 @@ const loadModelTransform = require('../utility/loadModelTransform');
|
||||
const exportDbModelSql = require('../utility/exportDbModelSql');
|
||||
const axios = require('axios');
|
||||
const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy');
|
||||
const { decryptConnection } = require('../utility/crypting');
|
||||
const { getSshTunnel } = require('../utility/sshTunnel');
|
||||
const sessions = require('./sessions');
|
||||
const jsldata = require('./jsldata');
|
||||
|
||||
const logger = getLogger('databaseConnections');
|
||||
|
||||
@@ -93,10 +98,59 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
// session event handlers
|
||||
|
||||
handle_info(conid, database, props) {
|
||||
const { sesid, info } = props;
|
||||
sessions.dispatchMessage(sesid, info);
|
||||
},
|
||||
|
||||
handle_done(conid, database, props) {
|
||||
const { sesid } = props;
|
||||
socket.emit(`session-done-${sesid}`);
|
||||
sessions.dispatchMessage(sesid, 'Query execution finished');
|
||||
},
|
||||
|
||||
handle_recordset(conid, database, props) {
|
||||
const { jslid, resultIndex } = props;
|
||||
socket.emit(`session-recordset-${props.sesid}`, { jslid, resultIndex });
|
||||
},
|
||||
|
||||
handle_stats(conid, database, stats) {
|
||||
jsldata.notifyChangedStats(stats);
|
||||
},
|
||||
|
||||
handle_initializeFile(conid, database, props) {
|
||||
const { jslid } = props;
|
||||
socket.emit(`session-initialize-file-${jslid}`);
|
||||
},
|
||||
|
||||
// eval event handler
|
||||
handle_runnerDone(conid, database, props) {
|
||||
const { runid } = props;
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
},
|
||||
|
||||
handle_progress(conid, database, progressData) {
|
||||
const { progressName } = progressData;
|
||||
const { name, runid } = progressName;
|
||||
socket.emit(`runner-progress-${runid}`, { ...progressData, progressName: name });
|
||||
},
|
||||
|
||||
handle_copyStreamError(conid, database, { copyStreamError }) {
|
||||
const { progressName } = copyStreamError;
|
||||
const { runid } = progressName;
|
||||
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
},
|
||||
|
||||
async ensureOpened(conid, database) {
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
@@ -133,12 +187,23 @@ module.exports = {
|
||||
const { msgtype } = message;
|
||||
if (handleProcessCommunication(message, subprocess)) return;
|
||||
if (newOpened.disconnected) return;
|
||||
this[`handle_${msgtype}`](conid, database, message);
|
||||
const funcName = `handle_${msgtype}`;
|
||||
if (!this[funcName]) {
|
||||
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
|
||||
return;
|
||||
}
|
||||
|
||||
this[funcName](conid, database, message);
|
||||
});
|
||||
subprocess.on('exit', () => {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
msgtype: 'connect',
|
||||
@@ -242,6 +307,12 @@ module.exports = {
|
||||
return this.loadDataCore('loadKeys', { conid, database, root, filter, limit });
|
||||
},
|
||||
|
||||
scanKeys_meta: true,
|
||||
async scanKeys({ conid, database, root, pattern, cursor, count }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('scanKeys', { conid, database, root, pattern, cursor, count });
|
||||
},
|
||||
|
||||
exportKeys_meta: true,
|
||||
async exportKeys({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
@@ -613,4 +684,167 @@ module.exports = {
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
async getNativeOpCommandArgs(
|
||||
command,
|
||||
{ conid, database, outputFile, inputFile, options, selectedTables, skippedTables, argsFormat }
|
||||
) {
|
||||
const sourceConnection = await connections.getCore({ conid });
|
||||
const connection = {
|
||||
...decryptConnection(sourceConnection),
|
||||
};
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnel(connection);
|
||||
if (tunnel.state == 'error') {
|
||||
throw new Error(tunnel.message);
|
||||
}
|
||||
|
||||
connection.server = tunnel.localHost;
|
||||
connection.port = tunnel.localPort;
|
||||
}
|
||||
|
||||
const settingsValue = await config.getSettings();
|
||||
|
||||
const externalTools = {};
|
||||
for (const pair of Object.entries(settingsValue || {})) {
|
||||
const [name, value] = pair;
|
||||
if (name.startsWith('externalTools.')) {
|
||||
externalTools[name.substring('externalTools.'.length)] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...(command == 'backup'
|
||||
? driver.backupDatabaseCommand(
|
||||
connection,
|
||||
{ outputFile, database, options, selectedTables, skippedTables, argsFormat },
|
||||
// @ts-ignore
|
||||
externalTools
|
||||
)
|
||||
: driver.restoreDatabaseCommand(
|
||||
connection,
|
||||
{ inputFile, database, options, argsFormat },
|
||||
// @ts-ignore
|
||||
externalTools
|
||||
)),
|
||||
transformMessage: driver.transformNativeCommandMessage
|
||||
? message => driver.transformNativeCommandMessage(message, command)
|
||||
: null,
|
||||
};
|
||||
},
|
||||
|
||||
commandArgsToCommandLine(commandArgs) {
|
||||
const { command, args, stdinFilePath } = commandArgs;
|
||||
let res = `${command} ${args.join(' ')}`;
|
||||
if (stdinFilePath) {
|
||||
res += ` < ${stdinFilePath}`;
|
||||
}
|
||||
return res;
|
||||
},
|
||||
|
||||
nativeBackup_meta: true,
|
||||
async nativeBackup({ conid, database, outputFile, runid, options, selectedTables, skippedTables }) {
|
||||
const commandArgs = await this.getNativeOpCommandArgs('backup', {
|
||||
conid,
|
||||
database,
|
||||
inputFile: undefined,
|
||||
outputFile,
|
||||
options,
|
||||
selectedTables,
|
||||
skippedTables,
|
||||
argsFormat: 'spawn',
|
||||
});
|
||||
|
||||
return runners.nativeRunCore(runid, {
|
||||
...commandArgs,
|
||||
onFinished: () => {
|
||||
socket.emitChanged(`files-changed`, { folder: 'sql' });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
nativeBackupCommand_meta: true,
|
||||
async nativeBackupCommand({ conid, database, outputFile, options, selectedTables, skippedTables }) {
|
||||
const commandArgs = await this.getNativeOpCommandArgs('backup', {
|
||||
conid,
|
||||
database,
|
||||
outputFile,
|
||||
inputFile: undefined,
|
||||
options,
|
||||
selectedTables,
|
||||
skippedTables,
|
||||
argsFormat: 'shell',
|
||||
});
|
||||
|
||||
return {
|
||||
...commandArgs,
|
||||
transformMessage: null,
|
||||
commandLine: this.commandArgsToCommandLine(commandArgs),
|
||||
};
|
||||
},
|
||||
|
||||
nativeRestore_meta: true,
|
||||
async nativeRestore({ conid, database, inputFile, runid }) {
|
||||
const commandArgs = await this.getNativeOpCommandArgs('restore', {
|
||||
conid,
|
||||
database,
|
||||
inputFile,
|
||||
outputFile: undefined,
|
||||
options: undefined,
|
||||
argsFormat: 'spawn',
|
||||
});
|
||||
|
||||
return runners.nativeRunCore(runid, {
|
||||
...commandArgs,
|
||||
onFinished: () => {
|
||||
this.syncModel({ conid, database, isFullRefresh: true });
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
nativeRestoreCommand_meta: true,
|
||||
async nativeRestoreCommand({ conid, database, inputFile }) {
|
||||
const commandArgs = await this.getNativeOpCommandArgs('restore', {
|
||||
conid,
|
||||
database,
|
||||
inputFile,
|
||||
outputFile: undefined,
|
||||
options: undefined,
|
||||
argsFormat: 'shell',
|
||||
});
|
||||
|
||||
return {
|
||||
...commandArgs,
|
||||
transformMessage: null,
|
||||
commandLine: this.commandArgsToCommandLine(commandArgs),
|
||||
};
|
||||
},
|
||||
|
||||
executeSessionQuery_meta: true,
|
||||
async executeSessionQuery({ sesid, conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
sessions.dispatchMessage(sesid, 'Query execution started');
|
||||
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
opened.subprocess.send({ msgtype: 'executeSessionQuery', sql, sesid });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
evalJsonScript_meta: true,
|
||||
async evalJsonScript({ conid, database, script, runid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
|
||||
opened.subprocess.send({ msgtype: 'evalJsonScript', script, runid });
|
||||
return { state: 'ok' };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -9,6 +9,9 @@ const scheduler = require('./scheduler');
|
||||
const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const logger = getLogger('files');
|
||||
|
||||
function serialize(format, data) {
|
||||
if (format == 'text') return data;
|
||||
@@ -195,8 +198,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
exportDiagram_meta: true,
|
||||
async exportDiagram({ filePath, html, css, themeType, themeClassName }) {
|
||||
await fs.writeFile(filePath, getDiagramExport(html, css, themeType, themeClassName));
|
||||
async exportDiagram({ filePath, html, css, themeType, themeClassName, watermark }) {
|
||||
await fs.writeFile(filePath, getDiagramExport(html, css, themeType, themeClassName, watermark));
|
||||
return true;
|
||||
},
|
||||
|
||||
@@ -219,4 +222,60 @@ module.exports = {
|
||||
return path.join(dir, file);
|
||||
}
|
||||
},
|
||||
|
||||
createZipFromJsons_meta: true,
|
||||
async createZipFromJsons({ db, filePath }) {
|
||||
logger.info(`Creating zip file from JSONS ${filePath}`);
|
||||
await dbgateApi.zipJsonLinesData(db, filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
getJsonsFromZip_meta: true,
|
||||
async getJsonsFromZip({ filePath }) {
|
||||
const res = await dbgateApi.unzipJsonLinesData(filePath);
|
||||
return res;
|
||||
},
|
||||
|
||||
downloadText_meta: true,
|
||||
async downloadText({ uri }, req) {
|
||||
if (!uri) return null;
|
||||
const filePath = await dbgateApi.download(uri);
|
||||
const text = await fs.readFile(filePath, {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return text;
|
||||
},
|
||||
|
||||
saveUploadedFile_meta: true,
|
||||
async saveUploadedFile({ filePath, fileName }) {
|
||||
const FOLDERS = ['sql', 'sqlite'];
|
||||
for (const folder of FOLDERS) {
|
||||
if (fileName.toLowerCase().endsWith('.' + folder)) {
|
||||
logger.info(`Saving ${folder} file ${fileName}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: folder });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
return {
|
||||
name: path.basename(filePath),
|
||||
folder: folder,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folder, file, filePath }, req) {
|
||||
if (!hasPermission(`files/${folder}/read`, req)) return false;
|
||||
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
simpleCopy_meta: true,
|
||||
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
|
||||
await fs.copyFile(sourceFilePath, targetFilePath);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -8,6 +8,8 @@ const getJslFileName = require('../utility/getJslFileName');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const requirePluginFunction = require('../utility/requirePluginFunction');
|
||||
const socket = require('../utility/socket');
|
||||
const crypto = require('crypto');
|
||||
const dbgateApi = require('../shell');
|
||||
|
||||
function readFirstLine(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -293,4 +295,11 @@ module.exports = {
|
||||
})),
|
||||
};
|
||||
},
|
||||
|
||||
downloadJslData_meta: true,
|
||||
async downloadJslData({ uri }) {
|
||||
const jslid = crypto.randomUUID();
|
||||
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
|
||||
return { jslid };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -4,15 +4,17 @@ const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const byline = require('byline');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
const { fork, spawn } = require('child_process');
|
||||
const { rundir, uploadsdir, pluginsdir, getPluginBackendPath, packagedPluginList } = require('../utility/directories');
|
||||
const {
|
||||
extractShellApiPlugins,
|
||||
extractShellApiFunctionName,
|
||||
compileShellApiFunctionName,
|
||||
jsonScriptToJavascript,
|
||||
getLogger,
|
||||
safeJsonParse,
|
||||
pinoLogRecordToMessageRecord,
|
||||
extractErrorMessage,
|
||||
extractErrorLogData,
|
||||
} = require('dbgate-tools');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
@@ -56,7 +58,7 @@ dbgateApi.initializeApiEnvironment();
|
||||
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
|
||||
require=null;
|
||||
async function run() {
|
||||
const reader=await ${extractShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
}
|
||||
@@ -80,6 +82,7 @@ module.exports = {
|
||||
}
|
||||
: {
|
||||
message,
|
||||
severity: 'info',
|
||||
time: new Date(),
|
||||
};
|
||||
|
||||
@@ -93,9 +96,9 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
handle_freeData(runid, { freeData }) {
|
||||
handle_dataResult(runid, { dataResult }) {
|
||||
const { resolve } = this.requests[runid];
|
||||
resolve(freeData);
|
||||
resolve(dataResult);
|
||||
delete this.requests[runid];
|
||||
},
|
||||
|
||||
@@ -168,15 +171,17 @@ module.exports = {
|
||||
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
|
||||
logger.info({ code, pid: subprocess.pid }, 'Exited process');
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
// console.log('... ERROR subprocess', error);
|
||||
this.rejectRequest(runid, { message: error && (error.message || error.toString()) });
|
||||
console.error('... ERROR subprocess', error);
|
||||
this.dispatchMessage({
|
||||
this.dispatchMessage(runid, {
|
||||
severity: 'error',
|
||||
message: error.toString(),
|
||||
});
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
const newOpened = {
|
||||
runid,
|
||||
@@ -192,12 +197,83 @@ module.exports = {
|
||||
return _.pick(newOpened, ['runid']);
|
||||
},
|
||||
|
||||
nativeRunCore(runid, commandArgs) {
|
||||
const { command, args, env, transformMessage, stdinFilePath, onFinished } = commandArgs;
|
||||
const pipeDispatcher = severity => data => {
|
||||
let messageObject = {
|
||||
message: data.toString().trim(),
|
||||
severity,
|
||||
};
|
||||
if (transformMessage) {
|
||||
messageObject = transformMessage(messageObject);
|
||||
}
|
||||
|
||||
if (messageObject) {
|
||||
return this.dispatchMessage(runid, messageObject);
|
||||
}
|
||||
};
|
||||
|
||||
const subprocess = spawn(command, args, { env: { ...process.env, ...env } });
|
||||
|
||||
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
||||
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
|
||||
|
||||
subprocess.on('exit', code => {
|
||||
console.log('... EXITED', code);
|
||||
logger.info({ code, pid: subprocess.pid }, 'Exited process');
|
||||
this.dispatchMessage(runid, `Finished external process with code ${code}`);
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
if (onFinished) {
|
||||
onFinished();
|
||||
}
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
subprocess.on('spawn', () => {
|
||||
this.dispatchMessage(runid, `Started external process ${command}`);
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
console.log('... ERROR subprocess', error);
|
||||
this.dispatchMessage(runid, {
|
||||
severity: 'error',
|
||||
message: error.toString(),
|
||||
});
|
||||
if (error['code'] == 'ENOENT') {
|
||||
this.dispatchMessage(runid, {
|
||||
severity: 'error',
|
||||
message: `Command ${command} not found, please install it and configure its location in DbGate settings, Settings/External tools, if ${command} is not in system PATH`,
|
||||
});
|
||||
}
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
|
||||
if (stdinFilePath) {
|
||||
const inputStream = fs.createReadStream(stdinFilePath);
|
||||
inputStream.pipe(subprocess.stdin);
|
||||
|
||||
subprocess.stdin.on('error', err => {
|
||||
this.dispatchMessage(runid, {
|
||||
severity: 'error',
|
||||
message: extractErrorMessage(err),
|
||||
});
|
||||
logger.error(extractErrorLogData(err), 'Caught error on stdin');
|
||||
});
|
||||
}
|
||||
|
||||
const newOpened = {
|
||||
runid,
|
||||
subprocess,
|
||||
};
|
||||
this.opened.push(newOpened);
|
||||
return _.pick(newOpened, ['runid']);
|
||||
},
|
||||
|
||||
start_meta: true,
|
||||
async start({ script }) {
|
||||
const runid = crypto.randomUUID();
|
||||
|
||||
if (script.type == 'json') {
|
||||
const js = jsonScriptToJavascript(script);
|
||||
const js = await jsonScriptToJavascript(script);
|
||||
return this.startCore(runid, scriptTemplate(js, false));
|
||||
}
|
||||
|
||||
@@ -252,4 +328,24 @@ module.exports = {
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
|
||||
scriptResult_meta: true,
|
||||
async scriptResult({ script }) {
|
||||
if (script.type != 'json') {
|
||||
return { errorMessage: 'Only JSON scripts are allowed' };
|
||||
}
|
||||
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
const runid = crypto.randomUUID();
|
||||
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
|
||||
const cloned = _.cloneDeepWith(script, node => {
|
||||
if (node?.$replace == 'runid') {
|
||||
return runid;
|
||||
}
|
||||
});
|
||||
const js = await jsonScriptToJavascript(cloned);
|
||||
this.startCore(runid, scriptTemplate(js, false));
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -52,7 +52,10 @@ module.exports = {
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`Connection with conid="${conid}" not found`);
|
||||
throw new Error(`serverConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.singleDatabase) {
|
||||
return null;
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
@@ -98,6 +101,11 @@ module.exports = {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.send({ msgtype: 'connect', ...connection, globalSettings: await config.getSettings() });
|
||||
return newOpened;
|
||||
});
|
||||
@@ -137,14 +145,14 @@ module.exports = {
|
||||
if (conid == '__model') return [];
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.databases;
|
||||
return opened?.databases ?? [];
|
||||
},
|
||||
|
||||
version_meta: true,
|
||||
async version({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.version;
|
||||
return opened?.version ?? null;
|
||||
},
|
||||
|
||||
serverStatus_meta: true,
|
||||
@@ -165,6 +173,9 @@ module.exports = {
|
||||
}
|
||||
this.lastPinged[conid] = new Date().getTime();
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
try {
|
||||
opened.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
@@ -189,6 +200,9 @@ module.exports = {
|
||||
async sendDatabaseOp({ conid, msgtype, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
const res = await this.sendRequest(opened, { msgtype, name });
|
||||
if (res.errorMessage) {
|
||||
@@ -228,6 +242,9 @@ module.exports = {
|
||||
async loadDataCore(msgtype, { conid, ...args }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
const res = await this.sendRequest(opened, { msgtype, ...args });
|
||||
if (res.errorMessage) {
|
||||
console.error(res.errorMessage);
|
||||
@@ -249,6 +266,9 @@ module.exports = {
|
||||
async summaryCommand({ conid, command, row }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
return this.loadDataCore('summaryCommand', { conid, command, row });
|
||||
},
|
||||
|
||||
@@ -127,6 +127,9 @@ module.exports = {
|
||||
this.dispatchMessage(sesid, 'Query session closed');
|
||||
socket.emit(`session-closed-${sesid}`);
|
||||
});
|
||||
subprocess.on('error', () => {
|
||||
this.opened = this.opened.filter(x => x.sesid != sesid);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
msgtype: 'connect',
|
||||
@@ -138,7 +141,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
executeQuery_meta: true,
|
||||
async executeQuery({ sesid, sql, autoCommit }) {
|
||||
async executeQuery({ sesid, sql, autoCommit, limitRows }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
@@ -146,7 +149,7 @@ module.exports = {
|
||||
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
this.dispatchMessage(sesid, 'Query execution started');
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit });
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit, limitRows });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
@@ -4,6 +4,10 @@ module.exports = {
|
||||
return null;
|
||||
},
|
||||
|
||||
async getExportedDatabase() {
|
||||
return {};
|
||||
},
|
||||
|
||||
getConnection_meta: true,
|
||||
async getConnection({ conid }) {
|
||||
return null;
|
||||
@@ -28,4 +32,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
startRefreshLicense() {},
|
||||
|
||||
async getUsedEngines() {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { uploadsdir, getLogsFilePath } = require('../utility/directories');
|
||||
const { uploadsdir, getLogsFilePath, filesdir } = require('../utility/directories');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('uploads');
|
||||
const axios = require('axios');
|
||||
@@ -13,6 +13,7 @@ const serverConnections = require('./serverConnections');
|
||||
const config = require('./config');
|
||||
const gistSecret = require('../gistSecret');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const socket = require('../utility/socket');
|
||||
|
||||
module.exports = {
|
||||
upload_meta: {
|
||||
|
||||
@@ -27,10 +27,11 @@ const plugins = require('./controllers/plugins');
|
||||
const files = require('./controllers/files');
|
||||
const scheduler = require('./controllers/scheduler');
|
||||
const queryHistory = require('./controllers/queryHistory');
|
||||
const cloud = require('./controllers/cloud');
|
||||
const onFinished = require('on-finished');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
|
||||
const { rundir } = require('./utility/directories');
|
||||
const { rundir, filesdir } = require('./utility/directories');
|
||||
const platformInfo = require('./utility/platformInfo');
|
||||
const getExpressPath = require('./utility/getExpressPath');
|
||||
const _ = require('lodash');
|
||||
@@ -38,6 +39,8 @@ const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
|
||||
const { startCloudFiles } = require('./utility/cloudIntf');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -117,6 +120,18 @@ function start() {
|
||||
});
|
||||
});
|
||||
|
||||
app.get(getExpressPath('/health'), async function (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
const health = await getHealthStatus();
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.get(getExpressPath('/__health'), async function (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
const health = await getHealthStatusSprinx();
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
|
||||
app.use(
|
||||
@@ -133,6 +148,7 @@ function start() {
|
||||
// }
|
||||
|
||||
app.use(getExpressPath('/runners/data'), express.static(rundir()));
|
||||
app.use(getExpressPath('/files/data'), express.static(filesdir()));
|
||||
|
||||
if (platformInfo.isDocker) {
|
||||
const port = process.env.PORT || 3000;
|
||||
@@ -186,6 +202,8 @@ function start() {
|
||||
if (process.env.CLOUD_UPGRADE_FILE) {
|
||||
startCloudUpgradeTimer();
|
||||
}
|
||||
|
||||
startCloudFiles();
|
||||
}
|
||||
|
||||
function useAllControllers(app, electron) {
|
||||
@@ -206,6 +224,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/query-history', queryHistory);
|
||||
useController(app, electron, '/apps', apps);
|
||||
useController(app, electron, '/auth', auth);
|
||||
useController(app, electron, '/cloud', cloud);
|
||||
}
|
||||
|
||||
function setElectronSender(electronSender) {
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
@@ -4,6 +4,8 @@ const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const _ = require('lodash');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('connectProcess');
|
||||
|
||||
const formatErrorDetail = (e, connection) => `${e.stack}
|
||||
|
||||
@@ -23,12 +25,15 @@ function start() {
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const dbhan = await connectUtility(driver, connection, 'app');
|
||||
const res = await driver.getVersion(dbhan);
|
||||
let version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
version = await driver.getVersion(dbhan);
|
||||
let databases = undefined;
|
||||
if (requestDbList) {
|
||||
databases = await driver.listDatabases(dbhan);
|
||||
}
|
||||
process.send({ msgtype: 'connected', ...res, databases });
|
||||
process.send({ msgtype: 'connected', ...version, databases });
|
||||
await driver.close(dbhan);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
@@ -38,6 +43,8 @@ function start() {
|
||||
detail: formatErrorDetail(e, connection),
|
||||
});
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -9,13 +9,22 @@ const {
|
||||
dbNameLogCategory,
|
||||
extractErrorMessage,
|
||||
extractErrorLogData,
|
||||
ScriptWriterEval,
|
||||
SqlGenerator,
|
||||
playJsonScriptWriter,
|
||||
serializeJsTypesForJsonStringify,
|
||||
} = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { SqlGenerator } = require('dbgate-tools');
|
||||
const generateDeploySql = require('../shell/generateDeploySql');
|
||||
const { dumpSqlSelect } = require('dbgate-sqltree');
|
||||
const { allowExecuteCustomScript, handleQueryStream } = require('../utility/handleQueryStream');
|
||||
const dbgateApi = require('../shell');
|
||||
const requirePlugin = require('../shell/requirePlugin');
|
||||
const path = require('path');
|
||||
const { rundir } = require('../utility/directories');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const logger = getLogger('dbconnProcess');
|
||||
|
||||
@@ -120,10 +129,15 @@ function setStatusName(name) {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
process.send({ msgtype: 'version', version });
|
||||
serverVersion = version;
|
||||
try {
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
serverVersion = version;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
serverVersion = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version: serverVersion });
|
||||
}
|
||||
|
||||
async function handleConnect({ connection, structure, globalSettings }) {
|
||||
@@ -219,7 +233,7 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
const res = await driver.query(dbhan, sql, { range });
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
@@ -241,7 +255,7 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const result = await callMethod(driver);
|
||||
process.send({ msgtype: 'response', msgid, result });
|
||||
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
|
||||
@@ -261,6 +275,10 @@ async function handleLoadKeys({ msgid, root, filter, limit }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter, limit), { logName: 'loadKeys' });
|
||||
}
|
||||
|
||||
async function handleScanKeys({ msgid, pattern, cursor, count }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.scanKeys(dbhan, pattern, cursor, count), { logName: 'scanKeys' });
|
||||
}
|
||||
|
||||
async function handleExportKeys({ msgid, options }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
|
||||
}
|
||||
@@ -370,6 +388,56 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteSessionQuery({ sesid, sql }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
message: 'Connection without read-only sessions is read only',
|
||||
severity: 'error',
|
||||
},
|
||||
sesid,
|
||||
});
|
||||
process.send({ msgtype: 'done', sesid, skipFinishedMessage: true });
|
||||
return;
|
||||
//process.send({ msgtype: 'error', error: e.message });
|
||||
}
|
||||
|
||||
const queryStreamInfoHolder = {
|
||||
resultIndex: 0,
|
||||
canceled: false,
|
||||
};
|
||||
for (const sqlItem of splitQuery(sql, {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, sesid);
|
||||
if (queryStreamInfoHolder.canceled) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
process.send({ msgtype: 'done', sesid });
|
||||
}
|
||||
|
||||
async function handleEvalJsonScript({ script, runid }) {
|
||||
const directory = path.join(rundir(), runid);
|
||||
fs.mkdirSync(directory);
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
process.chdir(directory);
|
||||
|
||||
const evalWriter = new ScriptWriterEval(dbgateApi, requirePlugin, dbhan, runid);
|
||||
await playJsonScriptWriter(script, evalWriter);
|
||||
process.send({ msgtype: 'runnerDone', runid });
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
}
|
||||
|
||||
// async function handleRunCommand({ msgid, sql }) {
|
||||
// await waitConnected();
|
||||
// const driver = engines(storedConnection);
|
||||
@@ -389,6 +457,7 @@ const messageHandlers = {
|
||||
updateCollection: handleUpdateCollection,
|
||||
collectionData: handleCollectionData,
|
||||
loadKeys: handleLoadKeys,
|
||||
scanKeys: handleScanKeys,
|
||||
loadKeyInfo: handleLoadKeyInfo,
|
||||
callMethod: handleCallMethod,
|
||||
loadKeyTableRange: handleLoadKeyTableRange,
|
||||
@@ -400,6 +469,8 @@ const messageHandlers = {
|
||||
sqlSelect: handleSqlSelect,
|
||||
exportKeys: handleExportKeys,
|
||||
schemaList: handleSchemaList,
|
||||
executeSessionQuery: handleExecuteSessionQuery,
|
||||
evalJsonScript: handleEvalJsonScript,
|
||||
// runCommand: handleRunCommand,
|
||||
};
|
||||
|
||||
|
||||
@@ -46,7 +46,13 @@ async function handleRefresh() {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
let version;
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
version = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version });
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ const { decryptConnection } = require('../utility/crypting');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require('dbgate-tools');
|
||||
const { handleQueryStream, QueryStreamTableWriter, allowExecuteCustomScript } = require('../utility/handleQueryStream');
|
||||
|
||||
const logger = getLogger('sessionProcess');
|
||||
|
||||
@@ -23,175 +24,6 @@ let lastActivity = null;
|
||||
let currentProfiler = null;
|
||||
let executingScripts = 0;
|
||||
|
||||
class TableWriter {
|
||||
constructor() {
|
||||
this.currentRowCount = 0;
|
||||
this.currentChangeIndex = 1;
|
||||
this.initializedFile = false;
|
||||
}
|
||||
|
||||
initializeFromQuery(structure, resultIndex) {
|
||||
this.jslid = crypto.randomUUID();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
fs.writeFileSync(
|
||||
this.currentFile,
|
||||
JSON.stringify({
|
||||
...structure,
|
||||
__isStreamHeader: true,
|
||||
}) + '\n'
|
||||
);
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.resultIndex = resultIndex;
|
||||
this.initializedFile = true;
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
|
||||
}
|
||||
|
||||
initializeFromReader(jslid) {
|
||||
this.jslid = jslid;
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
this.writeCurrentStats(false, false);
|
||||
}
|
||||
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentStream.write(JSON.stringify(row) + '\n');
|
||||
this.currentRowCount += 1;
|
||||
|
||||
if (!this.plannedStats) {
|
||||
this.plannedStats = true;
|
||||
process.nextTick(() => {
|
||||
if (this.currentStream) this.currentStream.uncork();
|
||||
process.nextTick(() => this.writeCurrentStats(false, true));
|
||||
this.plannedStats = false;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
rowFromReader(row) {
|
||||
if (!this.initializedFile) {
|
||||
process.send({ msgtype: 'initializeFile', jslid: this.jslid });
|
||||
this.initializedFile = true;
|
||||
|
||||
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.initializedFile = true;
|
||||
return;
|
||||
}
|
||||
|
||||
this.row(row);
|
||||
}
|
||||
|
||||
writeCurrentStats(isFinished = false, emitEvent = false) {
|
||||
const stats = {
|
||||
rowCount: this.currentRowCount,
|
||||
changeIndex: this.currentChangeIndex,
|
||||
isFinished,
|
||||
jslid: this.jslid,
|
||||
};
|
||||
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
|
||||
this.currentChangeIndex += 1;
|
||||
if (emitEvent) {
|
||||
process.send({ msgtype: 'stats', ...stats });
|
||||
}
|
||||
}
|
||||
|
||||
close(afterClose) {
|
||||
if (this.currentStream) {
|
||||
this.currentStream.end(() => {
|
||||
this.writeCurrentStats(true, true);
|
||||
if (afterClose) afterClose();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(resultIndexHolder, resolve, startLine) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
this.info = this.info.bind(this);
|
||||
|
||||
// use this for cancelling - not implemented
|
||||
// this.stream = null;
|
||||
|
||||
this.plannedStats = false;
|
||||
this.resultIndexHolder = resultIndexHolder;
|
||||
this.resolve = resolve;
|
||||
// currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
closeCurrentWriter() {
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close();
|
||||
this.currentWriter = null;
|
||||
}
|
||||
}
|
||||
|
||||
recordset(columns) {
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new TableWriter();
|
||||
this.currentWriter.initializeFromQuery(
|
||||
Array.isArray(columns) ? { columns } : columns,
|
||||
this.resultIndexHolder.value
|
||||
);
|
||||
this.resultIndexHolder.value += 1;
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
// this.onRow = _.throttle((jslid) => {
|
||||
// if (jslid == this.jslid) {
|
||||
// this.writeCurrentStats(false, true);
|
||||
// }
|
||||
// }, 500);
|
||||
}
|
||||
row(row) {
|
||||
if (this.currentWriter) this.currentWriter.row(row);
|
||||
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
// process.send({ msgtype: 'error', error });
|
||||
// }
|
||||
done(result) {
|
||||
this.closeCurrentWriter();
|
||||
// currentHandlers = currentHandlers.filter((x) => x != this);
|
||||
this.resolve();
|
||||
}
|
||||
info(info) {
|
||||
if (info && info.line != null) {
|
||||
info = {
|
||||
...info,
|
||||
line: this.startLine + info.line,
|
||||
};
|
||||
}
|
||||
process.send({ msgtype: 'info', info });
|
||||
}
|
||||
}
|
||||
|
||||
function handleStream(driver, resultIndexHolder, sqlItem) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
function allowExecuteCustomScript(driver) {
|
||||
if (driver.readOnlySessions) {
|
||||
return true;
|
||||
}
|
||||
if (storedConnection.isReadOnly) {
|
||||
return false;
|
||||
// throw new Error('Connection is read only');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function handleConnect(connection) {
|
||||
storedConnection = connection;
|
||||
|
||||
@@ -222,12 +54,12 @@ async function handleStartProfiler({ jslid }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({ msgtype: 'done' });
|
||||
return;
|
||||
}
|
||||
|
||||
const writer = new TableWriter();
|
||||
const writer = new QueryStreamTableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
currentProfiler = await driver.startProfiler(dbhan, {
|
||||
@@ -251,7 +83,7 @@ async function handleExecuteControlCommand({ command }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (command == 'commitTransaction' && !allowExecuteCustomScript(driver)) {
|
||||
if (command == 'commitTransaction' && !allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
@@ -285,13 +117,13 @@ async function handleExecuteControlCommand({ command }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
async function handleExecuteQuery({ sql, autoCommit, limitRows }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
@@ -306,18 +138,23 @@ async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
|
||||
executingScripts++;
|
||||
try {
|
||||
const resultIndexHolder = {
|
||||
value: 0,
|
||||
const queryStreamInfoHolder = {
|
||||
resultIndex: 0,
|
||||
canceled: false,
|
||||
};
|
||||
for (const sqlItem of splitQuery(sql, {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleStream(driver, resultIndexHolder, sqlItem);
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, undefined, limitRows);
|
||||
// const handler = new StreamHandler(resultIndex);
|
||||
// const stream = await driver.stream(systemConnection, sqlItem, handler);
|
||||
// handler.stream = stream;
|
||||
// resultIndex = handler.resultIndex;
|
||||
|
||||
if (queryStreamInfoHolder.canceled) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
process.send({ msgtype: 'done', autoCommit });
|
||||
} finally {
|
||||
@@ -335,13 +172,13 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
|
||||
if (fileName) {
|
||||
sql = fs.readFileSync(fileName, 'utf-8');
|
||||
} else {
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({ msgtype: 'done' });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const writer = new TableWriter();
|
||||
const writer = new QueryStreamTableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
const reader = await driver.readQuery(dbhan, sql);
|
||||
|
||||
@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
|
||||
function archiveReader({ folderName, fileName, ...other }) {
|
||||
const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const jsonlFile = folderName.endsWith('.zip')
|
||||
? `zip://archive:${folderName}//${fileName}.jsonl`
|
||||
: path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
|
||||
|
||||
_final(callback) {
|
||||
process.send({
|
||||
msgtype: 'freeData',
|
||||
msgtype: 'dataResult',
|
||||
runid: this.runid,
|
||||
freeData: { rows: this.rows, structure: this.structure },
|
||||
dataResult: { rows: this.rows, structure: this.structure },
|
||||
});
|
||||
callback();
|
||||
}
|
||||
|
||||
@@ -69,6 +69,7 @@ async function copyStream(input, output, options) {
|
||||
msgtype: 'copyStreamError',
|
||||
copyStreamError: {
|
||||
message: extractErrorMessage(err),
|
||||
progressName,
|
||||
...err,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('dataDuplicator');
|
||||
const { DataDuplicator } = require('dbgate-datalib');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
|
||||
async function dataDuplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => ({
|
||||
name: item.name,
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await dupl.run();
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataDuplicator;
|
||||
96
packages/api/src/shell/dataReplicator.js
Normal file
96
packages/api/src/shell/dataReplicator.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('datareplicator');
|
||||
const { DataReplicator } = require('dbgate-datalib');
|
||||
const { compileCompoudEvalCondition } = require('dbgate-filterparser');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
|
||||
function compileOperationFunction(enabled, condition) {
|
||||
if (!enabled) return _row => false;
|
||||
const conditionCompiled = compileCompoudEvalCondition(condition);
|
||||
if (condition) {
|
||||
return row => evaluateCondition(conditionCompiled, row);
|
||||
}
|
||||
return _row => true;
|
||||
}
|
||||
|
||||
async function dataReplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
let joinPath;
|
||||
|
||||
if (archive?.endsWith('.zip')) {
|
||||
joinPath = file => `zip://archive:${archive}//${file}`;
|
||||
} else {
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
joinPath = file => path.join(sourceDir, file);
|
||||
}
|
||||
|
||||
const repl = new DataReplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => {
|
||||
return {
|
||||
name: item.name,
|
||||
matchColumns: item.matchColumns,
|
||||
findExisting: compileOperationFunction(item.findExisting, item.findCondition),
|
||||
createNew: compileOperationFunction(item.createNew, item.createCondition),
|
||||
updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
|
||||
deleteMissing: !!item.deleteMissing,
|
||||
deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
|
||||
openStream: item.openStream
|
||||
? item.openStream
|
||||
: item.jsonArray
|
||||
? () => stream.Readable.from(item.jsonArray)
|
||||
: () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
|
||||
};
|
||||
}),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await repl.run();
|
||||
if (options?.runid) {
|
||||
process.send({
|
||||
msgtype: 'dataResult',
|
||||
runid: options?.runid,
|
||||
dataResult: repl.result,
|
||||
});
|
||||
}
|
||||
return repl.result;
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataReplicator;
|
||||
@@ -1,14 +1,30 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { uploadsdir } = require('../utility/directories');
|
||||
const { uploadsdir, archivedir } = require('../utility/directories');
|
||||
const { downloadFile } = require('../utility/downloader');
|
||||
const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
|
||||
|
||||
async function download(url) {
|
||||
if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, tmpFile);
|
||||
return tmpFile;
|
||||
async function download(url, options = {}) {
|
||||
const { targetFile } = options || {};
|
||||
if (url) {
|
||||
if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, destFile);
|
||||
return destFile;
|
||||
}
|
||||
const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
|
||||
if (zipMatch) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
let zipFile = zipMatch[1];
|
||||
if (zipFile.startsWith('archive:')) {
|
||||
zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
|
||||
return destFile;
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('dumpDb');
|
||||
|
||||
function doDump(dumper) {
|
||||
return new Promise((resolve, reject) => {
|
||||
dumper.once('end', () => {
|
||||
resolve(true);
|
||||
});
|
||||
dumper.once('error', err => {
|
||||
reject(err);
|
||||
});
|
||||
dumper.run();
|
||||
});
|
||||
}
|
||||
|
||||
async function dumpDatabase({
|
||||
connection = undefined,
|
||||
systemConnection = undefined,
|
||||
driver = undefined,
|
||||
outputFile,
|
||||
databaseName,
|
||||
schemaName,
|
||||
}) {
|
||||
logger.info(`Dumping database`);
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
|
||||
|
||||
try {
|
||||
const dumper = await driver.createBackupDumper(dbhan, {
|
||||
outputFile,
|
||||
databaseName,
|
||||
schemaName,
|
||||
});
|
||||
await doDump(dumper);
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dumpDatabase;
|
||||
@@ -14,6 +14,7 @@ const logger = getLogger('execQuery');
|
||||
* @param {string} [options.sql] - SQL query
|
||||
* @param {string} [options.sqlFile] - SQL file
|
||||
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
|
||||
* @param {boolean} [options.skipLogging] - whether to skip logging
|
||||
*/
|
||||
async function executeQuery({
|
||||
connection = undefined,
|
||||
@@ -22,8 +23,9 @@ async function executeQuery({
|
||||
sql,
|
||||
sqlFile = undefined,
|
||||
logScriptItems = false,
|
||||
skipLogging = false,
|
||||
}) {
|
||||
if (!logScriptItems) {
|
||||
if (!logScriptItems && !skipLogging) {
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
||||
}
|
||||
|
||||
@@ -36,7 +38,9 @@ async function executeQuery({
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
if (!skipLogging) {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
}
|
||||
|
||||
await driver.script(dbhan, sql, { logScriptItems });
|
||||
} finally {
|
||||
|
||||
@@ -52,7 +52,10 @@ async function generateDeploySql({
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
|
||||
) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
if (!driver?.dialect?.defaultSchemaName) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
}
|
||||
targetSchema = driver.dialect.defaultSchemaName;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -21,12 +21,11 @@ const executeQuery = require('./executeQuery');
|
||||
const loadFile = require('./loadFile');
|
||||
const deployDb = require('./deployDb');
|
||||
const initializeApiEnvironment = require('./initializeApiEnvironment');
|
||||
const dumpDatabase = require('./dumpDatabase');
|
||||
const importDatabase = require('./importDatabase');
|
||||
const loadDatabase = require('./loadDatabase');
|
||||
const generateModelSql = require('./generateModelSql');
|
||||
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
|
||||
const dataDuplicator = require('./dataDuplicator');
|
||||
const dataReplicator = require('./dataReplicator');
|
||||
const dbModelToJson = require('./dbModelToJson');
|
||||
const jsonToDbModel = require('./jsonToDbModel');
|
||||
const jsonReader = require('./jsonReader');
|
||||
@@ -36,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
|
||||
const generateDeploySql = require('./generateDeploySql');
|
||||
const dropAllDbObjects = require('./dropAllDbObjects');
|
||||
const importDbFromFolder = require('./importDbFromFolder');
|
||||
const zipDirectory = require('./zipDirectory');
|
||||
const unzipDirectory = require('./unzipDirectory');
|
||||
const zipJsonLinesData = require('./zipJsonLinesData');
|
||||
const unzipJsonLinesData = require('./unzipJsonLinesData');
|
||||
const unzipJsonLinesFile = require('./unzipJsonLinesFile');
|
||||
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
@@ -61,12 +65,11 @@ const dbgateApi = {
|
||||
loadFile,
|
||||
deployDb,
|
||||
initializeApiEnvironment,
|
||||
dumpDatabase,
|
||||
importDatabase,
|
||||
loadDatabase,
|
||||
generateModelSql,
|
||||
modifyJsonLinesReader,
|
||||
dataDuplicator,
|
||||
dataReplicator,
|
||||
dbModelToJson,
|
||||
jsonToDbModel,
|
||||
dataTypeMapperTransform,
|
||||
@@ -75,6 +78,11 @@ const dbgateApi = {
|
||||
generateDeploySql,
|
||||
dropAllDbObjects,
|
||||
importDbFromFolder,
|
||||
zipDirectory,
|
||||
unzipDirectory,
|
||||
zipJsonLinesData,
|
||||
unzipJsonLinesData,
|
||||
unzipJsonLinesFile,
|
||||
};
|
||||
|
||||
requirePlugin.initializeDbgateApi(dbgateApi);
|
||||
|
||||
@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
stringify.pipe(fileStream);
|
||||
stringify['finisher'] = fileStream;
|
||||
return stringify;
|
||||
return [stringify, fileStream];
|
||||
// stringify.pipe(fileStream);
|
||||
// stringify['finisher'] = fileStream;
|
||||
// return stringify;
|
||||
}
|
||||
|
||||
module.exports = jsonLinesWriter;
|
||||
|
||||
@@ -7,6 +7,8 @@ const logger = getLogger('queryReader');
|
||||
* Returns reader object for {@link copyStream} function. This reader object reads data from query.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} options.query - SQL query
|
||||
* @param {string} [options.queryType] - query type
|
||||
* @param {string} [options.sql] - SQL query. obsolete; use query instead
|
||||
@@ -16,6 +18,8 @@ async function queryReader({
|
||||
connection,
|
||||
query,
|
||||
queryType,
|
||||
systemConnection,
|
||||
driver,
|
||||
// obsolete; use query instead
|
||||
sql,
|
||||
}) {
|
||||
@@ -28,10 +32,13 @@ async function queryReader({
|
||||
logger.info({ sql: query || sql }, `Reading query`);
|
||||
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
||||
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
|
||||
if (!driver) {
|
||||
driver = requireEngineDriver(connection);
|
||||
}
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script'));
|
||||
|
||||
const reader =
|
||||
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
||||
queryType == 'json' ? await driver.readJsonQuery(dbhan, query) : await driver.readQuery(dbhan, query || sql);
|
||||
return reader;
|
||||
}
|
||||
|
||||
|
||||
91
packages/api/src/shell/unzipDirectory.js
Normal file
91
packages/api/src/shell/unzipDirectory.js
Normal file
@@ -0,0 +1,91 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('unzipDirectory');
|
||||
|
||||
/**
|
||||
* Extracts an entire ZIP file, preserving its internal directory layout.
|
||||
*
|
||||
* @param {string} zipPath Path to the ZIP file on disk.
|
||||
* @param {string} outputDirectory Folder to create / overwrite with the contents.
|
||||
* @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
|
||||
*/
|
||||
function unzipDirectory(zipPath, outputDirectory) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
/** Pending per-file extractions – we resolve the main promise after they’re all done */
|
||||
const pending = [];
|
||||
|
||||
// kick things off
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
const destPath = path.join(outputDirectory, entry.fileName);
|
||||
|
||||
// Handle directories (their names always end with “/” in ZIPs)
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Ensure directory exists, then continue to next entry
|
||||
fs.promises
|
||||
.mkdir(destPath, { recursive: true })
|
||||
.then(() => zipFile.readEntry())
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle files
|
||||
const filePromise = fs.promises
|
||||
.mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
|
||||
.then(
|
||||
() =>
|
||||
new Promise((res, rej) => {
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return rej(err);
|
||||
|
||||
const writeStream = fs.createWriteStream(destPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// proceed to next entry once we’ve consumed *this* one
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
);
|
||||
rej(writeErr);
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
pending.push(filePromise);
|
||||
});
|
||||
|
||||
// Entire archive enumerated; wait for all streams to finish
|
||||
zipFile.on('end', () => {
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipDirectory;
|
||||
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesData(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const results = {};
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
// Only process .json files
|
||||
if (/\.jsonl$/i.test(entry.fileName)) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(results);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesData;
|
||||
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
@@ -0,0 +1,59 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesFile(zipPath, fileInZip) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
let result = null;
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
if (entry.fileName == fileInZip) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
result = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(result);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesFile;
|
||||
49
packages/api/src/shell/zipDirectory.js
Normal file
49
packages/api/src/shell/zipDirectory.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(inputDirectory, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
// Append files from a folder
|
||||
archive.directory(inputDirectory, false, entryData => {
|
||||
if (entryData.name.endsWith('.zip')) {
|
||||
return false; // returning false means "do not include"
|
||||
}
|
||||
// otherwise, include it
|
||||
return entryData;
|
||||
});
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(jsonDb, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
for (const key in jsonDb) {
|
||||
const data = jsonDb[key];
|
||||
if (_.isArray(data)) {
|
||||
const jsonString = jsonLinesStringify(data);
|
||||
archive.append(jsonString, { name: `${key}.jsonl` });
|
||||
}
|
||||
}
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
819
packages/api/src/storageModel.js
Normal file
819
packages/api/src/storageModel.js
Normal file
@@ -0,0 +1,819 @@
|
||||
module.exports = {
|
||||
"tables": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "type",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "amoid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_disabled",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_default",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_collapsed",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"amoid": "790ca4d2-7f01-4800-955b-d691b890cc50",
|
||||
"name": "Anonymous",
|
||||
"type": "none"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"amoid": "53db1cbf-f488-44d9-8670-7162510eb09c",
|
||||
"name": "Local",
|
||||
"type": "local"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "auth_method_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "auth_methods_config",
|
||||
"refTableName": "auth_methods",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "auth_method_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods_config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "group",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(1000)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "conid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "displayName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "connectionColor",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "engine",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "server",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useDatabaseUrl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseUrl",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "authType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "port",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceNameType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "socketPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "user",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "passwordMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "treeKeySeparator",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "windowsDomain",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "isReadOnly",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "trustServerCertificate",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "defaultDatabase",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "singleDatabase",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSshTunnel",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPort",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyfilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshLogin",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshBastionHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSsl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCaFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCertFilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslRejectUnauthorized",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "clientLibraryPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useRedirectDbLogin",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabases",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabasesRegex",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpoint",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpointKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "accessKeyId",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "secretAccessKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "awsRegion",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"name": "anonymous-user"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"name": "logged-user"
|
||||
},
|
||||
{
|
||||
"id": -3,
|
||||
"name": "superadmin"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_permissions",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "login",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "email",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "users",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_permissions",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"collections": [],
|
||||
"views": [],
|
||||
"matviews": [],
|
||||
"functions": [],
|
||||
"procedures": [],
|
||||
"triggers": []
|
||||
};
|
||||
@@ -60,6 +60,10 @@ class DatastoreProxy {
|
||||
// if (this.disconnected) return;
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
}
|
||||
return this.subprocess;
|
||||
|
||||
@@ -36,6 +36,10 @@ async function callRefactorSqlQueryApi(query, task, structure, dialect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function getLicenseHttpHeaders() {
|
||||
return {};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isAuthProxySupported,
|
||||
authProxyGetRedirectUrl,
|
||||
@@ -47,4 +51,5 @@ module.exports = {
|
||||
callTextToSqlApi,
|
||||
callCompleteOnCursorApi,
|
||||
callRefactorSqlQueryApi,
|
||||
getLicenseHttpHeaders,
|
||||
};
|
||||
|
||||
380
packages/api/src/utility/cloudIntf.js
Normal file
380
packages/api/src/utility/cloudIntf.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs-extra');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const { getLicenseHttpHeaders } = require('./authProxy');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const { datadir } = require('./directories');
|
||||
const platformInfo = require('./platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const { isProApp } = require('./checkLicense');
|
||||
const socket = require('./socket');
|
||||
const config = require('../controllers/config');
|
||||
const simpleEncryptor = require('simple-encryptor');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getPublicIpInfo } = require('./hardwareFingerprint');
|
||||
|
||||
const logger = getLogger('cloudIntf');
|
||||
|
||||
let cloudFiles = null;
|
||||
|
||||
const DBGATE_IDENTITY_URL = process.env.LOCAL_DBGATE_IDENTITY
|
||||
? 'http://localhost:3103'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://identity.dbgate.udolni.net'
|
||||
: 'https://identity.dbgate.io';
|
||||
|
||||
const DBGATE_CLOUD_URL = process.env.LOCAL_DBGATE_CLOUD
|
||||
? 'http://localhost:3110'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://cloud.dbgate.udolni.net'
|
||||
: 'https://cloud.dbgate.io';
|
||||
|
||||
async function createDbGateIdentitySession(client) {
|
||||
const resp = await axios.default.post(
|
||||
`${DBGATE_IDENTITY_URL}/api/create-session`,
|
||||
{
|
||||
client,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
return {
|
||||
sid: resp.data.sid,
|
||||
url: `${DBGATE_IDENTITY_URL}/api/signin/${resp.data.sid}`,
|
||||
};
|
||||
}
|
||||
|
||||
function startCloudTokenChecking(sid, callback) {
|
||||
const started = Date.now();
|
||||
const interval = setInterval(async () => {
|
||||
if (Date.now() - started > 60 * 1000) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// console.log(`Checking cloud token for session: ${DBGATE_IDENTITY_URL}/api/get-token/${sid}`);
|
||||
const resp = await axios.default.get(`${DBGATE_IDENTITY_URL}/api/get-token/${sid}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
// console.log('CHECK RESP:', resp.data);
|
||||
|
||||
if (resp.data.email) {
|
||||
clearInterval(interval);
|
||||
callback(resp.data);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
async function loadCloudFiles() {
|
||||
try {
|
||||
const fileContent = await fs.readFile(path.join(datadir(), 'cloud-files.jsonl'), 'utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
cloudFiles = _.sortBy(parsedJson, x => `${x.folder}/${x.title}`);
|
||||
} catch (err) {
|
||||
cloudFiles = [];
|
||||
}
|
||||
}
|
||||
|
||||
async function collectCloudFilesSearchTags() {
|
||||
const res = [];
|
||||
if (platformInfo.isElectron) {
|
||||
res.push('app');
|
||||
} else {
|
||||
res.push('web');
|
||||
}
|
||||
if (platformInfo.isWindows) {
|
||||
res.push('windows');
|
||||
}
|
||||
if (platformInfo.isMac) {
|
||||
res.push('mac');
|
||||
}
|
||||
if (platformInfo.isLinux) {
|
||||
res.push('linux');
|
||||
}
|
||||
if (platformInfo.isAwsUbuntuLayout) {
|
||||
res.push('aws');
|
||||
}
|
||||
if (platformInfo.isAzureUbuntuLayout) {
|
||||
res.push('azure');
|
||||
}
|
||||
if (platformInfo.isSnap) {
|
||||
res.push('snap');
|
||||
}
|
||||
if (platformInfo.isDocker) {
|
||||
res.push('docker');
|
||||
}
|
||||
if (platformInfo.isNpmDist) {
|
||||
res.push('npm');
|
||||
}
|
||||
const engines = await connections.getUsedEngines();
|
||||
const engineTags = engines.map(engine => engine.split('@')[0]);
|
||||
res.push(...engineTags);
|
||||
|
||||
// team-premium and trials will return the same cloud files as premium - no need to check
|
||||
res.push(isProApp() ? 'premium' : 'community');
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async function getCloudSigninHolder() {
|
||||
const settingsValue = await config.getSettings();
|
||||
const holder = settingsValue['cloudSigninTokenHolder'];
|
||||
return holder;
|
||||
}
|
||||
|
||||
async function getCloudSigninHeaders(holder = null) {
|
||||
if (!holder) {
|
||||
holder = await getCloudSigninHolder();
|
||||
}
|
||||
if (holder) {
|
||||
return {
|
||||
'x-cloud-login': holder.token,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function updateCloudFiles(isRefresh) {
|
||||
let lastCloudFilesTags;
|
||||
try {
|
||||
lastCloudFilesTags = await fs.readFile(path.join(datadir(), 'cloud-files-tags.txt'), 'utf-8');
|
||||
} catch (err) {
|
||||
lastCloudFilesTags = '';
|
||||
}
|
||||
|
||||
const ipInfo = await getPublicIpInfo();
|
||||
|
||||
const tags = (await collectCloudFilesSearchTags()).join(',');
|
||||
let lastCheckedTm = 0;
|
||||
if (tags == lastCloudFilesTags && cloudFiles.length > 0) {
|
||||
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
||||
}
|
||||
|
||||
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
||||
|
||||
const resp = await axios.default.get(
|
||||
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
||||
isRefresh ? 1 : 0
|
||||
}&country=${ipInfo?.country || ''}`,
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...(await getCloudSigninHeaders()),
|
||||
'x-app-version': currentVersion.version,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
||||
|
||||
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
||||
for (const file of resp.data) {
|
||||
if (file.isDeleted) {
|
||||
delete filesByPath[file.path];
|
||||
} else {
|
||||
filesByPath[file.path] = file;
|
||||
}
|
||||
}
|
||||
|
||||
cloudFiles = Object.values(filesByPath);
|
||||
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files.jsonl'), cloudFiles.map(x => JSON.stringify(x)).join('\n'));
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files-tags.txt'), tags);
|
||||
|
||||
socket.emitChanged(`public-cloud-changed`);
|
||||
}
|
||||
|
||||
async function startCloudFiles() {
|
||||
loadCloudFiles();
|
||||
}
|
||||
|
||||
async function getPublicCloudFiles() {
|
||||
if (!loadCloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
return cloudFiles;
|
||||
}
|
||||
|
||||
async function getPublicFileData(path) {
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/public/${path}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function refreshPublicFiles(isRefresh) {
|
||||
if (!cloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
try {
|
||||
await updateCloudFiles(isRefresh);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
||||
}
|
||||
}
|
||||
|
||||
async function callCloudApiGet(endpoint, signinHolder = null, additionalHeaders = {}) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/${endpoint}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
...additionalHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage } = resp.data;
|
||||
if (errorMessage) {
|
||||
return { apiErrorMessage: errorMessage };
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function callCloudApiPost(endpoint, body, signinHolder = null) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.post(`${DBGATE_CLOUD_URL}/${endpoint}`, body, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
|
||||
if (errorMessage) {
|
||||
return {
|
||||
apiErrorMessage: errorMessage,
|
||||
apiErrorIsLicenseLimit: isLicenseLimit,
|
||||
apiErrorLimitedLicenseLimits: limitedLicenseLimits,
|
||||
};
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function getCloudFolderEncryptor(folid) {
|
||||
const { encryptionKey } = await callCloudApiGet(`folder-key/${folid}`);
|
||||
if (!encryptionKey) {
|
||||
throw new Error('No encryption key for folder: ' + folid);
|
||||
}
|
||||
return simpleEncryptor.createEncryptor(encryptionKey);
|
||||
}
|
||||
|
||||
async function getCloudContent(folid, cntid) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const { content, name, type, contentFolder, contentType, apiErrorMessage } = await callCloudApiGet(
|
||||
`content/${folid}/${cntid}`,
|
||||
signinHolder,
|
||||
{
|
||||
'x-kehid': signinHolder.kehid,
|
||||
}
|
||||
);
|
||||
|
||||
if (apiErrorMessage) {
|
||||
return { apiErrorMessage };
|
||||
}
|
||||
|
||||
return {
|
||||
content: encryptor.decrypt(content),
|
||||
name,
|
||||
type,
|
||||
contentFolder,
|
||||
contentType,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns Promise<{ cntid: string } | { apiErrorMessage: string }>
|
||||
*/
|
||||
async function putCloudContent(folid, cntid, content, name, type, contentFolder = null, contentType = null) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const resp = await callCloudApiPost(
|
||||
`put-content`,
|
||||
{
|
||||
folid,
|
||||
cntid,
|
||||
name,
|
||||
type,
|
||||
kehid: signinHolder.kehid,
|
||||
content: encryptor.encrypt(content),
|
||||
contentFolder,
|
||||
contentType,
|
||||
},
|
||||
signinHolder
|
||||
);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
}
|
||||
|
||||
const cloudConnectionCache = {};
|
||||
async function loadCachedCloudConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
if (!cloudConnectionCache[cacheKey]) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
cloudConnectionCache[cacheKey] = {
|
||||
...JSON.parse(content),
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
}
|
||||
return cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
function removeCloudCachedConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
delete cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createDbGateIdentitySession,
|
||||
startCloudTokenChecking,
|
||||
startCloudFiles,
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
loadCachedCloudConnection,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
};
|
||||
@@ -1,61 +1,3 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs');
|
||||
const fsp = require('fs/promises');
|
||||
const semver = require('semver');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('cloudUpgrade');
|
||||
|
||||
async function checkCloudUpgrade() {
|
||||
try {
|
||||
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
|
||||
const json = resp.data;
|
||||
const version = json.name.substring(1);
|
||||
let cloudDownloadedVersion = null;
|
||||
try {
|
||||
cloudDownloadedVersion = await fsp.readFile(process.env.CLOUD_UPGRADE_FILE + '.version', 'utf-8');
|
||||
} catch (err) {
|
||||
cloudDownloadedVersion = null;
|
||||
}
|
||||
if (
|
||||
semver.gt(version, currentVersion.version) &&
|
||||
(!cloudDownloadedVersion || semver.gt(version, cloudDownloadedVersion))
|
||||
) {
|
||||
logger.info(`New version available: ${version}`);
|
||||
const zipUrl = json.assets.find(x => x.name == 'cloud-build.zip').browser_download_url;
|
||||
|
||||
const writer = fs.createWriteStream(process.env.CLOUD_UPGRADE_FILE);
|
||||
|
||||
const response = await axios.default({
|
||||
url: zipUrl,
|
||||
method: 'GET',
|
||||
responseType: 'stream',
|
||||
});
|
||||
|
||||
response.data.pipe(writer);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
writer.on('finish', resolve);
|
||||
writer.on('error', reject);
|
||||
});
|
||||
await fsp.writeFile(process.env.CLOUD_UPGRADE_FILE + '.version', version);
|
||||
|
||||
logger.info(`Downloaded new version from ${zipUrl}`);
|
||||
} else {
|
||||
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');
|
||||
}
|
||||
}
|
||||
|
||||
function startCloudUpgradeTimer() {
|
||||
// at first in 5 seconds
|
||||
setTimeout(checkCloudUpgrade, 5000);
|
||||
|
||||
// hourly
|
||||
setInterval(checkCloudUpgrade, 60 * 60 * 1000);
|
||||
}
|
||||
function startCloudUpgradeTimer() {}
|
||||
|
||||
module.exports = startCloudUpgradeTimer;
|
||||
|
||||
@@ -96,7 +96,9 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
||||
...decryptConnection(connectionLoaded),
|
||||
};
|
||||
|
||||
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString();
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnelProxy(connection);
|
||||
|
||||
@@ -5,12 +5,16 @@ const path = require('path');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { datadir } = require('./directories');
|
||||
const { encryptionKeyArg } = require('./processArgs');
|
||||
|
||||
const defaultEncryptionKey = 'mQAUaXhavRGJDxDTXSCg7Ej0xMmGCrx6OKA07DIMBiDcYYkvkaXjTAzPUEHEHEf9';
|
||||
|
||||
let _encryptionKey = null;
|
||||
|
||||
function loadEncryptionKey() {
|
||||
if (encryptionKeyArg) {
|
||||
return encryptionKeyArg;
|
||||
}
|
||||
if (_encryptionKey) {
|
||||
return _encryptionKey;
|
||||
}
|
||||
@@ -33,9 +37,29 @@ function loadEncryptionKey() {
|
||||
return _encryptionKey;
|
||||
}
|
||||
|
||||
async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
|
||||
if (!storedValue) {
|
||||
const generatedKey = crypto.randomBytes(32);
|
||||
const newKey = generatedKey.toString('hex');
|
||||
const result = {
|
||||
encryptionKey: newKey,
|
||||
};
|
||||
await setStoredValue(encryptor.encrypt(result));
|
||||
|
||||
setEncryptionKey(newKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const data = encryptor.decrypt(storedValue);
|
||||
setEncryptionKey(data['encryptionKey']);
|
||||
}
|
||||
|
||||
let _encryptor = null;
|
||||
|
||||
function getEncryptor() {
|
||||
function getInternalEncryptor() {
|
||||
if (_encryptor) {
|
||||
return _encryptor;
|
||||
}
|
||||
@@ -43,35 +67,46 @@ function getEncryptor() {
|
||||
return _encryptor;
|
||||
}
|
||||
|
||||
function encryptPasswordField(connection, field) {
|
||||
if (
|
||||
connection &&
|
||||
connection[field] &&
|
||||
!connection[field].startsWith('crypt:') &&
|
||||
connection.passwordMode != 'saveRaw'
|
||||
) {
|
||||
return {
|
||||
...connection,
|
||||
[field]: 'crypt:' + getEncryptor().encrypt(connection[field]),
|
||||
};
|
||||
function encryptPasswordString(password) {
|
||||
if (password && !password.startsWith('crypt:')) {
|
||||
return 'crypt:' + getInternalEncryptor().encrypt(password);
|
||||
}
|
||||
return connection;
|
||||
return password;
|
||||
}
|
||||
|
||||
function decryptPasswordField(connection, field) {
|
||||
if (connection && connection[field] && connection[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...connection,
|
||||
[field]: getEncryptor().decrypt(connection[field].substring('crypt:'.length)),
|
||||
};
|
||||
function decryptPasswordString(password) {
|
||||
if (password && password.startsWith('crypt:')) {
|
||||
return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
|
||||
}
|
||||
return connection;
|
||||
return password;
|
||||
}
|
||||
|
||||
function encryptConnection(connection) {
|
||||
connection = encryptPasswordField(connection, 'password');
|
||||
connection = encryptPasswordField(connection, 'sshPassword');
|
||||
connection = encryptPasswordField(connection, 'sshKeyfilePassword');
|
||||
function encryptObjectPasswordField(obj, field, encryptor = null) {
|
||||
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + (encryptor || getInternalEncryptor()).encrypt(obj[field]),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function decryptObjectPasswordField(obj, field) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function encryptConnection(connection, encryptor = null) {
|
||||
if (connection.passwordMode != 'saveRaw') {
|
||||
connection = encryptObjectPasswordField(connection, 'password', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshPassword', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword', encryptor);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
@@ -81,12 +116,24 @@ function maskConnection(connection) {
|
||||
}
|
||||
|
||||
function decryptConnection(connection) {
|
||||
connection = decryptPasswordField(connection, 'password');
|
||||
connection = decryptPasswordField(connection, 'sshPassword');
|
||||
connection = decryptPasswordField(connection, 'sshKeyfilePassword');
|
||||
connection = decryptObjectPasswordField(connection, 'password');
|
||||
connection = decryptObjectPasswordField(connection, 'sshPassword');
|
||||
connection = decryptObjectPasswordField(connection, 'sshKeyfilePassword');
|
||||
return connection;
|
||||
}
|
||||
|
||||
function encryptUser(user) {
|
||||
if (user.encryptPassword) {
|
||||
user = encryptObjectPasswordField(user, 'password');
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
function decryptUser(user) {
|
||||
user = decryptObjectPasswordField(user, 'password');
|
||||
return user;
|
||||
}
|
||||
|
||||
function pickSafeConnectionInfo(connection) {
|
||||
if (process.env.LOG_CONNECTION_SENSITIVE_VALUES) {
|
||||
return connection;
|
||||
@@ -99,10 +146,78 @@ function pickSafeConnectionInfo(connection) {
|
||||
});
|
||||
}
|
||||
|
||||
function setEncryptionKey(encryptionKey) {
|
||||
_encryptionKey = encryptionKey;
|
||||
_encryptor = null;
|
||||
global.ENCRYPTION_KEY = encryptionKey;
|
||||
}
|
||||
|
||||
function getEncryptionKey() {
|
||||
return _encryptionKey;
|
||||
}
|
||||
|
||||
function generateTransportEncryptionKey() {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const result = {
|
||||
encryptionKey: crypto.randomBytes(32).toString('hex'),
|
||||
};
|
||||
return encryptor.encrypt(result);
|
||||
}
|
||||
|
||||
function createTransportEncryptor(encryptionData) {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const data = encryptor.decrypt(encryptionData);
|
||||
const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
|
||||
return res;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
|
||||
}
|
||||
}
|
||||
|
||||
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
|
||||
connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
|
||||
return connection;
|
||||
}
|
||||
|
||||
function recryptUser(user, decryptEncryptor, encryptEncryptor) {
|
||||
user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
|
||||
return user;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadEncryptionKey,
|
||||
encryptConnection,
|
||||
encryptUser,
|
||||
decryptUser,
|
||||
decryptConnection,
|
||||
maskConnection,
|
||||
pickSafeConnectionInfo,
|
||||
loadEncryptionKeyFromExternal,
|
||||
getEncryptionKey,
|
||||
setEncryptionKey,
|
||||
encryptPasswordString,
|
||||
decryptPasswordString,
|
||||
|
||||
getInternalEncryptor,
|
||||
recryptConnection,
|
||||
recryptUser,
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptObjectPasswordField,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
};
|
||||
|
||||
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('extractSingleFileFromZip');
|
||||
/**
|
||||
* Extracts a single file from a ZIP using yauzl.
|
||||
* Stops reading the rest of the archive once the file is found.
|
||||
*
|
||||
* @param {string} zipPath - Path to the ZIP file on disk.
|
||||
* @param {string} fileInZip - The file path *inside* the ZIP to extract.
|
||||
* @param {string} outputPath - Where to write the extracted file on disk.
|
||||
* @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
|
||||
*/
|
||||
function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let fileFound = false;
|
||||
|
||||
// Start reading the first entry
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
// Compare the entry name to the file we want
|
||||
if (entry.fileName === fileInZip) {
|
||||
fileFound = true;
|
||||
|
||||
// Open a read stream for this entry
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
// Create a write stream to outputPath
|
||||
const writeStream = fs.createWriteStream(outputPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// When the read stream ends, we can close the zipFile
|
||||
readStream.on('end', () => {
|
||||
// We won't read further entries
|
||||
zipFile.close();
|
||||
});
|
||||
|
||||
// When the file is finished writing, resolve
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
// Handle write errors
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
reject(writeErr);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not the file we want; skip to the next entry
|
||||
zipFile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// If we reach the end without finding the file
|
||||
zipFile.on('end', () => {
|
||||
if (!fileFound) {
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle general errors
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = extractSingleFileFromZip;
|
||||
@@ -1,4 +1,11 @@
|
||||
const getDiagramExport = (html, css, themeType, themeClassName) => {
|
||||
const getDiagramExport = (html, css, themeType, themeClassName, watermark) => {
|
||||
const watermarkHtml = watermark
|
||||
? `
|
||||
<div style="position: fixed; bottom: 0; right: 0; padding: 5px; font-size: 12px; color: var(--theme-font-2); background-color: var(--theme-bg-2); border-top-left-radius: 5px; border: 1px solid var(--theme-border);">
|
||||
${watermark}
|
||||
</div>
|
||||
`
|
||||
: '';
|
||||
return `<html>
|
||||
<meta charset='utf-8'>
|
||||
|
||||
@@ -13,10 +20,44 @@ const getDiagramExport = (html, css, themeType, themeClassName) => {
|
||||
</style>
|
||||
|
||||
<link rel="stylesheet" href='https://cdn.jsdelivr.net/npm/@mdi/font@6.5.95/css/materialdesignicons.css' />
|
||||
|
||||
<script>
|
||||
let lastX = null;
|
||||
let lastY = null;
|
||||
|
||||
const handleMoveDown = e => {
|
||||
lastX = e.clientX;
|
||||
lastY = e.clientY;
|
||||
document.addEventListener('mousemove', handleMoveMove, true);
|
||||
document.addEventListener('mouseup', handleMoveEnd, true);
|
||||
};
|
||||
|
||||
const handleMoveMove = e => {
|
||||
e.preventDefault();
|
||||
|
||||
document.body.scrollLeft -= e.clientX - lastX;
|
||||
document.body.scrollTop -= e.clientY - lastY;
|
||||
|
||||
lastX = e.clientX;
|
||||
lastY = e.clientY;
|
||||
};
|
||||
const handleMoveEnd = e => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
lastX = null;
|
||||
lastY = null;
|
||||
document.removeEventListener('mousemove', handleMoveMove, true);
|
||||
document.removeEventListener('mouseup', handleMoveEnd, true);
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleMoveDown);
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body class='${themeType == 'dark' ? 'theme-type-dark' : 'theme-type-light'} ${themeClassName}'>
|
||||
<body class='${themeType == 'dark' ? 'theme-type-dark' : 'theme-type-light'} ${themeClassName}' style='user-select:none; cursor:pointer'>
|
||||
${html}
|
||||
${watermarkHtml}
|
||||
</body>
|
||||
|
||||
</html>`;
|
||||
|
||||
@@ -22,6 +22,8 @@ const getMapExport = (geoJson) => {
|
||||
})
|
||||
.addTo(map);
|
||||
|
||||
leaflet.control.scale().addTo(map);
|
||||
|
||||
const geoJsonObj = leaflet
|
||||
.geoJSON(${JSON.stringify(geoJson)}, {
|
||||
style: function () {
|
||||
|
||||
227
packages/api/src/utility/handleQueryStream.js
Normal file
227
packages/api/src/utility/handleQueryStream.js
Normal file
@@ -0,0 +1,227 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { jsldir } = require('../utility/directories');
|
||||
const { serializeJsTypesReplacer } = require('dbgate-tools');
|
||||
|
||||
class QueryStreamTableWriter {
|
||||
constructor(sesid = undefined) {
|
||||
this.currentRowCount = 0;
|
||||
this.currentChangeIndex = 1;
|
||||
this.initializedFile = false;
|
||||
this.sesid = sesid;
|
||||
}
|
||||
|
||||
initializeFromQuery(structure, resultIndex) {
|
||||
this.jslid = crypto.randomUUID();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
fs.writeFileSync(
|
||||
this.currentFile,
|
||||
JSON.stringify({
|
||||
...structure,
|
||||
__isStreamHeader: true,
|
||||
}) + '\n'
|
||||
);
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.resultIndex = resultIndex;
|
||||
this.initializedFile = true;
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex, sesid: this.sesid });
|
||||
}
|
||||
|
||||
initializeFromReader(jslid) {
|
||||
this.jslid = jslid;
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
this.writeCurrentStats(false, false);
|
||||
}
|
||||
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentStream.write(JSON.stringify(row, serializeJsTypesReplacer) + '\n');
|
||||
this.currentRowCount += 1;
|
||||
|
||||
if (!this.plannedStats) {
|
||||
this.plannedStats = true;
|
||||
process.nextTick(() => {
|
||||
if (this.currentStream) this.currentStream.uncork();
|
||||
process.nextTick(() => this.writeCurrentStats(false, true));
|
||||
this.plannedStats = false;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
rowFromReader(row) {
|
||||
if (!this.initializedFile) {
|
||||
process.send({ msgtype: 'initializeFile', jslid: this.jslid, sesid: this.sesid });
|
||||
this.initializedFile = true;
|
||||
|
||||
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.initializedFile = true;
|
||||
return;
|
||||
}
|
||||
|
||||
this.row(row);
|
||||
}
|
||||
|
||||
writeCurrentStats(isFinished = false, emitEvent = false) {
|
||||
const stats = {
|
||||
rowCount: this.currentRowCount,
|
||||
changeIndex: this.currentChangeIndex,
|
||||
isFinished,
|
||||
jslid: this.jslid,
|
||||
};
|
||||
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
|
||||
this.currentChangeIndex += 1;
|
||||
if (emitEvent) {
|
||||
process.send({ msgtype: 'stats', sesid: this.sesid, ...stats });
|
||||
}
|
||||
}
|
||||
|
||||
close(afterClose) {
|
||||
return new Promise(resolve => {
|
||||
if (this.currentStream) {
|
||||
this.currentStream.end(() => {
|
||||
this.writeCurrentStats(true, true);
|
||||
if (afterClose) afterClose();
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(queryStreamInfoHolder, resolve, startLine, sesid = undefined, limitRows = undefined) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.sesid = sesid;
|
||||
this.limitRows = limitRows;
|
||||
this.rowsLimitOverflow = false;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
this.info = this.info.bind(this);
|
||||
|
||||
// use this for cancelling - not implemented
|
||||
// this.stream = null;
|
||||
|
||||
this.plannedStats = false;
|
||||
this.queryStreamInfoHolder = queryStreamInfoHolder;
|
||||
this.resolve = resolve;
|
||||
this.rowCounter = 0;
|
||||
// currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
closeCurrentWriter() {
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close();
|
||||
this.currentWriter = null;
|
||||
}
|
||||
}
|
||||
|
||||
recordset(columns) {
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new QueryStreamTableWriter(this.sesid);
|
||||
this.currentWriter.initializeFromQuery(
|
||||
Array.isArray(columns) ? { columns } : columns,
|
||||
this.queryStreamInfoHolder.resultIndex
|
||||
);
|
||||
this.queryStreamInfoHolder.resultIndex += 1;
|
||||
this.rowCounter = 0;
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
// this.onRow = _.throttle((jslid) => {
|
||||
// if (jslid == this.jslid) {
|
||||
// this.writeCurrentStats(false, true);
|
||||
// }
|
||||
// }, 500);
|
||||
}
|
||||
row(row) {
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.limitRows && this.rowCounter >= this.limitRows) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: { message: `Rows limit overflow, loaded ${this.rowCounter} rows, canceling query`, severity: 'error' },
|
||||
sesid: this.sesid,
|
||||
});
|
||||
this.rowsLimitOverflow = true;
|
||||
|
||||
this.queryStreamInfoHolder.canceled = true;
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.row(row);
|
||||
this.rowCounter += 1;
|
||||
} else if (row.message) {
|
||||
process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
|
||||
}
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
// process.send({ msgtype: 'error', error });
|
||||
// }
|
||||
done(result) {
|
||||
this.closeCurrentWriter();
|
||||
// currentHandlers = currentHandlers.filter((x) => x != this);
|
||||
this.resolve();
|
||||
}
|
||||
info(info) {
|
||||
if (info && info.line != null) {
|
||||
info = {
|
||||
...info,
|
||||
line: this.startLine + info.line,
|
||||
};
|
||||
}
|
||||
if (info.severity == 'error') {
|
||||
this.queryStreamInfoHolder.canceled = true;
|
||||
}
|
||||
process.send({ msgtype: 'info', info, sesid: this.sesid });
|
||||
}
|
||||
}
|
||||
|
||||
function handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, sesid = undefined, limitRows = undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(queryStreamInfoHolder, resolve, start && start.line, sesid, limitRows);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
function allowExecuteCustomScript(storedConnection, driver) {
|
||||
if (driver.readOnlySessions) {
|
||||
return true;
|
||||
}
|
||||
if (storedConnection.isReadOnly) {
|
||||
return false;
|
||||
// throw new Error('Connection is read only');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
handleQueryStream,
|
||||
QueryStreamTableWriter,
|
||||
allowExecuteCustomScript,
|
||||
};
|
||||
@@ -87,4 +87,5 @@ module.exports = {
|
||||
getHardwareFingerprint,
|
||||
getHardwareFingerprintHash,
|
||||
getPublicHardwareFingerprint,
|
||||
getPublicIpInfo,
|
||||
};
|
||||
|
||||
38
packages/api/src/utility/healthStatus.js
Normal file
38
packages/api/src/utility/healthStatus.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const os = require('os');
|
||||
|
||||
const databaseConnections = require('../controllers/databaseConnections');
|
||||
const serverConnections = require('../controllers/serverConnections');
|
||||
const sessions = require('../controllers/sessions');
|
||||
const runners = require('../controllers/runners');
|
||||
|
||||
async function getHealthStatus() {
|
||||
const memory = process.memoryUsage();
|
||||
const cpuUsage = process.cpuUsage();
|
||||
|
||||
return {
|
||||
status: 'ok',
|
||||
databaseConnectionCount: databaseConnections.opened.length,
|
||||
serverConnectionCount: serverConnections.opened.length,
|
||||
sessionCount: sessions.opened.length,
|
||||
runProcessCount: runners.opened.length,
|
||||
memory,
|
||||
cpuUsage,
|
||||
systemMemory: {
|
||||
total: os.totalmem(),
|
||||
free: os.freemem(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function getHealthStatusSprinx() {
|
||||
return {
|
||||
overallStatus: 'OK',
|
||||
timeStamp: new Date().toISOString(),
|
||||
timeStampUnix: Math.floor(Date.now() / 1000),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getHealthStatus,
|
||||
getHealthStatusSprinx,
|
||||
};
|
||||
41
packages/api/src/utility/listZipEntries.js
Normal file
41
packages/api/src/utility/listZipEntries.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const yauzl = require('yauzl');
|
||||
const path = require('path');
|
||||
|
||||
/**
|
||||
* Lists the files in a ZIP archive using yauzl,
|
||||
* returning an array of { fileName, uncompressedSize } objects.
|
||||
*
|
||||
* @param {string} zipPath - The path to the ZIP file.
|
||||
* @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
|
||||
*/
|
||||
function listZipEntries(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
const entries = [];
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
// Handle each entry
|
||||
zipfile.on('entry', entry => {
|
||||
entries.push({
|
||||
fileName: entry.fileName,
|
||||
uncompressedSize: entry.uncompressedSize,
|
||||
});
|
||||
|
||||
// Move on to the next entry (we’re only listing, not reading file data)
|
||||
zipfile.readEntry();
|
||||
});
|
||||
|
||||
// Finished reading all entries
|
||||
zipfile.on('end', () => resolve(entries));
|
||||
|
||||
// Handle errors
|
||||
zipfile.on('error', err => reject(err));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = listZipEntries;
|
||||
@@ -17,6 +17,7 @@ const processDisplayName = getNamedArg('--process-display-name');
|
||||
const listenApi = process.argv.includes('--listen-api');
|
||||
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
|
||||
const runE2eTests = process.argv.includes('--run-e2e-tests');
|
||||
const encryptionKeyArg = getNamedArg('--encryption-key');
|
||||
|
||||
function getPassArgs() {
|
||||
const res = [];
|
||||
@@ -31,6 +32,9 @@ function getPassArgs() {
|
||||
if (runE2eTests) {
|
||||
res.push('--run-e2e-tests');
|
||||
}
|
||||
if (global['ENCRYPTION_KEY']) {
|
||||
res.push('--encryption-key', global['ENCRYPTION_KEY']);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -45,4 +49,5 @@ module.exports = {
|
||||
listenApiChild,
|
||||
processDisplayName,
|
||||
runE2eTests,
|
||||
encryptionKeyArg,
|
||||
};
|
||||
|
||||
@@ -57,10 +57,21 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', code => {
|
||||
logger.info('SSH forward process exited');
|
||||
logger.info(`SSH forward process exited with code ${code}`);
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(new Error('SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'));
|
||||
reject(
|
||||
new Error(
|
||||
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -572,6 +572,27 @@ export function changeSetInsertDocuments(
|
||||
};
|
||||
}
|
||||
|
||||
export function createMergedRowsChangeSet(
|
||||
table: TableInfo,
|
||||
updatedRows: any[],
|
||||
insertedRows: any[],
|
||||
mergeKey: string[]
|
||||
): ChangeSet {
|
||||
const res = createChangeSet();
|
||||
res.updates = updatedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: _.omit(row, mergeKey),
|
||||
condition: _.pick(row, mergeKey),
|
||||
}));
|
||||
res.inserts = insertedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: row,
|
||||
}));
|
||||
return res;
|
||||
}
|
||||
|
||||
export function changeSetContainsChanges(changeSet: ChangeSet) {
|
||||
if (!changeSet) return false;
|
||||
return (
|
||||
|
||||
@@ -1,326 +0,0 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
|
||||
const logger = getLogger('dataDuplicator');
|
||||
|
||||
export interface DataDuplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
operation: 'copy' | 'lookup' | 'insertMissing';
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataDuplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
}
|
||||
|
||||
class DuplicatorReference {
|
||||
constructor(
|
||||
public base: DuplicatorItemHolder,
|
||||
public ref: DuplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorWeakReference {
|
||||
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorItemHolder {
|
||||
references: DuplicatorReference[] = [];
|
||||
backReferences: DuplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: DuplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
refByColumn: { [columnName: string]: DuplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
|
||||
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new DuplicatorWeakReference(
|
||||
this,
|
||||
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
res[key] = ref.ref.idMap[res[key]];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.duplicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.duplicator.driver;
|
||||
const pool = this.duplicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const existingWeakRefs = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doCopy = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
const insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
let res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced) {
|
||||
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.item.operation) {
|
||||
case 'copy': {
|
||||
await doCopy();
|
||||
break;
|
||||
}
|
||||
case 'insertMissing':
|
||||
case 'lookup': {
|
||||
const res = await runQueryOnDriver(pool, driver, dmp =>
|
||||
dmp.put(
|
||||
'^select %i ^from %f ^where %i = %v',
|
||||
this.autoColumn,
|
||||
this.table,
|
||||
this.item.matchColumns[0],
|
||||
chunk[this.item.matchColumns[0]]
|
||||
)
|
||||
);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
} else if (this.item.operation == 'insertMissing') {
|
||||
await doCopy();
|
||||
} else {
|
||||
missing += 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
await this.duplicator.copyStream(readStream, writeStream);
|
||||
|
||||
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataDuplicator {
|
||||
itemHolders: DuplicatorItemHolder[];
|
||||
itemPlan: DuplicatorItemHolder[] = [];
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataDuplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output) => Promise<void>,
|
||||
public options: DataDuplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
}
|
||||
|
||||
findItemToPlan(): DuplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing duplicator transaction');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
|
||||
}
|
||||
}
|
||||
}
|
||||
510
packages/datalib/src/DataReplicator.ts
Normal file
510
packages/datalib/src/DataReplicator.ts
Normal file
@@ -0,0 +1,510 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
isTypeNumber,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
SqlDumper,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, NamedObjectInfo, QueryResult, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
import stableStringify from 'json-stable-stringify';
|
||||
|
||||
const logger = getLogger('dataReplicator');
|
||||
|
||||
export interface DataReplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
findExisting: (row: any) => boolean;
|
||||
createNew: (row: any) => boolean;
|
||||
updateExisting: (row: any) => boolean;
|
||||
deleteMissing: boolean;
|
||||
deleteRestrictionColumns: string[];
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataReplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
generateSqlScript?: boolean;
|
||||
runid?: string;
|
||||
}
|
||||
|
||||
class ReplicatorReference {
|
||||
constructor(
|
||||
public base: ReplicatorItemHolder,
|
||||
public ref: ReplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorWeakReference {
|
||||
constructor(public base: ReplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorItemHolder {
|
||||
references: ReplicatorReference[] = [];
|
||||
backReferences: ReplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: ReplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
isManualAutoColumn: boolean;
|
||||
refByColumn: { [columnName: string]: ReplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataReplicatorItem, public replicator: DataReplicator) {
|
||||
this.table = replicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
if (!this.autoColumn && this.table.primaryKey?.columns?.length == 1) {
|
||||
const name = this.table.primaryKey.columns[0].columnName;
|
||||
const column = this.table.columns.find(x => x.columnName == name);
|
||||
if (isTypeNumber(column?.dataType)) {
|
||||
this.autoColumn = name;
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
if (this.autoColumn && this.replicator.options.generateSqlScript) {
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.replicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new ReplicatorWeakReference(
|
||||
this,
|
||||
this.replicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new ReplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols?: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...(weakrefcols ? weakrefcols : [])]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
const oldId = res[key];
|
||||
res[key] = ref.ref.idMap[oldId];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.replicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
createUpdateObject(chunk) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...this.references.map(x => x.columnName)]
|
||||
);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.replicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.replicator.pool, this.replicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.replicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.replicator.driver;
|
||||
const pool = this.replicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let updated = 0;
|
||||
let deleted = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const { deleteMissing, deleteRestrictionColumns } = this.item;
|
||||
const deleteRestrictions = {};
|
||||
const usedKeyRows = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.replicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doFind = async () => {
|
||||
let insertedObj = this.createInsertObject(chunk);
|
||||
|
||||
const res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select %i ^from %f ^where ', this.autoColumn, this.table);
|
||||
dmp.putCollection(' and ', this.item.matchColumns, x => {
|
||||
dmp.put('%i = %v', x, insertedObj[x]);
|
||||
});
|
||||
});
|
||||
const resId = Object.entries(res?.rows?.[0] || {})?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
};
|
||||
|
||||
const doUpdate = async recordId => {
|
||||
const updateObj = this.createUpdateObject(chunk);
|
||||
if (Object.keys(updateObj).length == 0) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^update %f ^ set ', this.table);
|
||||
dmp.putCollection(',', Object.keys(updateObj), x => {
|
||||
dmp.put('%i = %v', x, updateObj[x]);
|
||||
});
|
||||
dmp.put(' ^where %i = %v', this.autoColumn, recordId);
|
||||
dmp.endCommand();
|
||||
});
|
||||
updated += 1;
|
||||
};
|
||||
|
||||
const doInsert = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
let insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isManualAutoColumn) {
|
||||
const maxId = await this.replicator.generateIdentityValue(this.autoColumn, this.table);
|
||||
insertedObj = {
|
||||
...insertedObj,
|
||||
[this.autoColumn]: maxId,
|
||||
};
|
||||
this.idMap[chunk[this.autoColumn]] = maxId;
|
||||
}
|
||||
|
||||
let res = await this.replicator.runDumperQuery(dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
dmp.endCommand();
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity &&
|
||||
!this.isManualAutoColumn
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced && !this.isManualAutoColumn) {
|
||||
if (this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
}
|
||||
};
|
||||
|
||||
const doMarkDelete = () => {
|
||||
const insertedObj = this.createInsertObject(chunk);
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
const restriction = _pick(insertedObj, deleteRestrictionColumns);
|
||||
const key = stableStringify(restriction);
|
||||
deleteRestrictions[key] = restriction;
|
||||
}
|
||||
|
||||
const usedKey = _pick(insertedObj, this.item.matchColumns);
|
||||
usedKeyRows[stableStringify(usedKey)] = usedKey;
|
||||
};
|
||||
|
||||
const findExisting = this.item.findExisting(chunk);
|
||||
const updateExisting = this.item.updateExisting(chunk);
|
||||
const createNew = this.item.createNew(chunk);
|
||||
|
||||
if (deleteMissing) {
|
||||
doMarkDelete();
|
||||
}
|
||||
|
||||
let recordId = null;
|
||||
if (findExisting) {
|
||||
recordId = await doFind();
|
||||
}
|
||||
|
||||
if (updateExisting && recordId != null) {
|
||||
await doUpdate(recordId);
|
||||
}
|
||||
|
||||
if (createNew && recordId == null) {
|
||||
recordId = await doInsert();
|
||||
}
|
||||
|
||||
if (recordId == null && findExisting) {
|
||||
missing += 1;
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
const dumpConditionArray = (dmp: SqlDumper, array: any[], positive: boolean) => {
|
||||
dmp.putCollection(positive ? ' or ' : ' and ', array, x => {
|
||||
dmp.put('(');
|
||||
dmp.putCollection(positive ? ' and ' : ' or ', Object.keys(x), y => {
|
||||
dmp.put(positive ? '%i = %v' : 'not (%i = %v)', y, x[y]);
|
||||
});
|
||||
dmp.put(')');
|
||||
});
|
||||
};
|
||||
const dumpDeleteCondition = (dmp: SqlDumper) => {
|
||||
const deleteRestrictionValues = Object.values(deleteRestrictions);
|
||||
const usedKeyRowsValues = Object.values(usedKeyRows);
|
||||
|
||||
if (deleteRestrictionValues.length == 0 && usedKeyRowsValues.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
dmp.put(' ^where ');
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
dmp.put('(');
|
||||
dumpConditionArray(dmp, deleteRestrictionValues, true);
|
||||
dmp.put(')');
|
||||
if (usedKeyRowsValues.length > 0) {
|
||||
dmp.put(' ^and ');
|
||||
}
|
||||
}
|
||||
dumpConditionArray(dmp, Object.values(usedKeyRows), false);
|
||||
};
|
||||
const doDelete = async () => {
|
||||
const countRes = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select count(*) as ~cnt ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
const count = parseInt(countRes.rows[0].cnt);
|
||||
if (count > 0) {
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^delete ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
deleted += count;
|
||||
}
|
||||
};
|
||||
|
||||
await this.replicator.copyStream(readStream, writeStream, {});
|
||||
|
||||
if (deleteMissing) {
|
||||
await doDelete();
|
||||
}
|
||||
|
||||
// await this.replicator.driver.writeQueryStream(this.replicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped, updated, deleted };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataReplicator {
|
||||
itemHolders: ReplicatorItemHolder[];
|
||||
itemPlan: ReplicatorItemHolder[] = [];
|
||||
result: string = '';
|
||||
dumper: SqlDumper;
|
||||
identityValues: { [fullTableName: string]: number } = {};
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataReplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output, options) => Promise<void>,
|
||||
public options: DataReplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new ReplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
// @ts-ignore
|
||||
this.dumper = driver.createDumper();
|
||||
}
|
||||
|
||||
findItemToPlan(): ReplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperCommand(cmd: (dmp: SqlDumper) => void | string): Promise<void> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
} else {
|
||||
await runCommandOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperQuery(cmd: (dmp: SqlDumper) => void | string): Promise<QueryResult> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
return {
|
||||
rows: [],
|
||||
};
|
||||
} else {
|
||||
return await runQueryOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async generateIdentityValue(column: string, table: NamedObjectInfo): Promise<number> {
|
||||
const tableKey = `${table.schemaName}.${table.pureName}`;
|
||||
if (!(tableKey in this.identityValues)) {
|
||||
const max = await runQueryOnDriver(this.pool, this.driver, dmp => {
|
||||
dmp.put('^select max(%i) as ~maxid ^from %f', column, table);
|
||||
});
|
||||
const maxId = Math.max(max.rows[0]['maxid'] ?? 0, 0) + 1;
|
||||
this.identityValues[tableKey] = maxId;
|
||||
return maxId;
|
||||
}
|
||||
|
||||
this.identityValues[tableKey] += 1;
|
||||
return this.identityValues[tableKey];
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await this.runDumperCommand(dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing replicator transaction');
|
||||
await this.runDumperCommand(dmp => dmp.commitTransaction());
|
||||
}
|
||||
|
||||
this.result = this.dumper.s;
|
||||
}
|
||||
}
|
||||
@@ -29,8 +29,8 @@ export interface GridConfig extends GridConfigColumns {
|
||||
isFormView?: boolean;
|
||||
formViewRecordNumber?: number;
|
||||
formFilterColumns: string[];
|
||||
formColumnFilterText?: string;
|
||||
multiColumnFilter?: string;
|
||||
searchInColumns?: string;
|
||||
}
|
||||
|
||||
export interface GridCache {
|
||||
|
||||
@@ -196,9 +196,24 @@ export abstract class GridDisplay {
|
||||
}));
|
||||
}
|
||||
|
||||
setSearchInColumns(searchInColumns: string) {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
searchInColumns,
|
||||
}));
|
||||
}
|
||||
|
||||
get hiddenColumnIndexes() {
|
||||
// console.log('GridDisplay.hiddenColumn', this.config.hiddenColumns);
|
||||
return (this.config.hiddenColumns || []).map(x => _.findIndex(this.allColumns, y => y.uniqueName == x));
|
||||
const res = (this.config.hiddenColumns || []).map(x => _.findIndex(this.allColumns, y => y.uniqueName == x));
|
||||
if (this.config.searchInColumns) {
|
||||
for (let i = 0; i < this.allColumns.length; i++) {
|
||||
if (!filterName(this.config.searchInColumns, this.allColumns[i].columnName)) {
|
||||
res.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
return _.sortBy(_.uniq(res));
|
||||
}
|
||||
|
||||
isColumnChecked(column: DisplayColumn) {
|
||||
|
||||
@@ -18,7 +18,7 @@ export * from './processPerspectiveDefaultColunns';
|
||||
export * from './PerspectiveDataPattern';
|
||||
export * from './PerspectiveDataLoader';
|
||||
export * from './perspectiveTools';
|
||||
export * from './DataDuplicator';
|
||||
export * from './DataReplicator';
|
||||
export * from './FreeTableGridDisplay';
|
||||
export * from './FreeTableModel';
|
||||
export * from './CustomGridDisplay';
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { arrayToHexString, isTypeDateTime } from 'dbgate-tools';
|
||||
import { arrayToHexString, evalFilterBehaviour, isTypeDateTime } from 'dbgate-tools';
|
||||
import { format, toDate } from 'date-fns';
|
||||
import _isString from 'lodash/isString';
|
||||
import _cloneDeepWith from 'lodash/cloneDeepWith';
|
||||
import { Condition, Expression } from 'dbgate-sqltree';
|
||||
import { parseFilter } from './parseFilter';
|
||||
|
||||
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
|
||||
|
||||
@@ -17,6 +20,7 @@ export function getFilterValueExpression(value, dataType?) {
|
||||
if (value === true) return 'TRUE';
|
||||
if (value === false) return 'FALSE';
|
||||
if (value.$oid) return `ObjectId("${value.$oid}")`;
|
||||
if (value.$bigint) return value.$bigint;
|
||||
if (value.type == 'Buffer' && Array.isArray(value.data)) {
|
||||
return '0x' + arrayToHexString(value.data);
|
||||
}
|
||||
@@ -61,3 +65,29 @@ export function createMultiLineFilter(mode: FilterMultipleValuesMode, text: stri
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export function compileCompoudEvalCondition(filters: { [column: string]: string }): Condition {
|
||||
if (!filters) return null;
|
||||
const conditions = [];
|
||||
for (const name in filters) {
|
||||
try {
|
||||
const condition = parseFilter(filters[name], evalFilterBehaviour);
|
||||
const replaced = _cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder')
|
||||
return {
|
||||
exprType: 'column',
|
||||
columnName: name,
|
||||
};
|
||||
});
|
||||
conditions.push(replaced);
|
||||
} catch (err) {
|
||||
// filter parse error - ignore filter
|
||||
}
|
||||
}
|
||||
|
||||
if (conditions.length == 0) return null;
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,14 +2,18 @@ import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
import { hexStringToArray } from 'dbgate-tools';
|
||||
import { hexStringToArray, parseNumberSafe } from 'dbgate-tools';
|
||||
import { FilterBehaviour, TransformType } from 'dbgate-types';
|
||||
|
||||
const binaryCondition =
|
||||
(operator, numberDualTesting = false) =>
|
||||
value => {
|
||||
const numValue = parseFloat(value);
|
||||
if (numberDualTesting && !isNaN(numValue)) {
|
||||
const numValue = parseNumberSafe(value);
|
||||
if (
|
||||
numberDualTesting &&
|
||||
// @ts-ignore
|
||||
!isNaN(numValue)
|
||||
) {
|
||||
return {
|
||||
conditionType: 'or',
|
||||
conditions: [
|
||||
@@ -52,6 +56,18 @@ const binaryCondition =
|
||||
};
|
||||
};
|
||||
|
||||
const simpleEqualCondition = () => value => ({
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
});
|
||||
|
||||
const likeCondition = (conditionType, likeString) => value => ({
|
||||
conditionType,
|
||||
left: {
|
||||
@@ -333,21 +349,23 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
|
||||
string1Num: () =>
|
||||
token(P.regexp(/"-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?"/, 1))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('numer quoted'),
|
||||
|
||||
string2Num: () =>
|
||||
token(P.regexp(/'-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?'/, 1))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('numer quoted'),
|
||||
|
||||
number: () =>
|
||||
token(P.regexp(/-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?/))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('number'),
|
||||
|
||||
objectid: () => token(P.regexp(/ObjectId\(['"]?[0-9a-f]{24}['"]?\)/)).desc('ObjectId'),
|
||||
|
||||
objectidstr: () => token(P.regexp(/[0-9a-f]{24}/)).desc('ObjectId string'),
|
||||
|
||||
hexstring: () =>
|
||||
token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1))
|
||||
.map(x => ({
|
||||
@@ -366,6 +384,7 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
value: r => P.alt(...allowedValues.map(x => r[x])),
|
||||
valueTestEq: r => r.value.map(binaryCondition('=')),
|
||||
hexTestEq: r => r.hexstring.map(binaryCondition('=')),
|
||||
valueTestObjectIdStr: r => r.objectidstr.map(simpleEqualCondition()),
|
||||
valueTestStr: r => r.value.map(likeCondition('like', '%#VALUE#%')),
|
||||
valueTestNum: r => r.number.map(numberTestCondition()),
|
||||
valueTestObjectId: r => r.objectid.map(objectIdTestCondition()),
|
||||
@@ -546,12 +565,13 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (filterBehaviour.allowNumberDualTesting) {
|
||||
allowedElements.push('valueTestNum');
|
||||
if (filterBehaviour.allowObjectIdTesting) {
|
||||
allowedElements.push('valueTestObjectIdStr');
|
||||
allowedElements.push('valueTestObjectId');
|
||||
}
|
||||
|
||||
if (filterBehaviour.allowObjectIdTesting) {
|
||||
allowedElements.push('valueTestObjectId');
|
||||
if (filterBehaviour.allowNumberDualTesting) {
|
||||
allowedElements.push('valueTestNum');
|
||||
}
|
||||
|
||||
// must be last
|
||||
|
||||
@@ -16,11 +16,17 @@ function isLike(value, test) {
|
||||
return res;
|
||||
}
|
||||
|
||||
function extractRawValue(value) {
|
||||
if (value?.$bigint) return value.$bigint;
|
||||
if (value?.$oid) return value.$oid;
|
||||
return value;
|
||||
}
|
||||
|
||||
export function evaluateCondition(condition: Condition, values) {
|
||||
switch (condition.conditionType) {
|
||||
case 'binary':
|
||||
const left = evaluateExpression(condition.left, values);
|
||||
const right = evaluateExpression(condition.right, values);
|
||||
const left = extractRawValue(evaluateExpression(condition.left, values));
|
||||
const right = extractRawValue(evaluateExpression(condition.right, values));
|
||||
switch (condition.operator) {
|
||||
case '=':
|
||||
return left == right;
|
||||
@@ -50,10 +56,15 @@ export function evaluateCondition(condition: Condition, values) {
|
||||
case 'or':
|
||||
return condition.conditions.some(cond => evaluateCondition(cond, values));
|
||||
case 'like':
|
||||
return isLike(evaluateExpression(condition.left, values), evaluateExpression(condition.right, values));
|
||||
break;
|
||||
return isLike(
|
||||
extractRawValue(evaluateExpression(condition.left, values)),
|
||||
extractRawValue(evaluateExpression(condition.right, values))
|
||||
);
|
||||
case 'notLike':
|
||||
return !isLike(evaluateExpression(condition.left, values), evaluateExpression(condition.right, values));
|
||||
return !isLike(
|
||||
extractRawValue(evaluateExpression(condition.left, values)),
|
||||
extractRawValue(evaluateExpression(condition.right, values))
|
||||
);
|
||||
case 'not':
|
||||
return !evaluateCondition(condition.condition, values);
|
||||
case 'anyColumnPass':
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user