Compare commits
338 Commits
v6.3.2
...
check-2025
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9444c5318 | ||
|
|
d1925945b4 | ||
|
|
6625080fde | ||
|
|
1110609e39 | ||
|
|
f21d2c7253 | ||
|
|
9c1d330945 | ||
|
|
cd7800056c | ||
|
|
7ff4bec3bc | ||
|
|
d305cf2167 | ||
|
|
e77b83bd92 | ||
|
|
171d58658a | ||
|
|
a6f6bc4c0a | ||
|
|
f03cffe3f8 | ||
|
|
809dca184e | ||
|
|
ecda226949 | ||
|
|
ff1b58ebd8 | ||
|
|
5760ada3b4 | ||
|
|
b74b6b3284 | ||
|
|
e4cc4b6f58 | ||
|
|
dd90851477 | ||
|
|
da9b127468 | ||
|
|
d6b05e44cb | ||
|
|
58c1b5b98d | ||
|
|
c270cba8d6 | ||
|
|
58f1f749fc | ||
|
|
38d87a7c8f | ||
|
|
4e13598708 | ||
|
|
4177448d32 | ||
|
|
e4911a6f82 | ||
|
|
159224700f | ||
|
|
696d4e7342 | ||
|
|
ffb6cfaa4a | ||
|
|
b8899fcafa | ||
|
|
aba829c991 | ||
|
|
8f4c61c259 | ||
|
|
a19648a6e8 | ||
|
|
d5c0f7045e | ||
|
|
6f69205818 | ||
|
|
8166da548c | ||
|
|
d54f7293b7 | ||
|
|
225520a765 | ||
|
|
af1eccde8e | ||
|
|
5d37280643 | ||
|
|
80597039f5 | ||
|
|
2766aedc01 | ||
|
|
da3e12cb7e | ||
|
|
8baff1b0d2 | ||
|
|
4ff5f9204e | ||
|
|
515339bbd8 | ||
|
|
943634b0e2 | ||
|
|
212b26b960 | ||
|
|
c0b41987aa | ||
|
|
b4ef640052 | ||
|
|
db6b7f52eb | ||
|
|
55b4b9e02a | ||
|
|
4e6ae93b13 | ||
|
|
c9a5fe5676 | ||
|
|
a5adfb7c7f | ||
|
|
1794b86041 | ||
|
|
f405124ce4 | ||
|
|
25060c1477 | ||
|
|
6ad218f354 | ||
|
|
d1c52548b0 | ||
|
|
9dc847b72f | ||
|
|
5b04adb21f | ||
|
|
356d25e548 | ||
|
|
a9958af818 | ||
|
|
fb359b7f87 | ||
|
|
7f087819a6 | ||
|
|
e836fa3d38 | ||
|
|
9a69f1108d | ||
|
|
2c5c58dc90 | ||
|
|
cb50d2838a | ||
|
|
aff7125914 | ||
|
|
45d82dce04 | ||
|
|
7a3b27227a | ||
|
|
7b50a19b2c | ||
|
|
741b942dea | ||
|
|
f7ca64a49d | ||
|
|
2f7b3455e5 | ||
|
|
1568dfc183 | ||
|
|
d3a5df0007 | ||
|
|
c20cac621a | ||
|
|
74560c3289 | ||
|
|
f94bf3f8ce | ||
|
|
d26db7096d | ||
|
|
afde0a7423 | ||
|
|
cc930a3ff9 | ||
|
|
60ecdadc74 | ||
|
|
82fc1850cf | ||
|
|
88f937f73e | ||
|
|
b3497c7306 | ||
|
|
366ab2e0cd | ||
|
|
98e4fabd2e | ||
|
|
716c3573fd | ||
|
|
842d8dd780 | ||
|
|
c767dfb22e | ||
|
|
f94901c3b2 | ||
|
|
c9638aefe9 | ||
|
|
8bd4721686 | ||
|
|
808f7504c3 | ||
|
|
8ea7d3d5e8 | ||
|
|
d4931890ae | ||
|
|
f4a879a452 | ||
|
|
78521ffdb4 | ||
|
|
8ea3f80b97 | ||
|
|
0d8d87857c | ||
|
|
3a3a261d9c | ||
|
|
2e00daf63c | ||
|
|
1b8bb0c1fd | ||
|
|
5c33579544 | ||
|
|
f8081ff09e | ||
|
|
01b7eeeecf | ||
|
|
d2f4c374a9 | ||
|
|
07073eebe9 | ||
|
|
590a4ae476 | ||
|
|
b553a81d47 | ||
|
|
7d4e53e413 | ||
|
|
839b0f6f5e | ||
|
|
893c5da4ef | ||
|
|
f9b893edfa | ||
|
|
b4fadb39bf | ||
|
|
310f8bf6f7 | ||
|
|
903a26a330 | ||
|
|
41ebd39810 | ||
|
|
281de5196e | ||
|
|
a9ab864cbb | ||
|
|
f3ff910821 | ||
|
|
ba5179f1e8 | ||
|
|
05e8f6ed78 | ||
|
|
23150815a0 | ||
|
|
a50f223fe3 | ||
|
|
9329345d98 | ||
|
|
c71c32b363 | ||
|
|
5590aa7234 | ||
|
|
4a3491e0b5 | ||
|
|
e8cb87ae3d | ||
|
|
2f6427af32 | ||
|
|
5564047001 | ||
|
|
22577c5f87 | ||
|
|
4dc2627da2 | ||
|
|
05aaf0de9f | ||
|
|
951bfa23f3 | ||
|
|
7ac6cfcf25 | ||
|
|
06055a7c4c | ||
|
|
b33198d1bf | ||
|
|
f826b9eb6e | ||
|
|
2b58121552 | ||
|
|
762547d0e9 | ||
|
|
727523eb3f | ||
|
|
5bbdb66eb2 | ||
|
|
c6eff4f90d | ||
|
|
2559173c2c | ||
|
|
8adea132ef | ||
|
|
9d924f8d1c | ||
|
|
1b297fed90 | ||
|
|
2c2a93c440 | ||
|
|
bb076cce5d | ||
|
|
b16b02c3f1 | ||
|
|
3e0f834796 | ||
|
|
0af38c6e0e | ||
|
|
85f7011e03 | ||
|
|
e8d5412e14 | ||
|
|
170cf4753e | ||
|
|
660e76145e | ||
|
|
31a6f7b621 | ||
|
|
f6699ad93b | ||
|
|
6e508e4454 | ||
|
|
2b101844e9 | ||
|
|
fb036935e6 | ||
|
|
c3e09ddab0 | ||
|
|
36ae07074d | ||
|
|
a4518ce261 | ||
|
|
861ea7ef94 | ||
|
|
541af0b77e | ||
|
|
3fd3de1828 | ||
|
|
3e2840ca15 | ||
|
|
839ec9a456 | ||
|
|
bac8bd0006 | ||
|
|
8c1b51b7e9 | ||
|
|
a71c4fe7ec | ||
|
|
b9d4197b5c | ||
|
|
ce7559087e | ||
|
|
110d87e512 | ||
|
|
cd817714cd | ||
|
|
23db345756 | ||
|
|
16990bd0c3 | ||
|
|
2e3b770bea | ||
|
|
28f62623bf | ||
|
|
c9f3e8cb9f | ||
|
|
6b751eb715 | ||
|
|
5d953da267 | ||
|
|
e87ae31a51 | ||
|
|
9f029b892b | ||
|
|
40a9ced0f7 | ||
|
|
87fbd7e5da | ||
|
|
bca5514a76 | ||
|
|
62ddbb20ac | ||
|
|
9d376961f4 | ||
|
|
e77aa00bcd | ||
|
|
465330820d | ||
|
|
ec9cbba67e | ||
|
|
8961ea6fc9 | ||
|
|
ca7ca9da81 | ||
|
|
2a234f14df | ||
|
|
3ff97bf628 | ||
|
|
416d6f2aef | ||
|
|
ecaafaca69 | ||
|
|
fdb14cd49b | ||
|
|
070e955b89 | ||
|
|
9390ab3c6c | ||
|
|
f67221ee01 | ||
|
|
e09294d9aa | ||
|
|
5675acb71a | ||
|
|
0305a5dcef | ||
|
|
5f03340454 | ||
|
|
0f69ba46c5 | ||
|
|
71ecb6bd4e | ||
|
|
2b712cc808 | ||
|
|
3a68b7b554 | ||
|
|
e41727a1fc | ||
|
|
857d0f3316 | ||
|
|
1f68f62689 | ||
|
|
09b43a8e95 | ||
|
|
b8d765d229 | ||
|
|
06a919ff8d | ||
|
|
ea1e7769b1 | ||
|
|
d5e6f99819 | ||
|
|
a5ab9726dd | ||
|
|
6464fc56d8 | ||
|
|
156e1b928c | ||
|
|
edf17b8100 | ||
|
|
5ab980ce1a | ||
|
|
f1d80fadc4 | ||
|
|
d331d48ca2 | ||
|
|
e740db11ed | ||
|
|
6cff7b3c30 | ||
|
|
88cdd2fcbf | ||
|
|
55896be694 | ||
|
|
c4f17e42e1 | ||
|
|
e8b11bd42a | ||
|
|
a566fb3988 | ||
|
|
de071b37eb | ||
|
|
7b4d408733 | ||
|
|
06478d89ea | ||
|
|
7cedf4c620 | ||
|
|
ef1ea5eeee | ||
|
|
96f222db94 | ||
|
|
065eb9b878 | ||
|
|
ed583d80a3 | ||
|
|
7752db3916 | ||
|
|
e18254e4d8 | ||
|
|
555fc97e8f | ||
|
|
f135adfdba | ||
|
|
72b766f32a | ||
|
|
5278861ccd | ||
|
|
ebcf88070c | ||
|
|
273811bbb8 | ||
|
|
26b53e725d | ||
|
|
5434ec85e2 | ||
|
|
91f438aeff | ||
|
|
501f67ebe7 | ||
|
|
be52fef0bb | ||
|
|
2c0cf40a15 | ||
|
|
8566070d9b | ||
|
|
8f4118a6b8 | ||
|
|
54c53f0b56 | ||
|
|
eafcee8c67 | ||
|
|
5c8e1e0f4a | ||
|
|
76043d5876 | ||
|
|
936f3b0752 | ||
|
|
9380608781 | ||
|
|
544b75bcd5 | ||
|
|
2ae63d2323 | ||
|
|
f24717a3a3 | ||
|
|
f0f5558f3e | ||
|
|
3d8c732258 | ||
|
|
f312237bd5 | ||
|
|
952dde3fef | ||
|
|
e5387f6d06 | ||
|
|
93d6697b4e | ||
|
|
bad2f3415a | ||
|
|
d2f5d97282 | ||
|
|
f8c3fef839 | ||
|
|
14b47a929f | ||
|
|
4b3c0466eb | ||
|
|
041397b137 | ||
|
|
6548c286a6 | ||
|
|
ccf78285b6 | ||
|
|
5e9366fa92 | ||
|
|
0e30cb1439 | ||
|
|
b8d86518e7 | ||
|
|
725399ac7c | ||
|
|
ca18994092 | ||
|
|
caefc438b9 | ||
|
|
0ece8c7dec | ||
|
|
e7c42f3623 | ||
|
|
18faf89b89 | ||
|
|
3a750ae6a2 | ||
|
|
48c614d8c3 | ||
|
|
c37d502c27 | ||
|
|
3fed7a081d | ||
|
|
e75497d03b | ||
|
|
6d1421f1b7 | ||
|
|
dd210be037 | ||
|
|
7c7d6ad548 | ||
|
|
2f471c0e3f | ||
|
|
cff219674f | ||
|
|
e35f9eb75b | ||
|
|
ef1eff2ecb | ||
|
|
244ff61fb3 | ||
|
|
50e2623f19 | ||
|
|
fd0b997c13 | ||
|
|
13d057e4f7 | ||
|
|
d1a6be6ca6 | ||
|
|
edece02c13 | ||
|
|
d68cf4e44d | ||
|
|
9d85a58634 | ||
|
|
c4a4cd0957 | ||
|
|
5af7615054 | ||
|
|
a68a1334fc | ||
|
|
dd3e38355c | ||
|
|
890461bcf8 | ||
|
|
750265cb79 | ||
|
|
004de824ba | ||
|
|
fc43b35628 | ||
|
|
696653f945 | ||
|
|
157dca50e9 | ||
|
|
7d2130b229 | ||
|
|
2cc81211af | ||
|
|
ea9a5b0eb0 | ||
|
|
e60cee6a73 | ||
|
|
a3ee60a464 | ||
|
|
28595cbeb3 | ||
|
|
dbdbf5210e | ||
|
|
83f69d89ff | ||
|
|
fe9f1146ce | ||
|
|
d8405feab3 |
34
.github/workflows/build-app-beta.yaml
vendored
34
.github/workflows/build-app-beta.yaml
vendored
@@ -5,10 +5,10 @@ name: Electron app BETA
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
|
||||
@@ -71,16 +71,16 @@ jobs:
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
|
||||
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
|
||||
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
|
||||
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
|
||||
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
|
||||
APPLE_ID: '${{ secrets.APPLE_ID }}'
|
||||
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
|
||||
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
|
||||
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
|
||||
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
@@ -111,16 +111,16 @@ jobs:
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
name: '${{ matrix.os }}'
|
||||
path: artifacts
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: 'startsWith(github.ref, ''refs/tags/'')'
|
||||
with:
|
||||
files: artifacts/**
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
116
.github/workflows/build-app-check.yaml
vendored
Normal file
116
.github/workflows/build-app-check.yaml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# This file is generated. Do not edit manually
|
||||
# --------------------------------------------------------------------------------------------
|
||||
name: Electron app check build
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- check-[0-9]+-[0-9]+-[0-9]+.[0-9]+
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-14
|
||||
- windows-2022
|
||||
- ubuntu-22.04
|
||||
steps:
|
||||
- name: Install python 3.11 (MacOS)
|
||||
if: matrix.os == 'macos-14'
|
||||
run: |
|
||||
brew install python@3.11
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 22.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 22.x
|
||||
- name: adjustPackageJson
|
||||
run: |
|
||||
|
||||
node adjustPackageJson --community
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
|
||||
yarn fillPackagedPlugins
|
||||
- name: Install Snapcraft
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
uses: samuelmeuli/action-snapcraft@v1
|
||||
- name: Publish
|
||||
run: |
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp app/dist/*.deb artifacts/dbgate-check.deb || true
|
||||
cp app/dist/*x86*.AppImage artifacts/dbgate-check.AppImage || true
|
||||
cp app/dist/*arm64*.AppImage artifacts/dbgate-check-arm64.AppImage || true
|
||||
cp app/dist/*armv7l*.AppImage artifacts/dbgate-check-armv7l.AppImage || true
|
||||
cp app/dist/*win*.exe artifacts/dbgate-check.exe || true
|
||||
cp app/dist/*win_x64.zip artifacts/dbgate-windows-check.zip || true
|
||||
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-check-arm64.zip || true
|
||||
cp app/dist/*-mac_universal.dmg artifacts/dbgate-check.dmg || true
|
||||
cp app/dist/*-mac_x64.dmg artifacts/dbgate-check-x64.dmg || true
|
||||
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-check-arm64.dmg || true
|
||||
mv app/dist/*.snap artifacts/dbgate-check.snap || true
|
||||
|
||||
mv app/dist/*.exe artifacts/ || true
|
||||
mv app/dist/*.zip artifacts/ || true
|
||||
mv app/dist/*.tar.gz artifacts/ || true
|
||||
mv app/dist/*.AppImage artifacts/ || true
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
mv app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;
|
||||
38
.github/workflows/build-app-pro-beta.yaml
vendored
38
.github/workflows/build-app-pro-beta.yaml
vendored
@@ -5,10 +5,10 @@ name: Electron app PREMIUM BETA
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -37,9 +37,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
@@ -102,16 +102,16 @@ jobs:
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
|
||||
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
|
||||
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
|
||||
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
|
||||
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
|
||||
APPLE_ID: '${{ secrets.APPLE_ID }}'
|
||||
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
|
||||
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
|
||||
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
|
||||
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
@@ -142,16 +142,16 @@ jobs:
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
name: '${{ matrix.os }}'
|
||||
path: artifacts
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: 'startsWith(github.ref, ''refs/tags/'')'
|
||||
with:
|
||||
files: artifacts/**
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
38
.github/workflows/build-app-pro.yaml
vendored
38
.github/workflows/build-app-pro.yaml
vendored
@@ -5,10 +5,10 @@ name: Electron app PREMIUM
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -37,9 +37,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
@@ -102,16 +102,16 @@ jobs:
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
|
||||
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
|
||||
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
|
||||
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
|
||||
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
|
||||
APPLE_ID: '${{ secrets.APPLE_ID }}'
|
||||
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
|
||||
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
|
||||
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
|
||||
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
@@ -142,16 +142,16 @@ jobs:
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
name: '${{ matrix.os }}'
|
||||
path: artifacts
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: 'startsWith(github.ref, ''refs/tags/'')'
|
||||
with:
|
||||
files: artifacts/**
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
34
.github/workflows/build-app.yaml
vendored
34
.github/workflows/build-app.yaml
vendored
@@ -5,10 +5,10 @@ name: Electron app
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
|
||||
@@ -67,16 +67,16 @@ jobs:
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
|
||||
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
|
||||
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
|
||||
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
|
||||
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
|
||||
APPLE_ID: '${{ secrets.APPLE_ID }}'
|
||||
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
|
||||
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
|
||||
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
|
||||
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
|
||||
- name: generatePadFile
|
||||
run: |
|
||||
yarn generatePadFile
|
||||
@@ -114,16 +114,16 @@ jobs:
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
name: '${{ matrix.os }}'
|
||||
path: artifacts
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: 'startsWith(github.ref, ''refs/tags/'')'
|
||||
with:
|
||||
files: artifacts/**
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
46
.github/workflows/build-cloud-pro.yaml
vendored
46
.github/workflows/build-cloud-pro.yaml
vendored
@@ -5,11 +5,11 @@ name: Cloud images PREMIUM
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -37,9 +37,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: Prepare packer build
|
||||
run: |
|
||||
cd ..
|
||||
@@ -87,16 +87,16 @@ jobs:
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
name: '${{ matrix.os }}'
|
||||
path: artifacts
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: 'startsWith(github.ref, ''refs/tags/'')'
|
||||
with:
|
||||
files: artifacts/**
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Run `packer init` for Azure
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
@@ -110,33 +110,33 @@ jobs:
|
||||
cd ../dbgate-merged/packer
|
||||
packer init ./aws-ubuntu.pkr.hcl
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
|
||||
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
|
||||
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
|
||||
- name: Run `packer build` for AWS
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer build ./aws-ubuntu.pkr.hcl
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
|
||||
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
|
||||
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
|
||||
- name: Delete old Azure VMs
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
chmod +x delete-old-azure-images.sh
|
||||
./delete-old-azure-images.sh
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
|
||||
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
|
||||
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
|
||||
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
|
||||
AZURE_CLIENT_ID: '${{secrets.AZURE_CLIENT_ID}}'
|
||||
AZURE_CLIENT_SECRET: '${{secrets.AZURE_CLIENT_SECRET}}'
|
||||
AZURE_TENANT_ID: '${{secrets.AZURE_TENANT_ID}}'
|
||||
AZURE_SUBSCRIPTION_ID: '${{secrets.AZURE_SUBSCRIPTION_ID}}'
|
||||
- name: Delete old AMIs (AWS)
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
chmod +x delete-old-amis.sh
|
||||
./delete-old-amis.sh
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
|
||||
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
|
||||
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
|
||||
|
||||
22
.github/workflows/build-docker-pro.yaml
vendored
22
.github/workflows/build-docker-pro.yaml
vendored
@@ -5,11 +5,11 @@ name: Docker image PREMIUM
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -42,9 +42,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
cd ..
|
||||
@@ -97,12 +97,12 @@ jobs:
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
username: '${{ secrets.DOCKER_USERNAME }}'
|
||||
password: '${{ secrets.DOCKER_PASSWORD }}'
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ../dbgate-merged/docker
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: '${{ steps.meta.outputs.tags }}'
|
||||
platforms: 'linux/amd64,linux/arm64'
|
||||
|
||||
22
.github/workflows/build-docker.yaml
vendored
22
.github/workflows/build-docker.yaml
vendored
@@ -5,11 +5,11 @@ name: Docker image Community
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
|
||||
@@ -82,20 +82,20 @@ jobs:
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
username: '${{ secrets.DOCKER_USERNAME }}'
|
||||
password: '${{ secrets.DOCKER_PASSWORD }}'
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ./docker
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
tags: '${{ steps.meta.outputs.tags }}'
|
||||
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
|
||||
- name: Build and push alpine
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ./docker
|
||||
file: ./docker/Dockerfile-alpine
|
||||
tags: ${{ steps.alpmeta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
tags: '${{ steps.alpmeta.outputs.tags }}'
|
||||
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
|
||||
|
||||
16
.github/workflows/build-npm-pro.yaml
vendored
16
.github/workflows/build-npm-pro.yaml
vendored
@@ -5,11 +5,11 @@ name: NPM packages PREMIUM
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -30,9 +30,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
node adjustNpmPackageJsonPremium
|
||||
- name: Configure NPM token
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: Publish dbgate-api-premium
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
12
.github/workflows/build-npm.yaml
vendored
12
.github/workflows/build-npm.yaml
vendored
@@ -5,11 +5,11 @@ name: NPM packages
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- v[0-9]+.[0-9]+.[0-9]+
|
||||
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
node-version: 18.x
|
||||
- name: Configure NPM token
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
|
||||
run: |
|
||||
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
|
||||
- name: yarn install
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
run: |
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
|
||||
- name: Publish types
|
||||
working-directory: packages/types
|
||||
run: |
|
||||
|
||||
4
.github/workflows/build-test-containers.yaml
vendored
4
.github/workflows/build-test-containers.yaml
vendored
@@ -30,8 +30,8 @@ jobs:
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
username: '${{ github.actor }}'
|
||||
password: '${{ secrets.GITHUB_TOKEN }}'
|
||||
- name: Push mysql-ssh-login to GHCR
|
||||
run: |
|
||||
docker tag dbgate/mysql-ssh-login:latest ghcr.io/dbgate/mysql-ssh-login:latest
|
||||
|
||||
2
.github/workflows/diflow.yaml
vendored
2
.github/workflows/diflow.yaml
vendored
@@ -33,4 +33,4 @@ jobs:
|
||||
cd diflow
|
||||
node dist/diflow.js sync -r https://DIFLOW_GIT_SECRET@github.com/dbgate/dbgate-diflow-config.git -b master
|
||||
env:
|
||||
DIFLOW_GIT_SECRET: ${{ secrets.DIFLOW_GIT_SECRET }}
|
||||
DIFLOW_GIT_SECRET: '${{ secrets.DIFLOW_GIT_SECRET }}'
|
||||
|
||||
18
.github/workflows/e2e-pro.yaml
vendored
18
.github/workflows/e2e-pro.yaml
vendored
@@ -24,9 +24,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 5cc577af44faaf6ec61cff778cae6fbe35a6c510
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
name: screenshots
|
||||
path: screenshots
|
||||
- name: Push E2E screenshots
|
||||
if: ${{ github.ref_name == 'master' }}
|
||||
if: '${{ github.ref_name == ''master'' }}'
|
||||
run: |
|
||||
git config --global user.email "info@dbgate.info"
|
||||
git config --global user.name "GitHub Actions"
|
||||
@@ -89,25 +89,25 @@ jobs:
|
||||
ports:
|
||||
- '16000:5432'
|
||||
mysql-cypress:
|
||||
image: mysql:8.0.18
|
||||
image: 'mysql:8.0.18'
|
||||
ports:
|
||||
- '16004:3306'
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: Pwd2020Db
|
||||
mysql-ssh-login:
|
||||
image: ghcr.io/dbgate/mysql-ssh-login:latest
|
||||
image: 'ghcr.io/dbgate/mysql-ssh-login:latest'
|
||||
ports:
|
||||
- '16012:22'
|
||||
mysql-ssh-keyfile:
|
||||
image: ghcr.io/dbgate/mysql-ssh-keyfile:latest
|
||||
image: 'ghcr.io/dbgate/mysql-ssh-keyfile:latest'
|
||||
ports:
|
||||
- '16008:22'
|
||||
dex:
|
||||
image: ghcr.io/dbgate/dex:latest
|
||||
image: 'ghcr.io/dbgate/dex:latest'
|
||||
ports:
|
||||
- '16009:5556'
|
||||
mongo:
|
||||
image: mongo:4.0.12
|
||||
image: 'mongo:4.0.12'
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
SA_PASSWORD: Pwd2020Db
|
||||
MSSQL_PID: Express
|
||||
oracle:
|
||||
image: gvenzl/oracle-xe:21-slim
|
||||
image: 'gvenzl/oracle-xe:21-slim'
|
||||
env:
|
||||
ORACLE_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
|
||||
4
.github/workflows/process-templates.yaml
vendored
4
.github/workflows/process-templates.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
|
||||
token: '${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}'
|
||||
- name: git pull
|
||||
run: |
|
||||
git pull
|
||||
@@ -47,5 +47,5 @@ jobs:
|
||||
- name: Push changes
|
||||
uses: ad-m/github-push-action@v0.6.0
|
||||
with:
|
||||
github_token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
|
||||
github_token: '${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}'
|
||||
branch: master
|
||||
|
||||
27
.github/workflows/run-tests.yaml
vendored
27
.github/workflows/run-tests.yaml
vendored
@@ -45,19 +45,19 @@ jobs:
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
result-file: integration-tests/result.json
|
||||
action-name: Integration tests
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
result-file: packages/filterparser/result.json
|
||||
action-name: Filter parser test results
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: '${{ secrets.GITHUB_TOKEN }}'
|
||||
result-file: packages/datalib/result.json
|
||||
action-name: Datalib (perspectives) test results
|
||||
services:
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
ports:
|
||||
- '15000:5432'
|
||||
mysql-integr:
|
||||
image: mysql:8.0.18
|
||||
image: 'mysql:8.0.18'
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
@@ -83,22 +83,33 @@ jobs:
|
||||
ports:
|
||||
- '15002:1433'
|
||||
clickhouse-integr:
|
||||
image: bitnami/clickhouse:24.8.4
|
||||
image: 'bitnami/clickhouse:24.8.4'
|
||||
env:
|
||||
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- '15005:8123'
|
||||
oracle-integr:
|
||||
image: gvenzl/oracle-xe:21-slim
|
||||
image: 'gvenzl/oracle-xe:21-slim'
|
||||
env:
|
||||
ORACLE_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- '15006:1521'
|
||||
cassandradb:
|
||||
image: cassandra:5.0.2
|
||||
image: 'cassandra:5.0.2'
|
||||
ports:
|
||||
- '15942:9042'
|
||||
libsql:
|
||||
image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
image: 'ghcr.io/tursodatabase/libsql-server:latest'
|
||||
ports:
|
||||
- '8080:8080'
|
||||
firebird:
|
||||
image: 'firebirdsql/firebird:latest'
|
||||
env:
|
||||
FIREBIRD_DATABASE: mydatabase.fdb
|
||||
FIREBIRD_USER: dbuser
|
||||
FIREBIRD_PASSWORD: dbpassword
|
||||
ISC_PASSWORD: masterkey
|
||||
FIREBIRD_TRACE: false
|
||||
FIREBIRD_USE_LEGACY_AUTH: true
|
||||
ports:
|
||||
- '3050:3050'
|
||||
|
||||
41
CHANGELOG.md
41
CHANGELOG.md
@@ -8,6 +8,47 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
## 6.4.2
|
||||
|
||||
- ADDED: Source label to docker container #1105
|
||||
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
|
||||
- ADDED: View PostgreSQL query console output #1108
|
||||
- FIXED: Single quote generete MySql error #1107
|
||||
- ADDED: Ability to limit query result count #1098
|
||||
- CHANGED: Correct processing of bigint columns #1087 #1055 #583
|
||||
- CHANGED: Improved and optimalized algorithm of loading redis keys #1062, #1034
|
||||
- FIXED: Fixed loading Redis keys with :: in key name
|
||||
|
||||
## 6.4.0
|
||||
- ADDED: DuckDB support
|
||||
- ADDED: Data deployer (Premium)
|
||||
- ADDED: Compare data between JSON lines file in archive and database table
|
||||
- CHANGED: Data Duplicator => Data Replicator (suitable for update, create and delete data, much more customizable)
|
||||
- REMOVED: Data duplicator GUI (replaced with Data Deployer)
|
||||
- ADDED: Exporting to ZIP file
|
||||
- ADDED: Download SQL and SQLite files
|
||||
- ADDED: Upload SQLite files
|
||||
- ADDED: Upload archive as ZIP folder (Premium)
|
||||
- ADDED: Compress, uncompress archive folder (Premium)
|
||||
- ADDED: Export connections and settings #357
|
||||
- ADDED: Filtering by MongoDB ObjectId works now also without ObjectId(...) wrapper
|
||||
- ADDED: Split queries using blank lines #1089
|
||||
- FIXED: JSON-to-Grid only works if there is no newline #1085
|
||||
- CHANGED: When running multiple commands in script, stop execution after first error #1070
|
||||
- FIXED: Selection rectangle remains visible after closing JSONB edit cell value form #1031
|
||||
- FIXED: Diplaying numeric FK column with right alignement #1021
|
||||
- ADDED: Additional arguments for MySQL and PostgreSQL backup #1092
|
||||
- CHANGED: Amazon and Azure instalations are not auto-upgraded by default
|
||||
|
||||
## 6.3.3
|
||||
- CHANGED: New administration UI, redesigned administration of users, connections and roles
|
||||
- ADDED: Encrypting passwords in team-premium edition
|
||||
- ADDED: Show scale bar on map #1090
|
||||
- FIXED: Fixed native backup/restore for MySQL+PostgreSQL over SSH tunnel #1092
|
||||
- CHANGED: Column mapping dialog - fixes and improvements for copying from one existing table into another
|
||||
- ADDED: Search in columns in table editor
|
||||
- ADDED: Line Wrap for JSON viewer #768
|
||||
|
||||
### 6.3.2
|
||||
- ADDED: "Use system theme" switch, use changed system theme without restart #1084
|
||||
- ADDED: "Skip SETNAME instruction" option for Redis #1077
|
||||
|
||||
@@ -20,6 +20,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Use nodeJs [scripting interface](https://docs.dbgate.io/scripting) ([API documentation](https://docs.dbgate.io/apidoc))
|
||||
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
|
||||
* [Give us feedback](https://dbgate.org/feedback) - it will help us to decide, how to improve DbGate in future
|
||||
|
||||
## Supported databases
|
||||
* MySQL
|
||||
@@ -35,6 +36,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* CosmosDB (Premium)
|
||||
* ClickHouse
|
||||
* Apache Cassandra
|
||||
* libSQL/Turso (Premium)
|
||||
* DuckDB
|
||||
|
||||
|
||||
<a href="https://raw.githubusercontent.com/dbgate/dbgate/master/img/screenshot1.png">
|
||||
@@ -184,4 +187,4 @@ yarn plugin # this compiles plugin and copies it into existing DbGate installati
|
||||
After restarting DbGate, you could use your new plugin from DbGate.
|
||||
|
||||
## Logging
|
||||
DbGate uses [pinomin logger](https://github.com/dbgate/pinomin). So by default, it produces JSON log messages into console and log files. If you want to see formatted logs, please use [pino-pretty](https://github.com/pinojs/pino-pretty) log formatter.
|
||||
DbGate uses [pinomin logger](https://github.com/dbgate/pinomin). So by default, it produces JSON log messages into console and log files. If you want to see formatted logs, please use [pino-pretty](https://github.com/pinojs/pino-pretty) log formatter.
|
||||
|
||||
@@ -4,6 +4,7 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
submenu: [
|
||||
{ command: 'new.connection', hideDisabled: true },
|
||||
{ command: 'new.sqliteDatabase', hideDisabled: true },
|
||||
{ command: 'new.duckdbDatabase', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'new.query', hideDisabled: true },
|
||||
{ command: 'new.queryDesign', hideDisabled: true },
|
||||
@@ -87,6 +88,9 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'folder.showData', hideDisabled: true },
|
||||
{ command: 'new.gist', hideDisabled: true },
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'app.exportConnections', hideDisabled: true },
|
||||
{ command: 'app.importConnections', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
...(isMac
|
||||
@@ -104,6 +108,7 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'app.openWeb', hideDisabled: true },
|
||||
{ command: 'app.openIssue', hideDisabled: true },
|
||||
{ command: 'app.openSponsoring', hideDisabled: true },
|
||||
{ command: 'app.giveFeedback', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'settings.commands', hideDisabled: true },
|
||||
{ command: 'tabs.changelog', hideDisabled: true },
|
||||
|
||||
@@ -21,6 +21,7 @@ const volatilePackages = [
|
||||
'axios',
|
||||
'ssh2',
|
||||
'wkx',
|
||||
'@duckdb/node-api',
|
||||
];
|
||||
|
||||
module.exports = volatilePackages;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:22
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
iputils-ping \
|
||||
iproute2 \
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:18-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
WORKDIR /home/dbgate-docker
|
||||
|
||||
RUN apk --no-cache upgrade \
|
||||
|
||||
@@ -112,4 +112,11 @@ describe('Add connection', () => {
|
||||
|
||||
cy.contains('performance_schema');
|
||||
});
|
||||
|
||||
it('export connections', () => {
|
||||
cy.testid('WidgetIconPanel_menu').click();
|
||||
cy.contains('Tools').click();
|
||||
cy.contains('Export connections').click();
|
||||
cy.themeshot('export-connections');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -126,7 +126,7 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('data-browser-form-view');
|
||||
});
|
||||
|
||||
it.only('Column search', () => {
|
||||
it('Column search', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Customer').click();
|
||||
@@ -248,14 +248,14 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('database-diagram');
|
||||
});
|
||||
|
||||
it('Charts', () => {
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.contains('pie-chart').click();
|
||||
cy.contains('line-chart').click();
|
||||
cy.testid('TabsPanel_buttonSplit').click();
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.themeshot('view-split-charts');
|
||||
});
|
||||
// it('Charts', () => {
|
||||
// cy.testid('WidgetIconPanel_file').click();
|
||||
// cy.contains('pie-chart').click();
|
||||
// cy.contains('line-chart').click();
|
||||
// cy.testid('TabsPanel_buttonSplit').click();
|
||||
// cy.testid('WidgetIconPanel_file').click();
|
||||
// cy.themeshot('view-split-charts');
|
||||
// });
|
||||
|
||||
it('Keyboard configuration', () => {
|
||||
cy.testid('WidgetIconPanel_settings').click();
|
||||
@@ -468,15 +468,24 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('database-model-table-yaml');
|
||||
});
|
||||
|
||||
it('Data duplicator', () => {
|
||||
it('Data replicator', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.contains('chinook-archive').rightclick();
|
||||
cy.contains('Data duplicator').click();
|
||||
cy.contains('Data deployer').click();
|
||||
cy.contains('Dry run').click();
|
||||
cy.testid('DataDuplicatorTab_importIntoDb').click();
|
||||
cy.contains('Duplicated Album, inserted 347 rows, mapped 0 rows, missing 0 rows, skipped 0 rows');
|
||||
cy.themeshot('data-duplicator');
|
||||
cy.testid('TableControl_row_2_checkbox').click();
|
||||
cy.testid('TableControl_row_2').click();
|
||||
cy.testid('DataDeploySettings_find_checkbox').click();
|
||||
cy.testid('DataDeploySettings_create_checkbox').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.themeshot('data-deployer');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.testid('ConfirmDataDeployModal_okButton').click();
|
||||
cy.contains('Replicated Customer, inserted 59 rows');
|
||||
cy.contains('Finished job script');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.themeshot('data-replicator');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,15 +11,17 @@ describe('Team edition tests', () => {
|
||||
|
||||
cy.testid('AdminMenuWidget_itemConnections').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.contains('New connection').click();
|
||||
cy.testid('ConnectionDriverFields_connectionType').select('PostgreSQL');
|
||||
cy.themeshot('connection-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemRoles').click();
|
||||
cy.contains('Permissions').click();
|
||||
cy.contains('logged-user').click();
|
||||
cy.themeshot('role-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemUsers').click();
|
||||
cy.contains('New user').click();
|
||||
cy.themeshot('user-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemAuthentication').click();
|
||||
cy.contains('Add authentication').click();
|
||||
cy.contains('Use database login').click();
|
||||
@@ -77,6 +79,5 @@ describe('Team edition tests', () => {
|
||||
cy.testid('LoginPage_submitLogin').click();
|
||||
cy.testid('AdminMenuWidget_itemUsers').click();
|
||||
cy.contains('test@example.com');
|
||||
cy.contains('Rows: 1');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -21,8 +21,8 @@ services:
|
||||
build: containers/mysql-ssh-login
|
||||
restart: always
|
||||
ports:
|
||||
- 16005:3306
|
||||
- "16015:22"
|
||||
- 16017:3306
|
||||
- "16012:22"
|
||||
|
||||
mysql-ssh-keyfile:
|
||||
build: containers/mysql-ssh-keyfile
|
||||
|
||||
@@ -29,7 +29,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||
driver,
|
||||
`create table ~t2 (
|
||||
~id int not null primary key,
|
||||
~t1_id int null references ~t1(~id)
|
||||
~t1_id int ${driver.dialect.implicitNullDeclaration ? '' : 'null'} references ~t1(~id)
|
||||
)`
|
||||
);
|
||||
|
||||
@@ -52,7 +52,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||
}
|
||||
|
||||
describe('Alter database', () => {
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences && !x.skipDropReferences).map(engine => [engine.label, engine]))(
|
||||
'Drop referenced table - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDiff(conn, driver, db => {
|
||||
|
||||
@@ -60,7 +60,9 @@ async function testTableDiff(engine, conn, driver, mangle) {
|
||||
if (!engine.skipReferences) {
|
||||
const query = formatQueryWithoutParams(
|
||||
driver,
|
||||
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
|
||||
`create table ~t2 (~id int not null primary key, ~fkval int ${
|
||||
driver.dialect.implicitNullDeclaration ? '' : 'null'
|
||||
} references ~t1(~col_ref))`
|
||||
);
|
||||
|
||||
await driver.query(conn, transformSqlForEngine(engine, query));
|
||||
@@ -90,7 +92,7 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
|
||||
// const TESTED_COLUMNS = ['col_std'];
|
||||
// const TESTED_COLUMNS = ['col_ref'];
|
||||
|
||||
function create_engines_columns_source(engines) {
|
||||
function createEnginesColumnsSource(engines) {
|
||||
return _.flatten(
|
||||
engines.map(engine =>
|
||||
TESTED_COLUMNS.filter(col => col.endsWith('_pk') || !engine.skipNonPkRename)
|
||||
@@ -116,45 +118,30 @@ describe('Alter table', () => {
|
||||
})
|
||||
);
|
||||
|
||||
const columnsSource = create_engines_columns_source(engines);
|
||||
const dropableColumnsSrouce = columnsSource.filter(
|
||||
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
|
||||
test.each(
|
||||
createEnginesColumnsSource(engines.filter(x => !x.skipDropColumn)).filter(
|
||||
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
|
||||
)
|
||||
)(
|
||||
'Drop column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
|
||||
})
|
||||
);
|
||||
const hasDropableColumns = dropableColumnsSrouce.length > 0;
|
||||
|
||||
if (hasDropableColumns) {
|
||||
test.each(dropableColumnsSrouce)(
|
||||
'Drop column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column))
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipNullable && !x.skipChangeNullability)))(
|
||||
'Change nullability - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
const hasEnginesWithNullable = engines.filter(x => !x.skipNullable).length > 0;
|
||||
|
||||
if (hasEnginesWithNullable) {
|
||||
const source = create_engines_columns_source(engines.filter(x => !x.skipNullable));
|
||||
|
||||
test.each(source)(
|
||||
'Change nullability - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
test.each(columnsSource)(
|
||||
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipRenameColumn)))(
|
||||
'Rename column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
@@ -175,37 +162,32 @@ describe('Alter table', () => {
|
||||
})
|
||||
);
|
||||
|
||||
const enginesWithDefault = engines.filter(x => !x.skipDefaultValue);
|
||||
const hasEnginesWithDefault = enginesWithDefault.length > 0;
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Add default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
if (hasEnginesWithDefault) {
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Add default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
|
||||
});
|
||||
})
|
||||
);
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Unset default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Unset default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
|
||||
'Change default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
|
||||
'Change default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// test.each(engines.map(engine => [engine.label, engine]))(
|
||||
// 'Change autoincrement - %s',
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data duplicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
});
|
||||
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
|
||||
const deployDb = require('dbgate-api/src/shell/deployDb');
|
||||
const storageModel = require('dbgate-api/src/storageModel');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data replicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
jsonArray: [
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
],
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Import storage DB - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await deployDb({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
loadedDbModel: storageModel,
|
||||
targetSchema: engine.defaultSchemaName,
|
||||
});
|
||||
|
||||
async function queryValue(sql) {
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
return res1.rows[0].val?.toString();
|
||||
}
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('2');
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeFalsy();
|
||||
|
||||
const DB1 = {
|
||||
auth_methods: [
|
||||
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
|
||||
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
|
||||
],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
|
||||
config: [
|
||||
{ group: 'admin', key: 'encyptKey', value: '1234' },
|
||||
{ group: 'admin', key: 'adminPasswordState', value: 'set' },
|
||||
{ group: 'license', key: 'licenseKey', value: '123467' },
|
||||
],
|
||||
roles: [
|
||||
{ id: -3, name: 'superadmin' },
|
||||
{ id: -2, name: 'logged-user' },
|
||||
{ id: -1, name: 'anonymous-user' },
|
||||
],
|
||||
role_permissions: [
|
||||
{ id: 14, role_id: -1, permission: 'perm1' },
|
||||
{ id: 29, role_id: -1, permission: 'perm2' },
|
||||
{ id: 1, role_id: -1, permission: 'perm3' },
|
||||
],
|
||||
};
|
||||
|
||||
const DB2 = {
|
||||
auth_methods: [{ id: 10, name: 'My Auth', amoid: 'myauth1' }],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'my authClient', value: 'mydbgate' }],
|
||||
config: [],
|
||||
roles: [{ id: 1, name: 'test' }],
|
||||
role_permissions: [{ id: 14, role_id: 1, permission: 'permxx' }],
|
||||
};
|
||||
|
||||
function createDuplConfig(db) {
|
||||
return {
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 'auth_methods',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['amoid'],
|
||||
jsonArray: db.auth_methods,
|
||||
},
|
||||
{
|
||||
name: 'auth_methods_config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['auth_method_id', 'key'],
|
||||
jsonArray: db.auth_methods_config,
|
||||
},
|
||||
{
|
||||
name: 'config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['group', 'key'],
|
||||
jsonArray: db.config,
|
||||
},
|
||||
{
|
||||
name: 'roles',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['name'],
|
||||
jsonArray: db.roles,
|
||||
},
|
||||
{
|
||||
name: 'role_permissions',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
deleteMissing: true,
|
||||
matchColumns: ['role_id', 'permission'],
|
||||
deleteRestrictionColumns: ['role_id'],
|
||||
jsonArray: db.role_permissions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('123467');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.auth_methods_config[0].value = 'dbgate2';
|
||||
DB1.config[2].value = '567';
|
||||
DB1.role_permissions.splice(2, 1);
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate2');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('567');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
|
||||
// now add DB2
|
||||
await dataReplicator(createDuplConfig(DB2));
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('4');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('2');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.role_permissions.splice(1, 1);
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
}),
|
||||
15 * 1000
|
||||
);
|
||||
});
|
||||
@@ -51,7 +51,8 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -75,7 +76,8 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -103,10 +105,12 @@ describe('DB Import/export', () => {
|
||||
await copyStream(reader2, writer2);
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt = parseInt(res1.rows[0].cnt.toString());
|
||||
expect(cnt).toEqual(6);
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
const cnt2 = parseInt(res2.rows[0].cnt.toString());
|
||||
expect(cnt2).toEqual(6);
|
||||
})
|
||||
);
|
||||
const enginesWithDumpFile = engines.filter(x => x.dumpFile);
|
||||
@@ -189,10 +193,12 @@ describe('DB Import/export', () => {
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
folder: path.join(__dirname, '../../e2e-tests/data/my-guitar-shop'),
|
||||
transformRow: engine.transformModelRow,
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~categories`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('4');
|
||||
const cnt1 = parseInt(res1.rows[0].cnt.toString());
|
||||
expect(cnt1).toEqual(4);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -106,7 +106,9 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
|
||||
for (const loadedDbModel of dbModelsYaml) {
|
||||
if (_.isString(loadedDbModel)) {
|
||||
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel));
|
||||
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel), {
|
||||
useTransaction: engine.runDeployInTransaction,
|
||||
});
|
||||
} else {
|
||||
const { sql, isEmpty } = await generateDeploySql({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
@@ -131,6 +133,7 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
driver,
|
||||
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
|
||||
dbdiffOptionsExtra,
|
||||
useTransaction: engine.runDeployInTransaction,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -606,7 +609,7 @@ describe('Deploy database', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
|
||||
'Mark table removed - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
|
||||
@@ -822,7 +825,7 @@ describe('Deploy database', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
|
||||
'Mark table removed, one remains - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {
|
||||
|
||||
@@ -20,7 +20,11 @@ function flatSourceParameters() {
|
||||
}
|
||||
|
||||
function flatSourceTriggers() {
|
||||
return _.flatten(engines.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine])));
|
||||
return _.flatten(
|
||||
engines
|
||||
.filter(engine => !engine.skipTriggers)
|
||||
.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine]))
|
||||
);
|
||||
}
|
||||
|
||||
function flatSourceSchedulerEvents() {
|
||||
|
||||
@@ -183,12 +183,12 @@ describe('Query', () => {
|
||||
{ discardResult: true }
|
||||
);
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT COUNT(*) AS ~cnt FROM ~t1'));
|
||||
// console.log(res);
|
||||
expect(res.rows[0].cnt == 3).toBeTruthy();
|
||||
const cnt = parseInt(res.rows[0].cnt);
|
||||
expect(cnt).toEqual(3);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Select scope identity - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp =>
|
||||
|
||||
@@ -94,7 +94,7 @@ describe('Table analyse', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
|
||||
'Table add - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
@@ -112,7 +112,7 @@ describe('Table analyse', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
|
||||
'Table remove - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
|
||||
@@ -130,7 +130,7 @@ describe('Table analyse', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
|
||||
'Table change - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
|
||||
|
||||
@@ -8,14 +8,25 @@ services:
|
||||
# ports:
|
||||
# - 15000:5432
|
||||
#
|
||||
# mariadb:
|
||||
# image: mariadb
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15004:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 15004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
db2:
|
||||
image: icr.io/db2_community/db2:11.5.8.0
|
||||
privileged: true
|
||||
ports:
|
||||
- "15055:50000"
|
||||
environment:
|
||||
LICENSE: accept
|
||||
DB2INST1_PASSWORD: Pwd2020Db
|
||||
DBNAME: testdb
|
||||
DB2INSTANCE: db2inst1
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
@@ -25,7 +36,7 @@ services:
|
||||
# - 15001:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
#
|
||||
|
||||
|
||||
# cassandradb:
|
||||
# image: cassandra:5.0.2
|
||||
@@ -81,11 +92,36 @@ services:
|
||||
# ports:
|
||||
# - 15006:1521
|
||||
|
||||
libsql:
|
||||
image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
platform: linux/amd64
|
||||
# libsql:
|
||||
# image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
# platform: linux/amd64
|
||||
# ports:
|
||||
# - '8080:8080'
|
||||
# - '5002:5001'
|
||||
# volumes:
|
||||
# - ./data/libsql:/var/lib/sqld
|
||||
|
||||
firebird:
|
||||
image: firebirdsql/firebird:latest
|
||||
container_name: firebird-db
|
||||
environment:
|
||||
- FIREBIRD_DATABASE=mydatabase.fdb
|
||||
- FIREBIRD_USER=dbuser
|
||||
- FIREBIRD_PASSWORD=dbpassword
|
||||
- ISC_PASSWORD=masterkey
|
||||
- FIREBIRD_TRACE=false
|
||||
- FIREBIRD_USE_LEGACY_AUTH=true
|
||||
ports:
|
||||
- '8080:8080'
|
||||
- '5002:5001'
|
||||
- '3050:3050'
|
||||
volumes:
|
||||
- ./data/libsql:/var/lib/sqld
|
||||
- firebird-data:/firebird/data
|
||||
- ./firebird.conf:/firebird/firebird.conf # Mount custom config file
|
||||
healthcheck:
|
||||
test: ['CMD', 'nc', '-z', 'localhost', '3050']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
volumes:
|
||||
firebird-data:
|
||||
|
||||
@@ -551,7 +551,7 @@ const clickhouseEngine = {
|
||||
skipUnique: true,
|
||||
skipAutoIncrement: true,
|
||||
skipPkColumnTesting: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipStringLength: true,
|
||||
alterTableAddColumnSyntax: true,
|
||||
dbSnapshotBySeconds: true,
|
||||
@@ -643,7 +643,7 @@ const cassandraEngine = {
|
||||
skipOrderBy: true,
|
||||
skipAutoIncrement: true,
|
||||
skipDataModifications: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipDeploy: true,
|
||||
skipImportModel: true,
|
||||
|
||||
@@ -654,6 +654,92 @@ const cassandraEngine = {
|
||||
objects: [],
|
||||
};
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo} */
|
||||
const duckdbEngine = {
|
||||
label: 'DuckDB',
|
||||
generateDbFile: true,
|
||||
defaultSchemaName: 'main',
|
||||
connection: {
|
||||
engine: 'duckdb@dbgate-plugin-duckdb',
|
||||
},
|
||||
objects: [views],
|
||||
skipOnCI: false,
|
||||
skipChangeColumn: true,
|
||||
// skipIndexes: true,
|
||||
skipStringLength: true,
|
||||
skipTriggers: true,
|
||||
skipDataReplicator: true,
|
||||
skipAutoIncrement: true,
|
||||
skipDropColumn: true,
|
||||
skipRenameColumn: true,
|
||||
skipChangeNullability: true,
|
||||
skipDeploy: true,
|
||||
supportRenameSqlObject: true,
|
||||
skipIncrementalAnalysis: true,
|
||||
skipDefaultValue: true,
|
||||
skipDropReferences: true,
|
||||
};
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo} */
|
||||
const firebirdEngine = {
|
||||
label: 'Firebird',
|
||||
generateDbFile: true,
|
||||
databaseFileLocationOnServer: '/var/lib/firebird/data/',
|
||||
defaultSchemaName: 'main',
|
||||
connection: {
|
||||
engine: 'firebird@dbgate-plugin-firebird',
|
||||
server: 'localhost',
|
||||
port: 3050,
|
||||
// databaseUrl: '/var/lib/firebird/data/mydatabase.fdb',
|
||||
// databaseFile: '/var/lib/firebird/data/mydatabase.fdb',
|
||||
user: 'SYSDBA',
|
||||
password: 'masterkey',
|
||||
},
|
||||
objects: [],
|
||||
triggers: [
|
||||
{
|
||||
testName: 'triggers after each row',
|
||||
create: `CREATE OR ALTER TRIGGER ~obj1 AFTER INSERT ON ~t1 AS BEGIN END;`,
|
||||
drop: 'DROP TRIGGER ~obj1;',
|
||||
objectTypeField: 'triggers',
|
||||
expected: {
|
||||
pureName: 'obj1',
|
||||
tableName: 't1',
|
||||
eventType: 'INSERT',
|
||||
triggerTiming: 'AFTER',
|
||||
},
|
||||
},
|
||||
],
|
||||
skipOnCI: false,
|
||||
runDeployInTransaction: true,
|
||||
skipDataModifications: true,
|
||||
skipChangeColumn: true,
|
||||
// skipIndexes: true,
|
||||
// skipStringLength: true,
|
||||
// skipTriggers: true,
|
||||
skipDataReplicator: true,
|
||||
skipAutoIncrement: true,
|
||||
// skipDropColumn: true,
|
||||
skipRenameColumn: true,
|
||||
// skipChangeNullability: true,
|
||||
// skipDeploy: true,
|
||||
// supportRenameSqlObject: true,
|
||||
skipIncrementalAnalysis: true,
|
||||
skipRenameTable: true,
|
||||
transformModelRow: row => {
|
||||
return Object.fromEntries(
|
||||
Object.entries(row).map(([key, value]) => {
|
||||
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$/.test(value)) {
|
||||
return [key, value.replace('T', ' ')];
|
||||
}
|
||||
return [key, value];
|
||||
})
|
||||
);
|
||||
},
|
||||
// skipDefaultValue: true,
|
||||
skipDropReferences: true,
|
||||
};
|
||||
|
||||
const enginesOnCi = [
|
||||
// all engines, which would be run on GitHub actions
|
||||
mysqlEngine,
|
||||
@@ -667,6 +753,8 @@ const enginesOnCi = [
|
||||
clickhouseEngine,
|
||||
oracleEngine,
|
||||
cassandraEngine,
|
||||
duckdbEngine,
|
||||
firebirdEngine,
|
||||
];
|
||||
|
||||
const enginesOnLocal = [
|
||||
@@ -680,8 +768,10 @@ const enginesOnLocal = [
|
||||
// cockroachDbEngine,
|
||||
// clickhouseEngine,
|
||||
// libsqlFileEngine,
|
||||
libsqlWsEngine,
|
||||
// libsqlWsEngine,
|
||||
// oracleEngine,
|
||||
// duckdbEngine,
|
||||
firebirdEngine,
|
||||
];
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
|
||||
@@ -696,3 +786,7 @@ module.exports.cockroachDbEngine = cockroachDbEngine;
|
||||
module.exports.clickhouseEngine = clickhouseEngine;
|
||||
module.exports.oracleEngine = oracleEngine;
|
||||
module.exports.cassandraEngine = cassandraEngine;
|
||||
module.exports.libsqlFileEngine = libsqlFileEngine;
|
||||
module.exports.libsqlWsEngine = libsqlWsEngine;
|
||||
module.exports.duckdbEngine = duckdbEngine;
|
||||
module.exports.firebirdEngine = firebirdEngine;
|
||||
|
||||
45
integration-tests/firebird.conf
Normal file
45
integration-tests/firebird.conf
Normal file
@@ -0,0 +1,45 @@
|
||||
# Custom Firebird Configuration
|
||||
|
||||
# Wire encryption settings
|
||||
# Options: Enabled, Required, Disabled
|
||||
WireCrypt = Disabled
|
||||
|
||||
# Authentication settings
|
||||
# Add Legacy_Auth to support older clients
|
||||
AuthServer = Legacy_Auth
|
||||
|
||||
# User manager plugin
|
||||
UserManager = Legacy_UserManager
|
||||
|
||||
# Default character set
|
||||
DefaultCharSet = UTF8
|
||||
|
||||
# Buffer settings for better performance
|
||||
DefaultDbCachePages = 2048
|
||||
TempCacheLimit = 512M
|
||||
|
||||
# Connection settings
|
||||
ConnectionTimeout = 180
|
||||
DatabaseGrowthIncrement = 128M
|
||||
|
||||
# TCP Protocol settings
|
||||
TcpRemoteBufferSize = 8192
|
||||
TcpNoNagle = 1
|
||||
|
||||
# Security settings
|
||||
RemoteServiceName = gds_db
|
||||
RemoteServicePort = 3050
|
||||
RemoteAuxPort = 0
|
||||
RemotePipeName = firebird
|
||||
|
||||
# Lock settings
|
||||
LockMemSize = 1M
|
||||
LockHashSlots = 8191
|
||||
LockAcquireSpins = 0
|
||||
|
||||
# Log settings
|
||||
FileSystemCacheThreshold = 65536
|
||||
FileSystemCacheSize = 0
|
||||
|
||||
# Compatibility settings for older clients
|
||||
CompatiblityDialect = 3
|
||||
@@ -12,7 +12,7 @@
|
||||
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
|
||||
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
|
||||
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest --testTimeout=5000",
|
||||
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
|
||||
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/alter-database.spec.js",
|
||||
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit --testTimeout=10000",
|
||||
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
|
||||
},
|
||||
|
||||
@@ -5,7 +5,12 @@ const crypto = require('crypto');
|
||||
function randomDbName(dialect) {
|
||||
const generatedKey = crypto.randomBytes(6);
|
||||
const newKey = generatedKey.toString('hex');
|
||||
const res = `db${newKey}`;
|
||||
let res = `db${newKey}`;
|
||||
|
||||
if (dialect.dbFileExtension) {
|
||||
res += dialect.dbFileExtension;
|
||||
}
|
||||
|
||||
if (dialect.upperCaseAllDbObjectNames) return res.toUpperCase();
|
||||
return res;
|
||||
}
|
||||
@@ -17,7 +22,7 @@ async function connect(engine, database) {
|
||||
if (engine.generateDbFile) {
|
||||
const conn = await driver.connect({
|
||||
...connection,
|
||||
databaseFile: `dbtemp/${database}`,
|
||||
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
|
||||
});
|
||||
return conn;
|
||||
} else {
|
||||
@@ -42,7 +47,7 @@ async function prepareConnection(engine, database) {
|
||||
if (engine.generateDbFile) {
|
||||
return {
|
||||
...connection,
|
||||
databaseFile: `dbtemp/${database}`,
|
||||
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
|
||||
isPreparedOnly: true,
|
||||
};
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "6.3.2",
|
||||
"version": "6.4.3-premium-beta.4",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -9,6 +9,7 @@
|
||||
],
|
||||
"scripts": {
|
||||
"start:api": "yarn workspace dbgate-api start | pino-pretty",
|
||||
"start:api:watch": "nodemon --watch 'src/**' --ext 'ts,json,js' --exec yarn start:api",
|
||||
"start:api:json": "yarn workspace dbgate-api start",
|
||||
"start:app": "cd app && yarn start | pino-pretty",
|
||||
"start:app:singledb": "CONNECTIONS=con1 SERVER_con1=localhost ENGINE_con1=mysql@dbgate-plugin-mysql USER_con1=root PASSWORD_con1=Pwd2020Db SINGLE_CONNECTION=con1 SINGLE_DATABASE=Chinook yarn start:app",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
# LOCAL_DBGATE_CLOUD=1
|
||||
# LOCAL_DBGATE_IDENTITY=1
|
||||
|
||||
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/rds-signer": "^3.665.0",
|
||||
"activedirectory2": "^2.1.0",
|
||||
"archiver": "^7.0.1",
|
||||
"async-lock": "^1.2.6",
|
||||
"axios": "^0.21.1",
|
||||
"body-parser": "^1.19.0",
|
||||
@@ -30,7 +31,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.3",
|
||||
"dbgate-query-splitter": "^4.11.5",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
@@ -62,7 +63,8 @@
|
||||
"simple-encryptor": "^4.0.0",
|
||||
"ssh2": "^1.16.0",
|
||||
"stream-json": "^1.8.0",
|
||||
"tar": "^6.0.5"
|
||||
"tar": "^6.0.5",
|
||||
"yauzl": "^3.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd -f .env node src/index.js --listen-api",
|
||||
|
||||
@@ -2,14 +2,20 @@ const fs = require('fs-extra');
|
||||
const readline = require('readline');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder, uploadsdir } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const dbgateApi = require('../shell');
|
||||
const jsldata = require('./jsldata');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { isProApp } = require('../utility/checkLicense');
|
||||
const listZipEntries = require('../utility/listZipEntries');
|
||||
const unzipJsonLinesFile = require('../shell/unzipJsonLinesFile');
|
||||
const { zip } = require('lodash');
|
||||
const zipDirectory = require('../shell/zipDirectory');
|
||||
const unzipDirectory = require('../shell/unzipDirectory');
|
||||
|
||||
const logger = getLogger('archive');
|
||||
|
||||
@@ -47,9 +53,31 @@ module.exports = {
|
||||
return folder;
|
||||
},
|
||||
|
||||
async getZipFiles({ file }) {
|
||||
const entries = await listZipEntries(path.join(archivedir(), file));
|
||||
const files = entries.map(entry => {
|
||||
let name = entry.fileName;
|
||||
if (isProApp() && entry.fileName.endsWith('.jsonl')) {
|
||||
name = entry.fileName.slice(0, -6);
|
||||
}
|
||||
return {
|
||||
name: name,
|
||||
label: name,
|
||||
type: isProApp() && entry.fileName.endsWith('.jsonl') ? 'jsonl' : 'other',
|
||||
};
|
||||
});
|
||||
return files;
|
||||
},
|
||||
|
||||
files_meta: true,
|
||||
async files({ folder }) {
|
||||
try {
|
||||
if (folder.endsWith('.zip')) {
|
||||
if (await fs.exists(path.join(archivedir(), folder))) {
|
||||
return this.getZipFiles({ file: folder });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
const dir = resolveArchiveFolder(folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
|
||||
@@ -91,6 +119,16 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
createFile_meta: true,
|
||||
async createFile({ folder, file, fileType, tableInfo }) {
|
||||
await fs.writeFile(
|
||||
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
|
||||
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
|
||||
);
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
},
|
||||
|
||||
deleteFile_meta: true,
|
||||
async deleteFile({ folder, file, fileType }) {
|
||||
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
|
||||
@@ -158,7 +196,7 @@ module.exports = {
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folder }) {
|
||||
if (!folder) throw new Error('Missing folder parameter');
|
||||
if (folder.endsWith('.link')) {
|
||||
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
|
||||
await fs.unlink(path.join(archivedir(), folder));
|
||||
} else {
|
||||
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
|
||||
@@ -204,9 +242,10 @@ module.exports = {
|
||||
},
|
||||
|
||||
async getNewArchiveFolder({ database }) {
|
||||
const isLink = database.endsWith(database);
|
||||
const name = isLink ? database.slice(0, -5) : database;
|
||||
const suffix = isLink ? '.link' : '';
|
||||
const isLink = database.endsWith('.link');
|
||||
const isZip = database.endsWith('.zip');
|
||||
const name = isLink ? database.slice(0, -5) : isZip ? database.slice(0, -4) : database;
|
||||
const suffix = isLink ? '.link' : isZip ? '.zip' : '';
|
||||
if (!(await fs.exists(path.join(archivedir(), database)))) return database;
|
||||
let index = 2;
|
||||
while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) {
|
||||
@@ -214,4 +253,58 @@ module.exports = {
|
||||
}
|
||||
return `${name}${index}${suffix}`;
|
||||
},
|
||||
|
||||
getArchiveData_meta: true,
|
||||
async getArchiveData({ folder, file }) {
|
||||
let rows;
|
||||
if (folder.endsWith('.zip')) {
|
||||
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
|
||||
} else {
|
||||
rows = jsonLinesParse(await fs.readFile(path.join(archivedir(), folder, `${file}.jsonl`), { encoding: 'utf8' }));
|
||||
}
|
||||
return rows.filter(x => !x.__isStreamHeader);
|
||||
},
|
||||
|
||||
saveUploadedZip_meta: true,
|
||||
async saveUploadedZip({ filePath, fileName }) {
|
||||
if (!fileName?.endsWith('.zip')) {
|
||||
throw new Error(`${fileName} is not a ZIP file`);
|
||||
}
|
||||
|
||||
const folder = await this.getNewArchiveFolder({ database: fileName });
|
||||
await fs.copyFile(filePath, path.join(archivedir(), folder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
zip_meta: true,
|
||||
async zip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
|
||||
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
unzip_meta: true,
|
||||
async unzip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
|
||||
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
getZippedPath_meta: true,
|
||||
async getZippedPath({ folder }) {
|
||||
if (folder.endsWith('.zip')) {
|
||||
return { filePath: path.join(archivedir(), folder) };
|
||||
}
|
||||
|
||||
const uploadName = crypto.randomUUID();
|
||||
const filePath = path.join(uploadsdir(), uploadName);
|
||||
await zipDirectory(path.join(archivedir(), folder), filePath);
|
||||
return { filePath };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,6 +12,9 @@ const {
|
||||
getAuthProviderById,
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
const { decryptPasswordString } = require('../utility/crypting');
|
||||
const { createDbGateIdentitySession, startCloudTokenChecking } = require('../utility/cloudIntf');
|
||||
const socket = require('../utility/socket');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
@@ -44,6 +47,7 @@ function authMiddleware(req, res, next) {
|
||||
'/connections/dblogin-auth',
|
||||
'/connections/dblogin-auth-token',
|
||||
'/health',
|
||||
'/__health',
|
||||
];
|
||||
|
||||
// console.log('********************* getAuthProvider()', getAuthProvider());
|
||||
@@ -95,7 +99,7 @@ module.exports = {
|
||||
let adminPassword = process.env.ADMIN_PASSWORD;
|
||||
if (!adminPassword) {
|
||||
const adminConfig = await storage.readConfig({ group: 'admin' });
|
||||
adminPassword = adminConfig?.adminPassword;
|
||||
adminPassword = decryptPasswordString(adminConfig?.adminPassword);
|
||||
}
|
||||
if (adminPassword && adminPassword == password) {
|
||||
return {
|
||||
@@ -133,5 +137,14 @@ module.exports = {
|
||||
return getAuthProviderById(amoid).redirect(params);
|
||||
},
|
||||
|
||||
createCloudLoginSession_meta: true,
|
||||
async createCloudLoginSession({ client }) {
|
||||
const res = await createDbGateIdentitySession(client);
|
||||
startCloudTokenChecking(res.sid, tokenHolder => {
|
||||
socket.emit('got-cloud-token', tokenHolder);
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
authMiddleware,
|
||||
};
|
||||
|
||||
250
packages/api/src/controllers/cloud.js
Normal file
250
packages/api/src/controllers/cloud.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const {
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
} = require('../utility/cloudIntf');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { recryptConnection, getInternalEncryptor, encryptConnection } = require('../utility/crypting');
|
||||
const { getConnectionLabel, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('cloud');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
module.exports = {
|
||||
publicFiles_meta: true,
|
||||
async publicFiles() {
|
||||
const res = await getPublicCloudFiles();
|
||||
return res;
|
||||
},
|
||||
|
||||
publicFileData_meta: true,
|
||||
async publicFileData({ path }) {
|
||||
const res = getPublicFileData(path);
|
||||
return res;
|
||||
},
|
||||
|
||||
refreshPublicFiles_meta: true,
|
||||
async refreshPublicFiles({ isRefresh }) {
|
||||
await refreshPublicFiles(isRefresh);
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
contentList_meta: true,
|
||||
async contentList() {
|
||||
try {
|
||||
const resp = await callCloudApiGet('content-list');
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
getContent_meta: true,
|
||||
async getContent({ folid, cntid }) {
|
||||
const resp = await getCloudContent(folid, cntid);
|
||||
return resp;
|
||||
},
|
||||
|
||||
putContent_meta: true,
|
||||
async putContent({ folid, cntid, content, name, type }) {
|
||||
const resp = await putCloudContent(folid, cntid, content, name, type);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
createFolder_meta: true,
|
||||
async createFolder({ name }) {
|
||||
const resp = await callCloudApiPost(`folders/create`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
grantFolder_meta: true,
|
||||
async grantFolder({ inviteLink }) {
|
||||
const m = inviteLink.match(/^dbgate\:\/\/folder\/v1\/([a-zA-Z0-9]+)\?mode=(read|write|admin)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid invite link format');
|
||||
}
|
||||
const invite = m[1];
|
||||
const mode = m[2];
|
||||
|
||||
const resp = await callCloudApiPost(`folders/grant/${mode}`, { invite });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameFolder_meta: true,
|
||||
async renameFolder({ folid, name }) {
|
||||
const resp = await callCloudApiPost(`folders/rename`, { folid, name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folid }) {
|
||||
const resp = await callCloudApiPost(`folders/delete`, { folid });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
getInviteToken_meta: true,
|
||||
async getInviteToken({ folid, role }) {
|
||||
const resp = await callCloudApiGet(`invite-token/${folid}/${role}`);
|
||||
return resp;
|
||||
},
|
||||
|
||||
refreshContent_meta: true,
|
||||
async refreshContent() {
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
copyConnectionCloud_meta: true,
|
||||
async copyConnectionCloud({ conid, folid }) {
|
||||
const conn = await connections.getCore({ conid });
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = recryptConnection(conn, getInternalEncryptor(), folderEncryptor);
|
||||
const connToSend = _.omit(recryptedConn, ['_id']);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
undefined,
|
||||
JSON.stringify(connToSend),
|
||||
getConnectionLabel(conn),
|
||||
'connection'
|
||||
);
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveConnection_meta: true,
|
||||
async saveConnection({ folid, connection }) {
|
||||
let cntid = undefined;
|
||||
if (connection._id) {
|
||||
const m = connection._id.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
folid = m[1];
|
||||
cntid = m[2];
|
||||
}
|
||||
|
||||
if (!folid) {
|
||||
throw new Error('Missing cloud folder ID');
|
||||
}
|
||||
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = encryptConnection(connection, folderEncryptor);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
cntid,
|
||||
JSON.stringify(recryptedConn),
|
||||
getConnectionLabel(recryptedConn),
|
||||
'connection'
|
||||
);
|
||||
|
||||
if (resp.apiErrorMessage) {
|
||||
return resp;
|
||||
}
|
||||
|
||||
removeCloudCachedConnection(folid, resp.cntid);
|
||||
cntid = resp.cntid;
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
...recryptedConn,
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
},
|
||||
|
||||
duplicateConnection_meta: true,
|
||||
async duplicateConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const respGet = await getCloudContent(folid, cntid);
|
||||
const conn = JSON.parse(respGet.content);
|
||||
const conn2 = {
|
||||
...conn,
|
||||
displayName: getConnectionLabel(conn) + ' - copy',
|
||||
};
|
||||
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection');
|
||||
return respPut;
|
||||
},
|
||||
|
||||
deleteConnection_meta: true,
|
||||
async deleteConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteContent_meta: true,
|
||||
async deleteContent({ folid, cntid }) {
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameContent_meta: true,
|
||||
async renameContent({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/rename/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveFile_meta: true,
|
||||
async saveFile({ folid, cntid, fileName, data, contentFolder, format }) {
|
||||
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', contentFolder, format);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
copyFile_meta: true,
|
||||
async copyFile({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/duplicate/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folid, cntid, filePath }, req) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
if (!content) {
|
||||
throw new Error('File not found');
|
||||
}
|
||||
await fs.writeFile(filePath, content);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
@@ -19,6 +19,14 @@ const storage = require('./storage');
|
||||
const { getAuthProxyUrl } = require('../utility/authProxy');
|
||||
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
|
||||
const { extractErrorMessage } = require('dbgate-tools');
|
||||
const {
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptConnection,
|
||||
getInternalEncryptor,
|
||||
recryptUser,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
} = require('../utility/crypting');
|
||||
|
||||
const lock = new AsyncLock();
|
||||
|
||||
@@ -107,6 +115,7 @@ module.exports = {
|
||||
datadir(),
|
||||
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
|
||||
),
|
||||
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
|
||||
...currentVersion,
|
||||
};
|
||||
|
||||
@@ -144,7 +153,7 @@ module.exports = {
|
||||
const res = {
|
||||
...value,
|
||||
};
|
||||
if (value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
if (platformInfo.isElectron && value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
|
||||
res['app.useNativeMenu'] = false;
|
||||
}
|
||||
@@ -161,14 +170,19 @@ module.exports = {
|
||||
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const settings = await storage.readConfig({ group: 'settings' });
|
||||
return this.fillMissingSettings(settings);
|
||||
} else {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
@@ -246,19 +260,31 @@ module.exports = {
|
||||
const res = await lock.acquire('settings', async () => {
|
||||
const currentValue = await this.loadSettings();
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
let updated = currentValue;
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
updated = {
|
||||
...currentValue,
|
||||
...values,
|
||||
};
|
||||
await storage.writeConfig({
|
||||
group: 'settings',
|
||||
config: updated,
|
||||
});
|
||||
} else {
|
||||
updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
}
|
||||
}
|
||||
|
||||
socket.emitChanged(`settings-changed`);
|
||||
@@ -272,8 +298,12 @@ module.exports = {
|
||||
|
||||
changelog_meta: true,
|
||||
async changelog() {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
try {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
} catch (err) {
|
||||
return ''
|
||||
}
|
||||
},
|
||||
|
||||
checkLicense_meta: true,
|
||||
@@ -281,4 +311,91 @@ module.exports = {
|
||||
const resp = await checkLicenseKey(licenseKey);
|
||||
return resp;
|
||||
},
|
||||
|
||||
recryptDatabaseForExport(db) {
|
||||
const encryptionKey = generateTransportEncryptionKey();
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const config = _.cloneDeep([
|
||||
...(db.config?.filter(c => !(c.group == 'admin' && c.key == 'encryptionKey')) || []),
|
||||
{ group: 'admin', key: 'encryptionKey', value: encryptionKey },
|
||||
]);
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', getInternalEncryptor(), transportEncryptor);
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
users: db.users?.map(conn => recryptUser(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
recryptDatabaseFromImport(db) {
|
||||
const encryptionKey = db.config?.find(c => c.group == 'admin' && c.key == 'encryptionKey')?.value;
|
||||
if (!encryptionKey) {
|
||||
throw new Error('Missing encryption key in the database');
|
||||
}
|
||||
const config = _.cloneDeep(db.config || []).filter(c => !(c.group == 'admin' && c.key == 'encryptionKey'));
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', transportEncryptor, getInternalEncryptor());
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, transportEncryptor, getInternalEncryptor())),
|
||||
users: db.users?.map(conn => recryptUser(conn, transportEncryptor, getInternalEncryptor())),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
exportConnectionsAndSettings_meta: true,
|
||||
async exportConnectionsAndSettings(_params, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const db = await storage.getExportedDatabase();
|
||||
return this.recryptDatabaseForExport(db);
|
||||
}
|
||||
|
||||
return this.recryptDatabaseForExport({
|
||||
connections: (await connections.list(null, req)).map((conn, index) => ({
|
||||
..._.omit(conn, ['_id']),
|
||||
id: index + 1,
|
||||
conid: conn._id,
|
||||
})),
|
||||
});
|
||||
},
|
||||
|
||||
importConnectionsAndSettings_meta: true,
|
||||
async importConnectionsAndSettings({ db }, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
const recryptedDb = this.recryptDatabaseFromImport(db);
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
await storage.replicateImportedDatabase(recryptedDb);
|
||||
} else {
|
||||
await connections.importFromArray(
|
||||
recryptedDb.connections.map(conn => ({
|
||||
..._.omit(conn, ['conid', 'id']),
|
||||
_id: conn.conid,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -38,6 +38,11 @@ function getNamedArgs() {
|
||||
res.databaseFile = name;
|
||||
res.engine = 'sqlite@dbgate-plugin-sqlite';
|
||||
}
|
||||
|
||||
if (name.endsWith('.duckdb')) {
|
||||
res.databaseFile = name;
|
||||
res.engine = 'duckdb@dbgate-plugin-duckdb';
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
@@ -102,8 +107,8 @@ function getPortalCollections() {
|
||||
trustServerCertificate: process.env[`SSL_TRUST_CERTIFICATE_${id}`],
|
||||
}));
|
||||
|
||||
for(const conn of connections) {
|
||||
for(const prop in process.env) {
|
||||
for (const conn of connections) {
|
||||
for (const prop in process.env) {
|
||||
if (prop.startsWith(`CONNECTION_${conn._id}_`)) {
|
||||
const name = prop.substring(`CONNECTION_${conn._id}_`.length);
|
||||
conn[name] = process.env[prop];
|
||||
@@ -234,6 +239,19 @@ module.exports = {
|
||||
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
|
||||
},
|
||||
|
||||
async getUsedEngines() {
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageEngines = await storage.getUsedEngines();
|
||||
if (storageEngines) {
|
||||
return storageEngines;
|
||||
}
|
||||
if (portalConnections) {
|
||||
return _.uniq(_.compact(portalConnections.map(x => x.engine)));
|
||||
}
|
||||
return _.uniq((await this.datastore.find()).map(x => x.engine));
|
||||
},
|
||||
|
||||
test_meta: true,
|
||||
test({ connection, requestDbList = false }) {
|
||||
const subprocess = fork(
|
||||
@@ -316,6 +334,18 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
importFromArray(list) {
|
||||
this.datastore.transformAll(connections => {
|
||||
const mapped = connections.map(x => {
|
||||
const found = list.find(y => y._id == x._id);
|
||||
if (found) return found;
|
||||
return x;
|
||||
});
|
||||
return [...mapped, ...list.filter(x => !connections.find(y => y._id == x._id))];
|
||||
});
|
||||
socket.emitChanged('connection-list-changed');
|
||||
},
|
||||
|
||||
async checkUnsavedConnectionsLimit() {
|
||||
if (!this.datastore) {
|
||||
return;
|
||||
@@ -393,6 +423,13 @@ module.exports = {
|
||||
return volatile;
|
||||
}
|
||||
|
||||
const cloudMatch = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (cloudMatch) {
|
||||
const { loadCachedCloudConnection } = require('../utility/cloudIntf');
|
||||
const conn = await loadCachedCloudConnection(cloudMatch[1], cloudMatch[2]);
|
||||
return conn;
|
||||
}
|
||||
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
@@ -435,6 +472,22 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
newDuckdbDatabase_meta: true,
|
||||
async newDuckdbDatabase({ file }) {
|
||||
const duckdbDir = path.join(filesdir(), 'duckdb');
|
||||
if (!(await fs.exists(duckdbDir))) {
|
||||
await fs.mkdir(duckdbDir);
|
||||
}
|
||||
const databaseFile = path.join(duckdbDir, `${file}.duckdb`);
|
||||
const res = await this.save({
|
||||
engine: 'duckdb@dbgate-plugin-duckdb',
|
||||
databaseFile,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: `${file}.duckdb`,
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
dbloginWeb_meta: {
|
||||
raw: true,
|
||||
method: 'get',
|
||||
|
||||
@@ -37,6 +37,10 @@ const loadModelTransform = require('../utility/loadModelTransform');
|
||||
const exportDbModelSql = require('../utility/exportDbModelSql');
|
||||
const axios = require('axios');
|
||||
const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy');
|
||||
const { decryptConnection } = require('../utility/crypting');
|
||||
const { getSshTunnel } = require('../utility/sshTunnel');
|
||||
const sessions = require('./sessions');
|
||||
const jsldata = require('./jsldata');
|
||||
|
||||
const logger = getLogger('databaseConnections');
|
||||
|
||||
@@ -94,10 +98,59 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
// session event handlers
|
||||
|
||||
handle_info(conid, database, props) {
|
||||
const { sesid, info } = props;
|
||||
sessions.dispatchMessage(sesid, info);
|
||||
},
|
||||
|
||||
handle_done(conid, database, props) {
|
||||
const { sesid } = props;
|
||||
socket.emit(`session-done-${sesid}`);
|
||||
sessions.dispatchMessage(sesid, 'Query execution finished');
|
||||
},
|
||||
|
||||
handle_recordset(conid, database, props) {
|
||||
const { jslid, resultIndex } = props;
|
||||
socket.emit(`session-recordset-${props.sesid}`, { jslid, resultIndex });
|
||||
},
|
||||
|
||||
handle_stats(conid, database, stats) {
|
||||
jsldata.notifyChangedStats(stats);
|
||||
},
|
||||
|
||||
handle_initializeFile(conid, database, props) {
|
||||
const { jslid } = props;
|
||||
socket.emit(`session-initialize-file-${jslid}`);
|
||||
},
|
||||
|
||||
// eval event handler
|
||||
handle_runnerDone(conid, database, props) {
|
||||
const { runid } = props;
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
},
|
||||
|
||||
handle_progress(conid, database, progressData) {
|
||||
const { progressName } = progressData;
|
||||
const { name, runid } = progressName;
|
||||
socket.emit(`runner-progress-${runid}`, { ...progressData, progressName: name });
|
||||
},
|
||||
|
||||
handle_copyStreamError(conid, database, { copyStreamError }) {
|
||||
const { progressName } = copyStreamError;
|
||||
const { runid } = progressName;
|
||||
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
},
|
||||
|
||||
async ensureOpened(conid, database) {
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
@@ -134,12 +187,23 @@ module.exports = {
|
||||
const { msgtype } = message;
|
||||
if (handleProcessCommunication(message, subprocess)) return;
|
||||
if (newOpened.disconnected) return;
|
||||
this[`handle_${msgtype}`](conid, database, message);
|
||||
const funcName = `handle_${msgtype}`;
|
||||
if (!this[funcName]) {
|
||||
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
|
||||
return;
|
||||
}
|
||||
|
||||
this[funcName](conid, database, message);
|
||||
});
|
||||
subprocess.on('exit', () => {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
msgtype: 'connect',
|
||||
@@ -243,6 +307,12 @@ module.exports = {
|
||||
return this.loadDataCore('loadKeys', { conid, database, root, filter, limit });
|
||||
},
|
||||
|
||||
scanKeys_meta: true,
|
||||
async scanKeys({ conid, database, root, pattern, cursor, count }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('scanKeys', { conid, database, root, pattern, cursor, count });
|
||||
},
|
||||
|
||||
exportKeys_meta: true,
|
||||
async exportKeys({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
@@ -619,9 +689,26 @@ module.exports = {
|
||||
command,
|
||||
{ conid, database, outputFile, inputFile, options, selectedTables, skippedTables, argsFormat }
|
||||
) {
|
||||
const connection = await connections.getCore({ conid });
|
||||
const sourceConnection = await connections.getCore({ conid });
|
||||
const connection = {
|
||||
...decryptConnection(sourceConnection),
|
||||
};
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnel(connection);
|
||||
if (tunnel.state == 'error') {
|
||||
throw new Error(tunnel.message);
|
||||
}
|
||||
|
||||
connection.server = tunnel.localHost;
|
||||
connection.port = tunnel.localPort;
|
||||
}
|
||||
|
||||
const settingsValue = await config.getSettings();
|
||||
|
||||
const externalTools = {};
|
||||
@@ -739,4 +826,25 @@ module.exports = {
|
||||
commandLine: this.commandArgsToCommandLine(commandArgs),
|
||||
};
|
||||
},
|
||||
|
||||
executeSessionQuery_meta: true,
|
||||
async executeSessionQuery({ sesid, conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
sessions.dispatchMessage(sesid, 'Query execution started');
|
||||
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
opened.subprocess.send({ msgtype: 'executeSessionQuery', sql, sesid });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
evalJsonScript_meta: true,
|
||||
async evalJsonScript({ conid, database, script, runid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
|
||||
opened.subprocess.send({ msgtype: 'evalJsonScript', script, runid });
|
||||
return { state: 'ok' };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -9,6 +9,9 @@ const scheduler = require('./scheduler');
|
||||
const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const logger = getLogger('files');
|
||||
|
||||
function serialize(format, data) {
|
||||
if (format == 'text') return data;
|
||||
@@ -219,4 +222,60 @@ module.exports = {
|
||||
return path.join(dir, file);
|
||||
}
|
||||
},
|
||||
|
||||
createZipFromJsons_meta: true,
|
||||
async createZipFromJsons({ db, filePath }) {
|
||||
logger.info(`Creating zip file from JSONS ${filePath}`);
|
||||
await dbgateApi.zipJsonLinesData(db, filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
getJsonsFromZip_meta: true,
|
||||
async getJsonsFromZip({ filePath }) {
|
||||
const res = await dbgateApi.unzipJsonLinesData(filePath);
|
||||
return res;
|
||||
},
|
||||
|
||||
downloadText_meta: true,
|
||||
async downloadText({ uri }, req) {
|
||||
if (!uri) return null;
|
||||
const filePath = await dbgateApi.download(uri);
|
||||
const text = await fs.readFile(filePath, {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return text;
|
||||
},
|
||||
|
||||
saveUploadedFile_meta: true,
|
||||
async saveUploadedFile({ filePath, fileName }) {
|
||||
const FOLDERS = ['sql', 'sqlite'];
|
||||
for (const folder of FOLDERS) {
|
||||
if (fileName.toLowerCase().endsWith('.' + folder)) {
|
||||
logger.info(`Saving ${folder} file ${fileName}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: folder });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
return {
|
||||
name: path.basename(filePath),
|
||||
folder: folder,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folder, file, filePath }, req) {
|
||||
if (!hasPermission(`files/${folder}/read`, req)) return false;
|
||||
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
simpleCopy_meta: true,
|
||||
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
|
||||
await fs.copyFile(sourceFilePath, targetFilePath);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -8,6 +8,9 @@ const getJslFileName = require('../utility/getJslFileName');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const requirePluginFunction = require('../utility/requirePluginFunction');
|
||||
const socket = require('../utility/socket');
|
||||
const crypto = require('crypto');
|
||||
const dbgateApi = require('../shell');
|
||||
const { ChartProcessor } = require('dbgate-datalib');
|
||||
|
||||
function readFirstLine(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -293,4 +296,36 @@ module.exports = {
|
||||
})),
|
||||
};
|
||||
},
|
||||
|
||||
downloadJslData_meta: true,
|
||||
async downloadJslData({ uri }) {
|
||||
const jslid = crypto.randomUUID();
|
||||
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
|
||||
return { jslid };
|
||||
},
|
||||
|
||||
buildChart_meta: true,
|
||||
async buildChart({ jslid, definition }) {
|
||||
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
|
||||
const processor = new ChartProcessor(definition ? [definition] : undefined);
|
||||
await datastore.enumRows(row => {
|
||||
processor.addRow(row);
|
||||
return true;
|
||||
});
|
||||
processor.finalize();
|
||||
return processor.charts;
|
||||
},
|
||||
|
||||
detectChartColumns_meta: true,
|
||||
async detectChartColumns({ jslid }) {
|
||||
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
|
||||
const processor = new ChartProcessor();
|
||||
processor.autoDetectCharts = false;
|
||||
await datastore.enumRows(row => {
|
||||
processor.addRow(row);
|
||||
return true;
|
||||
});
|
||||
processor.finalize();
|
||||
return processor.availableColumns;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ const { fork, spawn } = require('child_process');
|
||||
const { rundir, uploadsdir, pluginsdir, getPluginBackendPath, packagedPluginList } = require('../utility/directories');
|
||||
const {
|
||||
extractShellApiPlugins,
|
||||
extractShellApiFunctionName,
|
||||
compileShellApiFunctionName,
|
||||
jsonScriptToJavascript,
|
||||
getLogger,
|
||||
safeJsonParse,
|
||||
@@ -58,7 +58,7 @@ dbgateApi.initializeApiEnvironment();
|
||||
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
|
||||
require=null;
|
||||
async function run() {
|
||||
const reader=await ${extractShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
}
|
||||
@@ -96,9 +96,9 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
handle_freeData(runid, { freeData }) {
|
||||
handle_dataResult(runid, { dataResult }) {
|
||||
const { resolve } = this.requests[runid];
|
||||
resolve(freeData);
|
||||
resolve(dataResult);
|
||||
delete this.requests[runid];
|
||||
},
|
||||
|
||||
@@ -273,7 +273,7 @@ module.exports = {
|
||||
const runid = crypto.randomUUID();
|
||||
|
||||
if (script.type == 'json') {
|
||||
const js = jsonScriptToJavascript(script);
|
||||
const js = await jsonScriptToJavascript(script);
|
||||
return this.startCore(runid, scriptTemplate(js, false));
|
||||
}
|
||||
|
||||
@@ -328,4 +328,24 @@ module.exports = {
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
|
||||
scriptResult_meta: true,
|
||||
async scriptResult({ script }) {
|
||||
if (script.type != 'json') {
|
||||
return { errorMessage: 'Only JSON scripts are allowed' };
|
||||
}
|
||||
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
const runid = crypto.randomUUID();
|
||||
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
|
||||
const cloned = _.cloneDeepWith(script, node => {
|
||||
if (node?.$replace == 'runid') {
|
||||
return runid;
|
||||
}
|
||||
});
|
||||
const js = await jsonScriptToJavascript(cloned);
|
||||
this.startCore(runid, scriptTemplate(js, false));
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -52,7 +52,10 @@ module.exports = {
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`Connection with conid="${conid}" not found`);
|
||||
throw new Error(`serverConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.singleDatabase) {
|
||||
return null;
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
@@ -98,6 +101,11 @@ module.exports = {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.send({ msgtype: 'connect', ...connection, globalSettings: await config.getSettings() });
|
||||
return newOpened;
|
||||
});
|
||||
@@ -137,14 +145,14 @@ module.exports = {
|
||||
if (conid == '__model') return [];
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.databases;
|
||||
return opened?.databases ?? [];
|
||||
},
|
||||
|
||||
version_meta: true,
|
||||
async version({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.version;
|
||||
return opened?.version ?? null;
|
||||
},
|
||||
|
||||
serverStatus_meta: true,
|
||||
@@ -165,6 +173,9 @@ module.exports = {
|
||||
}
|
||||
this.lastPinged[conid] = new Date().getTime();
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
try {
|
||||
opened.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
@@ -189,6 +200,9 @@ module.exports = {
|
||||
async sendDatabaseOp({ conid, msgtype, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
const res = await this.sendRequest(opened, { msgtype, name });
|
||||
if (res.errorMessage) {
|
||||
@@ -228,6 +242,9 @@ module.exports = {
|
||||
async loadDataCore(msgtype, { conid, ...args }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
const res = await this.sendRequest(opened, { msgtype, ...args });
|
||||
if (res.errorMessage) {
|
||||
console.error(res.errorMessage);
|
||||
@@ -249,6 +266,9 @@ module.exports = {
|
||||
async summaryCommand({ conid, command, row }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (!opened) {
|
||||
return null;
|
||||
}
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
return this.loadDataCore('summaryCommand', { conid, command, row });
|
||||
},
|
||||
|
||||
@@ -83,6 +83,11 @@ module.exports = {
|
||||
jsldata.notifyChangedStats(stats);
|
||||
},
|
||||
|
||||
handle_charts(sesid, props) {
|
||||
const { jslid, charts, resultIndex } = props;
|
||||
socket.emit(`session-charts-${sesid}`, { jslid, resultIndex, charts });
|
||||
},
|
||||
|
||||
handle_initializeFile(sesid, props) {
|
||||
const { jslid } = props;
|
||||
socket.emit(`session-initialize-file-${jslid}`);
|
||||
@@ -141,7 +146,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
executeQuery_meta: true,
|
||||
async executeQuery({ sesid, sql, autoCommit }) {
|
||||
async executeQuery({ sesid, sql, autoCommit, limitRows, frontMatter }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
@@ -149,7 +154,7 @@ module.exports = {
|
||||
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
this.dispatchMessage(sesid, 'Query execution started');
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit });
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit, limitRows, frontMatter });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
@@ -4,6 +4,10 @@ module.exports = {
|
||||
return null;
|
||||
},
|
||||
|
||||
async getExportedDatabase() {
|
||||
return {};
|
||||
},
|
||||
|
||||
getConnection_meta: true,
|
||||
async getConnection({ conid }) {
|
||||
return null;
|
||||
@@ -28,4 +32,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
startRefreshLicense() {},
|
||||
|
||||
async getUsedEngines() {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -39,52 +39,6 @@ module.exports = {
|
||||
});
|
||||
},
|
||||
|
||||
uploadDataFile_meta: {
|
||||
method: 'post',
|
||||
raw: true,
|
||||
},
|
||||
uploadDataFile(req, res) {
|
||||
const { data } = req.files || {};
|
||||
|
||||
if (!data) {
|
||||
res.json(null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.name.toLowerCase().endsWith('.sql')) {
|
||||
logger.info(`Uploading SQL file ${data.name}, size=${data.size}`);
|
||||
data.mv(path.join(filesdir(), 'sql', data.name), () => {
|
||||
res.json({
|
||||
name: data.name,
|
||||
folder: 'sql',
|
||||
});
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: 'sql' });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(null);
|
||||
},
|
||||
|
||||
saveDataFile_meta: true,
|
||||
async saveDataFile({ filePath }) {
|
||||
if (filePath.toLowerCase().endsWith('.sql')) {
|
||||
logger.info(`Saving SQL file ${filePath}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), 'sql', path.basename(filePath)));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: 'sql' });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
return {
|
||||
name: path.basename(filePath),
|
||||
folder: 'sql',
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
get_meta: {
|
||||
method: 'get',
|
||||
raw: true,
|
||||
|
||||
@@ -27,6 +27,7 @@ const plugins = require('./controllers/plugins');
|
||||
const files = require('./controllers/files');
|
||||
const scheduler = require('./controllers/scheduler');
|
||||
const queryHistory = require('./controllers/queryHistory');
|
||||
const cloud = require('./controllers/cloud');
|
||||
const onFinished = require('on-finished');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
|
||||
@@ -38,7 +39,8 @@ const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
const getHealthStatus = require('./utility/healthStatus');
|
||||
const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
|
||||
const { startCloudFiles } = require('./utility/cloudIntf');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -124,6 +126,12 @@ function start() {
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.get(getExpressPath('/__health'), async function (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
const health = await getHealthStatusSprinx();
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
|
||||
app.use(
|
||||
@@ -194,6 +202,8 @@ function start() {
|
||||
if (process.env.CLOUD_UPGRADE_FILE) {
|
||||
startCloudUpgradeTimer();
|
||||
}
|
||||
|
||||
startCloudFiles();
|
||||
}
|
||||
|
||||
function useAllControllers(app, electron) {
|
||||
@@ -214,6 +224,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/query-history', queryHistory);
|
||||
useController(app, electron, '/apps', apps);
|
||||
useController(app, electron, '/auth', auth);
|
||||
useController(app, electron, '/cloud', cloud);
|
||||
}
|
||||
|
||||
function setElectronSender(electronSender) {
|
||||
|
||||
@@ -4,6 +4,8 @@ const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const _ = require('lodash');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('connectProcess');
|
||||
|
||||
const formatErrorDetail = (e, connection) => `${e.stack}
|
||||
|
||||
@@ -23,12 +25,15 @@ function start() {
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const dbhan = await connectUtility(driver, connection, 'app');
|
||||
const res = await driver.getVersion(dbhan);
|
||||
let version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
version = await driver.getVersion(dbhan);
|
||||
let databases = undefined;
|
||||
if (requestDbList) {
|
||||
databases = await driver.listDatabases(dbhan);
|
||||
}
|
||||
process.send({ msgtype: 'connected', ...res, databases });
|
||||
process.send({ msgtype: 'connected', ...version, databases });
|
||||
await driver.close(dbhan);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
||||
@@ -9,13 +9,22 @@ const {
|
||||
dbNameLogCategory,
|
||||
extractErrorMessage,
|
||||
extractErrorLogData,
|
||||
ScriptWriterEval,
|
||||
SqlGenerator,
|
||||
playJsonScriptWriter,
|
||||
serializeJsTypesForJsonStringify,
|
||||
} = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { SqlGenerator } = require('dbgate-tools');
|
||||
const generateDeploySql = require('../shell/generateDeploySql');
|
||||
const { dumpSqlSelect } = require('dbgate-sqltree');
|
||||
const { allowExecuteCustomScript, handleQueryStream } = require('../utility/handleQueryStream');
|
||||
const dbgateApi = require('../shell');
|
||||
const requirePlugin = require('../shell/requirePlugin');
|
||||
const path = require('path');
|
||||
const { rundir } = require('../utility/directories');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const logger = getLogger('dbconnProcess');
|
||||
|
||||
@@ -120,10 +129,15 @@ function setStatusName(name) {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
process.send({ msgtype: 'version', version });
|
||||
serverVersion = version;
|
||||
try {
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
serverVersion = version;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
serverVersion = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version: serverVersion });
|
||||
}
|
||||
|
||||
async function handleConnect({ connection, structure, globalSettings }) {
|
||||
@@ -219,7 +233,7 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
const res = await driver.query(dbhan, sql, { range });
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
@@ -241,7 +255,7 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const result = await callMethod(driver);
|
||||
process.send({ msgtype: 'response', msgid, result });
|
||||
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
|
||||
@@ -261,6 +275,10 @@ async function handleLoadKeys({ msgid, root, filter, limit }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter, limit), { logName: 'loadKeys' });
|
||||
}
|
||||
|
||||
async function handleScanKeys({ msgid, pattern, cursor, count }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.scanKeys(dbhan, pattern, cursor, count), { logName: 'scanKeys' });
|
||||
}
|
||||
|
||||
async function handleExportKeys({ msgid, options }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
|
||||
}
|
||||
@@ -370,6 +388,56 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteSessionQuery({ sesid, sql }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
message: 'Connection without read-only sessions is read only',
|
||||
severity: 'error',
|
||||
},
|
||||
sesid,
|
||||
});
|
||||
process.send({ msgtype: 'done', sesid, skipFinishedMessage: true });
|
||||
return;
|
||||
//process.send({ msgtype: 'error', error: e.message });
|
||||
}
|
||||
|
||||
const queryStreamInfoHolder = {
|
||||
resultIndex: 0,
|
||||
canceled: false,
|
||||
};
|
||||
for (const sqlItem of splitQuery(sql, {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, sesid);
|
||||
if (queryStreamInfoHolder.canceled) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
process.send({ msgtype: 'done', sesid });
|
||||
}
|
||||
|
||||
async function handleEvalJsonScript({ script, runid }) {
|
||||
const directory = path.join(rundir(), runid);
|
||||
fs.mkdirSync(directory);
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
process.chdir(directory);
|
||||
|
||||
const evalWriter = new ScriptWriterEval(dbgateApi, requirePlugin, dbhan, runid);
|
||||
await playJsonScriptWriter(script, evalWriter);
|
||||
process.send({ msgtype: 'runnerDone', runid });
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
}
|
||||
|
||||
// async function handleRunCommand({ msgid, sql }) {
|
||||
// await waitConnected();
|
||||
// const driver = engines(storedConnection);
|
||||
@@ -389,6 +457,7 @@ const messageHandlers = {
|
||||
updateCollection: handleUpdateCollection,
|
||||
collectionData: handleCollectionData,
|
||||
loadKeys: handleLoadKeys,
|
||||
scanKeys: handleScanKeys,
|
||||
loadKeyInfo: handleLoadKeyInfo,
|
||||
callMethod: handleCallMethod,
|
||||
loadKeyTableRange: handleLoadKeyTableRange,
|
||||
@@ -400,6 +469,8 @@ const messageHandlers = {
|
||||
sqlSelect: handleSqlSelect,
|
||||
exportKeys: handleExportKeys,
|
||||
schemaList: handleSchemaList,
|
||||
executeSessionQuery: handleExecuteSessionQuery,
|
||||
evalJsonScript: handleEvalJsonScript,
|
||||
// runCommand: handleRunCommand,
|
||||
};
|
||||
|
||||
|
||||
@@ -46,7 +46,13 @@ async function handleRefresh() {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
let version;
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
version = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version });
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ const { decryptConnection } = require('../utility/crypting');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require('dbgate-tools');
|
||||
const { handleQueryStream, QueryStreamTableWriter, allowExecuteCustomScript } = require('../utility/handleQueryStream');
|
||||
|
||||
const logger = getLogger('sessionProcess');
|
||||
|
||||
@@ -23,175 +24,6 @@ let lastActivity = null;
|
||||
let currentProfiler = null;
|
||||
let executingScripts = 0;
|
||||
|
||||
class TableWriter {
|
||||
constructor() {
|
||||
this.currentRowCount = 0;
|
||||
this.currentChangeIndex = 1;
|
||||
this.initializedFile = false;
|
||||
}
|
||||
|
||||
initializeFromQuery(structure, resultIndex) {
|
||||
this.jslid = crypto.randomUUID();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
fs.writeFileSync(
|
||||
this.currentFile,
|
||||
JSON.stringify({
|
||||
...structure,
|
||||
__isStreamHeader: true,
|
||||
}) + '\n'
|
||||
);
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.resultIndex = resultIndex;
|
||||
this.initializedFile = true;
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
|
||||
}
|
||||
|
||||
initializeFromReader(jslid) {
|
||||
this.jslid = jslid;
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
this.writeCurrentStats(false, false);
|
||||
}
|
||||
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentStream.write(JSON.stringify(row) + '\n');
|
||||
this.currentRowCount += 1;
|
||||
|
||||
if (!this.plannedStats) {
|
||||
this.plannedStats = true;
|
||||
process.nextTick(() => {
|
||||
if (this.currentStream) this.currentStream.uncork();
|
||||
process.nextTick(() => this.writeCurrentStats(false, true));
|
||||
this.plannedStats = false;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
rowFromReader(row) {
|
||||
if (!this.initializedFile) {
|
||||
process.send({ msgtype: 'initializeFile', jslid: this.jslid });
|
||||
this.initializedFile = true;
|
||||
|
||||
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.initializedFile = true;
|
||||
return;
|
||||
}
|
||||
|
||||
this.row(row);
|
||||
}
|
||||
|
||||
writeCurrentStats(isFinished = false, emitEvent = false) {
|
||||
const stats = {
|
||||
rowCount: this.currentRowCount,
|
||||
changeIndex: this.currentChangeIndex,
|
||||
isFinished,
|
||||
jslid: this.jslid,
|
||||
};
|
||||
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
|
||||
this.currentChangeIndex += 1;
|
||||
if (emitEvent) {
|
||||
process.send({ msgtype: 'stats', ...stats });
|
||||
}
|
||||
}
|
||||
|
||||
close(afterClose) {
|
||||
if (this.currentStream) {
|
||||
this.currentStream.end(() => {
|
||||
this.writeCurrentStats(true, true);
|
||||
if (afterClose) afterClose();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(resultIndexHolder, resolve, startLine) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
this.info = this.info.bind(this);
|
||||
|
||||
// use this for cancelling - not implemented
|
||||
// this.stream = null;
|
||||
|
||||
this.plannedStats = false;
|
||||
this.resultIndexHolder = resultIndexHolder;
|
||||
this.resolve = resolve;
|
||||
// currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
closeCurrentWriter() {
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close();
|
||||
this.currentWriter = null;
|
||||
}
|
||||
}
|
||||
|
||||
recordset(columns) {
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new TableWriter();
|
||||
this.currentWriter.initializeFromQuery(
|
||||
Array.isArray(columns) ? { columns } : columns,
|
||||
this.resultIndexHolder.value
|
||||
);
|
||||
this.resultIndexHolder.value += 1;
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
// this.onRow = _.throttle((jslid) => {
|
||||
// if (jslid == this.jslid) {
|
||||
// this.writeCurrentStats(false, true);
|
||||
// }
|
||||
// }, 500);
|
||||
}
|
||||
row(row) {
|
||||
if (this.currentWriter) this.currentWriter.row(row);
|
||||
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
// process.send({ msgtype: 'error', error });
|
||||
// }
|
||||
done(result) {
|
||||
this.closeCurrentWriter();
|
||||
// currentHandlers = currentHandlers.filter((x) => x != this);
|
||||
this.resolve();
|
||||
}
|
||||
info(info) {
|
||||
if (info && info.line != null) {
|
||||
info = {
|
||||
...info,
|
||||
line: this.startLine + info.line,
|
||||
};
|
||||
}
|
||||
process.send({ msgtype: 'info', info });
|
||||
}
|
||||
}
|
||||
|
||||
function handleStream(driver, resultIndexHolder, sqlItem) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
function allowExecuteCustomScript(driver) {
|
||||
if (driver.readOnlySessions) {
|
||||
return true;
|
||||
}
|
||||
if (storedConnection.isReadOnly) {
|
||||
return false;
|
||||
// throw new Error('Connection is read only');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function handleConnect(connection) {
|
||||
storedConnection = connection;
|
||||
|
||||
@@ -222,12 +54,12 @@ async function handleStartProfiler({ jslid }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({ msgtype: 'done' });
|
||||
return;
|
||||
}
|
||||
|
||||
const writer = new TableWriter();
|
||||
const writer = new QueryStreamTableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
currentProfiler = await driver.startProfiler(dbhan, {
|
||||
@@ -251,7 +83,7 @@ async function handleExecuteControlCommand({ command }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (command == 'commitTransaction' && !allowExecuteCustomScript(driver)) {
|
||||
if (command == 'commitTransaction' && !allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
@@ -285,13 +117,13 @@ async function handleExecuteControlCommand({ command }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
async function handleExecuteQuery({ sql, autoCommit, limitRows, frontMatter }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
@@ -306,18 +138,23 @@ async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
|
||||
executingScripts++;
|
||||
try {
|
||||
const resultIndexHolder = {
|
||||
value: 0,
|
||||
const queryStreamInfoHolder = {
|
||||
resultIndex: 0,
|
||||
canceled: false,
|
||||
};
|
||||
for (const sqlItem of splitQuery(sql, {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleStream(driver, resultIndexHolder, sqlItem);
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, undefined, limitRows, frontMatter);
|
||||
// const handler = new StreamHandler(resultIndex);
|
||||
// const stream = await driver.stream(systemConnection, sqlItem, handler);
|
||||
// handler.stream = stream;
|
||||
// resultIndex = handler.resultIndex;
|
||||
|
||||
if (queryStreamInfoHolder.canceled) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
process.send({ msgtype: 'done', autoCommit });
|
||||
} finally {
|
||||
@@ -335,13 +172,13 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
|
||||
if (fileName) {
|
||||
sql = fs.readFileSync(fileName, 'utf-8');
|
||||
} else {
|
||||
if (!allowExecuteCustomScript(driver)) {
|
||||
if (!allowExecuteCustomScript(storedConnection, driver)) {
|
||||
process.send({ msgtype: 'done' });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const writer = new TableWriter();
|
||||
const writer = new QueryStreamTableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
const reader = await driver.readQuery(dbhan, sql);
|
||||
|
||||
@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
|
||||
function archiveReader({ folderName, fileName, ...other }) {
|
||||
const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const jsonlFile = folderName.endsWith('.zip')
|
||||
? `zip://archive:${folderName}//${fileName}.jsonl`
|
||||
: path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
|
||||
|
||||
_final(callback) {
|
||||
process.send({
|
||||
msgtype: 'freeData',
|
||||
msgtype: 'dataResult',
|
||||
runid: this.runid,
|
||||
freeData: { rows: this.rows, structure: this.structure },
|
||||
dataResult: { rows: this.rows, structure: this.structure },
|
||||
});
|
||||
callback();
|
||||
}
|
||||
|
||||
@@ -69,6 +69,7 @@ async function copyStream(input, output, options) {
|
||||
msgtype: 'copyStreamError',
|
||||
copyStreamError: {
|
||||
message: extractErrorMessage(err),
|
||||
progressName,
|
||||
...err,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('dataDuplicator');
|
||||
const { DataDuplicator } = require('dbgate-datalib');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
|
||||
async function dataDuplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => ({
|
||||
name: item.name,
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await dupl.run();
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataDuplicator;
|
||||
96
packages/api/src/shell/dataReplicator.js
Normal file
96
packages/api/src/shell/dataReplicator.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('datareplicator');
|
||||
const { DataReplicator } = require('dbgate-datalib');
|
||||
const { compileCompoudEvalCondition } = require('dbgate-filterparser');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
|
||||
function compileOperationFunction(enabled, condition) {
|
||||
if (!enabled) return _row => false;
|
||||
const conditionCompiled = compileCompoudEvalCondition(condition);
|
||||
if (condition) {
|
||||
return row => evaluateCondition(conditionCompiled, row);
|
||||
}
|
||||
return _row => true;
|
||||
}
|
||||
|
||||
async function dataReplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
let joinPath;
|
||||
|
||||
if (archive?.endsWith('.zip')) {
|
||||
joinPath = file => `zip://archive:${archive}//${file}`;
|
||||
} else {
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
joinPath = file => path.join(sourceDir, file);
|
||||
}
|
||||
|
||||
const repl = new DataReplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => {
|
||||
return {
|
||||
name: item.name,
|
||||
matchColumns: item.matchColumns,
|
||||
findExisting: compileOperationFunction(item.findExisting, item.findCondition),
|
||||
createNew: compileOperationFunction(item.createNew, item.createCondition),
|
||||
updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
|
||||
deleteMissing: !!item.deleteMissing,
|
||||
deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
|
||||
openStream: item.openStream
|
||||
? item.openStream
|
||||
: item.jsonArray
|
||||
? () => stream.Readable.from(item.jsonArray)
|
||||
: () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
|
||||
};
|
||||
}),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await repl.run();
|
||||
if (options?.runid) {
|
||||
process.send({
|
||||
msgtype: 'dataResult',
|
||||
runid: options?.runid,
|
||||
dataResult: repl.result,
|
||||
});
|
||||
}
|
||||
return repl.result;
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataReplicator;
|
||||
@@ -20,6 +20,7 @@ const crypto = require('crypto');
|
||||
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
|
||||
* @param {string} options.targetSchema - target schema for deployment
|
||||
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
|
||||
* @param {boolean} options.useTransaction - run deploy in transaction. If not provided, it will be set to true if driver supports transactions
|
||||
*/
|
||||
async function deployDb({
|
||||
connection,
|
||||
@@ -33,6 +34,7 @@ async function deployDb({
|
||||
ignoreNameRegex = '',
|
||||
targetSchema = null,
|
||||
maxMissingTablesRatio = undefined,
|
||||
useTransaction,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
||||
@@ -60,7 +62,14 @@ async function deployDb({
|
||||
maxMissingTablesRatio,
|
||||
});
|
||||
// console.log('RUNNING DEPLOY SCRIPT:', sql);
|
||||
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
|
||||
await executeQuery({
|
||||
connection,
|
||||
systemConnection: dbhan,
|
||||
driver,
|
||||
sql,
|
||||
logScriptItems: true,
|
||||
useTransaction,
|
||||
});
|
||||
|
||||
await scriptDeployer.runPost();
|
||||
} finally {
|
||||
|
||||
@@ -1,14 +1,30 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { uploadsdir } = require('../utility/directories');
|
||||
const { uploadsdir, archivedir } = require('../utility/directories');
|
||||
const { downloadFile } = require('../utility/downloader');
|
||||
const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
|
||||
|
||||
async function download(url) {
|
||||
if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, tmpFile);
|
||||
return tmpFile;
|
||||
async function download(url, options = {}) {
|
||||
const { targetFile } = options || {};
|
||||
if (url) {
|
||||
if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, destFile);
|
||||
return destFile;
|
||||
}
|
||||
const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
|
||||
if (zipMatch) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
let zipFile = zipMatch[1];
|
||||
if (zipFile.startsWith('archive:')) {
|
||||
zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
|
||||
return destFile;
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,8 @@ const logger = getLogger('execQuery');
|
||||
* @param {string} [options.sql] - SQL query
|
||||
* @param {string} [options.sqlFile] - SQL file
|
||||
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
|
||||
* @param {boolean} [options.useTransaction] - run query in transaction
|
||||
* @param {boolean} [options.skipLogging] - whether to skip logging
|
||||
*/
|
||||
async function executeQuery({
|
||||
connection = undefined,
|
||||
@@ -22,8 +24,10 @@ async function executeQuery({
|
||||
sql,
|
||||
sqlFile = undefined,
|
||||
logScriptItems = false,
|
||||
skipLogging = false,
|
||||
useTransaction,
|
||||
}) {
|
||||
if (!logScriptItems) {
|
||||
if (!logScriptItems && !skipLogging) {
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
||||
}
|
||||
|
||||
@@ -36,9 +40,11 @@ async function executeQuery({
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
if (!skipLogging) {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
}
|
||||
|
||||
await driver.script(dbhan, sql, { logScriptItems });
|
||||
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
|
||||
@@ -52,7 +52,10 @@ async function generateDeploySql({
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
|
||||
) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
if (!driver?.dialect?.defaultSchemaName) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
}
|
||||
targetSchema = driver.dialect.defaultSchemaName;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -17,8 +17,9 @@ const copyStream = require('./copyStream');
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} options.folder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
|
||||
* @param {function[]} options.modelTransforms - array of functions for transforming model
|
||||
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
|
||||
*/
|
||||
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms }) {
|
||||
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms, transformRow }) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
||||
|
||||
@@ -77,7 +78,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
|
||||
for (const table of modelAdapted.tables) {
|
||||
const fileName = path.join(folder, `${table.pureName}.jsonl`);
|
||||
if (await fs.exists(fileName)) {
|
||||
const src = await jsonLinesReader({ fileName });
|
||||
const src = await jsonLinesReader({ fileName, transformRow });
|
||||
const dst = await tableWriter({
|
||||
systemConnection: dbhan,
|
||||
pureName: table.pureName,
|
||||
@@ -105,7 +106,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
|
||||
for (const file of fs.readdirSync(folder)) {
|
||||
if (!file.endsWith('.jsonl')) continue;
|
||||
const pureName = path.parse(file).name;
|
||||
const src = await jsonLinesReader({ fileName: path.join(folder, file) });
|
||||
const src = await jsonLinesReader({ fileName: path.join(folder, file), transformRow });
|
||||
const dst = await tableWriter({
|
||||
systemConnection: dbhan,
|
||||
pureName,
|
||||
|
||||
@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
|
||||
const loadDatabase = require('./loadDatabase');
|
||||
const generateModelSql = require('./generateModelSql');
|
||||
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
|
||||
const dataDuplicator = require('./dataDuplicator');
|
||||
const dataReplicator = require('./dataReplicator');
|
||||
const dbModelToJson = require('./dbModelToJson');
|
||||
const jsonToDbModel = require('./jsonToDbModel');
|
||||
const jsonReader = require('./jsonReader');
|
||||
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
|
||||
const generateDeploySql = require('./generateDeploySql');
|
||||
const dropAllDbObjects = require('./dropAllDbObjects');
|
||||
const importDbFromFolder = require('./importDbFromFolder');
|
||||
const zipDirectory = require('./zipDirectory');
|
||||
const unzipDirectory = require('./unzipDirectory');
|
||||
const zipJsonLinesData = require('./zipJsonLinesData');
|
||||
const unzipJsonLinesData = require('./unzipJsonLinesData');
|
||||
const unzipJsonLinesFile = require('./unzipJsonLinesFile');
|
||||
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
@@ -64,7 +69,7 @@ const dbgateApi = {
|
||||
loadDatabase,
|
||||
generateModelSql,
|
||||
modifyJsonLinesReader,
|
||||
dataDuplicator,
|
||||
dataReplicator,
|
||||
dbModelToJson,
|
||||
jsonToDbModel,
|
||||
dataTypeMapperTransform,
|
||||
@@ -73,6 +78,11 @@ const dbgateApi = {
|
||||
generateDeploySql,
|
||||
dropAllDbObjects,
|
||||
importDbFromFolder,
|
||||
zipDirectory,
|
||||
unzipDirectory,
|
||||
zipJsonLinesData,
|
||||
unzipJsonLinesData,
|
||||
unzipJsonLinesFile,
|
||||
};
|
||||
|
||||
requirePlugin.initializeDbgateApi(dbgateApi);
|
||||
|
||||
@@ -6,10 +6,11 @@ const download = require('./download');
|
||||
const logger = getLogger('jsonLinesReader');
|
||||
|
||||
class ParseStream extends stream.Transform {
|
||||
constructor({ limitRows }) {
|
||||
constructor({ limitRows, transformRow }) {
|
||||
super({ objectMode: true });
|
||||
this.wasHeader = false;
|
||||
this.limitRows = limitRows;
|
||||
this.transformRow = transformRow;
|
||||
this.rowsWritten = 0;
|
||||
}
|
||||
_transform(chunk, encoding, done) {
|
||||
@@ -26,7 +27,11 @@ class ParseStream extends stream.Transform {
|
||||
this.wasHeader = true;
|
||||
}
|
||||
if (!this.limitRows || this.rowsWritten < this.limitRows) {
|
||||
this.push(obj);
|
||||
if (this.transformRow) {
|
||||
this.push(this.transformRow(obj));
|
||||
} else {
|
||||
this.push(obj);
|
||||
}
|
||||
this.rowsWritten += 1;
|
||||
}
|
||||
done();
|
||||
@@ -39,9 +44,10 @@ class ParseStream extends stream.Transform {
|
||||
* @param {string} options.fileName - file name or URL
|
||||
* @param {string} options.encoding - encoding of the file
|
||||
* @param {number} options.limitRows - maximum number of rows to read
|
||||
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined, transformRow }) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
|
||||
const downloadedFile = await download(fileName);
|
||||
@@ -52,7 +58,7 @@ async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undef
|
||||
encoding
|
||||
);
|
||||
const liner = byline(fileStream);
|
||||
const parser = new ParseStream({ limitRows });
|
||||
const parser = new ParseStream({ limitRows, transformRow });
|
||||
return [liner, parser];
|
||||
}
|
||||
|
||||
|
||||
@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
stringify.pipe(fileStream);
|
||||
stringify['finisher'] = fileStream;
|
||||
return stringify;
|
||||
return [stringify, fileStream];
|
||||
// stringify.pipe(fileStream);
|
||||
// stringify['finisher'] = fileStream;
|
||||
// return stringify;
|
||||
}
|
||||
|
||||
module.exports = jsonLinesWriter;
|
||||
|
||||
@@ -7,6 +7,8 @@ const logger = getLogger('queryReader');
|
||||
* Returns reader object for {@link copyStream} function. This reader object reads data from query.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} options.query - SQL query
|
||||
* @param {string} [options.queryType] - query type
|
||||
* @param {string} [options.sql] - SQL query. obsolete; use query instead
|
||||
@@ -16,6 +18,8 @@ async function queryReader({
|
||||
connection,
|
||||
query,
|
||||
queryType,
|
||||
systemConnection,
|
||||
driver,
|
||||
// obsolete; use query instead
|
||||
sql,
|
||||
}) {
|
||||
@@ -28,10 +32,13 @@ async function queryReader({
|
||||
logger.info({ sql: query || sql }, `Reading query`);
|
||||
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
||||
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
|
||||
if (!driver) {
|
||||
driver = requireEngineDriver(connection);
|
||||
}
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script'));
|
||||
|
||||
const reader =
|
||||
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
||||
queryType == 'json' ? await driver.readJsonQuery(dbhan, query) : await driver.readQuery(dbhan, query || sql);
|
||||
return reader;
|
||||
}
|
||||
|
||||
|
||||
91
packages/api/src/shell/unzipDirectory.js
Normal file
91
packages/api/src/shell/unzipDirectory.js
Normal file
@@ -0,0 +1,91 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('unzipDirectory');
|
||||
|
||||
/**
|
||||
* Extracts an entire ZIP file, preserving its internal directory layout.
|
||||
*
|
||||
* @param {string} zipPath Path to the ZIP file on disk.
|
||||
* @param {string} outputDirectory Folder to create / overwrite with the contents.
|
||||
* @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
|
||||
*/
|
||||
function unzipDirectory(zipPath, outputDirectory) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
/** Pending per-file extractions – we resolve the main promise after they’re all done */
|
||||
const pending = [];
|
||||
|
||||
// kick things off
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
const destPath = path.join(outputDirectory, entry.fileName);
|
||||
|
||||
// Handle directories (their names always end with “/” in ZIPs)
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Ensure directory exists, then continue to next entry
|
||||
fs.promises
|
||||
.mkdir(destPath, { recursive: true })
|
||||
.then(() => zipFile.readEntry())
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle files
|
||||
const filePromise = fs.promises
|
||||
.mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
|
||||
.then(
|
||||
() =>
|
||||
new Promise((res, rej) => {
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return rej(err);
|
||||
|
||||
const writeStream = fs.createWriteStream(destPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// proceed to next entry once we’ve consumed *this* one
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
);
|
||||
rej(writeErr);
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
pending.push(filePromise);
|
||||
});
|
||||
|
||||
// Entire archive enumerated; wait for all streams to finish
|
||||
zipFile.on('end', () => {
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipDirectory;
|
||||
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesData(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const results = {};
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
// Only process .json files
|
||||
if (/\.jsonl$/i.test(entry.fileName)) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(results);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesData;
|
||||
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
@@ -0,0 +1,59 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesFile(zipPath, fileInZip) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
let result = null;
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
if (entry.fileName == fileInZip) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
result = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(result);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesFile;
|
||||
49
packages/api/src/shell/zipDirectory.js
Normal file
49
packages/api/src/shell/zipDirectory.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(inputDirectory, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
// Append files from a folder
|
||||
archive.directory(inputDirectory, false, entryData => {
|
||||
if (entryData.name.endsWith('.zip')) {
|
||||
return false; // returning false means "do not include"
|
||||
}
|
||||
// otherwise, include it
|
||||
return entryData;
|
||||
});
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(jsonDb, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
for (const key in jsonDb) {
|
||||
const data = jsonDb[key];
|
||||
if (_.isArray(data)) {
|
||||
const jsonString = jsonLinesStringify(data);
|
||||
archive.append(jsonString, { name: `${key}.jsonl` });
|
||||
}
|
||||
}
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
819
packages/api/src/storageModel.js
Normal file
819
packages/api/src/storageModel.js
Normal file
@@ -0,0 +1,819 @@
|
||||
module.exports = {
|
||||
"tables": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "type",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "amoid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_disabled",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_default",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_collapsed",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"amoid": "790ca4d2-7f01-4800-955b-d691b890cc50",
|
||||
"name": "Anonymous",
|
||||
"type": "none"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"amoid": "53db1cbf-f488-44d9-8670-7162510eb09c",
|
||||
"name": "Local",
|
||||
"type": "local"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "auth_method_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "auth_methods_config",
|
||||
"refTableName": "auth_methods",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "auth_method_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods_config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "group",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(1000)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "conid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "displayName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "connectionColor",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "engine",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "server",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useDatabaseUrl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseUrl",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "authType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "port",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceNameType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "socketPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "user",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "passwordMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "treeKeySeparator",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "windowsDomain",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "isReadOnly",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "trustServerCertificate",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "defaultDatabase",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "singleDatabase",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSshTunnel",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPort",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyfilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshLogin",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshBastionHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSsl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCaFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCertFilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslRejectUnauthorized",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "clientLibraryPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useRedirectDbLogin",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabases",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabasesRegex",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpoint",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpointKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "accessKeyId",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "secretAccessKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "awsRegion",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"name": "anonymous-user"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"name": "logged-user"
|
||||
},
|
||||
{
|
||||
"id": -3,
|
||||
"name": "superadmin"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_permissions",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "login",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "email",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "users",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_permissions",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"collections": [],
|
||||
"views": [],
|
||||
"matviews": [],
|
||||
"functions": [],
|
||||
"procedures": [],
|
||||
"triggers": []
|
||||
};
|
||||
@@ -60,6 +60,10 @@ class DatastoreProxy {
|
||||
// if (this.disconnected) return;
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
}
|
||||
return this.subprocess;
|
||||
|
||||
@@ -36,6 +36,10 @@ async function callRefactorSqlQueryApi(query, task, structure, dialect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function getLicenseHttpHeaders() {
|
||||
return {};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isAuthProxySupported,
|
||||
authProxyGetRedirectUrl,
|
||||
@@ -47,4 +51,5 @@ module.exports = {
|
||||
callTextToSqlApi,
|
||||
callCompleteOnCursorApi,
|
||||
callRefactorSqlQueryApi,
|
||||
getLicenseHttpHeaders,
|
||||
};
|
||||
|
||||
380
packages/api/src/utility/cloudIntf.js
Normal file
380
packages/api/src/utility/cloudIntf.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs-extra');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const { getLicenseHttpHeaders } = require('./authProxy');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const { datadir } = require('./directories');
|
||||
const platformInfo = require('./platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const { isProApp } = require('./checkLicense');
|
||||
const socket = require('./socket');
|
||||
const config = require('../controllers/config');
|
||||
const simpleEncryptor = require('simple-encryptor');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getPublicIpInfo } = require('./hardwareFingerprint');
|
||||
|
||||
const logger = getLogger('cloudIntf');
|
||||
|
||||
let cloudFiles = null;
|
||||
|
||||
const DBGATE_IDENTITY_URL = process.env.LOCAL_DBGATE_IDENTITY
|
||||
? 'http://localhost:3103'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://identity.dbgate.udolni.net'
|
||||
: 'https://identity.dbgate.io';
|
||||
|
||||
const DBGATE_CLOUD_URL = process.env.LOCAL_DBGATE_CLOUD
|
||||
? 'http://localhost:3110'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://cloud.dbgate.udolni.net'
|
||||
: 'https://cloud.dbgate.io';
|
||||
|
||||
async function createDbGateIdentitySession(client) {
|
||||
const resp = await axios.default.post(
|
||||
`${DBGATE_IDENTITY_URL}/api/create-session`,
|
||||
{
|
||||
client,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
return {
|
||||
sid: resp.data.sid,
|
||||
url: `${DBGATE_IDENTITY_URL}/api/signin/${resp.data.sid}`,
|
||||
};
|
||||
}
|
||||
|
||||
function startCloudTokenChecking(sid, callback) {
|
||||
const started = Date.now();
|
||||
const interval = setInterval(async () => {
|
||||
if (Date.now() - started > 60 * 1000) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// console.log(`Checking cloud token for session: ${DBGATE_IDENTITY_URL}/api/get-token/${sid}`);
|
||||
const resp = await axios.default.get(`${DBGATE_IDENTITY_URL}/api/get-token/${sid}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
// console.log('CHECK RESP:', resp.data);
|
||||
|
||||
if (resp.data.email) {
|
||||
clearInterval(interval);
|
||||
callback(resp.data);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
async function loadCloudFiles() {
|
||||
try {
|
||||
const fileContent = await fs.readFile(path.join(datadir(), 'cloud-files.jsonl'), 'utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
cloudFiles = _.sortBy(parsedJson, x => `${x.folder}/${x.title}`);
|
||||
} catch (err) {
|
||||
cloudFiles = [];
|
||||
}
|
||||
}
|
||||
|
||||
async function collectCloudFilesSearchTags() {
|
||||
const res = [];
|
||||
if (platformInfo.isElectron) {
|
||||
res.push('app');
|
||||
} else {
|
||||
res.push('web');
|
||||
}
|
||||
if (platformInfo.isWindows) {
|
||||
res.push('windows');
|
||||
}
|
||||
if (platformInfo.isMac) {
|
||||
res.push('mac');
|
||||
}
|
||||
if (platformInfo.isLinux) {
|
||||
res.push('linux');
|
||||
}
|
||||
if (platformInfo.isAwsUbuntuLayout) {
|
||||
res.push('aws');
|
||||
}
|
||||
if (platformInfo.isAzureUbuntuLayout) {
|
||||
res.push('azure');
|
||||
}
|
||||
if (platformInfo.isSnap) {
|
||||
res.push('snap');
|
||||
}
|
||||
if (platformInfo.isDocker) {
|
||||
res.push('docker');
|
||||
}
|
||||
if (platformInfo.isNpmDist) {
|
||||
res.push('npm');
|
||||
}
|
||||
const engines = await connections.getUsedEngines();
|
||||
const engineTags = engines.map(engine => engine.split('@')[0]);
|
||||
res.push(...engineTags);
|
||||
|
||||
// team-premium and trials will return the same cloud files as premium - no need to check
|
||||
res.push(isProApp() ? 'premium' : 'community');
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async function getCloudSigninHolder() {
|
||||
const settingsValue = await config.getSettings();
|
||||
const holder = settingsValue['cloudSigninTokenHolder'];
|
||||
return holder;
|
||||
}
|
||||
|
||||
async function getCloudSigninHeaders(holder = null) {
|
||||
if (!holder) {
|
||||
holder = await getCloudSigninHolder();
|
||||
}
|
||||
if (holder) {
|
||||
return {
|
||||
'x-cloud-login': holder.token,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function updateCloudFiles(isRefresh) {
|
||||
let lastCloudFilesTags;
|
||||
try {
|
||||
lastCloudFilesTags = await fs.readFile(path.join(datadir(), 'cloud-files-tags.txt'), 'utf-8');
|
||||
} catch (err) {
|
||||
lastCloudFilesTags = '';
|
||||
}
|
||||
|
||||
const ipInfo = await getPublicIpInfo();
|
||||
|
||||
const tags = (await collectCloudFilesSearchTags()).join(',');
|
||||
let lastCheckedTm = 0;
|
||||
if (tags == lastCloudFilesTags && cloudFiles.length > 0) {
|
||||
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
||||
}
|
||||
|
||||
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
||||
|
||||
const resp = await axios.default.get(
|
||||
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
||||
isRefresh ? 1 : 0
|
||||
}&country=${ipInfo?.country || ''}`,
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...(await getCloudSigninHeaders()),
|
||||
'x-app-version': currentVersion.version,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
||||
|
||||
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
||||
for (const file of resp.data) {
|
||||
if (file.isDeleted) {
|
||||
delete filesByPath[file.path];
|
||||
} else {
|
||||
filesByPath[file.path] = file;
|
||||
}
|
||||
}
|
||||
|
||||
cloudFiles = Object.values(filesByPath);
|
||||
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files.jsonl'), cloudFiles.map(x => JSON.stringify(x)).join('\n'));
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files-tags.txt'), tags);
|
||||
|
||||
socket.emitChanged(`public-cloud-changed`);
|
||||
}
|
||||
|
||||
async function startCloudFiles() {
|
||||
loadCloudFiles();
|
||||
}
|
||||
|
||||
async function getPublicCloudFiles() {
|
||||
if (!loadCloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
return cloudFiles;
|
||||
}
|
||||
|
||||
async function getPublicFileData(path) {
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/public/${path}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function refreshPublicFiles(isRefresh) {
|
||||
if (!cloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
try {
|
||||
await updateCloudFiles(isRefresh);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
||||
}
|
||||
}
|
||||
|
||||
async function callCloudApiGet(endpoint, signinHolder = null, additionalHeaders = {}) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/${endpoint}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
...additionalHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage } = resp.data;
|
||||
if (errorMessage) {
|
||||
return { apiErrorMessage: errorMessage };
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function callCloudApiPost(endpoint, body, signinHolder = null) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.post(`${DBGATE_CLOUD_URL}/${endpoint}`, body, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
|
||||
if (errorMessage) {
|
||||
return {
|
||||
apiErrorMessage: errorMessage,
|
||||
apiErrorIsLicenseLimit: isLicenseLimit,
|
||||
apiErrorLimitedLicenseLimits: limitedLicenseLimits,
|
||||
};
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function getCloudFolderEncryptor(folid) {
|
||||
const { encryptionKey } = await callCloudApiGet(`folder-key/${folid}`);
|
||||
if (!encryptionKey) {
|
||||
throw new Error('No encryption key for folder: ' + folid);
|
||||
}
|
||||
return simpleEncryptor.createEncryptor(encryptionKey);
|
||||
}
|
||||
|
||||
async function getCloudContent(folid, cntid) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const { content, name, type, contentFolder, contentType, apiErrorMessage } = await callCloudApiGet(
|
||||
`content/${folid}/${cntid}`,
|
||||
signinHolder,
|
||||
{
|
||||
'x-kehid': signinHolder.kehid,
|
||||
}
|
||||
);
|
||||
|
||||
if (apiErrorMessage) {
|
||||
return { apiErrorMessage };
|
||||
}
|
||||
|
||||
return {
|
||||
content: encryptor.decrypt(content),
|
||||
name,
|
||||
type,
|
||||
contentFolder,
|
||||
contentType,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns Promise<{ cntid: string } | { apiErrorMessage: string }>
|
||||
*/
|
||||
async function putCloudContent(folid, cntid, content, name, type, contentFolder = null, contentType = null) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const resp = await callCloudApiPost(
|
||||
`put-content`,
|
||||
{
|
||||
folid,
|
||||
cntid,
|
||||
name,
|
||||
type,
|
||||
kehid: signinHolder.kehid,
|
||||
content: encryptor.encrypt(content),
|
||||
contentFolder,
|
||||
contentType,
|
||||
},
|
||||
signinHolder
|
||||
);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
}
|
||||
|
||||
const cloudConnectionCache = {};
|
||||
async function loadCachedCloudConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
if (!cloudConnectionCache[cacheKey]) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
cloudConnectionCache[cacheKey] = {
|
||||
...JSON.parse(content),
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
}
|
||||
return cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
function removeCloudCachedConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
delete cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createDbGateIdentitySession,
|
||||
startCloudTokenChecking,
|
||||
startCloudFiles,
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
loadCachedCloudConnection,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
};
|
||||
@@ -1,61 +1,3 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs');
|
||||
const fsp = require('fs/promises');
|
||||
const semver = require('semver');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('cloudUpgrade');
|
||||
|
||||
async function checkCloudUpgrade() {
|
||||
try {
|
||||
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
|
||||
const json = resp.data;
|
||||
const version = json.name.substring(1);
|
||||
let cloudDownloadedVersion = null;
|
||||
try {
|
||||
cloudDownloadedVersion = await fsp.readFile(process.env.CLOUD_UPGRADE_FILE + '.version', 'utf-8');
|
||||
} catch (err) {
|
||||
cloudDownloadedVersion = null;
|
||||
}
|
||||
if (
|
||||
semver.gt(version, currentVersion.version) &&
|
||||
(!cloudDownloadedVersion || semver.gt(version, cloudDownloadedVersion))
|
||||
) {
|
||||
logger.info(`New version available: ${version}`);
|
||||
const zipUrl = json.assets.find(x => x.name == 'cloud-build.zip').browser_download_url;
|
||||
|
||||
const writer = fs.createWriteStream(process.env.CLOUD_UPGRADE_FILE);
|
||||
|
||||
const response = await axios.default({
|
||||
url: zipUrl,
|
||||
method: 'GET',
|
||||
responseType: 'stream',
|
||||
});
|
||||
|
||||
response.data.pipe(writer);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
writer.on('finish', resolve);
|
||||
writer.on('error', reject);
|
||||
});
|
||||
await fsp.writeFile(process.env.CLOUD_UPGRADE_FILE + '.version', version);
|
||||
|
||||
logger.info(`Downloaded new version from ${zipUrl}`);
|
||||
} else {
|
||||
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');
|
||||
}
|
||||
}
|
||||
|
||||
function startCloudUpgradeTimer() {
|
||||
// at first in 5 seconds
|
||||
setTimeout(checkCloudUpgrade, 5000);
|
||||
|
||||
// hourly
|
||||
setInterval(checkCloudUpgrade, 60 * 60 * 1000);
|
||||
}
|
||||
function startCloudUpgradeTimer() {}
|
||||
|
||||
module.exports = startCloudUpgradeTimer;
|
||||
|
||||
@@ -96,7 +96,9 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
||||
...decryptConnection(connectionLoaded),
|
||||
};
|
||||
|
||||
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString();
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnelProxy(connection);
|
||||
|
||||
@@ -5,12 +5,16 @@ const path = require('path');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { datadir } = require('./directories');
|
||||
const { encryptionKeyArg } = require('./processArgs');
|
||||
|
||||
const defaultEncryptionKey = 'mQAUaXhavRGJDxDTXSCg7Ej0xMmGCrx6OKA07DIMBiDcYYkvkaXjTAzPUEHEHEf9';
|
||||
|
||||
let _encryptionKey = null;
|
||||
|
||||
function loadEncryptionKey() {
|
||||
if (encryptionKeyArg) {
|
||||
return encryptionKeyArg;
|
||||
}
|
||||
if (_encryptionKey) {
|
||||
return _encryptionKey;
|
||||
}
|
||||
@@ -33,9 +37,29 @@ function loadEncryptionKey() {
|
||||
return _encryptionKey;
|
||||
}
|
||||
|
||||
async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
|
||||
if (!storedValue) {
|
||||
const generatedKey = crypto.randomBytes(32);
|
||||
const newKey = generatedKey.toString('hex');
|
||||
const result = {
|
||||
encryptionKey: newKey,
|
||||
};
|
||||
await setStoredValue(encryptor.encrypt(result));
|
||||
|
||||
setEncryptionKey(newKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const data = encryptor.decrypt(storedValue);
|
||||
setEncryptionKey(data['encryptionKey']);
|
||||
}
|
||||
|
||||
let _encryptor = null;
|
||||
|
||||
function getEncryptor() {
|
||||
function getInternalEncryptor() {
|
||||
if (_encryptor) {
|
||||
return _encryptor;
|
||||
}
|
||||
@@ -43,35 +67,46 @@ function getEncryptor() {
|
||||
return _encryptor;
|
||||
}
|
||||
|
||||
function encryptPasswordField(connection, field) {
|
||||
if (
|
||||
connection &&
|
||||
connection[field] &&
|
||||
!connection[field].startsWith('crypt:') &&
|
||||
connection.passwordMode != 'saveRaw'
|
||||
) {
|
||||
return {
|
||||
...connection,
|
||||
[field]: 'crypt:' + getEncryptor().encrypt(connection[field]),
|
||||
};
|
||||
function encryptPasswordString(password) {
|
||||
if (password && !password.startsWith('crypt:')) {
|
||||
return 'crypt:' + getInternalEncryptor().encrypt(password);
|
||||
}
|
||||
return connection;
|
||||
return password;
|
||||
}
|
||||
|
||||
function decryptPasswordField(connection, field) {
|
||||
if (connection && connection[field] && connection[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...connection,
|
||||
[field]: getEncryptor().decrypt(connection[field].substring('crypt:'.length)),
|
||||
};
|
||||
function decryptPasswordString(password) {
|
||||
if (password && password.startsWith('crypt:')) {
|
||||
return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
|
||||
}
|
||||
return connection;
|
||||
return password;
|
||||
}
|
||||
|
||||
function encryptConnection(connection) {
|
||||
connection = encryptPasswordField(connection, 'password');
|
||||
connection = encryptPasswordField(connection, 'sshPassword');
|
||||
connection = encryptPasswordField(connection, 'sshKeyfilePassword');
|
||||
function encryptObjectPasswordField(obj, field, encryptor = null) {
|
||||
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + (encryptor || getInternalEncryptor()).encrypt(obj[field]),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function decryptObjectPasswordField(obj, field) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function encryptConnection(connection, encryptor = null) {
|
||||
if (connection.passwordMode != 'saveRaw') {
|
||||
connection = encryptObjectPasswordField(connection, 'password', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshPassword', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword', encryptor);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
@@ -81,12 +116,24 @@ function maskConnection(connection) {
|
||||
}
|
||||
|
||||
function decryptConnection(connection) {
|
||||
connection = decryptPasswordField(connection, 'password');
|
||||
connection = decryptPasswordField(connection, 'sshPassword');
|
||||
connection = decryptPasswordField(connection, 'sshKeyfilePassword');
|
||||
connection = decryptObjectPasswordField(connection, 'password');
|
||||
connection = decryptObjectPasswordField(connection, 'sshPassword');
|
||||
connection = decryptObjectPasswordField(connection, 'sshKeyfilePassword');
|
||||
return connection;
|
||||
}
|
||||
|
||||
function encryptUser(user) {
|
||||
if (user.encryptPassword) {
|
||||
user = encryptObjectPasswordField(user, 'password');
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
function decryptUser(user) {
|
||||
user = decryptObjectPasswordField(user, 'password');
|
||||
return user;
|
||||
}
|
||||
|
||||
function pickSafeConnectionInfo(connection) {
|
||||
if (process.env.LOG_CONNECTION_SENSITIVE_VALUES) {
|
||||
return connection;
|
||||
@@ -99,10 +146,78 @@ function pickSafeConnectionInfo(connection) {
|
||||
});
|
||||
}
|
||||
|
||||
function setEncryptionKey(encryptionKey) {
|
||||
_encryptionKey = encryptionKey;
|
||||
_encryptor = null;
|
||||
global.ENCRYPTION_KEY = encryptionKey;
|
||||
}
|
||||
|
||||
function getEncryptionKey() {
|
||||
return _encryptionKey;
|
||||
}
|
||||
|
||||
function generateTransportEncryptionKey() {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const result = {
|
||||
encryptionKey: crypto.randomBytes(32).toString('hex'),
|
||||
};
|
||||
return encryptor.encrypt(result);
|
||||
}
|
||||
|
||||
function createTransportEncryptor(encryptionData) {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const data = encryptor.decrypt(encryptionData);
|
||||
const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
|
||||
return res;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
|
||||
}
|
||||
}
|
||||
|
||||
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
|
||||
connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
|
||||
return connection;
|
||||
}
|
||||
|
||||
function recryptUser(user, decryptEncryptor, encryptEncryptor) {
|
||||
user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
|
||||
return user;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadEncryptionKey,
|
||||
encryptConnection,
|
||||
encryptUser,
|
||||
decryptUser,
|
||||
decryptConnection,
|
||||
maskConnection,
|
||||
pickSafeConnectionInfo,
|
||||
loadEncryptionKeyFromExternal,
|
||||
getEncryptionKey,
|
||||
setEncryptionKey,
|
||||
encryptPasswordString,
|
||||
decryptPasswordString,
|
||||
|
||||
getInternalEncryptor,
|
||||
recryptConnection,
|
||||
recryptUser,
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptObjectPasswordField,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
};
|
||||
|
||||
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('extractSingleFileFromZip');
|
||||
/**
|
||||
* Extracts a single file from a ZIP using yauzl.
|
||||
* Stops reading the rest of the archive once the file is found.
|
||||
*
|
||||
* @param {string} zipPath - Path to the ZIP file on disk.
|
||||
* @param {string} fileInZip - The file path *inside* the ZIP to extract.
|
||||
* @param {string} outputPath - Where to write the extracted file on disk.
|
||||
* @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
|
||||
*/
|
||||
function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let fileFound = false;
|
||||
|
||||
// Start reading the first entry
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
// Compare the entry name to the file we want
|
||||
if (entry.fileName === fileInZip) {
|
||||
fileFound = true;
|
||||
|
||||
// Open a read stream for this entry
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
// Create a write stream to outputPath
|
||||
const writeStream = fs.createWriteStream(outputPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// When the read stream ends, we can close the zipFile
|
||||
readStream.on('end', () => {
|
||||
// We won't read further entries
|
||||
zipFile.close();
|
||||
});
|
||||
|
||||
// When the file is finished writing, resolve
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
// Handle write errors
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
reject(writeErr);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not the file we want; skip to the next entry
|
||||
zipFile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// If we reach the end without finding the file
|
||||
zipFile.on('end', () => {
|
||||
if (!fileFound) {
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle general errors
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = extractSingleFileFromZip;
|
||||
@@ -22,6 +22,8 @@ const getMapExport = (geoJson) => {
|
||||
})
|
||||
.addTo(map);
|
||||
|
||||
leaflet.control.scale().addTo(map);
|
||||
|
||||
const geoJsonObj = leaflet
|
||||
.geoJSON(${JSON.stringify(geoJson)}, {
|
||||
style: function () {
|
||||
|
||||
285
packages/api/src/utility/handleQueryStream.js
Normal file
285
packages/api/src/utility/handleQueryStream.js
Normal file
@@ -0,0 +1,285 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { jsldir } = require('../utility/directories');
|
||||
const { serializeJsTypesReplacer } = require('dbgate-tools');
|
||||
const { ChartProcessor } = require('dbgate-datalib');
|
||||
const { isProApp } = require('./checkLicense');
|
||||
|
||||
class QueryStreamTableWriter {
|
||||
constructor(sesid = undefined) {
|
||||
this.currentRowCount = 0;
|
||||
this.currentChangeIndex = 1;
|
||||
this.initializedFile = false;
|
||||
this.sesid = sesid;
|
||||
if (isProApp()) {
|
||||
this.chartProcessor = new ChartProcessor();
|
||||
}
|
||||
}
|
||||
|
||||
initializeFromQuery(structure, resultIndex, chartDefinition) {
|
||||
this.jslid = crypto.randomUUID();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
fs.writeFileSync(
|
||||
this.currentFile,
|
||||
JSON.stringify({
|
||||
...structure,
|
||||
__isStreamHeader: true,
|
||||
}) + '\n'
|
||||
);
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.resultIndex = resultIndex;
|
||||
this.initializedFile = true;
|
||||
if (isProApp() && chartDefinition) {
|
||||
this.chartProcessor = new ChartProcessor([chartDefinition]);
|
||||
}
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex, sesid: this.sesid });
|
||||
}
|
||||
|
||||
initializeFromReader(jslid) {
|
||||
this.jslid = jslid;
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
this.writeCurrentStats(false, false);
|
||||
}
|
||||
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentStream.write(JSON.stringify(row, serializeJsTypesReplacer) + '\n');
|
||||
try {
|
||||
if (this.chartProcessor) {
|
||||
this.chartProcessor.addRow(row);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error processing chart row', e);
|
||||
this.chartProcessor = null;
|
||||
}
|
||||
|
||||
this.currentRowCount += 1;
|
||||
|
||||
if (!this.plannedStats) {
|
||||
this.plannedStats = true;
|
||||
process.nextTick(() => {
|
||||
if (this.currentStream) this.currentStream.uncork();
|
||||
process.nextTick(() => this.writeCurrentStats(false, true));
|
||||
this.plannedStats = false;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
rowFromReader(row) {
|
||||
if (!this.initializedFile) {
|
||||
process.send({ msgtype: 'initializeFile', jslid: this.jslid, sesid: this.sesid });
|
||||
this.initializedFile = true;
|
||||
|
||||
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
this.initializedFile = true;
|
||||
return;
|
||||
}
|
||||
|
||||
this.row(row);
|
||||
}
|
||||
|
||||
writeCurrentStats(isFinished = false, emitEvent = false) {
|
||||
const stats = {
|
||||
rowCount: this.currentRowCount,
|
||||
changeIndex: this.currentChangeIndex,
|
||||
isFinished,
|
||||
jslid: this.jslid,
|
||||
};
|
||||
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
|
||||
this.currentChangeIndex += 1;
|
||||
if (emitEvent) {
|
||||
process.send({ msgtype: 'stats', sesid: this.sesid, ...stats });
|
||||
}
|
||||
}
|
||||
|
||||
close(afterClose) {
|
||||
return new Promise(resolve => {
|
||||
if (this.currentStream) {
|
||||
this.currentStream.end(() => {
|
||||
this.writeCurrentStats(true, true);
|
||||
if (afterClose) afterClose();
|
||||
if (this.chartProcessor) {
|
||||
try {
|
||||
this.chartProcessor.finalize();
|
||||
if (this.chartProcessor.charts.length > 0) {
|
||||
process.send({
|
||||
msgtype: 'charts',
|
||||
sesid: this.sesid,
|
||||
jslid: this.jslid,
|
||||
charts: this.chartProcessor.charts,
|
||||
resultIndex: this.resultIndex,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error finalizing chart processor', e);
|
||||
this.chartProcessor = null;
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(
|
||||
queryStreamInfoHolder,
|
||||
resolve,
|
||||
startLine,
|
||||
sesid = undefined,
|
||||
limitRows = undefined,
|
||||
frontMatter = undefined
|
||||
) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.sesid = sesid;
|
||||
this.frontMatter = frontMatter;
|
||||
this.limitRows = limitRows;
|
||||
this.rowsLimitOverflow = false;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
this.info = this.info.bind(this);
|
||||
|
||||
// use this for cancelling - not implemented
|
||||
// this.stream = null;
|
||||
|
||||
this.plannedStats = false;
|
||||
this.queryStreamInfoHolder = queryStreamInfoHolder;
|
||||
this.resolve = resolve;
|
||||
this.rowCounter = 0;
|
||||
// currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
closeCurrentWriter() {
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close();
|
||||
this.currentWriter = null;
|
||||
}
|
||||
}
|
||||
|
||||
recordset(columns) {
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new QueryStreamTableWriter(this.sesid);
|
||||
this.currentWriter.initializeFromQuery(
|
||||
Array.isArray(columns) ? { columns } : columns,
|
||||
this.queryStreamInfoHolder.resultIndex,
|
||||
this.frontMatter?.[`chart-${this.queryStreamInfoHolder.resultIndex + 1}`]
|
||||
);
|
||||
this.queryStreamInfoHolder.resultIndex += 1;
|
||||
this.rowCounter = 0;
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
// this.onRow = _.throttle((jslid) => {
|
||||
// if (jslid == this.jslid) {
|
||||
// this.writeCurrentStats(false, true);
|
||||
// }
|
||||
// }, 500);
|
||||
}
|
||||
row(row) {
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.limitRows && this.rowCounter >= this.limitRows) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: { message: `Rows limit overflow, loaded ${this.rowCounter} rows, canceling query`, severity: 'error' },
|
||||
sesid: this.sesid,
|
||||
});
|
||||
this.rowsLimitOverflow = true;
|
||||
|
||||
this.queryStreamInfoHolder.canceled = true;
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.row(row);
|
||||
this.rowCounter += 1;
|
||||
} else if (row.message) {
|
||||
process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
|
||||
}
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
// process.send({ msgtype: 'error', error });
|
||||
// }
|
||||
done(result) {
|
||||
this.closeCurrentWriter();
|
||||
// currentHandlers = currentHandlers.filter((x) => x != this);
|
||||
this.resolve();
|
||||
}
|
||||
info(info) {
|
||||
if (info && info.line != null) {
|
||||
info = {
|
||||
...info,
|
||||
line: this.startLine + info.line,
|
||||
};
|
||||
}
|
||||
if (info.severity == 'error') {
|
||||
this.queryStreamInfoHolder.canceled = true;
|
||||
}
|
||||
process.send({ msgtype: 'info', info, sesid: this.sesid });
|
||||
}
|
||||
}
|
||||
|
||||
function handleQueryStream(
|
||||
dbhan,
|
||||
driver,
|
||||
queryStreamInfoHolder,
|
||||
sqlItem,
|
||||
sesid = undefined,
|
||||
limitRows = undefined,
|
||||
frontMatter = undefined
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(
|
||||
queryStreamInfoHolder,
|
||||
resolve,
|
||||
start && start.line,
|
||||
sesid,
|
||||
limitRows,
|
||||
frontMatter
|
||||
);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
function allowExecuteCustomScript(storedConnection, driver) {
|
||||
if (driver.readOnlySessions) {
|
||||
return true;
|
||||
}
|
||||
if (storedConnection.isReadOnly) {
|
||||
return false;
|
||||
// throw new Error('Connection is read only');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
handleQueryStream,
|
||||
QueryStreamTableWriter,
|
||||
allowExecuteCustomScript,
|
||||
};
|
||||
@@ -87,4 +87,5 @@ module.exports = {
|
||||
getHardwareFingerprint,
|
||||
getHardwareFingerprintHash,
|
||||
getPublicHardwareFingerprint,
|
||||
getPublicIpInfo,
|
||||
};
|
||||
|
||||
@@ -24,4 +24,15 @@ async function getHealthStatus() {
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getHealthStatus;
|
||||
async function getHealthStatusSprinx() {
|
||||
return {
|
||||
overallStatus: 'OK',
|
||||
timeStamp: new Date().toISOString(),
|
||||
timeStampUnix: Math.floor(Date.now() / 1000),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getHealthStatus,
|
||||
getHealthStatusSprinx,
|
||||
};
|
||||
|
||||
41
packages/api/src/utility/listZipEntries.js
Normal file
41
packages/api/src/utility/listZipEntries.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const yauzl = require('yauzl');
|
||||
const path = require('path');
|
||||
|
||||
/**
|
||||
* Lists the files in a ZIP archive using yauzl,
|
||||
* returning an array of { fileName, uncompressedSize } objects.
|
||||
*
|
||||
* @param {string} zipPath - The path to the ZIP file.
|
||||
* @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
|
||||
*/
|
||||
function listZipEntries(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
const entries = [];
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
// Handle each entry
|
||||
zipfile.on('entry', entry => {
|
||||
entries.push({
|
||||
fileName: entry.fileName,
|
||||
uncompressedSize: entry.uncompressedSize,
|
||||
});
|
||||
|
||||
// Move on to the next entry (we’re only listing, not reading file data)
|
||||
zipfile.readEntry();
|
||||
});
|
||||
|
||||
// Finished reading all entries
|
||||
zipfile.on('end', () => resolve(entries));
|
||||
|
||||
// Handle errors
|
||||
zipfile.on('error', err => reject(err));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = listZipEntries;
|
||||
@@ -17,6 +17,7 @@ const processDisplayName = getNamedArg('--process-display-name');
|
||||
const listenApi = process.argv.includes('--listen-api');
|
||||
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
|
||||
const runE2eTests = process.argv.includes('--run-e2e-tests');
|
||||
const encryptionKeyArg = getNamedArg('--encryption-key');
|
||||
|
||||
function getPassArgs() {
|
||||
const res = [];
|
||||
@@ -31,6 +32,9 @@ function getPassArgs() {
|
||||
if (runE2eTests) {
|
||||
res.push('--run-e2e-tests');
|
||||
}
|
||||
if (global['ENCRYPTION_KEY']) {
|
||||
res.push('--encryption-key', global['ENCRYPTION_KEY']);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -45,4 +49,5 @@ module.exports = {
|
||||
listenApiChild,
|
||||
processDisplayName,
|
||||
runE2eTests,
|
||||
encryptionKeyArg,
|
||||
};
|
||||
|
||||
@@ -57,10 +57,21 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', code => {
|
||||
logger.info('SSH forward process exited');
|
||||
logger.info(`SSH forward process exited with code ${code}`);
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(new Error('SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'));
|
||||
reject(
|
||||
new Error(
|
||||
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest",
|
||||
"test:charts": "jest -t \"Chart processor\"",
|
||||
"test:ci": "jest --json --outputFile=result.json --testLocationInResults",
|
||||
"start": "tsc --watch"
|
||||
},
|
||||
@@ -13,16 +14,17 @@
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"date-fns": "^4.1.0",
|
||||
"dbgate-filterparser": "^6.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"dbgate-filterparser": "^6.0.0-alpha.1",
|
||||
"uuid": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"@types/node": "^13.7.0",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
"typescript": "^4.4.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -572,6 +572,27 @@ export function changeSetInsertDocuments(
|
||||
};
|
||||
}
|
||||
|
||||
export function createMergedRowsChangeSet(
|
||||
table: TableInfo,
|
||||
updatedRows: any[],
|
||||
insertedRows: any[],
|
||||
mergeKey: string[]
|
||||
): ChangeSet {
|
||||
const res = createChangeSet();
|
||||
res.updates = updatedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: _.omit(row, mergeKey),
|
||||
condition: _.pick(row, mergeKey),
|
||||
}));
|
||||
res.inserts = insertedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: row,
|
||||
}));
|
||||
return res;
|
||||
}
|
||||
|
||||
export function changeSetContainsChanges(changeSet: ChangeSet) {
|
||||
if (!changeSet) return false;
|
||||
return (
|
||||
|
||||
@@ -1,326 +0,0 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
|
||||
const logger = getLogger('dataDuplicator');
|
||||
|
||||
export interface DataDuplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
operation: 'copy' | 'lookup' | 'insertMissing';
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataDuplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
}
|
||||
|
||||
class DuplicatorReference {
|
||||
constructor(
|
||||
public base: DuplicatorItemHolder,
|
||||
public ref: DuplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorWeakReference {
|
||||
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorItemHolder {
|
||||
references: DuplicatorReference[] = [];
|
||||
backReferences: DuplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: DuplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
refByColumn: { [columnName: string]: DuplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
|
||||
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new DuplicatorWeakReference(
|
||||
this,
|
||||
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
res[key] = ref.ref.idMap[res[key]];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.duplicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.duplicator.driver;
|
||||
const pool = this.duplicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const existingWeakRefs = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doCopy = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
const insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
let res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced) {
|
||||
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.item.operation) {
|
||||
case 'copy': {
|
||||
await doCopy();
|
||||
break;
|
||||
}
|
||||
case 'insertMissing':
|
||||
case 'lookup': {
|
||||
const res = await runQueryOnDriver(pool, driver, dmp =>
|
||||
dmp.put(
|
||||
'^select %i ^from %f ^where %i = %v',
|
||||
this.autoColumn,
|
||||
this.table,
|
||||
this.item.matchColumns[0],
|
||||
chunk[this.item.matchColumns[0]]
|
||||
)
|
||||
);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
} else if (this.item.operation == 'insertMissing') {
|
||||
await doCopy();
|
||||
} else {
|
||||
missing += 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
await this.duplicator.copyStream(readStream, writeStream);
|
||||
|
||||
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataDuplicator {
|
||||
itemHolders: DuplicatorItemHolder[];
|
||||
itemPlan: DuplicatorItemHolder[] = [];
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataDuplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output) => Promise<void>,
|
||||
public options: DataDuplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
}
|
||||
|
||||
findItemToPlan(): DuplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing duplicator transaction');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
|
||||
}
|
||||
}
|
||||
}
|
||||
510
packages/datalib/src/DataReplicator.ts
Normal file
510
packages/datalib/src/DataReplicator.ts
Normal file
@@ -0,0 +1,510 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
isTypeNumber,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
SqlDumper,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, NamedObjectInfo, QueryResult, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
import stableStringify from 'json-stable-stringify';
|
||||
|
||||
const logger = getLogger('dataReplicator');
|
||||
|
||||
export interface DataReplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
findExisting: (row: any) => boolean;
|
||||
createNew: (row: any) => boolean;
|
||||
updateExisting: (row: any) => boolean;
|
||||
deleteMissing: boolean;
|
||||
deleteRestrictionColumns: string[];
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataReplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
generateSqlScript?: boolean;
|
||||
runid?: string;
|
||||
}
|
||||
|
||||
class ReplicatorReference {
|
||||
constructor(
|
||||
public base: ReplicatorItemHolder,
|
||||
public ref: ReplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorWeakReference {
|
||||
constructor(public base: ReplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorItemHolder {
|
||||
references: ReplicatorReference[] = [];
|
||||
backReferences: ReplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: ReplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
isManualAutoColumn: boolean;
|
||||
refByColumn: { [columnName: string]: ReplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataReplicatorItem, public replicator: DataReplicator) {
|
||||
this.table = replicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
if (!this.autoColumn && this.table.primaryKey?.columns?.length == 1) {
|
||||
const name = this.table.primaryKey.columns[0].columnName;
|
||||
const column = this.table.columns.find(x => x.columnName == name);
|
||||
if (isTypeNumber(column?.dataType)) {
|
||||
this.autoColumn = name;
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
if (this.autoColumn && this.replicator.options.generateSqlScript) {
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.replicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new ReplicatorWeakReference(
|
||||
this,
|
||||
this.replicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new ReplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols?: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...(weakrefcols ? weakrefcols : [])]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
const oldId = res[key];
|
||||
res[key] = ref.ref.idMap[oldId];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.replicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
createUpdateObject(chunk) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...this.references.map(x => x.columnName)]
|
||||
);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.replicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.replicator.pool, this.replicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.replicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.replicator.driver;
|
||||
const pool = this.replicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let updated = 0;
|
||||
let deleted = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const { deleteMissing, deleteRestrictionColumns } = this.item;
|
||||
const deleteRestrictions = {};
|
||||
const usedKeyRows = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.replicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doFind = async () => {
|
||||
let insertedObj = this.createInsertObject(chunk);
|
||||
|
||||
const res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select %i ^from %f ^where ', this.autoColumn, this.table);
|
||||
dmp.putCollection(' and ', this.item.matchColumns, x => {
|
||||
dmp.put('%i = %v', x, insertedObj[x]);
|
||||
});
|
||||
});
|
||||
const resId = Object.entries(res?.rows?.[0] || {})?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
};
|
||||
|
||||
const doUpdate = async recordId => {
|
||||
const updateObj = this.createUpdateObject(chunk);
|
||||
if (Object.keys(updateObj).length == 0) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^update %f ^ set ', this.table);
|
||||
dmp.putCollection(',', Object.keys(updateObj), x => {
|
||||
dmp.put('%i = %v', x, updateObj[x]);
|
||||
});
|
||||
dmp.put(' ^where %i = %v', this.autoColumn, recordId);
|
||||
dmp.endCommand();
|
||||
});
|
||||
updated += 1;
|
||||
};
|
||||
|
||||
const doInsert = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
let insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isManualAutoColumn) {
|
||||
const maxId = await this.replicator.generateIdentityValue(this.autoColumn, this.table);
|
||||
insertedObj = {
|
||||
...insertedObj,
|
||||
[this.autoColumn]: maxId,
|
||||
};
|
||||
this.idMap[chunk[this.autoColumn]] = maxId;
|
||||
}
|
||||
|
||||
let res = await this.replicator.runDumperQuery(dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
dmp.endCommand();
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity &&
|
||||
!this.isManualAutoColumn
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced && !this.isManualAutoColumn) {
|
||||
if (this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
}
|
||||
};
|
||||
|
||||
const doMarkDelete = () => {
|
||||
const insertedObj = this.createInsertObject(chunk);
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
const restriction = _pick(insertedObj, deleteRestrictionColumns);
|
||||
const key = stableStringify(restriction);
|
||||
deleteRestrictions[key] = restriction;
|
||||
}
|
||||
|
||||
const usedKey = _pick(insertedObj, this.item.matchColumns);
|
||||
usedKeyRows[stableStringify(usedKey)] = usedKey;
|
||||
};
|
||||
|
||||
const findExisting = this.item.findExisting(chunk);
|
||||
const updateExisting = this.item.updateExisting(chunk);
|
||||
const createNew = this.item.createNew(chunk);
|
||||
|
||||
if (deleteMissing) {
|
||||
doMarkDelete();
|
||||
}
|
||||
|
||||
let recordId = null;
|
||||
if (findExisting) {
|
||||
recordId = await doFind();
|
||||
}
|
||||
|
||||
if (updateExisting && recordId != null) {
|
||||
await doUpdate(recordId);
|
||||
}
|
||||
|
||||
if (createNew && recordId == null) {
|
||||
recordId = await doInsert();
|
||||
}
|
||||
|
||||
if (recordId == null && findExisting) {
|
||||
missing += 1;
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
const dumpConditionArray = (dmp: SqlDumper, array: any[], positive: boolean) => {
|
||||
dmp.putCollection(positive ? ' or ' : ' and ', array, x => {
|
||||
dmp.put('(');
|
||||
dmp.putCollection(positive ? ' and ' : ' or ', Object.keys(x), y => {
|
||||
dmp.put(positive ? '%i = %v' : 'not (%i = %v)', y, x[y]);
|
||||
});
|
||||
dmp.put(')');
|
||||
});
|
||||
};
|
||||
const dumpDeleteCondition = (dmp: SqlDumper) => {
|
||||
const deleteRestrictionValues = Object.values(deleteRestrictions);
|
||||
const usedKeyRowsValues = Object.values(usedKeyRows);
|
||||
|
||||
if (deleteRestrictionValues.length == 0 && usedKeyRowsValues.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
dmp.put(' ^where ');
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
dmp.put('(');
|
||||
dumpConditionArray(dmp, deleteRestrictionValues, true);
|
||||
dmp.put(')');
|
||||
if (usedKeyRowsValues.length > 0) {
|
||||
dmp.put(' ^and ');
|
||||
}
|
||||
}
|
||||
dumpConditionArray(dmp, Object.values(usedKeyRows), false);
|
||||
};
|
||||
const doDelete = async () => {
|
||||
const countRes = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select count(*) as ~cnt ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
const count = parseInt(countRes.rows[0].cnt);
|
||||
if (count > 0) {
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^delete ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
deleted += count;
|
||||
}
|
||||
};
|
||||
|
||||
await this.replicator.copyStream(readStream, writeStream, {});
|
||||
|
||||
if (deleteMissing) {
|
||||
await doDelete();
|
||||
}
|
||||
|
||||
// await this.replicator.driver.writeQueryStream(this.replicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped, updated, deleted };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataReplicator {
|
||||
itemHolders: ReplicatorItemHolder[];
|
||||
itemPlan: ReplicatorItemHolder[] = [];
|
||||
result: string = '';
|
||||
dumper: SqlDumper;
|
||||
identityValues: { [fullTableName: string]: number } = {};
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataReplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output, options) => Promise<void>,
|
||||
public options: DataReplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new ReplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
// @ts-ignore
|
||||
this.dumper = driver.createDumper();
|
||||
}
|
||||
|
||||
findItemToPlan(): ReplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperCommand(cmd: (dmp: SqlDumper) => void | string): Promise<void> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
} else {
|
||||
await runCommandOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperQuery(cmd: (dmp: SqlDumper) => void | string): Promise<QueryResult> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
return {
|
||||
rows: [],
|
||||
};
|
||||
} else {
|
||||
return await runQueryOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async generateIdentityValue(column: string, table: NamedObjectInfo): Promise<number> {
|
||||
const tableKey = `${table.schemaName}.${table.pureName}`;
|
||||
if (!(tableKey in this.identityValues)) {
|
||||
const max = await runQueryOnDriver(this.pool, this.driver, dmp => {
|
||||
dmp.put('^select max(%i) as ~maxid ^from %f', column, table);
|
||||
});
|
||||
const maxId = Math.max(max.rows[0]['maxid'] ?? 0, 0) + 1;
|
||||
this.identityValues[tableKey] = maxId;
|
||||
return maxId;
|
||||
}
|
||||
|
||||
this.identityValues[tableKey] += 1;
|
||||
return this.identityValues[tableKey];
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await this.runDumperCommand(dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing replicator transaction');
|
||||
await this.runDumperCommand(dmp => dmp.commitTransaction());
|
||||
}
|
||||
|
||||
this.result = this.dumper.s;
|
||||
}
|
||||
}
|
||||
88
packages/datalib/src/chartDefinitions.ts
Normal file
88
packages/datalib/src/chartDefinitions.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
export type ChartTypeEnum = 'bar' | 'line' | 'pie' | 'polarArea';
|
||||
export type ChartXTransformFunction =
|
||||
| 'identity'
|
||||
| 'date:minute'
|
||||
| 'date:hour'
|
||||
| 'date:day'
|
||||
| 'date:month'
|
||||
| 'date:year';
|
||||
export type ChartYAggregateFunction = 'sum' | 'first' | 'last' | 'min' | 'max' | 'count' | 'avg';
|
||||
export type ChartDataLabelFormatter = 'number' | 'size:bytes' | 'size:kb' | 'size:mb' | 'size:gb';
|
||||
|
||||
export const ChartConstDefaults = {
|
||||
sortOrder: ' asc',
|
||||
windowAlign: 'end',
|
||||
windowSize: 100,
|
||||
parentAggregateLimit: 200,
|
||||
};
|
||||
|
||||
export const ChartLimits = {
|
||||
AUTODETECT_CHART_LIMIT: 10, // limit for auto-detecting charts, to avoid too many charts
|
||||
AUTODETECT_MEASURES_LIMIT: 10, // limit for auto-detecting measures, to avoid too many measures
|
||||
APPLY_LIMIT_AFTER_ROWS: 100,
|
||||
MAX_DISTINCT_VALUES: 10, // max number of distinct values to keep in topDistinctValues
|
||||
VALID_VALUE_RATIO_LIMIT: 0.5, // limit for valid value ratio, y defs below this will not be used in auto-detect
|
||||
PIE_RATIO_LIMIT: 0.05, // limit for other values in pie chart, if the value is below this, it will be grouped into "Other"
|
||||
PIE_COUNT_LIMIT: 10, // limit for number of pie chart slices, if the number of slices is above this, it will be grouped into "Other"
|
||||
};
|
||||
|
||||
export interface ChartXFieldDefinition {
|
||||
field: string;
|
||||
title?: string;
|
||||
transformFunction: ChartXTransformFunction;
|
||||
sortOrder?: 'natural' | 'ascKeys' | 'descKeys' | 'ascValues' | 'descValues';
|
||||
windowAlign?: 'start' | 'end';
|
||||
windowSize?: number;
|
||||
parentAggregateLimit?: number;
|
||||
}
|
||||
|
||||
export interface ChartYFieldDefinition {
|
||||
field: string;
|
||||
title?: string;
|
||||
aggregateFunction: ChartYAggregateFunction;
|
||||
}
|
||||
|
||||
export interface ChartDefinition {
|
||||
chartType: ChartTypeEnum;
|
||||
title?: string;
|
||||
pieRatioLimit?: number; // limit for pie chart, if the value is below this, it will be grouped into "Other"
|
||||
pieCountLimit?: number; // limit for number of pie chart slices, if the number of slices is above this, it will be grouped into "Other"
|
||||
|
||||
xdef: ChartXFieldDefinition;
|
||||
ydefs: ChartYFieldDefinition[];
|
||||
|
||||
useDataLabels?: boolean;
|
||||
dataLabelFormatter?: ChartDataLabelFormatter;
|
||||
}
|
||||
|
||||
export interface ChartDateParsed {
|
||||
year: number;
|
||||
month?: number;
|
||||
day?: number;
|
||||
hour?: number;
|
||||
minute?: number;
|
||||
second?: number;
|
||||
fraction?: string;
|
||||
}
|
||||
|
||||
export interface ChartAvailableColumn {
|
||||
field: string;
|
||||
}
|
||||
|
||||
export interface ProcessedChart {
|
||||
minX?: string;
|
||||
maxX?: string;
|
||||
rowsAdded: number;
|
||||
buckets: { [key: string]: any }; // key is the bucket key, value is aggregated data
|
||||
bucketKeysOrdered: string[];
|
||||
bucketKeyDateParsed: { [key: string]: ChartDateParsed }; // key is the bucket key, value is parsed date
|
||||
isGivenDefinition: boolean; // true if the chart was created with a given definition, false if it was created from raw data
|
||||
invalidXRows: number;
|
||||
invalidYRows: { [key: string]: number }; // key is the y field, value is the count of invalid rows
|
||||
validYRows: { [key: string]: number }; // key is the field, value is the count of valid rows
|
||||
|
||||
topDistinctValues: { [key: string]: Set<any> }; // key is the field, value is the set of distinct values
|
||||
availableColumns: ChartAvailableColumn[];
|
||||
|
||||
definition: ChartDefinition;
|
||||
}
|
||||
374
packages/datalib/src/chartProcessor.ts
Normal file
374
packages/datalib/src/chartProcessor.ts
Normal file
@@ -0,0 +1,374 @@
|
||||
import {
|
||||
ChartAvailableColumn,
|
||||
ChartDateParsed,
|
||||
ChartDefinition,
|
||||
ChartLimits,
|
||||
ProcessedChart,
|
||||
} from './chartDefinitions';
|
||||
import _sortBy from 'lodash/sortBy';
|
||||
import _sum from 'lodash/sum';
|
||||
import {
|
||||
aggregateChartNumericValuesFromSource,
|
||||
autoAggregateCompactTimelineChart,
|
||||
computeChartBucketCardinality,
|
||||
computeChartBucketKey,
|
||||
fillChartTimelineBuckets,
|
||||
tryParseChartDate,
|
||||
} from './chartTools';
|
||||
import { getChartScore, getChartYFieldScore } from './chartScoring';
|
||||
|
||||
export class ChartProcessor {
|
||||
chartsProcessing: ProcessedChart[] = [];
|
||||
charts: ProcessedChart[] = [];
|
||||
availableColumnsDict: { [field: string]: ChartAvailableColumn } = {};
|
||||
availableColumns: ChartAvailableColumn[] = [];
|
||||
autoDetectCharts = false;
|
||||
rowsAdded = 0;
|
||||
|
||||
constructor(public givenDefinitions: ChartDefinition[] = []) {
|
||||
for (const definition of givenDefinitions) {
|
||||
this.chartsProcessing.push({
|
||||
definition,
|
||||
rowsAdded: 0,
|
||||
bucketKeysOrdered: [],
|
||||
buckets: {},
|
||||
bucketKeyDateParsed: {},
|
||||
isGivenDefinition: true,
|
||||
invalidXRows: 0,
|
||||
invalidYRows: {},
|
||||
availableColumns: [],
|
||||
validYRows: {},
|
||||
topDistinctValues: {},
|
||||
});
|
||||
}
|
||||
this.autoDetectCharts = this.givenDefinitions.length == 0;
|
||||
}
|
||||
|
||||
// findOrCreateChart(definition: ChartDefinition, isGivenDefinition: boolean): ProcessedChart {
|
||||
// const signatureItems = [
|
||||
// definition.chartType,
|
||||
// definition.xdef.field,
|
||||
// definition.xdef.transformFunction,
|
||||
// definition.ydefs.map(y => y.field).join(','),
|
||||
// ];
|
||||
// const signature = signatureItems.join('::');
|
||||
|
||||
// if (this.chartsBySignature[signature]) {
|
||||
// return this.chartsBySignature[signature];
|
||||
// }
|
||||
// const chart: ProcessedChart = {
|
||||
// definition,
|
||||
// rowsAdded: 0,
|
||||
// bucketKeysOrdered: [],
|
||||
// buckets: {},
|
||||
// bucketKeyDateParsed: {},
|
||||
// isGivenDefinition,
|
||||
// };
|
||||
// this.chartsBySignature[signature] = chart;
|
||||
// return chart;
|
||||
// }
|
||||
|
||||
addRow(row: any) {
|
||||
const dateColumns: { [key: string]: ChartDateParsed } = {};
|
||||
const numericColumns: { [key: string]: number } = {};
|
||||
const numericColumnsForAutodetect: { [key: string]: number } = {};
|
||||
const stringColumns: { [key: string]: string } = {};
|
||||
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
const number: number = typeof value == 'string' ? Number(value) : typeof value == 'number' ? value : NaN;
|
||||
this.availableColumnsDict[key] = {
|
||||
field: key,
|
||||
};
|
||||
|
||||
const keyLower = key.toLowerCase();
|
||||
const keyIsId = keyLower.endsWith('_id') || keyLower == 'id' || key.endsWith('Id');
|
||||
|
||||
const parsedDate = tryParseChartDate(value);
|
||||
if (parsedDate) {
|
||||
dateColumns[key] = parsedDate;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isNaN(number) && isFinite(number)) {
|
||||
numericColumns[key] = number;
|
||||
if (!keyIsId) {
|
||||
numericColumnsForAutodetect[key] = number; // for auto-detecting charts
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && isNaN(number) && value.length < 100) {
|
||||
stringColumns[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// const sortedNumericColumnns = Object.keys(numericColumns).sort();
|
||||
|
||||
if (this.autoDetectCharts) {
|
||||
// create charts from data, if there are no given definitions
|
||||
for (const datecol in dateColumns) {
|
||||
let usedChart = this.chartsProcessing.find(
|
||||
chart =>
|
||||
!chart.isGivenDefinition &&
|
||||
chart.definition.xdef.field === datecol &&
|
||||
chart.definition.xdef.transformFunction?.startsWith('date:')
|
||||
);
|
||||
|
||||
if (
|
||||
!usedChart &&
|
||||
(this.rowsAdded < ChartLimits.APPLY_LIMIT_AFTER_ROWS ||
|
||||
this.chartsProcessing.length < ChartLimits.AUTODETECT_CHART_LIMIT)
|
||||
) {
|
||||
usedChart = {
|
||||
definition: {
|
||||
chartType: 'line',
|
||||
xdef: {
|
||||
field: datecol,
|
||||
transformFunction: 'date:day',
|
||||
},
|
||||
ydefs: [],
|
||||
},
|
||||
rowsAdded: 0,
|
||||
bucketKeysOrdered: [],
|
||||
buckets: {},
|
||||
bucketKeyDateParsed: {},
|
||||
isGivenDefinition: false,
|
||||
invalidXRows: 0,
|
||||
invalidYRows: {},
|
||||
availableColumns: [],
|
||||
validYRows: {},
|
||||
topDistinctValues: {},
|
||||
};
|
||||
this.chartsProcessing.push(usedChart);
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value == null) continue;
|
||||
if (key == datecol) continue; // skip date column itself
|
||||
let existingYDef = usedChart.definition.ydefs.find(y => y.field === key);
|
||||
if (
|
||||
!existingYDef &&
|
||||
(this.rowsAdded < ChartLimits.APPLY_LIMIT_AFTER_ROWS ||
|
||||
usedChart.definition.ydefs.length < ChartLimits.AUTODETECT_MEASURES_LIMIT)
|
||||
) {
|
||||
existingYDef = {
|
||||
field: key,
|
||||
aggregateFunction: 'sum',
|
||||
};
|
||||
usedChart.definition.ydefs.push(existingYDef);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply on all charts with this date column
|
||||
for (const chart of this.chartsProcessing) {
|
||||
this.applyRawData(
|
||||
chart,
|
||||
row,
|
||||
dateColumns[chart.definition.xdef.field],
|
||||
chart.isGivenDefinition ? numericColumns : numericColumnsForAutodetect,
|
||||
stringColumns
|
||||
);
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.chartsProcessing.length; i++) {
|
||||
this.chartsProcessing[i] = autoAggregateCompactTimelineChart(this.chartsProcessing[i]);
|
||||
}
|
||||
|
||||
this.rowsAdded += 1;
|
||||
if (this.rowsAdded == ChartLimits.APPLY_LIMIT_AFTER_ROWS) {
|
||||
this.applyLimitsOnCharts();
|
||||
}
|
||||
}
|
||||
|
||||
applyLimitsOnCharts() {
|
||||
const autodetectProcessingCharts = this.chartsProcessing.filter(chart => !chart.isGivenDefinition);
|
||||
if (autodetectProcessingCharts.length > ChartLimits.AUTODETECT_CHART_LIMIT) {
|
||||
const newAutodetectProcessingCharts = _sortBy(
|
||||
this.chartsProcessing.slice(0, ChartLimits.AUTODETECT_CHART_LIMIT),
|
||||
chart => -getChartScore(chart)
|
||||
);
|
||||
|
||||
for (const chart of autodetectProcessingCharts) {
|
||||
chart.definition.ydefs = _sortBy(chart.definition.ydefs, yfield => -getChartYFieldScore(chart, yfield)).slice(
|
||||
0,
|
||||
ChartLimits.AUTODETECT_MEASURES_LIMIT
|
||||
);
|
||||
}
|
||||
|
||||
this.chartsProcessing = [
|
||||
...this.chartsProcessing.filter(chart => chart.isGivenDefinition),
|
||||
...newAutodetectProcessingCharts,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
addRows(...rows: any[]) {
|
||||
for (const row of rows) {
|
||||
this.addRow(row);
|
||||
}
|
||||
}
|
||||
|
||||
finalize() {
|
||||
this.applyLimitsOnCharts();
|
||||
this.availableColumns = Object.values(this.availableColumnsDict);
|
||||
for (const chart of this.chartsProcessing) {
|
||||
let addedChart: ProcessedChart = chart;
|
||||
if (chart.rowsAdded == 0) {
|
||||
continue; // skip empty charts
|
||||
}
|
||||
const sortOrder = chart.definition.xdef.sortOrder ?? 'ascKeys';
|
||||
if (sortOrder != 'natural') {
|
||||
if (sortOrder == 'ascKeys' || sortOrder == 'descKeys') {
|
||||
if (chart.definition.xdef.transformFunction.startsWith('date:')) {
|
||||
addedChart = autoAggregateCompactTimelineChart(addedChart);
|
||||
fillChartTimelineBuckets(addedChart);
|
||||
}
|
||||
|
||||
addedChart.bucketKeysOrdered = _sortBy(Object.keys(addedChart.buckets));
|
||||
if (sortOrder == 'descKeys') {
|
||||
addedChart.bucketKeysOrdered.reverse();
|
||||
}
|
||||
}
|
||||
|
||||
if (sortOrder == 'ascValues' || sortOrder == 'descValues') {
|
||||
addedChart.bucketKeysOrdered = _sortBy(Object.keys(addedChart.buckets), key =>
|
||||
computeChartBucketCardinality(addedChart.buckets[key])
|
||||
);
|
||||
if (sortOrder == 'descValues') {
|
||||
addedChart.bucketKeysOrdered.reverse();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!addedChart.isGivenDefinition) {
|
||||
addedChart = {
|
||||
...addedChart,
|
||||
definition: {
|
||||
...addedChart.definition,
|
||||
ydefs: addedChart.definition.ydefs.filter(
|
||||
y =>
|
||||
!addedChart.invalidYRows[y.field] &&
|
||||
addedChart.validYRows[y.field] / addedChart.rowsAdded >= ChartLimits.VALID_VALUE_RATIO_LIMIT
|
||||
),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (addedChart) {
|
||||
addedChart.availableColumns = this.availableColumns;
|
||||
this.charts.push(addedChart);
|
||||
}
|
||||
|
||||
this.groupPieOtherBuckets(addedChart);
|
||||
}
|
||||
|
||||
this.charts = [
|
||||
...this.charts.filter(x => x.isGivenDefinition),
|
||||
..._sortBy(
|
||||
this.charts.filter(x => !x.isGivenDefinition),
|
||||
chart => -getChartScore(chart)
|
||||
),
|
||||
];
|
||||
}
|
||||
groupPieOtherBuckets(chart: ProcessedChart) {
|
||||
if (chart.definition.chartType !== 'pie') {
|
||||
return; // only for pie charts
|
||||
}
|
||||
const ratioLimit = chart.definition.pieRatioLimit ?? ChartLimits.PIE_RATIO_LIMIT;
|
||||
const countLimit = chart.definition.pieCountLimit ?? ChartLimits.PIE_COUNT_LIMIT;
|
||||
if (ratioLimit == 0 && countLimit == 0) {
|
||||
return; // no grouping if limit is 0
|
||||
}
|
||||
const otherBucket: any = {};
|
||||
let newBuckets: any = {};
|
||||
const cardSum = _sum(Object.values(chart.buckets).map(bucket => computeChartBucketCardinality(bucket)));
|
||||
|
||||
if (cardSum == 0) {
|
||||
return; // no buckets to process
|
||||
}
|
||||
|
||||
for (const [bucketKey, bucket] of Object.entries(chart.buckets)) {
|
||||
if (computeChartBucketCardinality(bucket) / cardSum < ratioLimit) {
|
||||
for (const field in bucket) {
|
||||
otherBucket[field] = (otherBucket[field] ?? 0) + bucket[field];
|
||||
}
|
||||
} else {
|
||||
newBuckets[bucketKey] = bucket;
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(newBuckets).length > countLimit) {
|
||||
const sortedBucketKeys = _sortBy(
|
||||
Object.entries(newBuckets),
|
||||
([, bucket]) => -computeChartBucketCardinality(bucket)
|
||||
).map(([key]) => key);
|
||||
const newBuckets2 = {};
|
||||
sortedBucketKeys.forEach((key, index) => {
|
||||
if (index < countLimit) {
|
||||
newBuckets2[key] = newBuckets[key];
|
||||
} else {
|
||||
for (const field in newBuckets[key]) {
|
||||
otherBucket[field] = (otherBucket[field] ?? 0) + newBuckets[key][field];
|
||||
}
|
||||
}
|
||||
});
|
||||
newBuckets = newBuckets2;
|
||||
}
|
||||
|
||||
if (Object.keys(otherBucket).length > 0) {
|
||||
newBuckets['Other'] = otherBucket;
|
||||
}
|
||||
chart.buckets = newBuckets;
|
||||
chart.bucketKeysOrdered = [...chart.bucketKeysOrdered, 'Other'].filter(key => key in newBuckets);
|
||||
}
|
||||
|
||||
applyRawData(
|
||||
chart: ProcessedChart,
|
||||
row: any,
|
||||
dateParsed: ChartDateParsed,
|
||||
numericColumns: { [key: string]: number },
|
||||
stringColumns: { [key: string]: string }
|
||||
) {
|
||||
if (chart.definition.xdef == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (row[chart.definition.xdef.field] == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (dateParsed == null && chart.definition.xdef.transformFunction.startsWith('date:')) {
|
||||
chart.invalidXRows += 1;
|
||||
return; // skip if date is invalid
|
||||
}
|
||||
|
||||
const [bucketKey, bucketKeyParsed] = computeChartBucketKey(dateParsed, chart, row);
|
||||
|
||||
if (!bucketKey) {
|
||||
return; // skip if no bucket key
|
||||
}
|
||||
|
||||
if (bucketKeyParsed) {
|
||||
chart.bucketKeyDateParsed[bucketKey] = bucketKeyParsed;
|
||||
}
|
||||
|
||||
if (chart.minX == null || bucketKey < chart.minX) {
|
||||
chart.minX = bucketKey;
|
||||
}
|
||||
if (chart.maxX == null || bucketKey > chart.maxX) {
|
||||
chart.maxX = bucketKey;
|
||||
}
|
||||
|
||||
if (!chart.buckets[bucketKey]) {
|
||||
chart.buckets[bucketKey] = {};
|
||||
if (chart.definition.xdef.sortOrder == 'natural') {
|
||||
chart.bucketKeysOrdered.push(bucketKey);
|
||||
}
|
||||
}
|
||||
|
||||
aggregateChartNumericValuesFromSource(chart, bucketKey, numericColumns, row);
|
||||
chart.rowsAdded += 1;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user