Compare commits
376 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a467bbcadc | |||
| 1ad0ce8603 | |||
| cde972ac2b | |||
| 2d9a24d8a0 | |||
| 983d7909c1 | |||
| 98f1dcfa23 | |||
| 5ebeb25005 | |||
| 8774aee502 | |||
| d300c82fe5 | |||
| 8018177f24 | |||
| c64875b2e0 | |||
| f9ada72057 | |||
| 42bd1ea256 | |||
| dfa875159f | |||
| 2697e8b26d | |||
| 6dcf358870 | |||
| f48230205e | |||
| 8086b56170 | |||
| abc26278b3 | |||
| 558df69411 | |||
| 18e2f3a7a1 | |||
| ccbac05853 | |||
| f86402f192 | |||
| 05046f9df5 | |||
| 7bf9fb22e8 | |||
| 4803c98460 | |||
| ed25785772 | |||
| bf85a922ca | |||
| 00525f6b81 | |||
| 6dd27eb34f | |||
| 0b30386fee | |||
| 1f7f0ea8a2 | |||
| 0ae0cee766 | |||
| 4cbc7f3ae5 | |||
| f1cd0ab689 | |||
| 7f367a1f84 | |||
| 5405b9bf72 | |||
| 75f75d95a6 | |||
| ae5c539e31 | |||
| e7797cedc1 | |||
| f3c3ddd73a | |||
| c201f06103 | |||
| 111a3a678f | |||
| 51c4964003 | |||
| 5fac064a48 | |||
| e84f45ae39 | |||
| 7e119d40a4 | |||
| 22b47d1066 | |||
| ac0ea6a937 | |||
| ed7bfe0c21 | |||
| 55cac14ecf | |||
| 691635c5d1 | |||
| 9209a2d7d3 | |||
| 05b044d965 | |||
| e231a3a41a | |||
| 216614a4b1 | |||
| ae19d14951 | |||
| 7f639361b8 | |||
| ba706b85d3 | |||
| 3b91d921e8 | |||
| 660555d664 | |||
| 3550710f23 | |||
| 1429c29537 | |||
| 95113490f1 | |||
| 1eafdb944a | |||
| a32cd0b2ae | |||
| 249fbf3f96 | |||
| 64877af64a | |||
| ff94e46179 | |||
| 921bd4613a | |||
| 6f4a49ea97 | |||
| 9029fccad4 | |||
| edf3a072c5 | |||
| 0fde8c49a7 | |||
| 767c835a8e | |||
| 38f0223dc0 | |||
| ec707b5af3 | |||
| 462d5e3187 | |||
| 5cc4c07941 | |||
| 20de78f88a | |||
| a63d70ca7e | |||
| 672d6b88b2 | |||
| add1612c92 | |||
| 96b964e609 | |||
| 6b40190097 | |||
| f5b0bc5605 | |||
| 64a58252e5 | |||
| 46571684f6 | |||
| 710022539b | |||
| bc75d559b0 | |||
| a82ee5cc65 | |||
| 6647dd16f8 | |||
| 6f4e1e07f7 | |||
| 5cd951a9c1 | |||
| f492a215b4 | |||
| bbd2d74a28 | |||
| 2b697e21ba | |||
| 3a4e4ecbdc | |||
| 05cbb915d6 | |||
| 26a2bb75fa | |||
| 71b0bb78ec | |||
| 4e4eb39a19 | |||
| b6399c8271 | |||
| eb4a764407 | |||
| 27188eb2c5 | |||
| 57a997adc3 | |||
| a72a03cc3a | |||
| bb185d9e9f | |||
| 0298660714 | |||
| 8bf1dbb10d | |||
| 8e5ef98a7c | |||
| 72bd536aec | |||
| 1a4009a6b2 | |||
| e40357c052 | |||
| 222ea07cf2 | |||
| 6b3f398de3 | |||
| d796fa7ff4 | |||
| 6fdfd8717f | |||
| 81cea4c0f2 | |||
| 6a64633650 | |||
| 21702f1593 | |||
| 32ddb9c4c7 | |||
| 10fc62ceb7 | |||
| d3018a3136 | |||
| f9bcbd588b | |||
| 5c7d2bfd85 | |||
| 788f0ebf77 | |||
| 0eca5dd95d | |||
| 47322b0bbb | |||
| 518a05a6f0 | |||
| 352e426e17 | |||
| 666122f265 | |||
| 61e32f6d95 | |||
| 7aabc8f0be | |||
| a66dc03b99 | |||
| cf5ecb3150 | |||
| 46c365c5cd | |||
| ea76751e4a | |||
| 0bef3f8e71 | |||
| c26c9fae12 | |||
| 446c615bb8 | |||
| c516873541 | |||
| d4326de087 | |||
| eca966bb90 | |||
| 262b4732e3 | |||
| 7f9a30f568 | |||
| a89d2e1365 | |||
| fcd6f6c8fc | |||
| 8313d7f9f1 | |||
| 3a12601103 | |||
| 926949dc89 | |||
| 6b155083ef | |||
| 2b2ecac3ab | |||
| 35e9ff607d | |||
| ae037834f2 | |||
| 3ac24436ba | |||
| 2ca17e826c | |||
| 4fb6128499 | |||
| c359332746 | |||
| 1cd8e8e376 | |||
| 48ec2bdac8 | |||
| 2283e91532 | |||
| 647894ad60 | |||
| 574573abbb | |||
| a735a03cd7 | |||
| 83881a0dac | |||
| c04c6bbd2c | |||
| 42bbbc7ff4 | |||
| 1ecffeda71 | |||
| 92992d1e95 | |||
| bc9df9750f | |||
| 27e70e8031 | |||
| c823b8d19a | |||
| 170ff77eec | |||
| c241f5c562 | |||
| e06d964de4 | |||
| dfdb86de6f | |||
| a37b74f693 | |||
| 398d9f15df | |||
| ab7c2d7a31 | |||
| 090549ff91 | |||
| 10330c6597 | |||
| da2fe6a891 | |||
| 01b88221c5 | |||
| 46d25710b8 | |||
| a40ec7e66b | |||
| c8d2031d24 | |||
| 4f838e0ae3 | |||
| c7cc1b7611 | |||
| bf67a5f13d | |||
| e6bbe66873 | |||
| 1a930acf0a | |||
| a497467137 | |||
| b463416633 | |||
| ccf6240d65 | |||
| 5ccc12019d | |||
| f57fa9aee9 | |||
| 80e841a43d | |||
| e1d759041d | |||
| fd6df055c0 | |||
| 669d0b9dac | |||
| b9f9501e67 | |||
| 4b1c021871 | |||
| 1f79627dbe | |||
| dc18be07ce | |||
| 83ac45f8cf | |||
| c6cd865663 | |||
| 477636e0d7 | |||
| 77414ba934 | |||
| 86186072ed | |||
| 1216bcf9bf | |||
| 788ea70d32 | |||
| 18de37c4e4 | |||
| aeb81bd97f | |||
| a68660f1ab | |||
| 5abfa85a0e | |||
| 794f43d9ae | |||
| 4b2f762200 | |||
| fc3664571b | |||
| 5db8f11fd6 | |||
| 598674a7e0 | |||
| af17eceb27 | |||
| 90946c582d | |||
| d619e0f961 | |||
| 08311145c8 | |||
| a80e37a208 | |||
| c88114cabe | |||
| e33f3a1492 | |||
| 8328fdad33 | |||
| 8035380e7b | |||
| b4ea528643 | |||
| b0012872fa | |||
| 274fb595a2 | |||
| c7ef4b9231 | |||
| e64cfce423 | |||
| c0c9c7be20 | |||
| 2ae98d0c2d | |||
| a129834c16 | |||
| 71a9d6c5c0 | |||
| 1ce8f6bd1f | |||
| 85c4821606 | |||
| 06ed9d7dfc | |||
| 90de5edc99 | |||
| 72786e5dbb | |||
| d92c08548b | |||
| b1893234c7 | |||
| 534deff274 | |||
| bdeffea79c | |||
| ab2fdf26d2 | |||
| 0c902f037b | |||
| c0595aec0a | |||
| 6fdb20fc34 | |||
| 8e74031fb1 | |||
| 81e4b947b6 | |||
| c5d23410f4 | |||
| f23575c405 | |||
| 8cb98cf643 | |||
| b09a558670 | |||
| abc3c5f880 | |||
| 1c0d966c3c | |||
| 20d7264aab | |||
| 4cf987b89a | |||
| 5bcd3f807d | |||
| 9e04f2b9c6 | |||
| a7ceb8951c | |||
| 7392b223f4 | |||
| 51ce4f1bb5 | |||
| 059c310aaf | |||
| 11dad8ced3 | |||
| 18ed0fc020 | |||
| eaa1b73851 | |||
| 5c2c24e009 | |||
| 1957531600 | |||
| e47a165e11 | |||
| 3ca1adcb48 | |||
| 66bf1c847a | |||
| 47eea9b9b3 | |||
| 5c5a5f3b53 | |||
| 1c7729a797 | |||
| cd06f13fcb | |||
| 632870d448 | |||
| 762055379a | |||
| 7374749340 | |||
| e379be0107 | |||
| 2784053b83 | |||
| 12c4a0d498 | |||
| 35b5ea138d | |||
| d75e1fc660 | |||
| 3ab6df5da2 | |||
| 8d4fc391a4 | |||
| 35f9fc3741 | |||
| b465f3eb99 | |||
| c4e6a90722 | |||
| 9cc4af2b56 | |||
| 5ccdd7633b | |||
| 880f4403cb | |||
| cba391904a | |||
| 0fc397ace5 | |||
| 56a241b7f4 | |||
| 97eb999e4c | |||
| 9deaa89f21 | |||
| 74bae65e32 | |||
| 7091917578 | |||
| bbf72d9ed7 | |||
| 552d10ef48 | |||
| 7c5479157a | |||
| 2463dba380 | |||
| 2f9209a92d | |||
| 370bd92518 | |||
| ec083924fc | |||
| 22b450f7e0 | |||
| 1dd73e7319 | |||
| c4fe4b40dd | |||
| 251137ac60 | |||
| 0ad7c99274 | |||
| f53142d98a | |||
| 1f868523b0 | |||
| 94db02db2e | |||
| 9f0e06e663 | |||
| c6dab85fc2 | |||
| 59aa2e3f33 | |||
| 21b26773e6 | |||
| f308c5f6b0 | |||
| 2763b6028a | |||
| a3df6d6e7d | |||
| 473bfcbec5 | |||
| 8976c9e653 | |||
| 86795dcc63 | |||
| 9b90f15621 | |||
| 7d0d9d3e22 | |||
| 17f0248a3e | |||
| 25d3dcad59 | |||
| cbd857422f | |||
| e65b4d0c2a | |||
| 6bcebb63e4 | |||
| ac7708138c | |||
| 9d8ec9cc6b | |||
| 1b8a2cb923 | |||
| a97ab9c09e | |||
| 9a73eb3620 | |||
| f50e460335 | |||
| fa72d9a39f | |||
| 75b4f49e31 | |||
| a069093f6b | |||
| 62c741198a | |||
| 0266d912e0 | |||
| 55bc0fc93f | |||
| 47c00d7eb0 | |||
| ad9fac861e | |||
| 14afd08fcb | |||
| 319580554f | |||
| c750bd04ad | |||
| bdd55d8432 | |||
| 98464e414b | |||
| 2f2d9c45a3 | |||
| 3665a0d064 | |||
| c19c69266a | |||
| bafa2c2fff | |||
| 2d823140b9 | |||
| 1fb4a06092 | |||
| cb450a0313 | |||
| 7aaf6bb024 | |||
| 6a02ba3220 | |||
| 83610783e0 | |||
| cec26b0614 | |||
| fbf288198b | |||
| 193940fd63 | |||
| bd169c316a | |||
| 5315f65cfb | |||
| 5a859d81d3 | |||
| 904fc4d500 | |||
| 634fe18127 | |||
| 89a9cc4380 | |||
| 57c62fbe27 | |||
| 590eff1e3b | |||
| 2c0b76fb3f |
@@ -12,7 +12,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
os: [macos-14, windows-2022, ubuntu-22.04]
|
||||
# os: [macOS-10.15]
|
||||
|
||||
steps:
|
||||
@@ -47,9 +47,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
yarn fillPackagedPlugins
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
# os: [windows-2022]
|
||||
# os: [ubuntu-22.04]
|
||||
# os: [windows-2022, ubuntu-22.04]
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
os: [macos-14, windows-2022, ubuntu-22.04]
|
||||
# os: [macOS-10.15]
|
||||
|
||||
steps:
|
||||
@@ -55,11 +55,11 @@ jobs:
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
- name: adjustPackageJsonPremium
|
||||
- name: adjustAppPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustPackageJsonPremium
|
||||
node adjustAppPackageJsonPremium
|
||||
- name: setUpdaterChannel premium-beta
|
||||
run: |
|
||||
cd ..
|
||||
@@ -87,11 +87,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# os: [ubuntu-22.04, windows-2016]
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
os: [macos-14, windows-2022, ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
@@ -56,11 +56,11 @@ jobs:
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
- name: yarn adjustPackageJsonPremium
|
||||
- name: adjustAppPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustPackageJsonPremium
|
||||
node adjustAppPackageJsonPremium
|
||||
- name: setUpdaterChannel premium
|
||||
run: |
|
||||
cd ..
|
||||
@@ -88,11 +88,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# os: [ubuntu-22.04, windows-2016]
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
os: [macos-14, windows-2022, ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
@@ -50,9 +50,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
yarn fillPackagedPlugins
|
||||
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
# - 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
|
||||
|
||||
jobs:
|
||||
@@ -54,6 +54,12 @@ jobs:
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
@@ -62,6 +62,12 @@ jobs:
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
@@ -56,6 +56,11 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
# yarn --version
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
name: NPM packages PREMIUM
|
||||
|
||||
# on: [push]
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: adjustNpmPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustNpmPackageJsonPremium
|
||||
|
||||
- name: Configure NPM token
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
|
||||
|
||||
- name: Remove dbmodel - should be not published
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
rm -rf packages/dbmodel
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
|
||||
- name: Publish dbgate-api-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/api
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-web-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/web
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-serve-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/serve
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-plugin-cosmosdb
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/plugins/dbgate-plugin-cosmosdb
|
||||
npm publish
|
||||
@@ -90,11 +90,6 @@ jobs:
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate (obsolete)
|
||||
working-directory: packages/dbgate
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-serve
|
||||
working-directory: packages/serve
|
||||
run: |
|
||||
|
||||
@@ -12,16 +12,27 @@ jobs:
|
||||
container: node:18
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Install dependencies for cypress
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y xvfb libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libnss3 libxss1 libasound2 libxtst6
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install
|
||||
- name: Build packer dist for cypress
|
||||
run: |
|
||||
yarn prepare:packer
|
||||
- name: yarn install cypress
|
||||
run: |
|
||||
cd e2e-tests
|
||||
yarn install
|
||||
- name: Run Cypress tests
|
||||
run: |
|
||||
cd e2e-tests
|
||||
yarn test:ci
|
||||
- name: Integration tests
|
||||
run: |
|
||||
cd integration-tests
|
||||
@@ -84,5 +95,10 @@ jobs:
|
||||
env:
|
||||
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
|
||||
|
||||
oracle:
|
||||
image: gvenzl/oracle-xe:21-slim
|
||||
env:
|
||||
ORACLE_PASSWORD: Pwd2020Db
|
||||
|
||||
# cockroachdb:
|
||||
# image: cockroachdb/cockroach
|
||||
|
||||
@@ -28,8 +28,6 @@ docker/plugins
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
app/src/nativeModulesContent.js
|
||||
packages/api/src/nativeModulesContent.js
|
||||
packages/api/src/packagedPluginsContent.js
|
||||
.VSCodeCounter
|
||||
|
||||
|
||||
+28
-2
@@ -8,11 +8,37 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
### Not published
|
||||
### 6.0.0
|
||||
- ADDED: Order or filter the indexes for huge tables #922
|
||||
- ADDED: Empty string filters
|
||||
- CHANGED: (Premium) Workflow for new installation (used in Docker and AWS distribution)
|
||||
|
||||
- ADDED: Show stored procedure and function parameters (MySQL, PostgreSQL, SQL Server, MariaDB) #348
|
||||
- FIXED: Selected database has changed when closing database grouped tab #983
|
||||
- ADDED: Add line break option to editor #823
|
||||
- ADDED: Order or filter the indexes for huge tables #922
|
||||
- ADDED: Preview mode for the top bar tab like vscode #767
|
||||
- ADDED: Keyboard navigatioon between connections, databases and tables
|
||||
- FIXED: Fixed some issues in connection search
|
||||
- FIXED: Schema selection in Export does not provide all schemas #924
|
||||
- CHANGED: Standardized Window menu in MacOS app
|
||||
- FIXED: Typecast ::date is treated as a parameter #925
|
||||
- FIXED: App crashes when trying to 'Open Structure' in a readonly connection #926
|
||||
- FIXED: Selected database has changed when closing database grouped tab #938
|
||||
- CHANGED: (Premium) Query designer and Query perspective designer moved to Premium editioin
|
||||
- CHANGED: (Premium) Compare database tool - many improvements, moved to Premium edition
|
||||
- ADDED: (Premium) Export DB model - exporting model to YAML folder, JSON or SQL folder
|
||||
- CHANGED: Model deployer - many improvements, support of rename missing objects
|
||||
- ADDED: (Premium) Premium NPM distribution
|
||||
- CHANGED: (Premium) Amazon Redshift driver moved to Premium edition
|
||||
- ADDED: Generated API documentation https://dbgate.org/docs/apidoc.html
|
||||
- ADDED: NPM distribution now supports all dbgate database connectors, many improvements NPM packages
|
||||
- CHANGED: Optimalized size of NPM plugins (eg. dbgate-plugin-mssql from 1.34 MB to 71 kB)
|
||||
- CHANGED: Unsaved connections are now shown in "Recent and unsaved" folder after disconnect
|
||||
- FIXED: Correctly show focused control, as defined by UX standards
|
||||
- ADDED: Data duplicator - weak references
|
||||
- ADDED: View JSON detail of log messages from export/import jobs and query executions
|
||||
- ADDED: Rename procedure/function context menu
|
||||
- ADDED: Show SQL quick view
|
||||
|
||||
### 5.5.6
|
||||
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
|
||||
|
||||
@@ -17,6 +17,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
|
||||
* **Download** application for Windows, Linux or Mac from [dbgate.org](https://dbgate.org/download/)
|
||||
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Use nodeJs [scripting interface](https://dbgate.org/docs/scripting.html) ([API documentation](https://dbgate.org/docs/apidoc.html))
|
||||
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
|
||||
|
||||
## Supported databases
|
||||
* MySQL
|
||||
|
||||
+40
-2
@@ -1,12 +1,50 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const volatilePackages = require('./common/volatilePackages');
|
||||
|
||||
function adjustFile(file) {
|
||||
function adjustFile(file, isApp = false) {
|
||||
const json = JSON.parse(fs.readFileSync(file, { encoding: 'utf-8' }));
|
||||
|
||||
function processPackageFile(packageFile) {
|
||||
const pluginJson = JSON.parse(fs.readFileSync(packageFile, { encoding: 'utf-8' }));
|
||||
for (const depkey of ['dependencies', 'optionalDependencies']) {
|
||||
for (const dependency of Object.keys(pluginJson[depkey] || {})) {
|
||||
if (!volatilePackages.includes(dependency)) {
|
||||
// add only voletile packages
|
||||
continue;
|
||||
}
|
||||
if (!json[depkey]) {
|
||||
json[depkey] = {};
|
||||
}
|
||||
if (json[depkey][dependency]) {
|
||||
if (json[depkey][dependency] != pluginJson[depkey][dependency]) {
|
||||
console.log(`Dependency ${dependency} in ${packageName} is different from ${file}`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
json[depkey][dependency] = pluginJson[depkey][dependency];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const packageName of fs.readdirSync('plugins')) {
|
||||
if (!packageName.startsWith('dbgate-plugin-')) continue;
|
||||
processPackageFile(path.join('plugins', packageName, 'package.json'));
|
||||
}
|
||||
|
||||
if (isApp) {
|
||||
// add volatile dependencies from api to app
|
||||
processPackageFile(path.join('packages', 'api', 'package.json'));
|
||||
}
|
||||
|
||||
if (process.platform != 'win32') {
|
||||
delete json.optionalDependencies.msnodesqlv8;
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
adjustFile('packages/api/package.json');
|
||||
adjustFile('app/package.json');
|
||||
adjustFile('app/package.json', true);
|
||||
|
||||
fs.writeFileSync('common/useBundleExternals.js', "module.exports = 'true';", 'utf-8');
|
||||
|
||||
+3
-7
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dbgate",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"private": true,
|
||||
"author": "Jan Prochazka <jenasoft.database@gmail.com>",
|
||||
"description": "Opensource database administration tool",
|
||||
@@ -109,7 +109,8 @@
|
||||
"files": [
|
||||
"packages",
|
||||
"src",
|
||||
"icon.png"
|
||||
"icon.png",
|
||||
"!node_modules/cpu-features/build/**"
|
||||
]
|
||||
},
|
||||
"homepage": "./",
|
||||
@@ -130,10 +131,5 @@
|
||||
"electron": "30.0.2",
|
||||
"electron-builder": "23.1.0",
|
||||
"electron-builder-notarize": "^1.5.2"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^6.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
+1
-2
@@ -108,7 +108,7 @@ function commandItem(item) {
|
||||
}
|
||||
|
||||
function buildMenu() {
|
||||
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true }), item => {
|
||||
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true, isMac: isMac() }), item => {
|
||||
if (item.divider) {
|
||||
return { type: 'separator' };
|
||||
}
|
||||
@@ -430,7 +430,6 @@ function createWindow() {
|
||||
);
|
||||
|
||||
global.API_PACKAGE = apiPackage;
|
||||
global.NATIVE_MODULES = path.join(__dirname, 'nativeModules');
|
||||
|
||||
// console.log('global.API_PACKAGE', global.API_PACKAGE);
|
||||
const api = require(apiPackage);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module.exports = ({ editMenu }) => [
|
||||
module.exports = ({ editMenu, isMac }) => [
|
||||
{
|
||||
label: 'File',
|
||||
submenu: [
|
||||
@@ -24,20 +24,6 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'app.disconnect', hideDisabled: true, skipInApp: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Window',
|
||||
submenu: [
|
||||
{ command: 'tabs.closeTab', hideDisabled: false },
|
||||
{ command: 'tabs.closeAll', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
|
||||
{ divider: true },
|
||||
{ command: 'app.zoomIn', hideDisabled: true },
|
||||
{ command: 'app.zoomOut', hideDisabled: true },
|
||||
{ command: 'app.zoomReset', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
|
||||
editMenu
|
||||
? {
|
||||
label: 'Edit',
|
||||
@@ -75,6 +61,15 @@ module.exports = ({ editMenu }) => [
|
||||
{ divider: true },
|
||||
{ command: 'theme.changeTheme', hideDisabled: true },
|
||||
{ command: 'settings.show' },
|
||||
{ divider: true },
|
||||
{ command: 'tabs.closeTab', hideDisabled: false },
|
||||
{ command: 'tabs.closeAll', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
|
||||
{ divider: true },
|
||||
{ command: 'app.zoomIn', hideDisabled: true },
|
||||
{ command: 'app.zoomOut', hideDisabled: true },
|
||||
{ command: 'app.zoomReset', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -94,6 +89,14 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
...(isMac
|
||||
? [
|
||||
{
|
||||
role: 'window',
|
||||
submenu: [{ role: 'minimize' }, { role: 'zoom' }, { type: 'separator' }, { role: 'front' }],
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: 'Help',
|
||||
submenu: [
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
const content = require('./nativeModulesContent');
|
||||
|
||||
module.exports = content;
|
||||
@@ -0,0 +1,9 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
@@ -0,0 +1,19 @@
|
||||
const useBundleExternals = require('./useBundleExternals');
|
||||
const getBundleExternals = require('./getBundleExternals');
|
||||
|
||||
function buildExternalsFromDependencies(packageJson) {
|
||||
if (useBundleExternals == 'true') {
|
||||
return getBundleExternals();
|
||||
}
|
||||
const { dependencies, optionalDependencies } = packageJson;
|
||||
const externals = {};
|
||||
for (let dep in dependencies || {}) {
|
||||
externals[dep] = `commonjs ${dep}`;
|
||||
}
|
||||
for (let dep in optionalDependencies || {}) {
|
||||
externals[dep] = `commonjs ${dep}`;
|
||||
}
|
||||
return externals;
|
||||
}
|
||||
|
||||
module.exports = buildExternalsFromDependencies;
|
||||
@@ -0,0 +1,33 @@
|
||||
const directory = process.argv[2];
|
||||
const fs = require('fs');
|
||||
|
||||
const volatilePackages = require('./volatilePackages');
|
||||
const apiPackageJson = JSON.parse(fs.readFileSync(`packages/api/package.json`, { encoding: 'utf-8' }));
|
||||
|
||||
const dependencies = {};
|
||||
const optionalDependencies = {};
|
||||
for (const pkg of volatilePackages) {
|
||||
if (pkg == 'msnodesqlv8' && process.platform != 'win32') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (apiPackageJson.dependencies[pkg]) {
|
||||
dependencies[pkg] = apiPackageJson.dependencies[pkg];
|
||||
}
|
||||
if (apiPackageJson.optionalDependencies?.[pkg]) {
|
||||
optionalDependencies[pkg] = apiPackageJson.optionalDependencies[pkg];
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
`${directory}/package.json`,
|
||||
JSON.stringify(
|
||||
{
|
||||
dependencies,
|
||||
optionalDependencies,
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
'utf-8'
|
||||
);
|
||||
@@ -0,0 +1,10 @@
|
||||
const volatilePackages = require('./volatilePackages');
|
||||
|
||||
function getBundleExternals() {
|
||||
return volatilePackages.reduce((acc, item) => {
|
||||
acc[item] = `commonjs ${item}`;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
module.exports = getBundleExternals;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports = 'false';
|
||||
@@ -0,0 +1,24 @@
|
||||
// these packages will be never bundled with webpack
|
||||
|
||||
const volatilePackages = [
|
||||
'@clickhouse/client',
|
||||
'bson', // this package is already bundled and is used in mongodb
|
||||
'mongodb',
|
||||
'mongodb-client-encryption',
|
||||
'tedious',
|
||||
'msnodesqlv8',
|
||||
'mysql2',
|
||||
'oracledb',
|
||||
'pg-copy-streams',
|
||||
'pg',
|
||||
'ioredis',
|
||||
'node-redis-dump2',
|
||||
'better-sqlite3',
|
||||
'@azure/cosmos',
|
||||
'@aws-sdk/rds-signer',
|
||||
'activedirectory2',
|
||||
'axios',
|
||||
'ssh2',
|
||||
];
|
||||
|
||||
module.exports = volatilePackages;
|
||||
@@ -0,0 +1,29 @@
|
||||
const { defineConfig } = require('cypress');
|
||||
const killPort = require('kill-port');
|
||||
const { clearTestingData } = require('./e2eTestTools');
|
||||
const waitOn = require('wait-on');
|
||||
const { exec } = require('child_process');
|
||||
|
||||
module.exports = defineConfig({
|
||||
e2e: {
|
||||
setupNodeEvents(on, config) {
|
||||
// implement node event listeners here
|
||||
|
||||
on('before:spec', async details => {
|
||||
await clearTestingData();
|
||||
|
||||
if (config.isInteractive) {
|
||||
await killPort(3000);
|
||||
serverProcess = exec('yarn start');
|
||||
await waitOn({ resources: ['http://localhost:3000'] });
|
||||
serverProcess.stdout.on('data', data => {
|
||||
console.log(data.toString());
|
||||
});
|
||||
serverProcess.stderr.on('data', data => {
|
||||
console.error(data.toString());
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,31 @@
|
||||
describe('Initialization', () => {
|
||||
it('successfully loads', () => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.contains('Database not selected');
|
||||
});
|
||||
|
||||
it('adds connection', () => {
|
||||
const runOnCI = Cypress.env('runOnCI');
|
||||
|
||||
cy.visit('http://localhost:3000');
|
||||
// cy.get('[data-testid=ConnectionList_buttonNewConnection]').click();
|
||||
cy.get('[data-testid=ConnectionDriverFields_connectionType]').select('MySQL');
|
||||
cy.get('[data-testid=ConnectionDriverFields_user]').clear().type('root');
|
||||
cy.get('[data-testid=ConnectionDriverFields_password]').clear().type('Pwd2020Db');
|
||||
if (runOnCI) {
|
||||
cy.get('[data-testid=ConnectionDriverFields_server]').clear().type('mysql');
|
||||
} else {
|
||||
cy.get('[data-testid=ConnectionDriverFields_port]').clear().type('16004');
|
||||
}
|
||||
cy.get('[data-testid=ConnectionDriverFields_displayName]').clear().type('test-mysql-1');
|
||||
cy.get('[data-testid=ConnectionTab_buttonSave]').click();
|
||||
cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
|
||||
cy.contains('performance_schema');
|
||||
});
|
||||
|
||||
// it('import chinook DB', () => {
|
||||
// cy.visit('http://localhost:3000');
|
||||
// cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
|
||||
// });
|
||||
|
||||
});
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "Using fixtures to represent data",
|
||||
"email": "hello@cypress.io",
|
||||
"body": "Fixtures are a great way to mock data for responses to routes"
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
// ***********************************************
|
||||
// This example commands.js shows you how to
|
||||
// create various custom commands and overwrite
|
||||
// existing commands.
|
||||
//
|
||||
// For more comprehensive examples of custom
|
||||
// commands please read more here:
|
||||
// https://on.cypress.io/custom-commands
|
||||
// ***********************************************
|
||||
//
|
||||
//
|
||||
// -- This is a parent command --
|
||||
// Cypress.Commands.add('login', (email, password) => { ... })
|
||||
//
|
||||
//
|
||||
// -- This is a child command --
|
||||
// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... })
|
||||
//
|
||||
//
|
||||
// -- This is a dual command --
|
||||
// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... })
|
||||
//
|
||||
//
|
||||
// -- This will overwrite an existing command --
|
||||
// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... })
|
||||
@@ -0,0 +1,20 @@
|
||||
// ***********************************************************
|
||||
// This example support/e2e.js is processed and
|
||||
// loaded automatically before your test files.
|
||||
//
|
||||
// This is a great place to put global configuration and
|
||||
// behavior that modifies Cypress.
|
||||
//
|
||||
// You can change the location of this file or turn off
|
||||
// automatically serving support files with the
|
||||
// 'supportFile' configuration option.
|
||||
//
|
||||
// You can read more here:
|
||||
// https://on.cypress.io/configuration
|
||||
// ***********************************************************
|
||||
|
||||
// Import commands.js using ES2015 syntax:
|
||||
import './commands'
|
||||
|
||||
// Alternatively you can use CommonJS syntax:
|
||||
// require('./commands')
|
||||
@@ -0,0 +1,18 @@
|
||||
version: '3'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- 16000:5432
|
||||
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 16004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
@@ -0,0 +1,29 @@
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
|
||||
const baseDir = path.join(os.homedir(), '.dbgate');
|
||||
|
||||
// function createTimeStamp() {
|
||||
// const now = new Date();
|
||||
// const year = now.getFullYear();
|
||||
// const month = String(now.getMonth() + 1).padStart(2, '0'); // měsíc je 0-indexovaný
|
||||
// const day = String(now.getDate()).padStart(2, '0');
|
||||
// const hours = String(now.getHours()).padStart(2, '0');
|
||||
// const minutes = String(now.getMinutes()).padStart(2, '0');
|
||||
// const seconds = String(now.getSeconds()).padStart(2, '0');
|
||||
|
||||
// // Poskládáme datum a čas do názvu souboru
|
||||
// const ts = `${year}-${month}-${day}_${hours}-${minutes}-${seconds}`;
|
||||
// return ts;
|
||||
// }
|
||||
|
||||
function clearTestingData() {
|
||||
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
|
||||
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
clearTestingData,
|
||||
};
|
||||
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "e2e-tests",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "GPL",
|
||||
"devDependencies": {
|
||||
"axios": "^1.7.9",
|
||||
"cross-env": "^7.0.3",
|
||||
"cypress": "^13.16.1",
|
||||
"kill-port": "^2.0.1",
|
||||
"start-server-and-test": "^2.0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"cy:open": "cypress open --config experimentalInteractiveRunEvents=true",
|
||||
"cy:run": "cypress run",
|
||||
"cy:run:ci": "cypress run --env runOnCI=true",
|
||||
"start": "cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"test:ci": "start-server-and-test start http://localhost:3000 cy:run:ci",
|
||||
"test": "start-server-and-test start http://localhost:3000 cy:run"
|
||||
}
|
||||
}
|
||||
+1363
File diff suppressed because it is too large
Load Diff
@@ -1,24 +0,0 @@
|
||||
const fs = require('fs');
|
||||
|
||||
let fillContent = '';
|
||||
|
||||
if (process.platform == 'win32') {
|
||||
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');\n`;
|
||||
}
|
||||
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');\n`;
|
||||
fillContent += `content['oracledb'] = () => require('oracledb');\n`;
|
||||
|
||||
const getContent = empty => `
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
${empty ? '' : fillContent}
|
||||
|
||||
module.exports = content;
|
||||
`;
|
||||
|
||||
fs.writeFileSync(
|
||||
'packages/api/src/nativeModulesContent.js',
|
||||
getContent(process.argv.includes('--electron') ? true : false)
|
||||
);
|
||||
fs.writeFileSync('app/src/nativeModulesContent.js', getContent(false));
|
||||
@@ -3,9 +3,15 @@ const _ = require('lodash');
|
||||
const fp = require('lodash/fp');
|
||||
const { testWrapper } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const { getAlterDatabaseScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
|
||||
const {
|
||||
getAlterDatabaseScript,
|
||||
extendDatabaseInfo,
|
||||
generateDbPairingId,
|
||||
formatQueryWithoutParams,
|
||||
runCommandOnDriver,
|
||||
} = require('dbgate-tools');
|
||||
|
||||
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
|
||||
const initSql = ['CREATE TABLE ~t1 (~id int primary key)', 'CREATE TABLE ~t2 (~id int primary key)'];
|
||||
|
||||
function flatSource(engineCond = x => !x.skipReferences) {
|
||||
return _.flatten(
|
||||
@@ -16,13 +22,14 @@ function flatSource(engineCond = x => !x.skipReferences) {
|
||||
}
|
||||
|
||||
async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||
await driver.query(conn, `create table t1 (id int not null primary key)`);
|
||||
await runCommandOnDriver(conn, driver, `create table ~t1 (~id int not null primary key)`);
|
||||
|
||||
await driver.query(
|
||||
await runCommandOnDriver(
|
||||
conn,
|
||||
`create table t2 (
|
||||
id int not null primary key,
|
||||
t1_id int null references t1(id)
|
||||
driver,
|
||||
`create table ~t2 (
|
||||
~id int not null primary key,
|
||||
~t1_id int null references ~t1(~id)
|
||||
)`
|
||||
);
|
||||
|
||||
@@ -63,7 +70,7 @@ describe('Alter database', () => {
|
||||
db => {
|
||||
_.remove(db[type], x => x.pureName == 'obj1');
|
||||
},
|
||||
object.create1
|
||||
formatQueryWithoutParams(driver, object.create1)
|
||||
);
|
||||
expect(db[type].length).toEqual(0);
|
||||
})
|
||||
@@ -72,9 +79,9 @@ describe('Alter database', () => {
|
||||
test.each(flatSource(x => x.supportRenameSqlObject))(
|
||||
'Rename object - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create1);
|
||||
|
||||
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
|
||||
|
||||
|
||||
@@ -4,7 +4,12 @@ const fp = require('lodash/fp');
|
||||
const { testWrapper } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const crypto = require('crypto');
|
||||
const { getAlterTableScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
|
||||
const {
|
||||
getAlterTableScript,
|
||||
extendDatabaseInfo,
|
||||
generateDbPairingId,
|
||||
formatQueryWithoutParams,
|
||||
} = require('dbgate-tools');
|
||||
|
||||
function pickImportantTableInfo(engine, table) {
|
||||
const props = ['columnName', 'defaultValue'];
|
||||
@@ -15,7 +20,10 @@ function pickImportantTableInfo(engine, table) {
|
||||
columns: table.columns
|
||||
.filter(x => x.columnName != 'rowid')
|
||||
.map(fp.pick(props))
|
||||
.map(props => _.omitBy(props, x => x == null)),
|
||||
.map(props => _.omitBy(props, x => x == null))
|
||||
.map(props =>
|
||||
_.omitBy(props, (v, k) => k == 'defaultValue' && v == 'NULL' && engine.setNullDefaultInsteadOfDrop)
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -25,27 +33,36 @@ function checkTableStructure(engine, t1, t2) {
|
||||
}
|
||||
|
||||
async function testTableDiff(engine, conn, driver, mangle) {
|
||||
await driver.query(conn, `create table t0 (id int not null primary key)`);
|
||||
await driver.query(conn, formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`));
|
||||
|
||||
await driver.query(
|
||||
conn,
|
||||
`create table t1 (
|
||||
col_pk int not null primary key,
|
||||
col_std int,
|
||||
col_def int default 12,
|
||||
${engine.skipReferences ? '' : 'col_fk int references t0(id),'}
|
||||
col_idx int,
|
||||
col_uq int ${engine.skipUnique ? '' : 'unique'} ,
|
||||
col_ref int ${engine.skipUnique ? '' : 'unique'}
|
||||
formatQueryWithoutParams(
|
||||
driver,
|
||||
`create table ~t1 (
|
||||
~col_pk int not null primary key,
|
||||
~col_std int,
|
||||
~col_def int default 12,
|
||||
${engine.skipReferences ? '' : '~col_fk int references ~t0(~id),'}
|
||||
~col_idx int,
|
||||
~col_uq int ${engine.skipUnique ? '' : 'unique'} ,
|
||||
~col_ref int ${engine.skipUnique ? '' : 'unique'}
|
||||
)`
|
||||
)
|
||||
);
|
||||
|
||||
if (!engine.skipIndexes) {
|
||||
await driver.query(conn, `create index idx1 on t1(col_idx)`);
|
||||
await driver.query(conn, formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`));
|
||||
}
|
||||
|
||||
if (!engine.skipReferences) {
|
||||
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
|
||||
await driver.query(
|
||||
conn,
|
||||
formatQueryWithoutParams(
|
||||
driver,
|
||||
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const tget = x => x.tables.find(y => y.pureName == 't1');
|
||||
@@ -166,4 +183,13 @@ describe('Alter table', () => {
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// test.each(engines.map(engine => [engine.label, engine]))(
|
||||
// 'Change autoincrement - %s',
|
||||
// testWrapper(async (conn, driver, engine) => {
|
||||
// await testTableDiff(engine, conn, driver, tbl => {
|
||||
// tbl.columns.find(x => x.columnName == 'col_pk').autoIncrement = true;
|
||||
// });
|
||||
// })
|
||||
// );
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
|
||||
const { runCommandOnDriver } = require('dbgate-tools');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data duplicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
@@ -84,11 +84,77 @@ describe('Data duplicator', () => {
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -5,6 +5,7 @@ const tableWriter = require('dbgate-api/src/shell/tableWriter');
|
||||
const copyStream = require('dbgate-api/src/shell/copyStream');
|
||||
const importDatabase = require('dbgate-api/src/shell/importDatabase');
|
||||
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
|
||||
const { runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
function createImportStream() {
|
||||
const pass = new stream.PassThrough({
|
||||
@@ -37,7 +38,7 @@ describe('DB Import', () => {
|
||||
});
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const res = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
@@ -65,10 +66,10 @@ describe('DB Import', () => {
|
||||
});
|
||||
await copyStream(reader2, writer2);
|
||||
|
||||
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
@@ -4,7 +4,7 @@ const { testWrapper, testWrapperPrepareOnly } = require('../tools');
|
||||
const _ = require('lodash');
|
||||
const engines = require('../engines');
|
||||
const deployDb = require('dbgate-api/src/shell/deployDb');
|
||||
const { databaseInfoFromYamlModel } = require('dbgate-tools');
|
||||
const { databaseInfoFromYamlModel, runQueryOnDriver, formatQueryWithoutParams } = require('dbgate-tools');
|
||||
const generateDeploySql = require('dbgate-api/src/shell/generateDeploySql');
|
||||
const connectUtility = require('dbgate-api/src/utility/connectUtility');
|
||||
|
||||
@@ -69,6 +69,29 @@ function checkStructure(
|
||||
}
|
||||
}
|
||||
|
||||
// function convertObjectText(text, driver) {
|
||||
// if (!text) return undefined;
|
||||
// text = formatQueryWithoutParams(driver, text);
|
||||
// if (driver.dialect.requireFromDual && text.startsWith('create view ') && !text.includes('from')) {
|
||||
// text = text + ' from dual';
|
||||
// }
|
||||
// return text;
|
||||
// }
|
||||
|
||||
// function convertModelToEngine(model, driver) {
|
||||
// return model.map(x => ({
|
||||
// ...x,
|
||||
// text: convertObjectText(x.text, driver),
|
||||
// }));
|
||||
// }
|
||||
|
||||
function convertModelToEngine(model, driver) {
|
||||
return model.map(x => ({
|
||||
...x,
|
||||
text: x.text ? formatQueryWithoutParams(driver, x.text) : undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
const { testEmptyLastScript, finalCheckAgainstModel, markDeleted, allowDropStatements } = options || {};
|
||||
let index = 0;
|
||||
@@ -79,32 +102,37 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
deletedSqlObjectPrefix: '_deleted_',
|
||||
}
|
||||
: {};
|
||||
dbdiffOptionsExtra.schemaMode = 'ignore';
|
||||
|
||||
for (const loadedDbModel of dbModelsYaml) {
|
||||
const { sql, isEmpty } = await generateDeploySql({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
console.debug('Generated deploy script:', sql);
|
||||
if (!allowDropStatements) {
|
||||
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
|
||||
}
|
||||
if (_.isString(loadedDbModel)) {
|
||||
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel));
|
||||
} else {
|
||||
const { sql, isEmpty } = await generateDeploySql({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
console.debug('Generated deploy script:', sql);
|
||||
if (!allowDropStatements) {
|
||||
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
|
||||
}
|
||||
|
||||
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
|
||||
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
|
||||
expect(isEmpty).toBeTruthy();
|
||||
}
|
||||
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
|
||||
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
|
||||
expect(isEmpty).toBeTruthy();
|
||||
}
|
||||
|
||||
await deployDb({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
await deployDb({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
}
|
||||
|
||||
index++;
|
||||
}
|
||||
@@ -112,7 +140,12 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
const dbhan = conn.isPreparedOnly ? await connectUtility(driver, conn, 'read') : conn;
|
||||
const structure = await driver.analyseFull(dbhan);
|
||||
if (conn.isPreparedOnly) await driver.close(dbhan);
|
||||
checkStructure(engine, structure, finalCheckAgainstModel ?? dbModelsYaml[dbModelsYaml.length - 1], options);
|
||||
checkStructure(
|
||||
engine,
|
||||
structure,
|
||||
convertModelToEngine(finalCheckAgainstModel ?? _.findLast(dbModelsYaml, x => _.isArray(x)), driver),
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
describe('Deploy database', () => {
|
||||
@@ -334,7 +367,7 @@ describe('Deploy database', () => {
|
||||
],
|
||||
]);
|
||||
|
||||
const res = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
const res = await runQueryOnDriver(conn, driver, `select count(*) as ~cnt from ~t1`);
|
||||
expect(res.rows[0].cnt.toString()).toEqual('3');
|
||||
})
|
||||
);
|
||||
@@ -381,7 +414,7 @@ describe('Deploy database', () => {
|
||||
],
|
||||
]);
|
||||
|
||||
const res = await driver.query(conn, `select val from t1 where id = 2`);
|
||||
const res = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 2`);
|
||||
expect(res.rows[0].val.toString()).toEqual('5');
|
||||
})
|
||||
);
|
||||
@@ -409,12 +442,58 @@ describe('Deploy database', () => {
|
||||
],
|
||||
]);
|
||||
|
||||
await driver.query(conn, `insert into t1 (id) values (1)`);
|
||||
const res = await driver.query(conn, ` select val from t1 where id = 1`);
|
||||
await runQueryOnDriver(conn, driver, `insert into ~t1 (~id) values (1)`);
|
||||
const res = await runQueryOnDriver(conn, driver, ` select ~val from ~t1 where ~id = 1`);
|
||||
expect(res.rows[0].val.toString().substring(0, 2)).toEqual('20');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipChangeColumn).map(engine => [engine.label, engine]))(
|
||||
'Change column to NOT NULL column with default - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int', notNull: true },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'insert into ~t1 (~id, ~val) values (1, 1); insert into ~t1 (~id) values (2)',
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int', notNull: true },
|
||||
{ name: 'val', type: 'int', notNull: true, default: '20' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'insert into ~t1 (~id) values (3);',
|
||||
]);
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 1`);
|
||||
expect(res1.rows[0].val).toEqual(1);
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 2`);
|
||||
expect(res2.rows[0].val).toEqual(20);
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 3`);
|
||||
expect(res2.rows[0].val).toEqual(20);
|
||||
})
|
||||
);
|
||||
|
||||
const T1 = {
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
@@ -427,6 +506,18 @@ describe('Deploy database', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const T2 = {
|
||||
name: 't2.table.yaml',
|
||||
json: {
|
||||
name: 't2',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const T1_DELETED = {
|
||||
name: '_deleted_t1.table.yaml',
|
||||
json: {
|
||||
@@ -462,17 +553,17 @@ describe('Deploy database', () => {
|
||||
|
||||
const V1 = {
|
||||
name: 'v1.view.sql',
|
||||
text: 'create view v1 as select * from t1',
|
||||
text: 'create view ~v1 as select * from ~t1',
|
||||
};
|
||||
|
||||
const V1_VARIANT2 = {
|
||||
name: 'v1.view.sql',
|
||||
text: 'create view v1 as select 1 as c1',
|
||||
text: 'create view ~v1 as select ~id + ~id ~idsum from ~t1',
|
||||
};
|
||||
|
||||
const V1_DELETED = {
|
||||
name: '_deleted_v1.view.sql',
|
||||
text: 'create view _deleted_v1 as select * from t1',
|
||||
text: 'create view ~_deleted_v1 as select * from ~t1',
|
||||
};
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
@@ -611,4 +702,94 @@ describe('Deploy database', () => {
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Script drived deploy - basic predeploy - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: '1.predeploy.sql',
|
||||
text: 'create table ~t1 (~id int primary key); insert into ~t1 (~id) values (1);',
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~t1');
|
||||
expect(res1.rows[0].cnt == 1).toBeTruthy();
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~dbgate_deploy_journal');
|
||||
expect(res2.rows[0].cnt == 1).toBeTruthy();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Script drived deploy - install+uninstall - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.uninstall.sql',
|
||||
text: 'drop table ~t1',
|
||||
},
|
||||
{
|
||||
name: 't1.install.sql',
|
||||
text: 'create table ~t1 (~id int primary key); insert into ~t1 (~id) values (1)',
|
||||
},
|
||||
{
|
||||
name: 't2.once.sql',
|
||||
text: 'create table ~t2 (~id int primary key); insert into ~t2 (~id) values (1)',
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
name: 't1.uninstall.sql',
|
||||
text: 'drop table ~t1',
|
||||
},
|
||||
{
|
||||
name: 't1.install.sql',
|
||||
text: 'create table ~t1 (~id int primary key, ~val int); insert into ~t1 (~id, ~val) values (1, 11)',
|
||||
},
|
||||
{
|
||||
name: 't2.once.sql',
|
||||
text: 'insert into ~t2 (~id) values (2)',
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, 'SELECT ~val from ~t1 where ~id = 1');
|
||||
expect(res1.rows[0].val == 11).toBeTruthy();
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~t2');
|
||||
expect(res2.rows[0].cnt == 1).toBeTruthy();
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~dbgate_deploy_journal');
|
||||
expect(res3.rows[0].cnt == 3).toBeTruthy();
|
||||
|
||||
const res4 = await runQueryOnDriver(
|
||||
conn,
|
||||
driver,
|
||||
"SELECT ~run_count from ~dbgate_deploy_journal where ~name = 't2.once.sql'"
|
||||
);
|
||||
expect(res4.rows[0].run_count == 1).toBeTruthy();
|
||||
|
||||
const res5 = await runQueryOnDriver(
|
||||
conn,
|
||||
driver,
|
||||
"SELECT ~run_count from ~dbgate_deploy_journal where ~name = 't1.install.sql'"
|
||||
);
|
||||
expect(res5.rows[0].run_count == 2).toBeTruthy();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Mark table removed, one remains - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
finalCheckAgainstModel: [T1_DELETED, T2],
|
||||
});
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const { testWrapper } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const _ = require('lodash');
|
||||
const { formatQueryWithoutParams, runCommandOnDriver } = require('dbgate-tools');
|
||||
|
||||
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
|
||||
const initSql = ['CREATE TABLE ~t1 (~id int primary key)', 'CREATE TABLE ~t2 (~id int primary key)'];
|
||||
|
||||
function flatSource() {
|
||||
return _.flatten(
|
||||
@@ -10,6 +11,14 @@ function flatSource() {
|
||||
);
|
||||
}
|
||||
|
||||
function flatSourceParameters() {
|
||||
return _.flatten(
|
||||
engines.map(engine =>
|
||||
(engine.parameters || []).map(parameter => [engine.label, parameter.testName, parameter, engine])
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const obj1Match = expect.objectContaining({
|
||||
pureName: 'obj1',
|
||||
});
|
||||
@@ -26,9 +35,9 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Full analysis - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create1);
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
expect(structure[type].length).toEqual(1);
|
||||
@@ -39,11 +48,11 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Incremental analysis - add - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await driver.query(conn, object.create2, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create2);
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create1);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2[type].length).toEqual(2);
|
||||
@@ -54,12 +63,12 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Incremental analysis - drop - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await driver.query(conn, object.create2, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create1);
|
||||
await runCommandOnDriver(conn, driver, object.create2);
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.drop2, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.drop2);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2[type].length).toEqual(1);
|
||||
@@ -70,15 +79,15 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Create SQL - add - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.create1);
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.drop1, { discardResult: true });
|
||||
await runCommandOnDriver(conn, driver, object.drop1);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
expect(structure2[type].length).toEqual(0);
|
||||
|
||||
await driver.query(conn, structure1[type][0].createSql, { discardResult: true });
|
||||
await driver.script(conn, structure1[type][0].createSql);
|
||||
|
||||
const structure3 = await driver.analyseIncremental(conn, structure2);
|
||||
|
||||
@@ -86,4 +95,45 @@ describe('Object analyse', () => {
|
||||
expect(structure3[type][0]).toEqual(type.includes('views') ? view1Match : obj1Match);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(flatSourceParameters())(
|
||||
'Test parameters simple analyse - %s - %s',
|
||||
testWrapper(async (conn, driver, testName, parameter, engine) => {
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
for (const sql of engine.parametersOtherSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await runCommandOnDriver(conn, driver, parameter.create);
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
const parameters = structure[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
|
||||
|
||||
expect(parameters.length).toEqual(parameter.list.length);
|
||||
for (let i = 0; i < parameters.length; i += 1) {
|
||||
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
test.each(flatSourceParameters())(
|
||||
'Test parameters create SQL - %s - %s',
|
||||
testWrapper(async (conn, driver, testName, parameter, engine) => {
|
||||
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
|
||||
for (const sql of engine.parametersOtherSql) await runCommandOnDriver(conn, driver, sql);
|
||||
|
||||
await runCommandOnDriver(conn, driver, parameter.create);
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await runCommandOnDriver(conn, driver, parameter.drop);
|
||||
|
||||
const obj = structure1[parameter.objectTypeField].find(x => x.pureName == 'obj1');
|
||||
await driver.script(conn, obj.createSql, { discardResult: true });
|
||||
|
||||
const structure2 = await driver.analyseFull(conn);
|
||||
const parameters = structure2[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
|
||||
|
||||
expect(parameters.length).toEqual(parameter.list.length);
|
||||
for (let i = 0; i < parameters.length; i += 1) {
|
||||
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
|
||||
}
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
const engines = require('../engines');
|
||||
const { splitQuery } = require('dbgate-query-splitter');
|
||||
const { testWrapper } = require('../tools');
|
||||
const { runQueryOnDriver, runCommandOnDriver, formatQueryWithoutParams } = require('dbgate-tools');
|
||||
|
||||
const initSql = [
|
||||
'CREATE TABLE t1 (id int primary key)',
|
||||
'INSERT INTO t1 (id) VALUES (1)',
|
||||
'INSERT INTO t1 (id) VALUES (2)',
|
||||
'CREATE TABLE ~t1 (~id int primary key)',
|
||||
'INSERT INTO ~t1 (~id) VALUES (1)',
|
||||
'INSERT INTO ~t1 (~id) VALUES (2)',
|
||||
];
|
||||
|
||||
expect.extend({
|
||||
@@ -51,7 +52,7 @@ class StreamHandler {
|
||||
function executeStreamItem(driver, conn, sql) {
|
||||
return new Promise(resolve => {
|
||||
const handler = new StreamHandler(resolve);
|
||||
driver.stream(conn, sql, handler);
|
||||
driver.stream(conn, formatQueryWithoutParams(driver, sql), handler);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -68,9 +69,11 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Simple query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
}
|
||||
|
||||
const res = await driver.query(conn, 'SELECT id FROM t1 ORDER BY id');
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT ~id FROM ~t1 ORDER BY ~id'));
|
||||
expect(res.columns).toEqual([
|
||||
expect.objectContaining({
|
||||
columnName: 'id',
|
||||
@@ -91,8 +94,11 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Simple stream query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
const results = await executeStream(driver, conn, 'SELECT id FROM t1 ORDER BY id');
|
||||
for (const sql of initSql) {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
}
|
||||
|
||||
const results = await executeStream(driver, conn, 'SELECT ~id FROM ~t1 ORDER BY ~id');
|
||||
expect(results.length).toEqual(1);
|
||||
const res = results[0];
|
||||
|
||||
@@ -104,11 +110,14 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'More queries - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
}
|
||||
|
||||
const results = await executeStream(
|
||||
driver,
|
||||
conn,
|
||||
'SELECT id FROM t1 ORDER BY id; SELECT id FROM t1 ORDER BY id DESC'
|
||||
'SELECT ~id FROM ~t1 ORDER BY ~id; SELECT ~id FROM ~t1 ORDER BY ~id DESC'
|
||||
);
|
||||
expect(results.length).toEqual(2);
|
||||
|
||||
@@ -128,7 +137,7 @@ describe('Query', () => {
|
||||
const results = await executeStream(
|
||||
driver,
|
||||
conn,
|
||||
'CREATE TABLE t1 (id int primary key); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
|
||||
'CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ORDER BY ~id; '
|
||||
);
|
||||
expect(results.length).toEqual(1);
|
||||
|
||||
@@ -144,7 +153,7 @@ describe('Query', () => {
|
||||
const results = await executeStream(
|
||||
driver,
|
||||
conn,
|
||||
'CREATE TABLE t1 (id int); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2) '
|
||||
'CREATE TABLE ~t1 (~id int); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2) '
|
||||
);
|
||||
expect(results.length).toEqual(0);
|
||||
})
|
||||
@@ -153,16 +162,57 @@ describe('Query', () => {
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Save data query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of initSql) {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
}
|
||||
|
||||
await driver.script(
|
||||
conn,
|
||||
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;',
|
||||
formatQueryWithoutParams(
|
||||
driver,
|
||||
'INSERT INTO ~t1 (~id) VALUES (3);INSERT INTO ~t1 (~id) VALUES (4);UPDATE ~t1 SET ~id=10 WHERE ~id=1;DELETE FROM ~t1 WHERE ~id=2;'
|
||||
),
|
||||
{ discardResult: true }
|
||||
);
|
||||
const res = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t1');
|
||||
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT COUNT(*) AS ~cnt FROM ~t1'));
|
||||
// console.log(res);
|
||||
expect(res.rows[0].cnt == 3).toBeTruthy();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Select scope identity - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true, autoIncrement: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
const table = structure.tables.find(x => x.pureName == 't1');
|
||||
|
||||
let res;
|
||||
if (driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put("INSERT INTO ~t1 (~val) VALUES ('aaa')"));
|
||||
res = await runQueryOnDriver(conn, driver, dmp => dmp.selectScopeIdentity(table));
|
||||
} else {
|
||||
res = await runQueryOnDriver(conn, driver, dmp => {
|
||||
dmp.putCmd("INSERT INTO ~t1 (~val) VALUES ('aaa')");
|
||||
dmp.selectScopeIdentity(table);
|
||||
});
|
||||
}
|
||||
const row = res.rows[0];
|
||||
const keys = Object.keys(row);
|
||||
expect(keys.length).toEqual(1);
|
||||
expect(row[keys[0]] == 1).toBeTruthy();
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -76,7 +76,7 @@ describe('Schema tests', () => {
|
||||
});
|
||||
|
||||
describe('Base analyser test', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
|
||||
'Structure without change - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await baseStructure(conn, driver);
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const { runCommandOnDriver } = require('dbgate-tools');
|
||||
const engines = require('../engines');
|
||||
const { testWrapper } = require('../tools');
|
||||
|
||||
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50))';
|
||||
const ix1Sql = 'CREATE index ix1 ON t1(val1, id)';
|
||||
const t1Sql = 'CREATE TABLE ~t1 (~id int not null primary key, ~val1 varchar(50))';
|
||||
const ix1Sql = 'CREATE index ~ix1 ON ~t1(~val1, ~id)';
|
||||
const t2Sql = engine =>
|
||||
`CREATE TABLE t2 (id int not null primary key, val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
|
||||
const t3Sql = 'CREATE TABLE t3 (id int not null primary key, valfk int, foreign key (valfk) references t2(id))';
|
||||
const t4Sql = 'CREATE TABLE t4 (id int not null primary key, valdef int not null default 12)';
|
||||
`CREATE TABLE ~t2 (~id int not null primary key, ~val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
|
||||
const t3Sql = 'CREATE TABLE ~t3 (~id int not null primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))';
|
||||
const t4Sql = 'CREATE TABLE ~t4 (~id int not null primary key, ~valdef int default 12 not null)';
|
||||
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
|
||||
|
||||
const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
|
||||
@@ -15,7 +16,7 @@ const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
|
||||
columns: [
|
||||
expect.objectContaining({
|
||||
columnName: 'id',
|
||||
dataType: expect.stringMatching(/int.*/i),
|
||||
dataType: expect.stringMatching(/int.*|number/i),
|
||||
...(engine.skipNullability ? {} : { notNull: true }),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
@@ -59,9 +60,10 @@ describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table structure - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
console.log(JSON.stringify(structure, null, 2));
|
||||
|
||||
expect(structure.tables.length).toEqual(1);
|
||||
expect(structure.tables[0]).toEqual(t1Match(engine));
|
||||
@@ -71,13 +73,13 @@ describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table add - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
expect(structure1.tables.length).toEqual(1);
|
||||
expect(structure1.tables[0]).toEqual(t2Match(engine));
|
||||
|
||||
await driver.query(conn, t1Sql);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2.tables.length).toEqual(2);
|
||||
@@ -89,14 +91,14 @@ describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table remove - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
expect(structure1.tables.length).toEqual(2);
|
||||
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
|
||||
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
|
||||
|
||||
await driver.query(conn, 'DROP TABLE t2');
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put('DROP TABLE ~t2'));
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2.tables.length).toEqual(1);
|
||||
@@ -107,15 +109,14 @@ describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table change - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
|
||||
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
|
||||
|
||||
await driver.query(
|
||||
conn,
|
||||
`ALTER TABLE t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} nextcol varchar(50)`
|
||||
await runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol varchar(50)`)
|
||||
);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
@@ -130,8 +131,8 @@ describe('Table analyse', () => {
|
||||
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
|
||||
'Index - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await driver.query(conn, ix1Sql);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(ix1Sql));
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
const t1 = structure.tables.find(x => x.pureName == 't1');
|
||||
@@ -145,7 +146,7 @@ describe('Table analyse', () => {
|
||||
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
|
||||
'Unique - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
const t2 = structure.tables.find(x => x.pureName == 't2');
|
||||
@@ -159,8 +160,8 @@ describe('Table analyse', () => {
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
'Foreign key - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await driver.query(conn, t3Sql);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql));
|
||||
// await driver.query(conn, fkSql);
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
@@ -179,7 +180,7 @@ describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table structure - default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t4Sql);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql));
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ const _ = require('lodash');
|
||||
const fp = require('lodash/fp');
|
||||
const engines = require('../engines');
|
||||
const { testWrapper } = require('../tools');
|
||||
const { extendDatabaseInfo } = require('dbgate-tools');
|
||||
const { extendDatabaseInfo, runCommandOnDriver } = require('dbgate-tools');
|
||||
|
||||
function createExpector(value) {
|
||||
return _.cloneDeepWith(value, x => {
|
||||
@@ -25,7 +25,7 @@ function checkTableStructure2(t1, t2) {
|
||||
}
|
||||
|
||||
async function testTableCreate(conn, driver, table) {
|
||||
await driver.query(conn, `create table t0 (id int not null primary key)`);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.put('create table ~t0 (~id int not null primary key)'));
|
||||
|
||||
const dmp = driver.createDumper();
|
||||
const table1 = {
|
||||
|
||||
@@ -70,3 +70,9 @@ services:
|
||||
# - cockroachdb
|
||||
# restart: on-failure
|
||||
|
||||
oracle:
|
||||
image: gvenzl/oracle-xe:21-slim
|
||||
environment:
|
||||
ORACLE_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- 15006:1521
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
const views = {
|
||||
type: 'views',
|
||||
create1: 'CREATE VIEW obj1 AS SELECT id FROM t1',
|
||||
create2: 'CREATE VIEW obj2 AS SELECT id FROM t2',
|
||||
drop1: 'DROP VIEW obj1',
|
||||
drop2: 'DROP VIEW obj2',
|
||||
create1: 'CREATE VIEW ~obj1 AS SELECT ~id FROM ~t1',
|
||||
create2: 'CREATE VIEW ~obj2 AS SELECT ~id FROM ~t2',
|
||||
drop1: 'DROP VIEW ~obj1',
|
||||
drop2: 'DROP VIEW ~obj2',
|
||||
};
|
||||
const matviews = {
|
||||
type: 'matviews',
|
||||
@@ -28,7 +28,16 @@ const engines = [
|
||||
port: 15001,
|
||||
},
|
||||
// skipOnCI: true,
|
||||
objects: [views],
|
||||
objects: [
|
||||
views,
|
||||
{
|
||||
type: 'procedures',
|
||||
create1: 'CREATE PROCEDURE obj1() BEGIN SELECT * FROM t1; END',
|
||||
create2: 'CREATE PROCEDURE obj2() BEGIN SELECT * FROM t2; END',
|
||||
drop1: 'DROP PROCEDURE obj1',
|
||||
drop2: 'DROP PROCEDURE obj2',
|
||||
},
|
||||
],
|
||||
dbSnapshotBySeconds: true,
|
||||
dumpFile: 'data/chinook-mysql.sql',
|
||||
dumpChecks: [
|
||||
@@ -37,6 +46,68 @@ const engines = [
|
||||
res: '25',
|
||||
},
|
||||
],
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2(a int, b int) BEGIN SELECT * FROM t1; END'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1(a int) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramTypes',
|
||||
create: 'CREATE PROCEDURE obj1(a int, b varchar(50), c numeric(10,2)) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'varchar(50)',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'decimal(10,2)',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModes',
|
||||
create: 'CREATE PROCEDURE obj1(IN a int, OUT b int, INOUT c int) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'INOUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'MariaDB',
|
||||
@@ -105,6 +176,94 @@ const engines = [
|
||||
res: '25',
|
||||
},
|
||||
],
|
||||
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2(a integer, b integer) LANGUAGE SQL AS $$ select * from t1 $$'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1(a integer) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'dataTypes',
|
||||
create:
|
||||
'CREATE PROCEDURE obj1(a integer, b varchar(20), c numeric(18,2)) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'varchar',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'numeric',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModes',
|
||||
create: 'CREATE PROCEDURE obj1(IN a integer, INOUT b integer) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'INOUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModesFunction',
|
||||
objectTypeField: 'functions',
|
||||
create: `
|
||||
create or replace function obj1(
|
||||
out min_len int,
|
||||
out max_len int)
|
||||
language plpgsql
|
||||
as $$
|
||||
begin
|
||||
select min(id),
|
||||
max(id)
|
||||
into min_len, max_len
|
||||
from t1;
|
||||
end;$$`,
|
||||
drop: 'DROP FUNCTION obj1',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'min_len',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'max_len',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'SQL Server',
|
||||
@@ -129,6 +288,63 @@ const engines = [
|
||||
drop2: 'DROP PROCEDURE obj2',
|
||||
},
|
||||
],
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2 (@p1 int, @p2 int) AS SELECT id from t1'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1 (@param1 int) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@param1',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'dataTypes',
|
||||
create: 'CREATE PROCEDURE obj1 (@p1 bit, @p2 nvarchar(20), @p3 decimal(18,2), @p4 float) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@p1',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'bit',
|
||||
},
|
||||
{
|
||||
parameterName: '@p2',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'nvarchar(20)',
|
||||
},
|
||||
{
|
||||
parameterName: '@p3',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'decimal(18,2)',
|
||||
},
|
||||
{
|
||||
parameterName: '@p4',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'float',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'outputParam',
|
||||
create: 'CREATE PROCEDURE obj1 (@p1 int OUTPUT) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@p1',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
supportSchemas: true,
|
||||
supportRenameSqlObject: true,
|
||||
defaultSchemaName: 'dbo',
|
||||
@@ -142,6 +358,7 @@ const engines = [
|
||||
},
|
||||
objects: [views],
|
||||
skipOnCI: false,
|
||||
skipChangeColumn: true,
|
||||
},
|
||||
{
|
||||
label: 'CockroachDB',
|
||||
@@ -181,6 +398,43 @@ const engines = [
|
||||
skipStringLength: true,
|
||||
alterTableAddColumnSyntax: true,
|
||||
dbSnapshotBySeconds: true,
|
||||
skipChangeColumn: true,
|
||||
},
|
||||
{
|
||||
label: 'Oracle',
|
||||
connection: {
|
||||
engine: 'oracle@dbgate-plugin-oracle',
|
||||
password: 'Pwd2020Db',
|
||||
user: 'system',
|
||||
server: 'oracle',
|
||||
port: 1521,
|
||||
serviceName: 'xe',
|
||||
},
|
||||
local: {
|
||||
server: 'localhost',
|
||||
port: 15006,
|
||||
},
|
||||
skipOnCI: false,
|
||||
dbSnapshotBySeconds: true,
|
||||
setNullDefaultInsteadOfDrop: true,
|
||||
skipIncrementalAnalysis: true,
|
||||
objects: [
|
||||
views,
|
||||
{
|
||||
type: 'procedures',
|
||||
create1: 'CREATE PROCEDURE ~obj1 AS BEGIN SELECT ~id FROM ~t1 END',
|
||||
create2: 'CREATE PROCEDURE ~obj2 AS BEGIN SELECT ~id FROM ~t2 END',
|
||||
drop1: 'DROP PROCEDURE ~obj1',
|
||||
drop2: 'DROP PROCEDURE ~obj2',
|
||||
},
|
||||
{
|
||||
type: 'functions',
|
||||
create1: 'CREATE FUNCTION ~obj1 RETURN NUMBER IS v_count NUMBER; \n BEGIN SELECT COUNT(*) INTO v_count FROM ~t1;\n RETURN v_count;\n END ~obj1',
|
||||
create2: 'CREATE FUNCTION ~obj2 RETURN NUMBER IS v_count NUMBER; \n BEGIN SELECT COUNT(*) INTO v_count FROM ~t2;\n RETURN v_count;\n END ~obj2',
|
||||
drop1: 'DROP FUNCTION ~obj1',
|
||||
drop2: 'DROP FUNCTION ~obj2',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -189,10 +443,11 @@ const filterLocal = [
|
||||
'-MySQL',
|
||||
'-MariaDB',
|
||||
'-PostgreSQL',
|
||||
'SQL Server',
|
||||
'-SQL Server',
|
||||
'-SQLite',
|
||||
'-CockroachDB',
|
||||
'-ClickHouse',
|
||||
'Oracle',
|
||||
];
|
||||
|
||||
const enginesPostgre = engines.filter(x => x.label == 'PostgreSQL');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dbgate-integration-tests",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const crypto = require('crypto');
|
||||
|
||||
function randomDbName() {
|
||||
function randomDbName(dialect) {
|
||||
const generatedKey = crypto.randomBytes(6);
|
||||
const newKey = generatedKey.toString('hex');
|
||||
return `db${newKey}`;
|
||||
const res = `db${newKey}`;
|
||||
if (dialect.upperCaseAllDbObjectNames) return res.toUpperCase();
|
||||
return res;
|
||||
}
|
||||
|
||||
function extractConnection(engine) {
|
||||
@@ -32,12 +34,14 @@ async function connect(engine, database) {
|
||||
return conn;
|
||||
} else {
|
||||
const conn = await driver.connect(connection);
|
||||
await driver.query(conn, `CREATE DATABASE ${database}`);
|
||||
const dmp = driver.createDumper();
|
||||
dmp.createDatabase(database);
|
||||
await driver.query(conn, dmp.s);
|
||||
await driver.close(conn);
|
||||
|
||||
const res = await driver.connect({
|
||||
...connection,
|
||||
database,
|
||||
database: (driver.dialect.userDatabaseNamePrefix ?? '') + database,
|
||||
});
|
||||
return res;
|
||||
}
|
||||
@@ -55,12 +59,14 @@ async function prepareConnection(engine, database) {
|
||||
};
|
||||
} else {
|
||||
const conn = await driver.connect(connection);
|
||||
await driver.query(conn, `CREATE DATABASE ${database}`);
|
||||
const dmp = driver.createDumper();
|
||||
dmp.createDatabase(database);
|
||||
await driver.query(conn, dmp.s);
|
||||
await driver.close(conn);
|
||||
|
||||
return {
|
||||
...connection,
|
||||
database,
|
||||
database: (driver.dialect.userDatabaseNamePrefix ?? '') + database,
|
||||
isPreparedOnly: true,
|
||||
};
|
||||
}
|
||||
@@ -71,7 +77,7 @@ const testWrapper =
|
||||
async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await connect(engine, randomDbName());
|
||||
const conn = await connect(engine, randomDbName(driver.dialect));
|
||||
try {
|
||||
await body(conn, driver, ...other);
|
||||
} finally {
|
||||
@@ -84,7 +90,7 @@ const testWrapperPrepareOnly =
|
||||
async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await prepareConnection(engine, randomDbName());
|
||||
const conn = await prepareConnection(engine, randomDbName(driver.dialect));
|
||||
await body(conn, driver, ...other);
|
||||
};
|
||||
|
||||
|
||||
+6
-6
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "5.5.7-packer-beta.21",
|
||||
"version": "6.0.1-packer-beta.2",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -35,6 +35,7 @@
|
||||
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
|
||||
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
|
||||
"build:api": "yarn workspace dbgate-api build",
|
||||
"build:api:doc": "yarn workspace dbgate-api build:doc",
|
||||
"build:web": "yarn workspace dbgate-web build",
|
||||
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
|
||||
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
|
||||
@@ -47,14 +48,13 @@
|
||||
"printSecrets": "node printSecrets",
|
||||
"generatePadFile": "node generatePadFile",
|
||||
"adjustPackageJson": "node adjustPackageJson",
|
||||
"fillNativeModules": "node fillNativeModules",
|
||||
"fillNativeModulesElectron": "node fillNativeModules --electron",
|
||||
"fillPackagedPlugins": "node fillPackagedPlugins",
|
||||
"resetPackagedPlugins": "node resetPackagedPlugins",
|
||||
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
|
||||
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
|
||||
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2 && copyfiles packer/install-packages.sh packer/build -f",
|
||||
"install:drivers:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && yarn add oracledb && cd ..",
|
||||
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2 && copyfiles packer/install-packages.sh packer/build -f && yarn install:drivers:packer",
|
||||
"install:drivers:docker": "node common/defineVolatileDependencies.js docker && cd docker && yarn install && cd ..",
|
||||
"install:drivers:packer": "node common/defineVolatileDependencies.js packer/build",
|
||||
"prepare:docker": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
|
||||
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
|
||||
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
|
||||
@@ -62,7 +62,7 @@
|
||||
"ts:api": "yarn workspace dbgate-api ts",
|
||||
"ts:web": "yarn workspace dbgate-web ts",
|
||||
"ts": "yarn ts:api && yarn ts:web",
|
||||
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn fillNativeModules && yarn build:plugins:frontend",
|
||||
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn build:plugins:frontend",
|
||||
"dbgate-serve": "node packages/dbgate/bin/dbgate-serve.js"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
+1
-1
@@ -1,7 +1,7 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
|
||||
CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
|
||||
# PERMISSIONS=~widgets/app,~widgets/plugins
|
||||
# DISABLE_SHELL=1
|
||||
|
||||
@@ -12,15 +12,14 @@ This example exports table Customer info CSV file.
|
||||
|
||||
```javascript
|
||||
const dbgateApi = require('dbgate-api');
|
||||
const dbgatePluginMssql = require("dbgate-plugin-mssql");
|
||||
const dbgatePluginMysql = require("dbgate-plugin-mysql");
|
||||
const dbgatePluginCsv = require("dbgate-plugin-csv");
|
||||
|
||||
dbgateApi.registerPlugins(dbgatePluginMssql);
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
|
||||
async function run() {
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql', user: 'sa', password: 'xxxx', database: 'Chinook' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'Chinook' },
|
||||
pureName: 'Customer',
|
||||
});
|
||||
const writer = await dbgatePluginCsv.shellApi.writer({ fileName: 'Customer.csv' });
|
||||
@@ -59,8 +58,8 @@ Copies data from reader into writer. Reader and writer should be created from fu
|
||||
Reads table or view.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'postgres@dbgate-plugin-postgres', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'public',
|
||||
pureName: 'Customer',
|
||||
});
|
||||
```
|
||||
@@ -69,7 +68,7 @@ Reads table or view.
|
||||
Executes query and reads its result.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
sql: 'SELECT * FROM Album',
|
||||
});
|
||||
```
|
||||
@@ -81,8 +80,7 @@ Imports data into table. Options are optional, default values are false.
|
||||
- createIfNotExists - create table, if not exists
|
||||
```js
|
||||
const reader = await dbgateApi.tableWriter({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
pureName: 'Customer',
|
||||
options: {
|
||||
dropIfExists: false,
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
---
|
||||
layout: docs
|
||||
title: API documentation
|
||||
order: 21
|
||||
docs_left: true
|
||||
hide_hero: true
|
||||
---
|
||||
|
||||
# API Documentation
|
||||
|
||||
{{>main}}
|
||||
Vendored
+29
-42
@@ -1,60 +1,47 @@
|
||||
DEVMODE=1
|
||||
|
||||
CONNECTIONS=mysql,postgres,postgres1,mongo,mongo2,mysqlssh,sqlite,relational
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
|
||||
|
||||
LABEL_mysql=MySql localhost
|
||||
SERVER_mysql=localhost
|
||||
LABEL_mysql=MySql
|
||||
SERVER_mysql=dbgatedckstage1.sprinx.cz
|
||||
USER_mysql=root
|
||||
PASSWORD_mysql=test
|
||||
PORT_mysql=3307
|
||||
PASSWORD_mysql=Pwd2020Db
|
||||
PORT_mysql=3306
|
||||
ENGINE_mysql=mysql@dbgate-plugin-mysql
|
||||
|
||||
LABEL_postgres=Postgres localhost
|
||||
SERVER_postgres=localhost
|
||||
LABEL_postgres=Postgres
|
||||
SERVER_postgres=dbgatedckstage1.sprinx.cz
|
||||
USER_postgres=postgres
|
||||
PASSWORD_postgres=Pwd2020Db
|
||||
PORT_postgres=5432
|
||||
ENGINE_postgres=postgres@dbgate-plugin-postgres
|
||||
|
||||
LABEL_postgres1=Postgres localhost test DB
|
||||
SERVER_postgres1=localhost
|
||||
USER_postgres1=postgres
|
||||
PASSWORD_postgres1=Pwd2020Db
|
||||
PORT_postgres1=5432
|
||||
ENGINE_postgres1=postgres@dbgate-plugin-postgres
|
||||
DATABASE_postgres1=test
|
||||
|
||||
LABEL_mongo=Mongo URL
|
||||
URL_mongo=mongodb://localhost:27017
|
||||
LABEL_mongo=Mongo
|
||||
SERVER_mongo=dbgatedckstage1.sprinx.cz
|
||||
USER_mongo=root
|
||||
PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=27017
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_mongo2=Mongo Server
|
||||
SERVER_mongo2=localhost
|
||||
ENGINE_mongo2=mongo@dbgate-plugin-mongo
|
||||
LABEL_redis=Redis
|
||||
SERVER_redis=dbgatedckstage1.sprinx.cz
|
||||
ENGINE_redis=redis@dbgate-plugin-redis
|
||||
PORT_redis=6379
|
||||
|
||||
LABEL_mysqlssh=MySql SSH
|
||||
SERVER_mysqlssh=localhost
|
||||
USER_mysqlssh=root
|
||||
PASSWORD_mysqlssh=xxx
|
||||
PORT_mysqlssh=3316
|
||||
ENGINE_mysqlssh=mysql@dbgate-plugin-mysql
|
||||
USE_SSH_mysqlssh=1
|
||||
SSH_HOST_mysqlssh=demo.dbgate.org
|
||||
SSH_PORT_mysqlssh=22
|
||||
SSH_MODE_mysqlssh=userPassword
|
||||
SSH_LOGIN_mysqlssh=root
|
||||
SSH_PASSWORD_mysqlssh=xxx
|
||||
LABEL_mssql=SQL Server
|
||||
SERVER_mssql=dbgatedckstage1.sprinx.cz
|
||||
USER_mssql=sa
|
||||
PASSWORD_mssql=Pwd2020Db
|
||||
PORT_mssql=1433
|
||||
ENGINE_mssql=mssql@dbgate-plugin-mssql
|
||||
|
||||
LABEL_sqlite=sqlite
|
||||
FILE_sqlite=/home/jena/.dbgate/files/sqlite/feeds.sqlite
|
||||
ENGINE_sqlite=sqlite@dbgate-plugin-sqlite
|
||||
|
||||
LABEL_relational=Relational dataset repo
|
||||
SERVER_relational=relational.fit.cvut.cz
|
||||
USER_relational=guest
|
||||
PASSWORD_relational=relational
|
||||
ENGINE_relational=mariadb@dbgate-plugin-mysql
|
||||
READONLY_relational=1
|
||||
LABEL_oracle=Oracle
|
||||
SERVER_oracle=dbgatedckstage1.sprinx.cz
|
||||
USER_oracle=system
|
||||
PASSWORD_oracle=Pwd2020Db
|
||||
PORT_oracle=1521
|
||||
ENGINE_oracle=oracle@dbgate-plugin-oracle
|
||||
SERVICE_NAME_oracle=xe
|
||||
|
||||
# SETTINGS_dataGrid.showHintColumns=1
|
||||
|
||||
|
||||
Vendored
+1
-1
@@ -12,6 +12,6 @@ DBCONFIG_mysql=[{"name":"Chinook","connectionColor":"cyan"}]
|
||||
|
||||
|
||||
SINGLE_CONNECTION=mysql
|
||||
SINGLE_DATABASE=Chinook
|
||||
# SINGLE_DATABASE=Chinook
|
||||
|
||||
PERMISSIONS=files/charts/read
|
||||
|
||||
+12
-14
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-api",
|
||||
"main": "src/index.js",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -16,21 +16,23 @@
|
||||
"export",
|
||||
"dbgate"
|
||||
],
|
||||
"files": [
|
||||
"src"
|
||||
],
|
||||
"dependencies": {
|
||||
"@aws-sdk/rds-signer": "^3.665.0",
|
||||
"activedirectory2": "^2.1.0",
|
||||
"async-lock": "^1.2.4",
|
||||
"async-lock": "^1.2.6",
|
||||
"axios": "^0.21.1",
|
||||
"body-parser": "^1.19.0",
|
||||
"bufferutil": "^4.0.1",
|
||||
"byline": "^5.0.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.2",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
@@ -49,7 +51,6 @@
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"line-reader": "^0.4.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mkdirp": "^3.0.1",
|
||||
"moment": "^2.24.0",
|
||||
"ncp": "^2.0.0",
|
||||
"node-cron": "^2.0.3",
|
||||
@@ -74,22 +75,19 @@
|
||||
"start:storage:built": "env-cmd -f env/storage/.env cross-env DEVMODE= BUILTWEBMODE=1 node dist/bundle.js --listen-api",
|
||||
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
|
||||
"ts": "tsc",
|
||||
"build": "webpack"
|
||||
"build": "webpack",
|
||||
"build:doc": "jsdoc2md --template doctpl.hbs ./src/shell/* > ../../../dbgate.github.io/_docs/apidoc.md"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"env-cmd": "^10.1.0",
|
||||
"jsdoc-to-markdown": "^9.0.5",
|
||||
"node-loader": "^1.0.2",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^4.4.3",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^6.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,7 +230,7 @@ class LoginsProvider extends AuthProviderBase {
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
return { error: 'Invalid credentials' };
|
||||
}
|
||||
|
||||
@@ -271,11 +271,10 @@ function hasEnvLogins() {
|
||||
return false;
|
||||
}
|
||||
|
||||
function detectEnvAuthProvider() {
|
||||
function detectEnvAuthProviderCore() {
|
||||
if (process.env.AUTH_PROVIDER) {
|
||||
return process.env.AUTH_PROVIDER;
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
return 'denyall';
|
||||
}
|
||||
@@ -291,6 +290,14 @@ function detectEnvAuthProvider() {
|
||||
return 'none';
|
||||
}
|
||||
|
||||
function detectEnvAuthProvider() {
|
||||
const authProvider = detectEnvAuthProviderCore();
|
||||
if (process.env.BASIC_AUTH && authProvider != 'logins' && authProvider != 'ad') {
|
||||
throw new Error(`BASIC_AUTH is not supported with ${authProvider} auth provider`);
|
||||
}
|
||||
return authProvider;
|
||||
}
|
||||
|
||||
function createEnvAuthProvider() {
|
||||
const authProvider = detectEnvAuthProvider();
|
||||
switch (authProvider) {
|
||||
|
||||
@@ -9,6 +9,7 @@ const _ = require('lodash');
|
||||
const AsyncLock = require('async-lock');
|
||||
const jwt = require('jsonwebtoken');
|
||||
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
@@ -62,6 +63,8 @@ module.exports = {
|
||||
const logoutUrl = storageConnectionError ? null : await authProvider.getLogoutUrl();
|
||||
const adminConfig = storageConnectionError ? null : await storage.readConfig({ group: 'admin' });
|
||||
|
||||
storage.startRefreshLicense();
|
||||
|
||||
const isAdminPasswordMissing = !!(
|
||||
process.env.STORAGE_DATABASE &&
|
||||
!process.env.ADMIN_PASSWORD &&
|
||||
@@ -81,7 +84,8 @@ module.exports = {
|
||||
isElectron: platformInfo.isElectron,
|
||||
isLicenseValid,
|
||||
isLicenseExpired: checkedLicense?.isExpired,
|
||||
trialDaysLeft: checkedLicense?.isGeneratedTrial && !checkedLicense?.isExpired ? checkedLicense?.daysLeft : null,
|
||||
trialDaysLeft:
|
||||
checkedLicense?.licenseTypeObj?.isTrial && !checkedLicense?.isExpired ? checkedLicense?.daysLeft : null,
|
||||
checkedLicense,
|
||||
configurationError,
|
||||
logoutUrl,
|
||||
@@ -99,7 +103,10 @@ module.exports = {
|
||||
adminPasswordState: adminConfig?.adminPasswordState,
|
||||
storageDatabase: process.env.STORAGE_DATABASE,
|
||||
logsFilePath: getLogsFilePath(),
|
||||
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
|
||||
connectionsFilePath: path.join(
|
||||
datadir(),
|
||||
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
|
||||
),
|
||||
...currentVersion,
|
||||
};
|
||||
|
||||
|
||||
@@ -199,8 +199,11 @@ module.exports = {
|
||||
const dir = datadir();
|
||||
if (!portalConnections) {
|
||||
// @ts-ignore
|
||||
this.datastore = new JsonLinesDatabase(path.join(dir, 'connections.jsonl'));
|
||||
this.datastore = new JsonLinesDatabase(
|
||||
path.join(dir, processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl')
|
||||
);
|
||||
}
|
||||
await this.checkUnsavedConnectionsLimit();
|
||||
},
|
||||
|
||||
list_meta: true,
|
||||
@@ -300,6 +303,32 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
async checkUnsavedConnectionsLimit() {
|
||||
if (!this.datastore) {
|
||||
return;
|
||||
}
|
||||
const MAX_UNSAVED_CONNECTIONS = 5;
|
||||
await this.datastore.transformAll(connections => {
|
||||
const count = connections.filter(x => x.unsaved).length;
|
||||
if (count > MAX_UNSAVED_CONNECTIONS) {
|
||||
const res = [];
|
||||
let unsavedToSkip = count - MAX_UNSAVED_CONNECTIONS;
|
||||
for (const item of connections) {
|
||||
if (item.unsaved) {
|
||||
if (unsavedToSkip > 0) {
|
||||
unsavedToSkip--;
|
||||
} else {
|
||||
res.push(item);
|
||||
}
|
||||
} else {
|
||||
res.push(item);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
update_meta: true,
|
||||
async update({ _id, values }, req) {
|
||||
if (portalConnections) return;
|
||||
@@ -483,4 +512,10 @@ module.exports = {
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
reloadConnectionList_meta: true,
|
||||
async reloadConnectionList() {
|
||||
if (portalConnections) return;
|
||||
await this.datastore.unload();
|
||||
},
|
||||
};
|
||||
|
||||
@@ -13,6 +13,7 @@ const {
|
||||
modelCompareDbDiffOptions,
|
||||
getLogger,
|
||||
extractErrorLogData,
|
||||
filterStructureBySchema,
|
||||
} = require('dbgate-tools');
|
||||
const { html, parse } = require('diff2html');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
@@ -440,7 +441,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
exportModel_meta: true,
|
||||
async exportModel({ conid, database, outputFolder }, req) {
|
||||
async exportModel({ conid, database, outputFolder, schema }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
|
||||
const realFolder = outputFolder.startsWith('archive:')
|
||||
@@ -448,7 +449,8 @@ module.exports = {
|
||||
: outputFolder;
|
||||
|
||||
const model = await this.structure({ conid, database });
|
||||
await exportDbModel(extendDatabaseInfo(model), realFolder);
|
||||
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
||||
await exportDbModel(extendDatabaseInfo(filteredModel), realFolder);
|
||||
|
||||
if (outputFolder.startsWith('archive:')) {
|
||||
socket.emitChanged(`archive-files-changed`, { folder: outputFolder.substring('archive:'.length) });
|
||||
@@ -457,14 +459,15 @@ module.exports = {
|
||||
},
|
||||
|
||||
exportModelSql_meta: true,
|
||||
async exportModelSql({ conid, database, outputFolder, outputFile }, req) {
|
||||
async exportModelSql({ conid, database, outputFolder, outputFile, schema }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
|
||||
const connection = await connections.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
const model = await this.structure({ conid, database });
|
||||
await exportDbModelSql(extendDatabaseInfo(model), driver, outputFolder, outputFile);
|
||||
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
||||
await exportDbModelSql(extendDatabaseInfo(filteredModel), driver, outputFolder, outputFile);
|
||||
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
@@ -94,7 +94,7 @@ module.exports = {
|
||||
if (!manifest.keywords) {
|
||||
continue;
|
||||
}
|
||||
if (!manifest.keywords.includes('dbgateplugin')) {
|
||||
if (!manifest.keywords.includes('dbgateplugin') && !manifest.keywords.includes('dbgatebuiltin')) {
|
||||
continue;
|
||||
}
|
||||
const readmeFile = path.join(isPackaged ? packagedPluginsDir() : pluginsdir(), packageName, 'README.md');
|
||||
|
||||
@@ -12,6 +12,7 @@ const {
|
||||
jsonScriptToJavascript,
|
||||
getLogger,
|
||||
safeJsonParse,
|
||||
pinoLogRecordToMessageRecord,
|
||||
} = require('dbgate-tools');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
@@ -68,18 +69,20 @@ module.exports = {
|
||||
|
||||
dispatchMessage(runid, message) {
|
||||
if (message) {
|
||||
const json = safeJsonParse(message.message);
|
||||
if (_.isPlainObject(message)) logger.log(message);
|
||||
else logger.info(message);
|
||||
|
||||
if (json) logger.log(json);
|
||||
else logger.info(message.message);
|
||||
const toEmit = _.isPlainObject(message)
|
||||
? {
|
||||
time: new Date(),
|
||||
...message,
|
||||
}
|
||||
: {
|
||||
message,
|
||||
time: new Date(),
|
||||
};
|
||||
|
||||
const toEmit = {
|
||||
time: new Date(),
|
||||
...message,
|
||||
message: json ? json.msg : message.message,
|
||||
};
|
||||
|
||||
if (json && json.level >= 50) {
|
||||
if (toEmit.level >= 50) {
|
||||
toEmit.severity = 'error';
|
||||
}
|
||||
|
||||
@@ -108,7 +111,7 @@ module.exports = {
|
||||
const scriptFile = path.join(uploadsdir(), runid + '.js');
|
||||
fs.writeFileSync(`${scriptFile}`, scriptText);
|
||||
fs.mkdirSync(directory);
|
||||
const pluginNames = _.union(fs.readdirSync(pluginsdir()), packagedPluginList);
|
||||
const pluginNames = extractPlugins(scriptText);
|
||||
logger.info({ scriptFile }, 'Running script');
|
||||
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
|
||||
const subprocess = fork(
|
||||
@@ -131,7 +134,16 @@ module.exports = {
|
||||
}
|
||||
);
|
||||
const pipeDispatcher = severity => data => {
|
||||
return this.dispatchMessage(runid, { severity, message: data.toString().trim() });
|
||||
const json = safeJsonParse(data, null);
|
||||
|
||||
if (json) {
|
||||
return this.dispatchMessage(runid, pinoLogRecordToMessageRecord(json));
|
||||
} else {
|
||||
return this.dispatchMessage(runid, {
|
||||
message: json == null ? data.toString().trim() : null,
|
||||
severity,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
||||
@@ -165,7 +177,7 @@ module.exports = {
|
||||
|
||||
start_meta: true,
|
||||
async start({ script }) {
|
||||
const runid = crypto.randomUUID()
|
||||
const runid = crypto.randomUUID();
|
||||
|
||||
if (script.type == 'json') {
|
||||
const js = jsonScriptToJavascript(script);
|
||||
|
||||
@@ -52,7 +52,7 @@ module.exports = {
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`Connection with conid="${conid}" not fund`);
|
||||
throw new Error(`Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
|
||||
@@ -26,4 +26,6 @@ module.exports = {
|
||||
async readConfig({ group }) {
|
||||
return {};
|
||||
},
|
||||
|
||||
startRefreshLicense() {},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
module.exports = {
|
||||
version: '5.0.0-alpha.1',
|
||||
buildTime: '2021-04-17T07:22:49.702Z'
|
||||
version: '6.0.0-alpha.1',
|
||||
buildTime: '2024-12-01T00:00:00Z'
|
||||
};
|
||||
|
||||
@@ -28,6 +28,7 @@ const files = require('./controllers/files');
|
||||
const scheduler = require('./controllers/scheduler');
|
||||
const queryHistory = require('./controllers/queryHistory');
|
||||
const onFinished = require('on-finished');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
|
||||
const { rundir } = require('./utility/directories');
|
||||
const platformInfo = require('./utility/platformInfo');
|
||||
@@ -36,6 +37,7 @@ const _ = require('lodash');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -76,8 +78,13 @@ function start() {
|
||||
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
|
||||
} else if (platformInfo.isAwsUbuntuLayout) {
|
||||
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
|
||||
} else if (processArgs.runE2eTests) {
|
||||
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public')));
|
||||
} else if (platformInfo.isNpmDist) {
|
||||
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
|
||||
app.use(
|
||||
getExpressPath('/'),
|
||||
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
|
||||
);
|
||||
} else if (process.env.DEVWEB) {
|
||||
// console.log('__dirname', __dirname);
|
||||
// console.log(path.join(__dirname, '../../web/public/build'));
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
const argIndex = process.argv.indexOf('--native-modules');
|
||||
const redirectFile = global['NATIVE_MODULES'] || (argIndex > 0 ? process.argv[argIndex + 1] : null);
|
||||
|
||||
function requireDynamic(file) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
return __non_webpack_require__(redirectFile);
|
||||
} catch (err) {
|
||||
return require(redirectFile);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = redirectFile ? requireDynamic(redirectFile) : require('./nativeModulesContent');
|
||||
@@ -0,0 +1,9 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
@@ -8,7 +8,7 @@ const autoIndexForeignKeysTransform = () => database => {
|
||||
...(table.indexes || []),
|
||||
...table.foreignKeys.map(fk => ({
|
||||
constraintName: `IX_${fk.constraintName}`,
|
||||
columns: fk.columns,
|
||||
columns: fk.columns.map(x => ({ columnName: x.columnName })),
|
||||
})),
|
||||
],
|
||||
};
|
||||
|
||||
@@ -2,6 +2,13 @@ const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
|
||||
const Stream = require('stream');
|
||||
const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
|
||||
|
||||
/**
|
||||
* Copies reader to writer. Used for import, export tables and transfer data between tables
|
||||
* @param {readerType} input - reader object
|
||||
* @param {writerType} output - writer object
|
||||
* @param {object} options - options
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function copyStream(input, output, options) {
|
||||
const { columns } = options || {};
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ const { resolveArchiveFolder } = require('../utility/directories');
|
||||
async function dataDuplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
@@ -19,7 +20,7 @@ async function dataDuplicator({
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
@@ -29,6 +30,12 @@ async function dataDuplicator({
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
@@ -38,8 +45,7 @@ async function dataDuplicator({
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream ||
|
||||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
|
||||
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
const generateDeploySql = require('./generateDeploySql');
|
||||
const executeQuery = require('./executeQuery');
|
||||
const { ScriptDrivedDeployer } = require('dbgate-datalib');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const loadModelFolder = require('../utility/loadModelFolder');
|
||||
const crypto = require('crypto');
|
||||
|
||||
/**
|
||||
* Deploys database model stored in modelFolder (table as yamls) to database
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
|
||||
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
|
||||
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
|
||||
* @param {function[]} options.modelTransforms - array of functions for transforming model
|
||||
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
|
||||
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
|
||||
* @param {string} options.targetSchema - target schema for deployment
|
||||
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
|
||||
*/
|
||||
async function deployDb({
|
||||
connection,
|
||||
systemConnection,
|
||||
@@ -10,19 +30,44 @@ async function deployDb({
|
||||
loadedDbModel,
|
||||
modelTransforms,
|
||||
dbdiffOptionsExtra,
|
||||
ignoreNameRegex = '',
|
||||
targetSchema = null,
|
||||
maxMissingTablesRatio = undefined,
|
||||
}) {
|
||||
const { sql } = await generateDeploySql({
|
||||
connection,
|
||||
systemConnection,
|
||||
driver,
|
||||
analysedStructure,
|
||||
modelFolder,
|
||||
loadedDbModel,
|
||||
modelTransforms,
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
// console.log('RUNNING DEPLOY SCRIPT:', sql);
|
||||
await executeQuery({ connection, systemConnection, driver, sql, logScriptItems: true });
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
||||
|
||||
try {
|
||||
const scriptDeployer = new ScriptDrivedDeployer(
|
||||
dbhan,
|
||||
driver,
|
||||
Array.isArray(loadedDbModel) ? loadedDbModel : modelFolder ? await loadModelFolder(modelFolder) : [],
|
||||
crypto
|
||||
);
|
||||
await scriptDeployer.runPre();
|
||||
|
||||
const { sql } = await generateDeploySql({
|
||||
connection,
|
||||
systemConnection: dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
modelFolder,
|
||||
loadedDbModel,
|
||||
modelTransforms,
|
||||
dbdiffOptionsExtra,
|
||||
ignoreNameRegex,
|
||||
targetSchema,
|
||||
maxMissingTablesRatio,
|
||||
});
|
||||
// console.log('RUNNING DEPLOY SCRIPT:', sql);
|
||||
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
|
||||
|
||||
await scriptDeployer.runPost();
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = deployDb;
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
const executeQuery = require('./executeQuery');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const { getLogger, extendDatabaseInfo } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('dropAllDbObjects');
|
||||
|
||||
/**
|
||||
* Drops all database objects
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function dropAllDbObjects({ connection, systemConnection, driver, analysedStructure }) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
logger.info(`Connected.`);
|
||||
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
analysedStructure = extendDatabaseInfo(analysedStructure);
|
||||
|
||||
const dmp = driver.createDumper();
|
||||
|
||||
for (const table of analysedStructure.tables) {
|
||||
for (const fk of table.foreignKeys) {
|
||||
dmp.dropForeignKey(fk);
|
||||
}
|
||||
}
|
||||
for (const table of analysedStructure.tables) {
|
||||
dmp.dropTable(table);
|
||||
}
|
||||
for (const field of Object.keys(analysedStructure)) {
|
||||
if (dmp.getSqlObjectSqlName(field)) {
|
||||
for (const obj of analysedStructure[field]) {
|
||||
dmp.dropSqlObject(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await executeQuery({ connection, systemConnection, driver, sql: dmp.s, logScriptItems: true });
|
||||
}
|
||||
|
||||
module.exports = dropAllDbObjects;
|
||||
@@ -1,14 +1,26 @@
|
||||
const fs = require('fs-extra');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const { getLogger, getLimitedQuery } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('execQuery');
|
||||
|
||||
/**
|
||||
* Executes SQL query
|
||||
* @param {object} options
|
||||
* @param {connectionType} [options.connection] - connection object
|
||||
* @param {object} [options.systemConnection] - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} [options.driver] - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} [options.sql] - SQL query
|
||||
* @param {string} [options.sqlFile] - SQL file
|
||||
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
|
||||
*/
|
||||
async function executeQuery({
|
||||
connection = undefined,
|
||||
systemConnection = undefined,
|
||||
driver = undefined,
|
||||
sql,
|
||||
sqlFile = undefined,
|
||||
logScriptItems = false,
|
||||
}) {
|
||||
if (!logScriptItems) {
|
||||
@@ -18,6 +30,11 @@ async function executeQuery({
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
|
||||
|
||||
if (sqlFile) {
|
||||
logger.debug(`Loading SQL file ${sqlFile}`);
|
||||
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Connected.`);
|
||||
|
||||
|
||||
@@ -6,11 +6,30 @@ const {
|
||||
extendDatabaseInfo,
|
||||
modelCompareDbDiffOptions,
|
||||
enrichWithPreloadedRows,
|
||||
skipNamesInStructureByRegex,
|
||||
replaceSchemaInStructure,
|
||||
filterStructureBySchema,
|
||||
skipDbGateInternalObjects,
|
||||
} = require('dbgate-tools');
|
||||
const importDbModel = require('../utility/importDbModel');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
|
||||
/**
|
||||
* Generates query for deploying model into database
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
|
||||
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
|
||||
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
|
||||
* @param {function[]} options.modelTransforms - array of functions for transforming model
|
||||
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
|
||||
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
|
||||
* @param {string} options.targetSchema - target schema for deployment
|
||||
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
|
||||
*/
|
||||
async function generateDeploySql({
|
||||
connection,
|
||||
systemConnection = undefined,
|
||||
@@ -20,10 +39,21 @@ async function generateDeploySql({
|
||||
loadedDbModel = undefined,
|
||||
modelTransforms = undefined,
|
||||
dbdiffOptionsExtra = {},
|
||||
ignoreNameRegex = '',
|
||||
targetSchema = null,
|
||||
maxMissingTablesRatio = undefined,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
||||
if (
|
||||
driver?.dialect?.multipleSchema &&
|
||||
!targetSchema &&
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
|
||||
) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
}
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
@@ -34,10 +64,21 @@ async function generateDeploySql({
|
||||
? databaseInfoFromYamlModel(loadedDbModel)
|
||||
: await importDbModel(modelFolder);
|
||||
|
||||
if (ignoreNameRegex) {
|
||||
analysedStructure = skipNamesInStructureByRegex(analysedStructure, new RegExp(ignoreNameRegex, 'i'));
|
||||
deployedModelSource = skipNamesInStructureByRegex(deployedModelSource, new RegExp(ignoreNameRegex, 'i'));
|
||||
}
|
||||
analysedStructure = skipDbGateInternalObjects(analysedStructure);
|
||||
|
||||
for (const transform of modelTransforms || []) {
|
||||
deployedModelSource = transform(deployedModelSource);
|
||||
}
|
||||
|
||||
if (targetSchema) {
|
||||
deployedModelSource = replaceSchemaInStructure(deployedModelSource, targetSchema);
|
||||
analysedStructure = filterStructureBySchema(analysedStructure, targetSchema);
|
||||
}
|
||||
|
||||
const deployedModel = generateDbPairingId(extendDatabaseInfo(deployedModelSource));
|
||||
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
|
||||
const opts = {
|
||||
@@ -55,6 +96,17 @@ async function generateDeploySql({
|
||||
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
|
||||
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, dbhan, driver);
|
||||
|
||||
if (maxMissingTablesRatio != null) {
|
||||
const missingTables = currentModelPaired.tables.filter(
|
||||
x => !deployedModel.tables.find(y => y.pairingId == x.pairingId)
|
||||
);
|
||||
const missingTableCount = missingTables.length;
|
||||
const missingTablesRatio = missingTableCount / (currentModelPaired.tables.length || 1);
|
||||
if (missingTablesRatio > maxMissingTablesRatio) {
|
||||
throw new Error(`Too many missing tables (${missingTablesRatio * 100}%), aborting deploy`);
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('currentModelPairedPreloaded', currentModelPairedPreloaded.tables[0]);
|
||||
// console.log('deployedModel', deployedModel.tables[0]);
|
||||
// console.log('currentModel', currentModel.tables[0]);
|
||||
|
||||
@@ -34,6 +34,7 @@ const dataTypeMapperTransform = require('./dataTypeMapperTransform');
|
||||
const sqlTextReplacementTransform = require('./sqlTextReplacementTransform');
|
||||
const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform');
|
||||
const generateDeploySql = require('./generateDeploySql');
|
||||
const dropAllDbObjects = require('./dropAllDbObjects');
|
||||
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
@@ -71,6 +72,7 @@ const dbgateApi = {
|
||||
sqlTextReplacementTransform,
|
||||
autoIndexForeignKeysTransform,
|
||||
generateDeploySql,
|
||||
dropAllDbObjects,
|
||||
};
|
||||
|
||||
requirePlugin.initializeDbgateApi(dbgateApi);
|
||||
|
||||
@@ -33,6 +33,14 @@ class ParseStream extends stream.Transform {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reader function, which reads JSNOL file or URL. JSONL format - text file, every line is JSON encoded row.
|
||||
* @param {Object} options
|
||||
* @param {string} options.fileName - file name or URL
|
||||
* @param {string} options.encoding - encoding of the file
|
||||
* @param {number} options.limitRows - maximum number of rows to read
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
|
||||
|
||||
@@ -24,6 +24,14 @@ class StringifyStream extends stream.Transform {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns writer object for {@link copyStream} function. This writer object writes data to JSONL file. JSONL format - text file, every line is JSON encoded row, used eg. by MongoDB.
|
||||
* @param {object} options
|
||||
* @param {string} options.fileName - file name
|
||||
* @param {string} [options.encoding] - encoding of the file
|
||||
* @param {boolean} [options.header] - whether to write header. Header is JSON describing source table structure. Header is specific to DbGate, if you want eg. to import data to MongoDB, you should not write header.
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
|
||||
@@ -45,6 +45,17 @@ class ParseStream extends stream.Transform {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates reader object for JSON file for {@link copyStream} function.
|
||||
* @param {object} options
|
||||
* @param {string} options.fileName - file name or URL
|
||||
* @param {string} options.jsonStyle - 'object' or 'array'
|
||||
* @param {string} [options.keyField] - key field for object style
|
||||
* @param {string} [options.rootField] - root field for object style
|
||||
* @param {string} [options.encoding] - encoding of the file
|
||||
* @param {number} [options.limitRows] - maximum number of rows to read
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function jsonReader({
|
||||
fileName,
|
||||
jsonStyle,
|
||||
|
||||
@@ -85,6 +85,16 @@ class StringifyStream extends stream.Transform {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns writer object for {@link copyStream} function. This writer object writes data to JSON file.
|
||||
* @param {object} options
|
||||
* @param {string} options.fileName - file name
|
||||
* @param {string} [options.jsonStyle] - 'object' or 'array'
|
||||
* @param {string} [options.keyField] - key field for object style
|
||||
* @param {string} [options.rootField] - root field for object style
|
||||
* @param {string} [options.encoding] - encoding of the file
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
|
||||
|
||||
@@ -3,6 +3,15 @@ const connectUtility = require('../utility/connectUtility');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const logger = getLogger('queryReader');
|
||||
|
||||
/**
|
||||
* Returns reader object for {@link copyStream} function. This reader object reads data from query.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {string} options.query - SQL query
|
||||
* @param {string} [options.queryType] - query type
|
||||
* @param {string} [options.sql] - SQL query. obsolete; use query instead
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function queryReader({
|
||||
connection,
|
||||
query,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../utility/directories');
|
||||
const nativeModules = require('../nativeModules');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const authProxy = require('../utility/authProxy');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
@@ -11,7 +10,6 @@ const loadedPlugins = {};
|
||||
|
||||
const dbgateEnv = {
|
||||
dbgateApi: null,
|
||||
nativeModules,
|
||||
platformInfo,
|
||||
authProxy,
|
||||
};
|
||||
|
||||
@@ -3,6 +3,15 @@ const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const logger = getLogger('tableReader');
|
||||
|
||||
/**
|
||||
* Creates reader object for {@link copyStream} function. This reader object reads data from table or view.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {string} options.pureName - table name
|
||||
* @param {string} options.schemaName - schema name
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function tableReader({ connection, systemConnection, pureName, schemaName }) {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
||||
|
||||
@@ -3,6 +3,20 @@ const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const logger = getLogger('tableWriter');
|
||||
|
||||
/**
|
||||
* Creates writer object for {@link copyStream} function. This writer object writes data to table. Table could be created if not exists.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} options.pureName - table name
|
||||
* @param {string} options.schemaName - schema name
|
||||
* @param {boolean} options.dropIfExists - drop table if exists
|
||||
* @param {boolean} options.truncate - truncate table before insert
|
||||
* @param {boolean} options.createIfNotExists - create table if not exists
|
||||
* @param {boolean} options.commitAfterInsert - commit transaction after insert
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
|
||||
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
|
||||
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Reader (input) object for {@link copyStream} function
|
||||
* @typedef {Object} readerType
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Writer (output) object for {@link copyStream} function
|
||||
* @typedef {Object} writerType
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Typ uživatelské role.
|
||||
* @typedef {('mysql@dbgate-plugin-mysql' | 'mariadb@dbgate-plugin-mysql' | 'postgres@dbgate-plugin-postgres'
|
||||
* |'sqlite@dbgate-plugin-sqlite' | 'oracle@dbgate-plugin-oracle' | 'cockroach@dbgate-plugin-postgres' | 'redshift@dbgate-plugin-postgres')} engineType
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} connectionType
|
||||
* @property {engineType} engine
|
||||
* @property {string} server
|
||||
* @property {string} user
|
||||
* @property {string} password
|
||||
* @property {string} database
|
||||
* @property {string} port
|
||||
*/
|
||||
@@ -23,6 +23,12 @@ class JsonLinesDatabase {
|
||||
await fs.writeFile(this.filename, this.data.map(x => JSON.stringify(x)).join('\n'));
|
||||
}
|
||||
|
||||
async unload() {
|
||||
this.data = [];
|
||||
this.loadedOk = false;
|
||||
this.loadPerformed = false;
|
||||
}
|
||||
|
||||
async _ensureLoaded() {
|
||||
if (!this.loadPerformed) {
|
||||
await lock.acquire('reader', async () => {
|
||||
@@ -111,6 +117,15 @@ class JsonLinesDatabase {
|
||||
return removed;
|
||||
}
|
||||
|
||||
async transformAll(transformFunction) {
|
||||
await this._ensureLoaded();
|
||||
const newData = transformFunction(this.data);
|
||||
if (newData) {
|
||||
this.data = newData;
|
||||
await this._save();
|
||||
}
|
||||
}
|
||||
|
||||
// async _openReader() {
|
||||
// return new Promise((resolve, reject) =>
|
||||
// lineReader.open(this.filename, (err, reader) => {
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const { Client } = require('ssh2');
|
||||
const net = require('net');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
@@ -147,6 +146,7 @@ class SSHConnection {
|
||||
}
|
||||
|
||||
async connect(host, stream) {
|
||||
const { Client } = require('ssh2');
|
||||
this.debug('Connecting to "%s"', host);
|
||||
const connection = new Client();
|
||||
return new Promise(async (resolve, reject) => {
|
||||
|
||||
@@ -12,7 +12,12 @@ function checkLicenseKey(key) {
|
||||
};
|
||||
}
|
||||
|
||||
function isProApp() {
|
||||
return false;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkLicense,
|
||||
checkLicenseKey,
|
||||
isProApp,
|
||||
};
|
||||
|
||||
@@ -96,6 +96,9 @@ function packagedPluginsDir() {
|
||||
// return path.resolve(__dirname, '../../plugins');
|
||||
// }
|
||||
}
|
||||
if (processArgs.runE2eTests) {
|
||||
return path.resolve('packer/build/plugins');
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -110,7 +113,12 @@ function getPluginBackendPath(packageName) {
|
||||
return path.join(packagedPluginsDir(), packageName, 'dist', 'backend.js');
|
||||
}
|
||||
|
||||
return path.join(pluginsdir(), packageName, 'dist', 'backend.js');
|
||||
const res = path.join(pluginsdir(), packageName, 'dist', 'backend.js')
|
||||
if (fs.existsSync(res)) {
|
||||
return res;
|
||||
}
|
||||
|
||||
return require.resolve(packageName);
|
||||
}
|
||||
|
||||
let archiveLinksCache = {};
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { getSchemasUsedByStructure } = require('dbgate-tools');
|
||||
|
||||
async function exportDbModelSql(dbModel, driver, outputDir, outputFile) {
|
||||
const { tables, views, procedures, functions, triggers, matviews } = dbModel;
|
||||
|
||||
const usedSchemas = getSchemasUsedByStructure(dbModel);
|
||||
const useSchemaDir = usedSchemas.length > 1;
|
||||
|
||||
const createdDirs = new Set();
|
||||
async function ensureDir(dir) {
|
||||
if (!createdDirs.has(dir)) {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
createdDirs.add(dir);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeLists(writeList) {
|
||||
await writeList(views, 'views');
|
||||
await writeList(procedures, 'procedures');
|
||||
await writeList(functions, 'functions');
|
||||
await writeList(triggers, 'triggers');
|
||||
await writeList(matviews, 'matviews');
|
||||
}
|
||||
|
||||
if (outputFile) {
|
||||
const dmp = driver.createDumper();
|
||||
for (const table of tables || []) {
|
||||
dmp.createTable({
|
||||
...table,
|
||||
foreignKeys: [],
|
||||
dependencies: [],
|
||||
});
|
||||
}
|
||||
for (const table of tables || []) {
|
||||
for (const fk of table.foreignKeys || []) {
|
||||
dmp.createForeignKey(fk);
|
||||
}
|
||||
}
|
||||
writeLists((list, folder) => {
|
||||
for (const obj of list || []) {
|
||||
dmp.createSqlObject(obj);
|
||||
}
|
||||
});
|
||||
|
||||
const script = dmp.s;
|
||||
await fs.writeFile(outputFile, script);
|
||||
}
|
||||
|
||||
if (outputDir) {
|
||||
for (const table of tables || []) {
|
||||
const tablesDir = useSchemaDir
|
||||
? path.join(outputDir, table.schemaName ?? 'default', 'tables')
|
||||
: path.join(outputDir, 'tables');
|
||||
await ensureDir(tablesDir);
|
||||
const dmp = driver.createDumper();
|
||||
dmp.createTable({
|
||||
...table,
|
||||
foreignKeys: [],
|
||||
dependencies: [],
|
||||
});
|
||||
await fs.writeFile(path.join(tablesDir, `${table.pureName}.sql`), dmp.s);
|
||||
}
|
||||
|
||||
await writeLists(async (list, folder) => {
|
||||
for (const obj of list || []) {
|
||||
const objdir = useSchemaDir
|
||||
? path.join(outputDir, obj.schemaName ?? 'default', folder)
|
||||
: path.join(outputDir, folder);
|
||||
await ensureDir(objdir);
|
||||
const dmp = driver.createDumper();
|
||||
dmp.createSqlObject(obj);
|
||||
await fs.writeFile(path.join(objdir, `${obj.pureName}.sql`), dmp.s);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = exportDbModelSql;
|
||||
@@ -1,28 +1,8 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const yaml = require('js-yaml');
|
||||
const { databaseInfoFromYamlModel, DatabaseAnalyser } = require('dbgate-tools');
|
||||
const { startsWith } = require('lodash');
|
||||
const { archivedir, resolveArchiveFolder } = require('./directories');
|
||||
const loadFilesRecursive = require('./loadFilesRecursive');
|
||||
const loadModelFolder = require('./loadModelFolder');
|
||||
|
||||
async function importDbModel(inputDir) {
|
||||
const files = [];
|
||||
|
||||
const dir = inputDir.startsWith('archive:') ? resolveArchiveFolder(inputDir.substring('archive:'.length)) : inputDir;
|
||||
|
||||
for (const name of await loadFilesRecursive(dir)) {
|
||||
if (name.endsWith('.table.yaml') || name.endsWith('.sql')) {
|
||||
const text = await fs.readFile(path.join(dir, name), { encoding: 'utf-8' });
|
||||
|
||||
files.push({
|
||||
name: path.parse(name).base,
|
||||
text,
|
||||
json: name.endsWith('.yaml') ? yaml.load(text) : null,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const files = await loadModelFolder(inputDir);
|
||||
return databaseInfoFromYamlModel(files);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const yaml = require('js-yaml');
|
||||
const { resolveArchiveFolder } = require('./directories');
|
||||
const loadFilesRecursive = require('./loadFilesRecursive');
|
||||
|
||||
async function loadModelFolder(inputDir) {
|
||||
const files = [];
|
||||
|
||||
const dir = inputDir.startsWith('archive:')
|
||||
? resolveArchiveFolder(inputDir.substring('archive:'.length))
|
||||
: path.resolve(inputDir);
|
||||
|
||||
for (const name of await loadFilesRecursive(dir)) {
|
||||
if (name.endsWith('.table.yaml') || name.endsWith('.sql')) {
|
||||
const text = await fs.readFile(path.join(dir, name), { encoding: 'utf-8' });
|
||||
|
||||
files.push({
|
||||
name: path.parse(name).base,
|
||||
text,
|
||||
json: name.endsWith('.yaml') ? yaml.load(text) : null,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
module.exports = loadModelFolder;
|
||||
@@ -43,9 +43,17 @@ const platformInfo = {
|
||||
platform,
|
||||
runningInWebpack: !!process.env.WEBPACK_DEV_SERVER_URL,
|
||||
allowShellConnection:
|
||||
(!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_CONNECTION || !!isElectron() || !!isDbModel,
|
||||
(!processArgs.listenApiChild && !isNpmDist) ||
|
||||
!!process.env.SHELL_CONNECTION ||
|
||||
!!isElectron() ||
|
||||
!!isDbModel ||
|
||||
isDevMode,
|
||||
allowShellScripting:
|
||||
(!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_SCRIPTING || !!isElectron() || !!isDbModel,
|
||||
(!processArgs.listenApiChild && !isNpmDist) ||
|
||||
!!process.env.SHELL_SCRIPTING ||
|
||||
!!isElectron() ||
|
||||
!!isDbModel ||
|
||||
isDevMode,
|
||||
allowConnectionFromEnvVariables: !!isDbModel,
|
||||
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
|
||||
isAwsUbuntuLayout,
|
||||
|
||||
@@ -14,18 +14,19 @@ const workspaceDir = getNamedArg('--workspace-dir');
|
||||
const processDisplayName = getNamedArg('--process-display-name');
|
||||
const listenApi = process.argv.includes('--listen-api');
|
||||
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
|
||||
const runE2eTests = process.argv.includes('--run-e2e-tests');
|
||||
|
||||
function getPassArgs() {
|
||||
const res = [];
|
||||
if (global['NATIVE_MODULES']) {
|
||||
res.push('--native-modules', global['NATIVE_MODULES']);
|
||||
}
|
||||
if (global['PLUGINS_DIR']) {
|
||||
res.push('--plugins-dir', global['PLUGINS_DIR']);
|
||||
}
|
||||
if (listenApiChild) {
|
||||
res.push('listen-api-child');
|
||||
}
|
||||
if (runE2eTests) {
|
||||
res.push('--run-e2e-tests');
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -39,4 +40,5 @@ module.exports = {
|
||||
listenApi,
|
||||
listenApiChild,
|
||||
processDisplayName,
|
||||
runE2eTests,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
var webpack = require('webpack');
|
||||
var path = require('path');
|
||||
var getBundleExternals = require('../../common/getBundleExternals');
|
||||
|
||||
var externals = getBundleExternals();
|
||||
|
||||
var config = {
|
||||
context: __dirname + '/src',
|
||||
@@ -29,27 +32,28 @@ var config = {
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new webpack.IgnorePlugin({
|
||||
checkResource(resource) {
|
||||
const lazyImports = ['uws'];
|
||||
if (!lazyImports.includes(resource)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
require.resolve(resource);
|
||||
} catch (err) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
}),
|
||||
],
|
||||
externals: {
|
||||
'better-sqlite3': 'commonjs better-sqlite3',
|
||||
'oracledb': 'commonjs oracledb',
|
||||
'msnodesqlv8': 'commonjs msnodesqlv8',
|
||||
},
|
||||
// plugins: [
|
||||
// new webpack.IgnorePlugin({
|
||||
// checkResource(resource) {
|
||||
// const lazyImports = ['uws'];
|
||||
// if (!lazyImports.includes(resource)) {
|
||||
// return false;
|
||||
// }
|
||||
// try {
|
||||
// require.resolve(resource);
|
||||
// } catch (err) {
|
||||
// return true;
|
||||
// }
|
||||
// return false;
|
||||
// },
|
||||
// }),
|
||||
// ],
|
||||
// externals: {
|
||||
// 'better-sqlite3': 'commonjs better-sqlite3',
|
||||
// 'oracledb': 'commonjs oracledb',
|
||||
// 'msnodesqlv8': 'commonjs msnodesqlv8',
|
||||
// },
|
||||
externals,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"name": "dbgate-datalib",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
@@ -13,13 +13,13 @@
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-filterparser": "^5.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"dbgate-filterparser": "^6.0.0-alpha.1",
|
||||
"uuid": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"@types/node": "^13.7.0",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
|
||||
@@ -21,6 +21,7 @@ export interface DataDuplicatorItem {
|
||||
export interface DataDuplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
}
|
||||
|
||||
class DuplicatorReference {
|
||||
@@ -36,9 +37,19 @@ class DuplicatorReference {
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorWeakReference {
|
||||
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorItemHolder {
|
||||
references: DuplicatorReference[] = [];
|
||||
backReferences: DuplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: DuplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
@@ -55,7 +66,7 @@ class DuplicatorItemHolder {
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0].columnName != this.autoColumn
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
@@ -65,23 +76,33 @@ class DuplicatorItemHolder {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
if (refHolder == null) continue;
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new DuplicatorWeakReference(
|
||||
this,
|
||||
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk) {
|
||||
createInsertObject(chunk, weakrefcols: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName)]
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
@@ -102,6 +123,31 @@ class DuplicatorItemHolder {
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.duplicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.duplicator.driver;
|
||||
@@ -112,6 +158,8 @@ class DuplicatorItemHolder {
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const existingWeakRefs = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
@@ -120,7 +168,8 @@ class DuplicatorItemHolder {
|
||||
|
||||
const doCopy = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const insertedObj = this.createInsertObject(chunk);
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
const insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
@@ -148,6 +197,7 @@ class DuplicatorItemHolder {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
|
||||
@@ -0,0 +1,175 @@
|
||||
import { DatabaseModelFile, extractErrorLogData, getLogger, runCommandOnDriver, runQueryOnDriver } from 'dbgate-tools';
|
||||
import { EngineDriver } from 'dbgate-types';
|
||||
import _sortBy from 'lodash/sortBy';
|
||||
|
||||
const logger = getLogger('ScriptDrivedDeployer');
|
||||
|
||||
interface DeployScriptJournalItem {
|
||||
id: number;
|
||||
name: string;
|
||||
category: string;
|
||||
first_run_date: string;
|
||||
last_run_date: string;
|
||||
script_hash: string;
|
||||
}
|
||||
|
||||
export class ScriptDrivedDeployer {
|
||||
predeploy: DatabaseModelFile[] = [];
|
||||
uninstall: DatabaseModelFile[] = [];
|
||||
install: DatabaseModelFile[] = [];
|
||||
once: DatabaseModelFile[] = [];
|
||||
postdeploy: DatabaseModelFile[] = [];
|
||||
isEmpty = false;
|
||||
|
||||
journalItems: DeployScriptJournalItem[] = [];
|
||||
|
||||
constructor(public dbhan: any, public driver: EngineDriver, public files: DatabaseModelFile[], public crypto: any) {
|
||||
this.predeploy = files.filter(x => x.name.endsWith('.predeploy.sql'));
|
||||
this.uninstall = files.filter(x => x.name.endsWith('.uninstall.sql'));
|
||||
this.install = files.filter(x => x.name.endsWith('.install.sql'));
|
||||
this.once = files.filter(x => x.name.endsWith('.once.sql'));
|
||||
this.postdeploy = files.filter(x => x.name.endsWith('.postdeploy.sql'));
|
||||
this.isEmpty =
|
||||
this.predeploy.length === 0 &&
|
||||
this.uninstall.length === 0 &&
|
||||
this.install.length === 0 &&
|
||||
this.once.length === 0 &&
|
||||
this.postdeploy.length === 0;
|
||||
}
|
||||
|
||||
async loadJournalItems() {
|
||||
try {
|
||||
const { rows } = await runQueryOnDriver(this.dbhan, this.driver, dmp =>
|
||||
dmp.put('select * from ~dbgate_deploy_journal')
|
||||
);
|
||||
this.journalItems = rows;
|
||||
logger.debug(`Loaded ${rows.length} items from DbGate deploy journal`);
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
extractErrorLogData(err),
|
||||
'Error loading DbGate deploy journal, creating table dbgate_deploy_journal'
|
||||
);
|
||||
const dmp = this.driver.createDumper();
|
||||
dmp.createTable({
|
||||
pureName: 'dbgate_deploy_journal',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'name', dataType: 'varchar(100)', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'category', dataType: 'varchar(100)', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'script_hash', dataType: 'varchar(100)', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'first_run_date', dataType: 'varchar(100)', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'last_run_date', dataType: 'varchar(100)', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
{ columnName: 'run_count', dataType: 'int', notNull: true, pureName: 'dbgate_deploy_journal' },
|
||||
],
|
||||
foreignKeys: [],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
constraintType: 'primaryKey',
|
||||
pureName: 'dbgate_deploy_journal',
|
||||
},
|
||||
});
|
||||
await this.driver.query(this.dbhan, dmp.s, { discardResult: true });
|
||||
}
|
||||
}
|
||||
|
||||
async runPre() {
|
||||
// don't create journal table if no scripts are present
|
||||
if (this.isEmpty) return;
|
||||
await this.loadJournalItems();
|
||||
await this.runFiles(this.predeploy, 'predeploy');
|
||||
}
|
||||
|
||||
async runPost() {
|
||||
await this.runFiles(this.install, 'install');
|
||||
await this.runFiles(this.once, 'once');
|
||||
await this.runFiles(this.postdeploy, 'postdeploy');
|
||||
}
|
||||
|
||||
async run() {
|
||||
await this.runPre();
|
||||
await this.runPost();
|
||||
}
|
||||
|
||||
async runFiles(files: DatabaseModelFile[], category: string) {
|
||||
for (const file of _sortBy(files, x => x.name)) {
|
||||
await this.runFile(file, category);
|
||||
}
|
||||
}
|
||||
|
||||
async saveToJournal(file: DatabaseModelFile, category: string, hash: string) {
|
||||
const existing = this.journalItems.find(x => x.name == file.name);
|
||||
if (existing) {
|
||||
await runCommandOnDriver(this.dbhan, this.driver, dmp => {
|
||||
dmp.put(
|
||||
'update ~dbgate_deploy_journal set ~last_run_date = %v, ~script_hash = %v, ~run_count = ~run_count + 1 where ~id = %v',
|
||||
new Date().toISOString(),
|
||||
hash,
|
||||
existing.id
|
||||
);
|
||||
});
|
||||
} else {
|
||||
await runCommandOnDriver(this.dbhan, this.driver, dmp => {
|
||||
dmp.put(
|
||||
'insert into ~dbgate_deploy_journal (~name, ~category, ~first_run_date, ~last_run_date, ~script_hash, ~run_count) values (%v, %v, %v, %v, %v, 1)',
|
||||
file.name,
|
||||
category,
|
||||
new Date().toISOString(),
|
||||
new Date().toISOString(),
|
||||
hash
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async runFileCore(file: DatabaseModelFile, category: string, hash: string) {
|
||||
if (this.driver.supportsTransactions) {
|
||||
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.beginTransaction());
|
||||
}
|
||||
|
||||
logger.debug(`Running ${category} script ${file.name}`);
|
||||
try {
|
||||
await this.driver.script(this.dbhan, file.text);
|
||||
await this.saveToJournal(file, category, hash);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error running ${category} script ${file.name}`);
|
||||
if (this.driver.supportsTransactions) {
|
||||
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.driver.supportsTransactions) {
|
||||
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.commitTransaction());
|
||||
}
|
||||
}
|
||||
|
||||
async runFile(file: DatabaseModelFile, category: string) {
|
||||
const hash = this.crypto.createHash('md5').update(file.text.trim()).digest('hex');
|
||||
const journalItem = this.journalItems.find(x => x.name == file.name);
|
||||
const isEqual = journalItem && journalItem.script_hash == hash;
|
||||
|
||||
switch (category) {
|
||||
case 'predeploy':
|
||||
case 'postdeploy':
|
||||
await this.runFileCore(file, category, hash);
|
||||
break;
|
||||
case 'once':
|
||||
if (journalItem) return;
|
||||
await this.runFileCore(file, category, hash);
|
||||
break;
|
||||
case 'install':
|
||||
if (isEqual) return;
|
||||
const uninstallFile = this.uninstall.find(x => x.name == file.name.replace('.install.sql', '.uninstall.sql'));
|
||||
if (uninstallFile && journalItem) {
|
||||
// file was previously installed, uninstall first
|
||||
await this.runFileCore(
|
||||
uninstallFile,
|
||||
'uninstall',
|
||||
this.crypto.createHash('md5').update(uninstallFile.text.trim()).digest('hex')
|
||||
);
|
||||
}
|
||||
await this.runFileCore(file, category, hash);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,3 +22,4 @@ export * from './DataDuplicator';
|
||||
export * from './FreeTableGridDisplay';
|
||||
export * from './FreeTableModel';
|
||||
export * from './CustomGridDisplay';
|
||||
export * from './ScriptDrivedDeployer';
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# DbGate - database administration tool
|
||||
This package is obsolete, please use [dbgate-serve](https://www.npmjs.com/package/dbgate-serve) package instead
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "dbgate",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"description": "Opensource database administration tool - web interface",
|
||||
"author": "Jan Prochazka",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"dbgate",
|
||||
"web"
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user