Compare commits
508 Commits
multi-windows
...
v5.1.6
| Author | SHA1 | Date | |
|---|---|---|---|
| 3891e7768d | |||
| 792fa75ccd | |||
| cbd3f1bae9 | |||
| cd92231769 | |||
| ecad1ae01b | |||
| dc576e6ced | |||
| 6cca81f8f1 | |||
| a9f1f19696 | |||
| 390ddac75b | |||
| e2e7c6f06b | |||
| 3a3d0683d5 | |||
| d5534dcf07 | |||
| b0a86f9f4a | |||
| b833a30148 | |||
| d9c1bbaa39 | |||
| 4b74dbbd68 | |||
| 9bcc61551c | |||
| ed71ef312d | |||
| 4fa043b7e5 | |||
| 83725dd349 | |||
| 4e25b71b06 | |||
| 607ae7c872 | |||
| 66ade5823f | |||
| ebfa0a1939 | |||
| 909591404f | |||
| 7a5f2a70ad | |||
| d41b254058 | |||
| 435d06ffb9 | |||
| f4a4eb7f9e | |||
| 9910bbead3 | |||
| cb619a0fe0 | |||
| b0d61f974c | |||
| 8c051ff5f7 | |||
| f713a4b183 | |||
| 6c7e263f0e | |||
| ec3bfb4fae | |||
| 712ec8e6ee | |||
| 4da0b25f44 | |||
| 9b60b7a003 | |||
| 8ed73195c5 | |||
| c69fcd5eff | |||
| 310774db3b | |||
| 1dd166b563 | |||
| 0497f541cb | |||
| 42333a97b8 | |||
| 494c3c8e4a | |||
| 69a87bc076 | |||
| bf4eb19ef5 | |||
| 225518df3e | |||
| 0028240552 | |||
| 44be1bdd11 | |||
| e0703b1bae | |||
| a240681d6d | |||
| f5906587db | |||
| dc0001a8cd | |||
| f19835203f | |||
| 2a2debbb88 | |||
| 23cb3a4b12 | |||
| 13d4d34453 | |||
| 2adca64159 | |||
| 18519b5519 | |||
| 4ddea55d23 | |||
| 5858061349 | |||
| d86a5c0cb4 | |||
| c712005e33 | |||
| 7e28e2257e | |||
| d0c7d591c8 | |||
| 17b73a58c8 | |||
| d765591e8c | |||
| be0aeeb2c8 | |||
| 23b345c898 | |||
| 1d85a17533 | |||
| 7a3c46b691 | |||
| d647d30258 | |||
| 8b511a0532 | |||
| ccb52e9b58 | |||
| f60e1190c8 | |||
| da5dd7ac62 | |||
| 08abec7c3e | |||
| b3839def32 | |||
| efe15bf0bb | |||
| f9e167fc7b | |||
| b35e8fcdf4 | |||
| 4bdd988682 | |||
| 94f21472be | |||
| dd33d96ef6 | |||
| 7604889b72 | |||
| 1382461bdc | |||
| 833f029ab5 | |||
| 04d39f6646 | |||
| 4de8a5b038 | |||
| 1dfdeed018 | |||
| 4892e46795 | |||
| 5aff68d313 | |||
| cdd4382266 | |||
| bbd00ac94d | |||
| dba3183c94 | |||
| a2906cca9d | |||
| 140291696b | |||
| 975643fb24 | |||
| bf9a933fb1 | |||
| 643b792069 | |||
| b4d0ccbd8c | |||
| c9bf949d02 | |||
| 074390ac11 | |||
| 45e54475d0 | |||
| f157fc77d4 | |||
| dac1110404 | |||
| da00e1c228 | |||
| 9ed1cdf4b7 | |||
| 18b7792370 | |||
| 53b6b71a29 | |||
| b2204e1d77 | |||
| e7ac7558ca | |||
| c5a7f458ba | |||
| 8ce5e68c0d | |||
| e9256fe20e | |||
| 5913788035 | |||
| 6c9c4be311 | |||
| 1454ddacb8 | |||
| 2b26779ea8 | |||
| 7781ad69cf | |||
| 1a7f06342f | |||
| 2f820d8dac | |||
| 1535dfd407 | |||
| 3fe7d652b2 | |||
| 7fc8b2901b | |||
| a56f59ceba | |||
| 2ac1072357 | |||
| 24c26a6d87 | |||
| 83693e9f2c | |||
| 59efdd735c | |||
| 41afd177ef | |||
| 0137b191b9 | |||
| 054b90c90d | |||
| a46526cbc8 | |||
| 35c42d0a83 | |||
| 6e2ecd0b05 | |||
| a98a4617ae | |||
| 1a716f0bce | |||
| 973f64f4d7 | |||
| a89c6810aa | |||
| 3d45b00a7c | |||
| f93524e24f | |||
| 9aded740ca | |||
| 66f30ff26e | |||
| 4ced94f070 | |||
| fe61e5e631 | |||
| 24b0d278fd | |||
| de5b075ba5 | |||
| 1665c014e1 | |||
| 586a06da91 | |||
| eb1eb18163 | |||
| 1983576b2f | |||
| ffbb91678c | |||
| 0293766bad | |||
| 5eda39cb62 | |||
| b7c8a60c19 | |||
| 51101d91ea | |||
| cc9acf71ce | |||
| d27f8644d8 | |||
| 347448e3c2 | |||
| 0a008a760b | |||
| 462be9e2bd | |||
| f078872c5b | |||
| fdecef7e78 | |||
| 8acafbbd6e | |||
| 5b8d70747f | |||
| c9a9c7d0f7 | |||
| 50eb5012b1 | |||
| 917c2f49a0 | |||
| 5724067974 | |||
| 428de38b41 | |||
| 9e73e16b7f | |||
| 1e91097bf2 | |||
| 61f82be9f3 | |||
| 91e1c83a91 | |||
| e8452704eb | |||
| 357fcbdf47 | |||
| 02abb4f512 | |||
| 14f71e80d3 | |||
| fdcf1c4c9a | |||
| 97e96aaba6 | |||
| 174b0efd2e | |||
| eab5f4fe5e | |||
| a910e91a91 | |||
| 3e83a69ef7 | |||
| e3b833927d | |||
| 6582c7831e | |||
| 0d2169c996 | |||
| e64d013fee | |||
| c1627b8546 | |||
| 2f74eab048 | |||
| f7a269383f | |||
| 5f9156995b | |||
| f886b8c95d | |||
| 2284264a92 | |||
| f405db7685 | |||
| 14110cb6db | |||
| 1e347f6535 | |||
| 0813f4387d | |||
| 894a864110 | |||
| 4e799885b5 | |||
| 650f9a3db9 | |||
| 6b5e33d97e | |||
| 24923db199 | |||
| 80faf0fd68 | |||
| 33b11eef38 | |||
| b6a0fe6713 | |||
| 2b68a6e1de | |||
| e124291267 | |||
| 1a16d7c69e | |||
| 6cb2616d87 | |||
| 395863da3f | |||
| fec2df9d2f | |||
| 9e3a457ef5 | |||
| 728ad21d2f | |||
| d2f18bc048 | |||
| 0ae7939f93 | |||
| 7ac0b907e2 | |||
| 1bd4b77744 | |||
| 5e4ae3208b | |||
| daf7629f5f | |||
| aeceb34d19 | |||
| 2a98918857 | |||
| ce9d583989 | |||
| 7c87baf451 | |||
| f80c6fec99 | |||
| b04af4c5e3 | |||
| fe65193189 | |||
| a75e463ef5 | |||
| 7eb59ad3a0 | |||
| 7a9f8a460f | |||
| 289752c023 | |||
| 98f2c06c21 | |||
| 530b1cade3 | |||
| 65aa8fb4e3 | |||
| 4c0f17a0b2 | |||
| e4371c526b | |||
| e39f0a1f4b | |||
| 842f77d02b | |||
| 2571e6ac7e | |||
| 1599a7ea01 | |||
| cb1d81b586 | |||
| 339588b8a0 | |||
| 1731b7e4a3 | |||
| 5418bb932c | |||
| 6154b4c780 | |||
| 3f9bd100e1 | |||
| b5c6ddce59 | |||
| 51c72efb34 | |||
| 00df20e350 | |||
| f3a7e3af74 | |||
| 04c37c2b4f | |||
| 12df0993c0 | |||
| ac3ec5c11e | |||
| b565e981e4 | |||
| f7ada698e4 | |||
| bc4c146389 | |||
| 7c80ca1374 | |||
| 8c5cc7dcc1 | |||
| 1974243ed5 | |||
| 71c9071cb8 | |||
| c28e55132a | |||
| 2b2a4debd4 | |||
| 563a35560b | |||
| cc019281d4 | |||
| 86d7d61cc5 | |||
| aff1fe0b3d | |||
| 137631b5b5 | |||
| 090ffa064d | |||
| f77cc1023b | |||
| c6dbb31748 | |||
| ae6c486db5 | |||
| 9a2c12d558 | |||
| 1ed01e9839 | |||
| 25d2c129cd | |||
| 7dc7af0cdb | |||
| 80fea3b01b | |||
| 97dc92e413 | |||
| 9051ba2ee1 | |||
| 7dcbe6c7c1 | |||
| e6fe8a6379 | |||
| b793e4131d | |||
| b737eaac13 | |||
| cb5cce2ea3 | |||
| b05d260caa | |||
| 091e91556d | |||
| 2b4120435b | |||
| c8d031e2c4 | |||
| ac07b7e1ba | |||
| bf51f45934 | |||
| fe31cfb552 | |||
| d505be09ca | |||
| 44668b8017 | |||
| 452dba7f32 | |||
| 7694864fe7 | |||
| 37d5c6fbf9 | |||
| 802f231e43 | |||
| 53c39e6a43 | |||
| 65f550023a | |||
| abe7a20960 | |||
| d686206fe2 | |||
| 27b2fdb507 | |||
| 88f522084d | |||
| 8472c8be79 | |||
| 03f8a93dd0 | |||
| 2889f79120 | |||
| 8a312181a3 | |||
| e7236de078 | |||
| 1fe2269b11 | |||
| 10ea8ca3a6 | |||
| 491d24984d | |||
| b0279dd315 | |||
| 9d6b581809 | |||
| 3f748df1ec | |||
| 7ca835765c | |||
| a76530155d | |||
| 96b82b690e | |||
| d3a40e52fc | |||
| 513b2ba42f | |||
| d23371f642 | |||
| 5ac6e12c3e | |||
| 4468c0ed3b | |||
| 06bd9bcabe | |||
| 66d15abcab | |||
| 3bdb5c0152 | |||
| f504283002 | |||
| f07c7909ef | |||
| c809f58349 | |||
| 3e91ecd141 | |||
| 857185a78b | |||
| c189c12cae | |||
| 96106e6aac | |||
| 088ca231f3 | |||
| 5395d1343b | |||
| d48c34a4a5 | |||
| 53ee1d87c2 | |||
| b5d97c8181 | |||
| 28e06166e0 | |||
| 8f1343bc42 | |||
| 2080a23b69 | |||
| d71294621b | |||
| 0f6ec420d2 | |||
| 35152a2796 | |||
| 1abfab950e | |||
| 6e6d0bb616 | |||
| 93e264e9ec | |||
| 29257f9bf9 | |||
| 8dd90ce5e4 | |||
| f2f7421971 | |||
| 8a10beef52 | |||
| df33b43e90 | |||
| 153cba3779 | |||
| 8f110355c4 | |||
| b570f873fe | |||
| c07e26c036 | |||
| 995bc6f16a | |||
| 5b4339889f | |||
| ae963d7a3b | |||
| c426cd825f | |||
| 62c2b3f5f4 | |||
| ab3584dc23 | |||
| 3a5301af6b | |||
| 55efdef181 | |||
| e9ea1edd21 | |||
| d9b91f2122 | |||
| 15da5fb95e | |||
| d563a40d0f | |||
| a4e5630f89 | |||
| c368ad8d54 | |||
| 01d1f08597 | |||
| 8c934355ab | |||
| c6e3b52bc6 | |||
| e117caf708 | |||
| 2b4d5c026e | |||
| 93a736f1f8 | |||
| 1f8ef8e20e | |||
| bef8cdbee4 | |||
| 763391e73b | |||
| b1cd16b095 | |||
| 2ee1b3105f | |||
| 51fa652851 | |||
| 755781bca6 | |||
| 1a90729f66 | |||
| 9e520e04b2 | |||
| ded0c8398c | |||
| dc31552f9e | |||
| e0376a708c | |||
| 1becb89ff0 | |||
| 4d7365828e | |||
| 29ccb09ba6 | |||
| eadd3feba0 | |||
| 93269fe314 | |||
| 34ca4c501a | |||
| 34084d0e94 | |||
| 07fc551383 | |||
| b0eed05a1a | |||
| 8228afd725 | |||
| 301222d118 | |||
| 9b741b415a | |||
| cc8438ef66 | |||
| 179bd1f6b1 | |||
| 08b7b1870c | |||
| 2c7da1d3f8 | |||
| 2a8a2c8652 | |||
| b6b75f0743 | |||
| aca92f3889 | |||
| 4672540f82 | |||
| 261cec7ec2 | |||
| de444e8485 | |||
| f4fb92be91 | |||
| 571c928234 | |||
| 2fcc4b1ff0 | |||
| c0b0ca22aa | |||
| d862762758 | |||
| 7ca8880c3c | |||
| 21ccc55e3f | |||
| 8662353071 | |||
| faedcfa64d | |||
| 7ad1796db5 | |||
| 717ec5293b | |||
| d437e171fb | |||
| 97ae7ae0d6 | |||
| e9a8f3ee84 | |||
| 1fb237417a | |||
| cd65fa16ed | |||
| 1e5a740a52 | |||
| 42badf17eb | |||
| 2ec3c2c24f | |||
| f3ab06d3b8 | |||
| 2b78a8dcae | |||
| 389ef98c66 | |||
| 75bf0e53fc | |||
| ff4dd18c1b | |||
| 4c535289a4 | |||
| d24886c73b | |||
| 9883a2982a | |||
| 24191870e8 | |||
| b9dae8928e | |||
| 7bed880003 | |||
| e2b95ad372 | |||
| 18710bc67d | |||
| 02e8bba999 | |||
| e770ca3eef | |||
| aaa72426c3 | |||
| 53e5f1378c | |||
| 773abc6dff | |||
| 8abb311623 | |||
| 2d83fb7dc4 | |||
| ae69ca9ebd | |||
| 0cb4ec54bc | |||
| d34cff234c | |||
| 50abead104 | |||
| 3b0ed7df8b | |||
| ce925337f1 | |||
| a911f5048f | |||
| 096cbc13d8 | |||
| a2cf1cd340 | |||
| 44827ea504 | |||
| 13b549ca2c | |||
| c104122a50 | |||
| 6794b79d0e | |||
| 42200ec04a | |||
| 2944d0fa39 | |||
| 34496ced0e | |||
| fa0680a8ee | |||
| f2402cadb0 | |||
| ffe82a82fa | |||
| 6e1a1edac0 | |||
| 427e25b3c0 | |||
| fca2bf8ddb | |||
| f65c15d2e5 | |||
| 343cf84a58 | |||
| e67a94b5d7 | |||
| cc1916eba3 | |||
| 0a0ce6ad98 | |||
| fd21157c2d | |||
| 8b3697e71e | |||
| f3bebcfa8f | |||
| 4c145f1f0a | |||
| cfce4e6ece | |||
| 13d778586e | |||
| 77b85fa42b | |||
| fb89c47563 | |||
| 8ffbdfa01d | |||
| 94788454a9 | |||
| a92bd1c840 | |||
| 610e9f4e60 | |||
| 6e9dace360 | |||
| 148222e239 | |||
| 5e2279cd10 | |||
| b54026b039 | |||
| 6f3076fddb | |||
| 92c336624a | |||
| 07d4b248bf | |||
| 1534099dc4 | |||
| d483869aa6 | |||
| 8bb40e991b | |||
| 5c6989bf91 | |||
| 5b503ae802 | |||
| 5feb018e22 | |||
| 97d259cd1e | |||
| fa357cf8ce | |||
| 7a0f5e171e | |||
| 24cfb23b39 | |||
| 06b6a5d3ae | |||
| 301ba1df60 |
@@ -27,6 +27,9 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
yarn adjustPackageJson
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install
|
||||
@@ -48,8 +51,10 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
|
||||
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
|
||||
@@ -31,6 +31,9 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
yarn adjustPackageJson
|
||||
- name: yarn install
|
||||
run: |
|
||||
# yarn --version
|
||||
@@ -54,8 +57,10 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
|
||||
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
name: Docker image BETA
|
||||
|
||||
# on: [push]
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-docker.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-18.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: yarn install
|
||||
run: |
|
||||
# yarn --version
|
||||
# yarn config set network-timeout 300000
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
yarn setCurrentVersion
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
yarn run prepare:docker
|
||||
- name: Build docker image
|
||||
run: |
|
||||
docker build ./docker -t dbgate
|
||||
- name: Push docker image
|
||||
run: |
|
||||
docker tag dbgate dbgate/dbgate:beta
|
||||
docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
|
||||
docker push dbgate/dbgate:beta
|
||||
- name: Build alpine docker image
|
||||
run: |
|
||||
docker build ./docker -t dbgate -f docker/Dockerfile-alpine
|
||||
- name: Push alpine docker image
|
||||
run: |
|
||||
docker tag dbgate dbgate/dbgate:beta-alpine
|
||||
docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
|
||||
docker push dbgate/dbgate:beta-alpine
|
||||
@@ -1,17 +1,11 @@
|
||||
name: Docker image
|
||||
|
||||
# on: [push]
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - production
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-docker.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -30,12 +24,43 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
dbgate/dbgate
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=beta,enable=${{ contains(github.ref_name, '-docker.') || contains(github.ref_name, '-beta.') }}
|
||||
|
||||
type=match,pattern=\d+.\d+.\d+,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
type=raw,value=latest,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
|
||||
- name: Docker alpine meta
|
||||
id: alpmeta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
dbgate/dbgate
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=beta-alpine,enable=${{ contains(github.ref_name, '-docker.') || contains(github.ref_name, '-beta.') }}
|
||||
|
||||
type=match,pattern=\d+.\d+.\d+,suffix=-alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
type=raw,value=alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: yarn install
|
||||
run: |
|
||||
# yarn --version
|
||||
# yarn config set network-timeout 300000
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
@@ -43,19 +68,28 @@ jobs:
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
yarn run prepare:docker
|
||||
- name: Build docker image
|
||||
run: |
|
||||
docker build ./docker -t dbgate
|
||||
- name: Push docker image
|
||||
run: |
|
||||
docker tag dbgate dbgate/dbgate
|
||||
docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
|
||||
docker push dbgate/dbgate
|
||||
- name: Build alpine docker image
|
||||
run: |
|
||||
docker build ./docker -t dbgate -f docker/Dockerfile-alpine
|
||||
- name: Push alpine docker image
|
||||
run: |
|
||||
docker tag dbgate dbgate/dbgate:alpine
|
||||
docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
|
||||
docker push dbgate/dbgate:alpine
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ./docker
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
|
||||
- name: Build and push alpine
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ./docker
|
||||
file: ./docker/Dockerfile-alpine
|
||||
tags: ${{ steps.alpmeta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
|
||||
@@ -31,6 +31,11 @@ jobs:
|
||||
run: |
|
||||
cd packages/filterparser
|
||||
yarn test:ci
|
||||
- name: Datalib (perspective) tests
|
||||
if: always()
|
||||
run: |
|
||||
cd packages/datalib
|
||||
yarn test:ci
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
@@ -43,6 +48,12 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
result-file: packages/filterparser/result.json
|
||||
action-name: Filter parser test results
|
||||
- uses: tanmen/jest-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
result-file: packages/datalib/result.json
|
||||
action-name: Datalib (perspectives) test results
|
||||
|
||||
services:
|
||||
postgres:
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
16.14.2
|
||||
Vendored
+20
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"terminals": [
|
||||
{
|
||||
"splitTerminals": [
|
||||
{
|
||||
"name": "lib",
|
||||
"commands": ["yarn lib"]
|
||||
},
|
||||
{
|
||||
"name": "web",
|
||||
"commands": ["yarn start:web"]
|
||||
},
|
||||
{
|
||||
"name": "api",
|
||||
"commands": ["yarn start:api"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
Vendored
+4
-1
@@ -1,3 +1,6 @@
|
||||
{
|
||||
"jestrunner.jestCommand": "node_modules/.bin/cross-env DEVMODE=1 LOCALTEST=1 node_modules/.bin/jest"
|
||||
"jestrunner.jestCommand": "node_modules/.bin/cross-env DEVMODE=1 LOCALTEST=1 node_modules/.bin/jest",
|
||||
"cSpell.words": [
|
||||
"dbgate"
|
||||
]
|
||||
}
|
||||
+111
@@ -8,6 +8,117 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
### 5.1.5
|
||||
- ADDED: Support perspectives for MongoDB - MongoDB query designer
|
||||
- ADDED: Show JSON content directly in the overview #395
|
||||
- CHANGED: OSX Command H shortcut for hiding window #390
|
||||
- ADDED: Uppercase Autocomplete Suggestions #389
|
||||
- FIXED: Record view left/right arrows cause start record number to be treated as string #388
|
||||
- FIXED: MongoDb ObjectId behaviour not consistent in nested objects #387
|
||||
- FIXED: demo.dbgate.org - beta version crash 5.1.5-beta.3 #386
|
||||
- ADDED: connect via socket - configurable via environment variables #358
|
||||
|
||||
### 5.1.4
|
||||
- ADDED: Drop database commands #384
|
||||
- ADDED: Customizable Redis key separator #379
|
||||
- ADDED: ARM support for docker images
|
||||
- ADDED: Version tags for docker images
|
||||
- ADDED: Better SQL command splitting and highlighting
|
||||
- ADDED: Unsaved marker for SQL files
|
||||
|
||||
### 5.1.3
|
||||
- ADDED: Editing multiline cell values #378 #371 #359
|
||||
- ADDED: Truncate table #333
|
||||
- ADDED: Perspectives - show row count
|
||||
- ADDED: Query - error markers in gutter area
|
||||
- ADDED: Query - ability to execute query elements from gutter
|
||||
- FIXED: Correct error line numbers returned from queries
|
||||
|
||||
### 5.1.2
|
||||
- FIXED: MongoDb any export function does not work. #373
|
||||
- ADDED: Query Designer short order more flexibility #372
|
||||
- ADDED: Form View move between records #370
|
||||
- ADDED: Custom SQL conditions in query designer and table filtering #369
|
||||
- ADDED: Query Designer filter eq to X or IS NULL #368
|
||||
- FIXED: Query designer, open a saved query lost sort order #363
|
||||
- ADDED: Query designer reorder columns #362
|
||||
- ADDED: connect via socket #358
|
||||
- FIXED: Show affected rows after UPDATE/DELETE/INSERT #361
|
||||
- ADDED: Perspective cell formatters - JSON, image
|
||||
- ADDED: Perspectives - cells without joined data are gray
|
||||
|
||||
### 5.1.1
|
||||
- ADDED: Perspective designer
|
||||
- FIXED: NULL,NOT NULL filter datatime columns #356
|
||||
- FIXED: Recognize computed columns on SQL server #354
|
||||
- ADDED: Hotkey for clear filter #352
|
||||
- FIXED: Change column type on Postgres #350
|
||||
- ADDED: Ability to open qdesign file #349
|
||||
- ADDED: Custom editor font size #345
|
||||
- ADDED: Ability to open perspective files
|
||||
|
||||
|
||||
### 5.1.0
|
||||
- ADDED: Perspectives (docs: https://dbgate.org/docs/perspectives.html )
|
||||
- CHANGED: Upgraded SQLite engine version (driver better-sqlite3: 7.6.2)
|
||||
- CHANGED: Upgraded ElectronJS version (from version 13 to version 17)
|
||||
- CHANGED: Upgraded all dependencies with current available minor version updates
|
||||
- CHANGED: By default, connect on click #332˝
|
||||
- CHANGED: Improved keyboard navigation, when editing table data #331
|
||||
- ADDED: Option to skip Save changes dialog #329
|
||||
- FIXED: Unsigned column doesn't work correctly. #324
|
||||
- FIXED: Connect to MS SQL with domain user now works also under Linux and Mac #305
|
||||
|
||||
### 5.0.9
|
||||
- FIXED: Fixed problem with SSE events on web version
|
||||
- ADDED: Added menu command "New query designer"
|
||||
- ADDED: Added menu command "New ER diagram"
|
||||
|
||||
### 5.0.8
|
||||
- ADDED: SQL Server - support using domain logins under Linux and Mac #305
|
||||
- ADDED: Permissions for connections #318
|
||||
- ADDED: Ability to change editor front #308
|
||||
- ADDED: Custom expression in query designer #306
|
||||
- ADDED: OR conditions in query designer #321
|
||||
- ADDED: Ability to configure settings view environment variables #304
|
||||
|
||||
### 5.0.7
|
||||
- FIXED: Fixed some problems with SSH tunnel (upgraded SSH client) #315
|
||||
- FIXED: Fixed MognoDB executing find query #312
|
||||
- ADDED: Interval filters for date/time columns #311
|
||||
- ADDED: Ability to clone rows #309
|
||||
- ADDED: connecting option Trust server certificate for SQL Server #305
|
||||
- ADDED: Autorefresh, reload table every x second #303
|
||||
- FIXED(app): Changing editor theme and font size in Editor Themes #300
|
||||
|
||||
### 5.0.6
|
||||
- ADDED: Search in columns
|
||||
- CHANGED: Upgraded mongodb driver
|
||||
- ADDED: Ability to reset view, when data load fails
|
||||
- FIXED: Filtering works for complex types (geography, xml under MSSQL)
|
||||
- FIXED: Fixed some NPM package problems
|
||||
|
||||
### 5.0.5
|
||||
- ADDED: Visualisation geographics objects on map #288
|
||||
- ADDED: Support for native SQL as default value inside yaml files #296
|
||||
- FIXED: Postgres boolean columns don't filter correctly #298
|
||||
- FIXED: Importing dbgate-api as NPM package now works correctly
|
||||
- FIXED: Handle error when reading deleted archive
|
||||
|
||||
### 5.0.3
|
||||
- CHANGED: Optimalization of loading DB structure for PostgreSQL, MySQL #273
|
||||
- CHANGED: Upgraded mysql driver #293
|
||||
- CHANGED: Better UX when defining SSH port #291
|
||||
- ADDED: Database object menu from tab
|
||||
- CHANGED: Ability to close file uploader
|
||||
- FIXED: Correct handling of NUL values in update keys
|
||||
- CHANGED: Upgraded MS SQL tedious driver
|
||||
- ADDED: Change order of pinned tables & databases #227
|
||||
- FIXED: #294 Statusbar doesn't match active tab
|
||||
- CHANGED: Improved connection worklflow, disconnecting shws confirmations, when it leads to close any tabs
|
||||
- ADDED: Configurable object actions #255
|
||||
- ADDED: Multiple sort criteria #235
|
||||
- ADDED(app): Open JSON file
|
||||
### 5.0.2
|
||||
- FIXED: Cannot use SSH Tunnel after update #291
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ DbGate is licensed under MIT license and is completely free.
|
||||
* Charts, export chart to HTML page
|
||||
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Extensible plugin architecture
|
||||
* Perspectives - nested table view over complex relational data
|
||||
|
||||
## How to contribute
|
||||
Any contributions are welcome. If you want to contribute without coding, consider following:
|
||||
@@ -79,7 +80,8 @@ Any contributions are welcome. If you want to contribute without coding, conside
|
||||
* Tell your friends about DbGate or share on social networks - when more people will use DbGate, it will grow to be better
|
||||
* Write review on [Slant.co](https://www.slant.co/improve/options/41086/~dbgate-review) or [G2](https://www.g2.com/products/dbgate/reviews)
|
||||
* Create issue, if you find problem in app, or you have idea to new feature. If issue already exists, you could leave comment on it, to prioritise most wanted issues.
|
||||
* Become a backer on [Open collective](https://opencollective.com/dbgate)
|
||||
* Create some tutorial video on [youtube](https://www.youtube.com/playlist?list=PLCo7KjCVXhr0RfUSjM9wJMsp_ShL1q61A)
|
||||
* Become a backer on [GitHub sponsors](https://github.com/sponsors/dbgate) or [Open collective](https://opencollective.com/dbgate)
|
||||
* Where a small coding is acceptable for you, you could [create plugin](https://dbgate.org/docs/plugin-development.html). Plugins for new themes can be created actually without JS coding.
|
||||
|
||||
Thank you!
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
const fs = require('fs');
|
||||
|
||||
function adjustFile(file) {
|
||||
const json = JSON.parse(fs.readFileSync(file, { encoding: 'utf-8' }));
|
||||
if (process.platform != 'win32') {
|
||||
delete json.optionalDependencies.msnodesqlv8;
|
||||
}
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
adjustFile('packages/api/package.json');
|
||||
adjustFile('app/package.json');
|
||||
+5
-5
@@ -107,12 +107,12 @@
|
||||
"devDependencies": {
|
||||
"copyfiles": "^2.2.0",
|
||||
"cross-env": "^6.0.3",
|
||||
"electron": "13.6.3",
|
||||
"electron-builder": "22.14.5",
|
||||
"electron-builder-notarize": "^1.4.0"
|
||||
"electron": "17.4.10",
|
||||
"electron-builder": "23.1.0",
|
||||
"electron-builder-notarize": "^1.5.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "7.5.0",
|
||||
"msnodesqlv8": "^2.4.4"
|
||||
"better-sqlite3": "7.6.2",
|
||||
"msnodesqlv8": "^2.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -6,6 +6,9 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'new.sqliteDatabase', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'new.query', hideDisabled: true },
|
||||
{ command: 'new.queryDesign', hideDisabled: true },
|
||||
{ command: 'new.diagram', hideDisabled: true },
|
||||
{ command: 'new.perspective', hideDisabled: true },
|
||||
{ command: 'new.freetable', hideDisabled: true },
|
||||
{ command: 'new.shell', hideDisabled: true },
|
||||
{ command: 'new.jsonl', hideDisabled: true },
|
||||
@@ -66,6 +69,7 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'app.toggleDevTools', hideDisabled: true },
|
||||
{ command: 'app.toggleFullScreen', hideDisabled: true },
|
||||
{ command: 'app.minimize', hideDisabled: true },
|
||||
{ command: 'toggle.sidebar' },
|
||||
{ divider: true },
|
||||
{ command: 'theme.changeTheme', hideDisabled: true },
|
||||
{ command: 'settings.show' },
|
||||
|
||||
+479
-510
File diff suppressed because it is too large
Load Diff
@@ -8,4 +8,4 @@ then
|
||||
echo "$HOST_IP $HOST_DOMAIN" >> /etc/hosts
|
||||
fi
|
||||
|
||||
node bundle.js
|
||||
node bundle.js --listen-api
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 156 KiB After Width: | Height: | Size: 166 KiB |
@@ -297,4 +297,33 @@ describe('Deploy database', () => {
|
||||
expect(res.rows[0].val.toString()).toEqual('5');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.enginesPostgre.map(engine => [engine.label, engine]))(
|
||||
'Current timestamp default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{
|
||||
name: 'val',
|
||||
type: 'timestamp',
|
||||
default: 'current_timestamp',
|
||||
},
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
await driver.query(conn, `insert into t1 (id) values (1)`);
|
||||
const res = await driver.query(conn, ` select val from t1 where id = 1`);
|
||||
expect(res.rows[0].val.toString().substring(0, 2)).toEqual('20');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
version: '3'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- 15000:5432
|
||||
# postgres:
|
||||
# image: postgres
|
||||
# restart: always
|
||||
# environment:
|
||||
# POSTGRES_PASSWORD: Pwd2020Db
|
||||
# ports:
|
||||
# - 15000:5432
|
||||
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 15004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
# mariadb:
|
||||
# image: mariadb
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15004:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
@@ -26,15 +26,15 @@ services:
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
# mssql:
|
||||
# image: mcr.microsoft.com/mssql/server
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15002:1433
|
||||
# environment:
|
||||
# - ACCEPT_EULA=Y
|
||||
# - SA_PASSWORD=Pwd2020Db
|
||||
# - MSSQL_PID=Express
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
restart: always
|
||||
ports:
|
||||
- 15002:1433
|
||||
environment:
|
||||
- ACCEPT_EULA=Y
|
||||
- SA_PASSWORD=Pwd2020Db
|
||||
- MSSQL_PID=Express
|
||||
|
||||
# cockroachdb:
|
||||
# image: cockroachdb/cockroach
|
||||
|
||||
@@ -135,12 +135,16 @@ const filterLocal = [
|
||||
// filter local testing
|
||||
'-MySQL',
|
||||
'-MariaDB',
|
||||
'PostgreSQL',
|
||||
'-SQL Server',
|
||||
'-PostgreSQL',
|
||||
'SQL Server',
|
||||
'-SQLite',
|
||||
'-CockroachDB',
|
||||
];
|
||||
|
||||
const enginesPostgre = engines.filter(x => x.label == 'PostgreSQL');
|
||||
|
||||
module.exports = process.env.CITEST
|
||||
? engines.filter(x => !x.skipOnCI)
|
||||
: engines.filter(x => filterLocal.find(y => x.label == y));
|
||||
|
||||
module.exports.enginesPostgre = enginesPostgre;
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 17.804 17.804" style="enable-background:new 0 0 17.804 17.804;" xml:space="preserve">
|
||||
<g>
|
||||
<g id="c98_play">
|
||||
<path fill='#ccc' d="M2.067,0.043C2.21-0.028,2.372-0.008,2.493,0.085l13.312,8.503c0.094,0.078,0.154,0.191,0.154,0.313
|
||||
c0,0.12-0.061,0.237-0.154,0.314L2.492,17.717c-0.07,0.057-0.162,0.087-0.25,0.087l-0.176-0.04
|
||||
c-0.136-0.065-0.222-0.207-0.222-0.361V0.402C1.844,0.25,1.93,0.107,2.067,0.043z"/>
|
||||
</g>
|
||||
<g id="Capa_1_78_">
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 733 B |
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 17.804 17.804" style="enable-background:new 0 0 17.804 17.804;" xml:space="preserve">
|
||||
<g>
|
||||
<g id="c98_play">
|
||||
<path fill='#444' d="M2.067,0.043C2.21-0.028,2.372-0.008,2.493,0.085l13.312,8.503c0.094,0.078,0.154,0.191,0.154,0.313
|
||||
c0,0.12-0.061,0.237-0.154,0.314L2.492,17.717c-0.07,0.057-0.162,0.087-0.25,0.087l-0.176-0.04
|
||||
c-0.136-0.065-0.222-0.207-0.222-0.361V0.402C1.844,0.25,1.93,0.107,2.067,0.043z"/>
|
||||
</g>
|
||||
<g id="Capa_1_78_">
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 733 B |
+6
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "5.0.2",
|
||||
"version": "5.1.6",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -10,6 +10,10 @@
|
||||
"scripts": {
|
||||
"start:api": "yarn workspace dbgate-api start",
|
||||
"start:app": "cd app && yarn start",
|
||||
"start:api:debug": "cross-env DEBUG=* yarn workspace dbgate-api start",
|
||||
"start:app:debug": "cd app && cross-env DEBUG=* yarn start",
|
||||
"start:api:debug:ssh": "cross-env DEBUG=ssh yarn workspace dbgate-api start",
|
||||
"start:app:debug:ssh": "cd app && cross-env DEBUG=ssh yarn start",
|
||||
"start:api:portal": "yarn workspace dbgate-api start:portal",
|
||||
"start:api:singledb": "yarn workspace dbgate-api start:singledb",
|
||||
"start:web": "yarn workspace dbgate-web dev",
|
||||
@@ -32,6 +36,7 @@
|
||||
"start:app:local": "cd app && yarn start:local",
|
||||
"setCurrentVersion": "node setCurrentVersion",
|
||||
"generatePadFile": "node generatePadFile",
|
||||
"adjustPackageJson": "node adjustPackageJson",
|
||||
"fillNativeModules": "node fillNativeModules",
|
||||
"fillNativeModulesElectron": "node fillNativeModules --electron",
|
||||
"fillPackagedPlugins": "node fillPackagedPlugins",
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
|
||||
# PERMISSIONS=~widgets/app,~widgets/plugins
|
||||
# DISABLE_SHELL=1
|
||||
# HIDE_APP_EDITOR=1
|
||||
|
||||
Vendored
+11
@@ -48,4 +48,15 @@ PASSWORD_relational=relational
|
||||
ENGINE_relational=mariadb@dbgate-plugin-mysql
|
||||
READONLY_relational=1
|
||||
|
||||
# SETTINGS_dataGrid.showHintColumns=1
|
||||
|
||||
# docker run -p 3000:3000 -e CONNECTIONS=mongo -e URL_mongo=mongodb://localhost:27017 -e ENGINE_mongo=mongo@dbgate-plugin-mongo -e LABEL_mongo=mongo dbgate/dbgate:beta
|
||||
|
||||
# LOGINS=x,y
|
||||
# LOGIN_PASSWORD_x=x
|
||||
# LOGIN_PASSWORD_y=LOGIN_PASSWORD_y
|
||||
# LOGIN_PERMISSIONS_x=~*
|
||||
# LOGIN_PERMISSIONS_y=~*
|
||||
|
||||
# PERMISSIONS=~*,connections/relational
|
||||
# PERMISSIONS=~*
|
||||
|
||||
@@ -25,9 +25,10 @@
|
||||
"compare-versions": "^3.6.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-query-splitter": "^4.9.0",
|
||||
"dbgate-query-splitter": "^4.9.2",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
"eslint": "^6.8.0",
|
||||
@@ -45,18 +46,19 @@
|
||||
"lodash": "^4.17.21",
|
||||
"ncp": "^2.0.0",
|
||||
"node-cron": "^2.0.3",
|
||||
"node-ssh-forward": "^0.7.2",
|
||||
"on-finished": "^2.4.1",
|
||||
"portfinder": "^1.0.28",
|
||||
"simple-encryptor": "^4.0.0",
|
||||
"ssh2": "^1.11.0",
|
||||
"tar": "^6.0.5",
|
||||
"uuid": "^3.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd node src/index.js",
|
||||
"start:portal": "env-cmd -f env/portal/.env node src/index.js",
|
||||
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js",
|
||||
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db",
|
||||
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test",
|
||||
"start": "env-cmd node src/index.js --listen-api",
|
||||
"start:portal": "env-cmd -f env/portal/.env node src/index.js --listen-api",
|
||||
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js --listen-api",
|
||||
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db --listen-api",
|
||||
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
|
||||
"ts": "tsc",
|
||||
"build": "webpack"
|
||||
},
|
||||
@@ -72,7 +74,7 @@
|
||||
"webpack-cli": "^3.3.11"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "7.5.0",
|
||||
"msnodesqlv8": "^2.4.4"
|
||||
"better-sqlite3": "7.6.2",
|
||||
"msnodesqlv8": "^2.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
const fs = require('fs-extra');
|
||||
const stream = require('stream');
|
||||
const readline = require('readline');
|
||||
const path = require('path');
|
||||
const { formatWithOptions } = require('util');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const { saveFreeTableData } = require('../utility/freeTableStorage');
|
||||
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
||||
|
||||
@@ -45,29 +42,34 @@ module.exports = {
|
||||
|
||||
files_meta: true,
|
||||
async files({ folder }) {
|
||||
const dir = resolveArchiveFolder(folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
|
||||
try {
|
||||
const dir = resolveArchiveFolder(folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
|
||||
|
||||
function fileType(ext, type) {
|
||||
return files
|
||||
.filter(name => name.endsWith(ext))
|
||||
.map(name => ({
|
||||
name: name.slice(0, -ext.length),
|
||||
label: path.parse(name.slice(0, -ext.length)).base,
|
||||
type,
|
||||
}));
|
||||
function fileType(ext, type) {
|
||||
return files
|
||||
.filter(name => name.endsWith(ext))
|
||||
.map(name => ({
|
||||
name: name.slice(0, -ext.length),
|
||||
label: path.parse(name.slice(0, -ext.length)).base,
|
||||
type,
|
||||
}));
|
||||
}
|
||||
|
||||
return [
|
||||
...fileType('.jsonl', 'jsonl'),
|
||||
...fileType('.table.yaml', 'table.yaml'),
|
||||
...fileType('.view.sql', 'view.sql'),
|
||||
...fileType('.proc.sql', 'proc.sql'),
|
||||
...fileType('.func.sql', 'func.sql'),
|
||||
...fileType('.trigger.sql', 'trigger.sql'),
|
||||
...fileType('.matview.sql', 'matview.sql'),
|
||||
];
|
||||
} catch (err) {
|
||||
console.log('Error reading archive files', err.message);
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
...fileType('.jsonl', 'jsonl'),
|
||||
...fileType('.table.yaml', 'table.yaml'),
|
||||
...fileType('.view.sql', 'view.sql'),
|
||||
...fileType('.proc.sql', 'proc.sql'),
|
||||
...fileType('.func.sql', 'func.sql'),
|
||||
...fileType('.trigger.sql', 'trigger.sql'),
|
||||
...fileType('.matview.sql', 'matview.sql'),
|
||||
];
|
||||
},
|
||||
|
||||
refreshFiles_meta: true,
|
||||
|
||||
@@ -29,14 +29,14 @@ module.exports = {
|
||||
async get(_params, req) {
|
||||
const logins = getLogins();
|
||||
const login = logins ? logins.find(x => x.login == (req.auth && req.auth.user)) : null;
|
||||
const permissions = login ? login.permissions : null;
|
||||
const permissions = login ? login.permissions : process.env.PERMISSIONS;
|
||||
|
||||
return {
|
||||
runAsPortal: !!connections.portalConnections,
|
||||
singleDatabase: connections.singleDatabase,
|
||||
// hideAppEditor: !!process.env.HIDE_APP_EDITOR,
|
||||
allowShellConnection: platformInfo.allowShellConnection,
|
||||
allowShellScripting: platformInfo.allowShellConnection,
|
||||
allowShellScripting: platformInfo.allowShellScripting,
|
||||
isDocker: platformInfo.isDocker,
|
||||
permissions,
|
||||
login,
|
||||
@@ -59,13 +59,10 @@ module.exports = {
|
||||
|
||||
getSettings_meta: true,
|
||||
async getSettings() {
|
||||
try {
|
||||
return this.fillMissingSettings(
|
||||
JSON.parse(await fs.readFile(path.join(datadir(), 'settings.json'), { encoding: 'utf-8' }))
|
||||
);
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
const res = await lock.acquire('settings', async () => {
|
||||
return await this.loadSettings();
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
fillMissingSettings(value) {
|
||||
@@ -76,15 +73,32 @@ module.exports = {
|
||||
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
|
||||
res['app.useNativeMenu'] = false;
|
||||
}
|
||||
for (const envVar in process.env) {
|
||||
if (envVar.startsWith('SETTINGS_')) {
|
||||
const key = envVar.substring('SETTINGS_'.length);
|
||||
if (!res[key]) {
|
||||
res[key] = process.env[envVar];
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
},
|
||||
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(path.join(datadir(), 'settings.json'), { encoding: 'utf-8' });
|
||||
return this.fillMissingSettings(JSON.parse(settingsText));
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
},
|
||||
|
||||
updateSettings_meta: true,
|
||||
async updateSettings(values, req) {
|
||||
if (!hasPermission(`settings/change`, req)) return false;
|
||||
|
||||
const res = await lock.acquire('update', async () => {
|
||||
const currentValue = await this.getSettings();
|
||||
const res = await lock.acquire('settings', async () => {
|
||||
const currentValue = await this.loadSettings();
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
|
||||
@@ -13,6 +13,7 @@ const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const { safeJsonParse } = require('dbgate-tools');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
|
||||
|
||||
function getNamedArgs() {
|
||||
const res = {};
|
||||
@@ -52,6 +53,8 @@ function getPortalCollections() {
|
||||
databaseUrl: process.env[`URL_${id}`],
|
||||
useDatabaseUrl: !!process.env[`URL_${id}`],
|
||||
databaseFile: process.env[`FILE_${id}`],
|
||||
socketPath: process.env[`SOCKET_PATH_${id}`],
|
||||
authType: process.env[`AUTH_TYPE_${id}`] || (process.env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
|
||||
defaultDatabase:
|
||||
process.env[`DATABASE_${id}`] ||
|
||||
(process.env[`FILE_${id}`] ? getDatabaseFileLabel(process.env[`FILE_${id}`]) : null),
|
||||
@@ -59,6 +62,7 @@ function getPortalCollections() {
|
||||
displayName: process.env[`LABEL_${id}`],
|
||||
isReadOnly: process.env[`READONLY_${id}`],
|
||||
databases: process.env[`DBCONFIG_${id}`] ? safeJsonParse(process.env[`DBCONFIG_${id}`]) : null,
|
||||
parent: process.env[`PARENT_${id}`] || undefined,
|
||||
|
||||
// SSH tunnel
|
||||
useSshTunnel: process.env[`USE_SSH_${id}`],
|
||||
@@ -165,10 +169,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
list_meta: true,
|
||||
async list() {
|
||||
return portalConnections && !platformInfo.allowShellConnection
|
||||
? portalConnections.map(maskConnection)
|
||||
: this.datastore.find();
|
||||
async list(_params, req) {
|
||||
if (portalConnections) {
|
||||
if (platformInfo.allowShellConnection) return portalConnections;
|
||||
return portalConnections.map(maskConnection).filter(x => connectionHasPermission(x, req));
|
||||
}
|
||||
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
|
||||
},
|
||||
|
||||
test_meta: true,
|
||||
@@ -215,16 +221,18 @@ module.exports = {
|
||||
},
|
||||
|
||||
update_meta: true,
|
||||
async update({ _id, values }) {
|
||||
async update({ _id, values }, req) {
|
||||
if (portalConnections) return;
|
||||
testConnectionPermission(_id, req);
|
||||
const res = await this.datastore.patch(_id, values);
|
||||
socket.emitChanged('connection-list-changed');
|
||||
return res;
|
||||
},
|
||||
|
||||
updateDatabase_meta: true,
|
||||
async updateDatabase({ conid, database, values }) {
|
||||
async updateDatabase({ conid, database, values }, req) {
|
||||
if (portalConnections) return;
|
||||
testConnectionPermission(conid, req);
|
||||
const conn = await this.datastore.get(conid);
|
||||
let databases = (conn && conn.databases) || [];
|
||||
if (databases.find(x => x.name == database)) {
|
||||
@@ -240,8 +248,9 @@ module.exports = {
|
||||
},
|
||||
|
||||
delete_meta: true,
|
||||
async delete(connection) {
|
||||
async delete(connection, req) {
|
||||
if (portalConnections) return;
|
||||
testConnectionPermission(connection, req);
|
||||
const res = await this.datastore.remove(connection._id);
|
||||
socket.emitChanged('connection-list-changed');
|
||||
return res;
|
||||
@@ -258,7 +267,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
get_meta: true,
|
||||
async get({ conid }) {
|
||||
async get({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.getCore({ conid, mask: true });
|
||||
},
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ const generateDeploySql = require('../shell/generateDeploySql');
|
||||
const { createTwoFilesPatch } = require('diff');
|
||||
const diff2htmlPage = require('../utility/diff2htmlPage');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const { testConnectionPermission } = require('../utility/hasPermission');
|
||||
|
||||
module.exports = {
|
||||
/** @type {import('dbgate-types').OpenedDatabaseConnection[]} */
|
||||
@@ -130,7 +131,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
queryData_meta: true,
|
||||
async queryData({ conid, database, sql }) {
|
||||
async queryData({ conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
console.log(`Processing query, conid=${conid}, database=${database}, sql=${sql}`);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
// if (opened && opened.status && opened.status.name == 'error') {
|
||||
@@ -141,14 +143,16 @@ module.exports = {
|
||||
},
|
||||
|
||||
sqlSelect_meta: true,
|
||||
async sqlSelect({ conid, database, select }) {
|
||||
async sqlSelect({ conid, database, select }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'sqlSelect', select });
|
||||
return res;
|
||||
},
|
||||
|
||||
runScript_meta: true,
|
||||
async runScript({ conid, database, sql }) {
|
||||
async runScript({ conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
console.log(`Processing script, conid=${conid}, database=${database}, sql=${sql}`);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql });
|
||||
@@ -156,13 +160,15 @@ module.exports = {
|
||||
},
|
||||
|
||||
collectionData_meta: true,
|
||||
async collectionData({ conid, database, options }) {
|
||||
async collectionData({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'collectionData', options });
|
||||
return res.result || null;
|
||||
},
|
||||
|
||||
async loadDataCore(msgtype, { conid, database, ...args }) {
|
||||
async loadDataCore(msgtype, { conid, database, ...args }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype, ...args });
|
||||
if (res.errorMessage) {
|
||||
@@ -176,32 +182,38 @@ module.exports = {
|
||||
},
|
||||
|
||||
loadKeys_meta: true,
|
||||
async loadKeys({ conid, database, root, filter }) {
|
||||
async loadKeys({ conid, database, root, filter }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('loadKeys', { conid, database, root, filter });
|
||||
},
|
||||
|
||||
exportKeys_meta: true,
|
||||
async exportKeys({ conid, database, options }) {
|
||||
async exportKeys({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('exportKeys', { conid, database, options });
|
||||
},
|
||||
|
||||
loadKeyInfo_meta: true,
|
||||
async loadKeyInfo({ conid, database, key }) {
|
||||
async loadKeyInfo({ conid, database, key }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('loadKeyInfo', { conid, database, key });
|
||||
},
|
||||
|
||||
loadKeyTableRange_meta: true,
|
||||
async loadKeyTableRange({ conid, database, key, cursor, count }) {
|
||||
async loadKeyTableRange({ conid, database, key, cursor, count }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('loadKeyTableRange', { conid, database, key, cursor, count });
|
||||
},
|
||||
|
||||
loadFieldValues_meta: true,
|
||||
async loadFieldValues({ conid, database, schemaName, pureName, field, search }) {
|
||||
async loadFieldValues({ conid, database, schemaName, pureName, field, search }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('loadFieldValues', { conid, database, schemaName, pureName, field, search });
|
||||
},
|
||||
|
||||
callMethod_meta: true,
|
||||
async callMethod({ conid, database, method, args }) {
|
||||
async callMethod({ conid, database, method, args }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('callMethod', { conid, database, method, args });
|
||||
|
||||
// const opened = await this.ensureOpened(conid, database);
|
||||
@@ -213,7 +225,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
updateCollection_meta: true,
|
||||
async updateCollection({ conid, database, changeSet }) {
|
||||
async updateCollection({ conid, database, changeSet }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'updateCollection', changeSet });
|
||||
if (res.errorMessage) {
|
||||
@@ -225,7 +238,14 @@ module.exports = {
|
||||
},
|
||||
|
||||
status_meta: true,
|
||||
async status({ conid, database }) {
|
||||
async status({ conid, database }, req) {
|
||||
if (!conid) {
|
||||
return {
|
||||
name: 'error',
|
||||
message: 'No connection',
|
||||
};
|
||||
}
|
||||
testConnectionPermission(conid, req);
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) {
|
||||
return {
|
||||
@@ -247,7 +267,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
ping_meta: true,
|
||||
async ping({ conid, database }) {
|
||||
async ping({ conid, database }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
let existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
|
||||
if (existing) {
|
||||
@@ -263,7 +284,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
refresh_meta: true,
|
||||
async refresh({ conid, database, keepOpen }) {
|
||||
async refresh({ conid, database, keepOpen }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
if (!keepOpen) this.close(conid, database);
|
||||
|
||||
await this.ensureOpened(conid, database);
|
||||
@@ -271,7 +293,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
syncModel_meta: true,
|
||||
async syncModel({ conid, database, isFullRefresh }) {
|
||||
async syncModel({ conid, database, isFullRefresh }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const conn = await this.ensureOpened(conid, database);
|
||||
conn.subprocess.send({ msgtype: 'syncModel', isFullRefresh });
|
||||
return { status: 'ok' };
|
||||
@@ -301,13 +324,15 @@ module.exports = {
|
||||
},
|
||||
|
||||
disconnect_meta: true,
|
||||
async disconnect({ conid, database }) {
|
||||
async disconnect({ conid, database }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
await this.close(conid, database, true);
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
structure_meta: true,
|
||||
async structure({ conid, database }) {
|
||||
async structure({ conid, database }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
if (conid == '__model') {
|
||||
const model = await importDbModel(database);
|
||||
return model;
|
||||
@@ -324,14 +349,19 @@ module.exports = {
|
||||
},
|
||||
|
||||
serverVersion_meta: true,
|
||||
async serverVersion({ conid, database }) {
|
||||
async serverVersion({ conid, database }, req) {
|
||||
if (!conid) {
|
||||
return null;
|
||||
}
|
||||
testConnectionPermission(conid, req);
|
||||
if (!conid) return null;
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
return opened.serverVersion || null;
|
||||
},
|
||||
|
||||
sqlPreview_meta: true,
|
||||
async sqlPreview({ conid, database, objects, options }) {
|
||||
async sqlPreview({ conid, database, objects, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
// wait for structure
|
||||
await this.structure({ conid, database });
|
||||
|
||||
@@ -341,7 +371,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
exportModel_meta: true,
|
||||
async exportModel({ conid, database }) {
|
||||
async exportModel({ conid, database }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const archiveFolder = await archive.getNewArchiveFolder({ database });
|
||||
await fs.mkdir(path.join(archivedir(), archiveFolder));
|
||||
const model = await this.structure({ conid, database });
|
||||
@@ -351,7 +382,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
generateDeploySql_meta: true,
|
||||
async generateDeploySql({ conid, database, archiveFolder }) {
|
||||
async generateDeploySql({ conid, database, archiveFolder }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, {
|
||||
msgtype: 'generateDeploySql',
|
||||
|
||||
@@ -8,6 +8,7 @@ const socket = require('../utility/socket');
|
||||
const scheduler = require('./scheduler');
|
||||
const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
|
||||
function serialize(format, data) {
|
||||
if (format == 'text') return data;
|
||||
@@ -187,6 +188,12 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
exportMap_meta: true,
|
||||
async exportMap({ filePath, geoJson }) {
|
||||
await fs.writeFile(filePath, getMapExport(geoJson));
|
||||
return true;
|
||||
},
|
||||
|
||||
exportDiagram_meta: true,
|
||||
async exportDiagram({ filePath, html, css, themeType, themeClassName }) {
|
||||
await fs.writeFile(filePath, getDiagramExport(html, css, themeType, themeClassName));
|
||||
|
||||
@@ -7,6 +7,7 @@ const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const lock = new AsyncLock();
|
||||
const config = require('./config');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const { testConnectionPermission } = require('../utility/hasPermission');
|
||||
|
||||
module.exports = {
|
||||
opened: [],
|
||||
@@ -90,19 +91,22 @@ module.exports = {
|
||||
},
|
||||
|
||||
disconnect_meta: true,
|
||||
async disconnect({ conid }) {
|
||||
async disconnect({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
await this.close(conid, true);
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
listDatabases_meta: true,
|
||||
async listDatabases({ conid }) {
|
||||
async listDatabases({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.databases;
|
||||
},
|
||||
|
||||
version_meta: true,
|
||||
async version({ conid }) {
|
||||
async version({ conid }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.version;
|
||||
},
|
||||
@@ -132,7 +136,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
refresh_meta: true,
|
||||
async refresh({ conid, keepOpen }) {
|
||||
async refresh({ conid, keepOpen }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
if (!keepOpen) this.close(conid);
|
||||
|
||||
await this.ensureOpened(conid);
|
||||
@@ -140,10 +145,20 @@ module.exports = {
|
||||
},
|
||||
|
||||
createDatabase_meta: true,
|
||||
async createDatabase({ conid, name }) {
|
||||
async createDatabase({ conid, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'createDatabase', name });
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
dropDatabase_meta: true,
|
||||
async dropDatabase({ conid, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'dropDatabase', name });
|
||||
return { status: 'ok' };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -103,6 +103,12 @@ module.exports = {
|
||||
if (handleProcessCommunication(message, subprocess)) return;
|
||||
this[`handle_${msgtype}`](sesid, message);
|
||||
});
|
||||
subprocess.on('exit', () => {
|
||||
this.opened = this.opened.filter(x => x.sesid != sesid);
|
||||
this.dispatchMessage(sesid, 'Query session closed');
|
||||
socket.emit(`session-closed-${sesid}`);
|
||||
});
|
||||
|
||||
subprocess.send({ msgtype: 'connect', ...connection, database });
|
||||
return _.pick(newOpened, ['conid', 'database', 'sesid']);
|
||||
},
|
||||
@@ -165,6 +171,17 @@ module.exports = {
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
ping_meta: true,
|
||||
async ping({ sesid }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
session.subprocess.send({ msgtype: 'ping' });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
// runCommand_meta: true,
|
||||
// async runCommand({ conid, database, sql }) {
|
||||
// console.log(`Running SQL command , conid=${conid}, database=${database}, sql=${sql}`);
|
||||
|
||||
@@ -8,9 +8,10 @@ if (processArgs.startProcess) {
|
||||
const proc = require('./proc');
|
||||
const module = proc[processArgs.startProcess];
|
||||
module.start();
|
||||
} else if (!processArgs.checkParent && !global['API_PACKAGE']) {
|
||||
const main = require('./main');
|
||||
}
|
||||
|
||||
if (processArgs.listenApi) {
|
||||
const main = require('./main');
|
||||
main.start();
|
||||
}
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ const plugins = require('./controllers/plugins');
|
||||
const files = require('./controllers/files');
|
||||
const scheduler = require('./controllers/scheduler');
|
||||
const queryHistory = require('./controllers/queryHistory');
|
||||
const onFinished = require('on-finished');
|
||||
|
||||
const { rundir } = require('./utility/directories');
|
||||
const platformInfo = require('./utility/platformInfo');
|
||||
@@ -63,7 +64,10 @@ function start() {
|
||||
|
||||
// Tell the client to retry every 10 seconds if connectivity is lost
|
||||
res.write('retry: 10000\n\n');
|
||||
socket.setSseResponse(res);
|
||||
socket.addSseResponse(res);
|
||||
onFinished(req, () => {
|
||||
socket.removeSseResponse(res);
|
||||
});
|
||||
});
|
||||
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
|
||||
@@ -156,11 +156,11 @@ function resolveAnalysedPromises() {
|
||||
afterAnalyseCallbacks = [];
|
||||
}
|
||||
|
||||
async function handleRunScript({ msgid, sql }) {
|
||||
async function handleRunScript({ msgid, sql }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
ensureExecuteCustomScript(driver);
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
await driver.script(systemConnection, sql);
|
||||
process.send({ msgtype: 'response', msgid });
|
||||
} catch (err) {
|
||||
@@ -168,15 +168,16 @@ async function handleRunScript({ msgid, sql }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleQueryData({ msgid, sql }) {
|
||||
async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
ensureExecuteCustomScript(driver);
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
// console.log(sql);
|
||||
const res = await driver.query(systemConnection, sql);
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,7 +185,7 @@ async function handleSqlSelect({ msgid, select }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const dmp = driver.createDumper();
|
||||
dumpSqlSelect(dmp, select);
|
||||
return handleQueryData({ msgid, sql: dmp.s });
|
||||
return handleQueryData({ msgid, sql: dmp.s }, true);
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod) {
|
||||
@@ -334,11 +335,11 @@ function start() {
|
||||
|
||||
setInterval(() => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 120 * 1000) {
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
console.log('Database connection not alive, exiting');
|
||||
process.exit(0);
|
||||
}
|
||||
}, 60 * 1000);
|
||||
}, 10 * 1000);
|
||||
|
||||
process.on('message', async message => {
|
||||
if (handleProcessCommunication(message)) return;
|
||||
|
||||
@@ -2,7 +2,6 @@ const stableStringify = require('json-stable-stringify');
|
||||
const { extractBoolSettingsValue, extractIntSettingsValue } = require('dbgate-tools');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { decryptConnection } = require('../utility/crypting');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
|
||||
@@ -81,14 +80,16 @@ function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
async function handleCreateDatabase({ name }) {
|
||||
async function handleDatabaseOp(op, { name }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
console.log(`RUNNING SCRIPT: CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
|
||||
if (driver.createDatabase) {
|
||||
await driver.createDatabase(systemConnection, name);
|
||||
if (driver[op]) {
|
||||
await driver[op](systemConnection, name);
|
||||
} else {
|
||||
await driver.query(systemConnection, `CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
console.log(`RUNNING SCRIPT: ${dmp.s}`);
|
||||
await driver.query(systemConnection, dmp.s);
|
||||
}
|
||||
await handleRefresh();
|
||||
}
|
||||
@@ -96,7 +97,8 @@ async function handleCreateDatabase({ name }) {
|
||||
const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
ping: handlePing,
|
||||
createDatabase: handleCreateDatabase,
|
||||
createDatabase: props => handleDatabaseOp('createDatabase', props),
|
||||
dropDatabase: props => handleDatabaseOp('dropDatabase', props),
|
||||
};
|
||||
|
||||
async function handleMessage({ msgtype, ...other }) {
|
||||
@@ -109,11 +111,11 @@ function start() {
|
||||
|
||||
setInterval(() => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 120 * 1000) {
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
console.log('Server connection not alive, exiting');
|
||||
process.exit(0);
|
||||
}
|
||||
}, 60 * 1000);
|
||||
}, 10 * 1000);
|
||||
|
||||
process.on('message', async message => {
|
||||
if (handleProcessCommunication(message)) return;
|
||||
|
||||
@@ -15,6 +15,7 @@ let systemConnection;
|
||||
let storedConnection;
|
||||
let afterConnectCallbacks = [];
|
||||
// let currentHandlers = [];
|
||||
let lastPing = null;
|
||||
|
||||
class TableWriter {
|
||||
constructor() {
|
||||
@@ -101,8 +102,9 @@ class TableWriter {
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(resultIndexHolder, resolve) {
|
||||
constructor(resultIndexHolder, resolve, startLine) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
@@ -155,14 +157,21 @@ class StreamHandler {
|
||||
this.resolve();
|
||||
}
|
||||
info(info) {
|
||||
if (info && info.line != null) {
|
||||
info = {
|
||||
...info,
|
||||
line: this.startLine + info.line,
|
||||
};
|
||||
}
|
||||
process.send({ msgtype: 'info', info });
|
||||
}
|
||||
}
|
||||
|
||||
function handleStream(driver, resultIndexHolder, sql) {
|
||||
function handleStream(driver, resultIndexHolder, sqlItem) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const handler = new StreamHandler(resultIndexHolder, resolve);
|
||||
driver.stream(systemConnection, sql, handler);
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
|
||||
driver.stream(systemConnection, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -221,7 +230,10 @@ async function handleExecuteQuery({ sql }) {
|
||||
const resultIndexHolder = {
|
||||
value: 0,
|
||||
};
|
||||
for (const sqlItem of splitQuery(sql, driver.getQuerySplitterOptions('stream'))) {
|
||||
for (const sqlItem of splitQuery(sql, {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleStream(driver, resultIndexHolder, sqlItem);
|
||||
// const handler = new StreamHandler(resultIndex);
|
||||
// const stream = await driver.stream(systemConnection, sqlItem, handler);
|
||||
@@ -260,10 +272,15 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
|
||||
});
|
||||
}
|
||||
|
||||
function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
executeQuery: handleExecuteQuery,
|
||||
executeReader: handleExecuteReader,
|
||||
ping: handlePing,
|
||||
// cancel: handleCancel,
|
||||
};
|
||||
|
||||
@@ -274,6 +291,17 @@ async function handleMessage({ msgtype, ...other }) {
|
||||
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
|
||||
lastPing = new Date().getTime();
|
||||
|
||||
setInterval(() => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 25 * 1000) {
|
||||
console.log('Session not alive, exiting');
|
||||
process.exit(0);
|
||||
}
|
||||
}, 10 * 1000);
|
||||
|
||||
process.on('message', async message => {
|
||||
if (handleProcessCommunication(message)) return;
|
||||
try {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const fs = require('fs-extra');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const { SSHConnection } = require('node-ssh-forward');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { SSHConnection } = require('../utility/SSHConnection');
|
||||
|
||||
async function getSshConnection(connection) {
|
||||
const sshConfig = {
|
||||
@@ -35,6 +35,8 @@ async function handleStart({ connection, tunnelConfig }) {
|
||||
tunnelConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
console.log('Error creating SSH tunnel connection:', err.message);
|
||||
|
||||
process.send({
|
||||
msgtype: 'error',
|
||||
connection,
|
||||
|
||||
@@ -47,7 +47,7 @@ async function importDatabase({ connection = undefined, systemConnection = undef
|
||||
const downloadedFile = await download(inputFile);
|
||||
|
||||
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
|
||||
const splittedStream = splitQueryStream(fileStream, driver.getQuerySplitterOptions());
|
||||
const splittedStream = splitQueryStream(fileStream, driver.getQuerySplitterOptions('script'));
|
||||
const importStream = new ImportStream(pool, driver);
|
||||
// @ts-ignore
|
||||
splittedStream.pipe(importStream);
|
||||
|
||||
@@ -20,7 +20,9 @@ async function queryReader({
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
|
||||
console.log(`Connected.`);
|
||||
return queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
||||
const reader =
|
||||
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
||||
return reader;
|
||||
}
|
||||
|
||||
module.exports = queryReader;
|
||||
|
||||
@@ -0,0 +1,251 @@
|
||||
/*
|
||||
* Copyright 2018 Stocard GmbH.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const { Client } = require('ssh2');
|
||||
const net = require('net');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const debug = require('debug');
|
||||
|
||||
// interface Options {
|
||||
// username?: string;
|
||||
// password?: string;
|
||||
// privateKey?: string | Buffer;
|
||||
// agentForward?: boolean;
|
||||
// bastionHost?: string;
|
||||
// passphrase?: string;
|
||||
// endPort?: number;
|
||||
// endHost: string;
|
||||
// agentSocket?: string;
|
||||
// skipAutoPrivateKey?: boolean;
|
||||
// noReadline?: boolean;
|
||||
// }
|
||||
|
||||
// interface ForwardingOptions {
|
||||
// fromPort: number;
|
||||
// toPort: number;
|
||||
// toHost?: string;
|
||||
// }
|
||||
|
||||
class SSHConnection {
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.debug = debug('ssh');
|
||||
this.connections = [];
|
||||
this.isWindows = process.platform === 'win32';
|
||||
if (!options.username) {
|
||||
this.options.username = process.env['SSH_USERNAME'] || process.env['USER'];
|
||||
}
|
||||
if (!options.endPort) {
|
||||
this.options.endPort = 22;
|
||||
}
|
||||
if (!options.privateKey && !options.agentForward && !options.skipAutoPrivateKey) {
|
||||
const defaultFilePath = path.join(os.homedir(), '.ssh', 'id_rsa');
|
||||
if (fs.existsSync(defaultFilePath)) {
|
||||
this.options.privateKey = fs.readFileSync(defaultFilePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async shutdown() {
|
||||
this.debug('Shutdown connections');
|
||||
for (const connection of this.connections) {
|
||||
connection.removeAllListeners();
|
||||
connection.end();
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
if (this.server) {
|
||||
this.server.close(resolve);
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
}
|
||||
|
||||
async tty() {
|
||||
const connection = await this.establish();
|
||||
this.debug('Opening tty');
|
||||
await this.shell(connection);
|
||||
}
|
||||
|
||||
async executeCommand(command) {
|
||||
const connection = await this.establish();
|
||||
this.debug('Executing command "%s"', command);
|
||||
await this.shell(connection, command);
|
||||
}
|
||||
|
||||
async shell(connection, command) {
|
||||
return new Promise((resolve, reject) => {
|
||||
connection.shell((err, stream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
stream
|
||||
.on('close', async () => {
|
||||
stream.end();
|
||||
process.stdin.unpipe(stream);
|
||||
process.stdin.destroy();
|
||||
connection.end();
|
||||
await this.shutdown();
|
||||
return resolve();
|
||||
})
|
||||
.stderr.on('data', data => {
|
||||
return reject(data);
|
||||
});
|
||||
stream.pipe(process.stdout);
|
||||
|
||||
if (command) {
|
||||
stream.end(`${command}\nexit\n`);
|
||||
} else {
|
||||
process.stdin.pipe(stream);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async establish() {
|
||||
let connection;
|
||||
if (this.options.bastionHost) {
|
||||
connection = await this.connectViaBastion(this.options.bastionHost);
|
||||
} else {
|
||||
connection = await this.connect(this.options.endHost);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
async connectViaBastion(bastionHost) {
|
||||
this.debug('Connecting to bastion host "%s"', bastionHost);
|
||||
const connectionToBastion = await this.connect(bastionHost);
|
||||
return new Promise((resolve, reject) => {
|
||||
connectionToBastion.forwardOut(
|
||||
'127.0.0.1',
|
||||
22,
|
||||
this.options.endHost,
|
||||
this.options.endPort || 22,
|
||||
async (err, stream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
const connection = await this.connect(this.options.endHost, stream);
|
||||
return resolve(connection);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
async connect(host, stream) {
|
||||
this.debug('Connecting to "%s"', host);
|
||||
const connection = new Client();
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const options = {
|
||||
host,
|
||||
port: this.options.endPort,
|
||||
username: this.options.username,
|
||||
password: this.options.password,
|
||||
privateKey: this.options.privateKey,
|
||||
};
|
||||
if (this.options.agentForward) {
|
||||
options['agentForward'] = true;
|
||||
|
||||
// see https://github.com/mscdex/ssh2#client for agents on Windows
|
||||
// guaranteed to give the ssh agent sock if the agent is running (posix)
|
||||
let agentDefault = process.env['SSH_AUTH_SOCK'];
|
||||
if (this.isWindows) {
|
||||
// null or undefined
|
||||
if (agentDefault == null) {
|
||||
agentDefault = 'pageant';
|
||||
}
|
||||
}
|
||||
|
||||
const agentSock = this.options.agentSocket ? this.options.agentSocket : agentDefault;
|
||||
if (agentSock == null) {
|
||||
throw new Error('SSH Agent Socket is not provided, or is not set in the SSH_AUTH_SOCK env variable');
|
||||
}
|
||||
options['agent'] = agentSock;
|
||||
}
|
||||
if (stream) {
|
||||
options['sock'] = stream;
|
||||
}
|
||||
// PPK private keys can be encrypted, but won't contain the word 'encrypted'
|
||||
// in fact they always contain a `encryption` header, so we can't do a simple check
|
||||
options['passphrase'] = this.options.passphrase;
|
||||
const looksEncrypted = this.options.privateKey
|
||||
? this.options.privateKey.toString().toLowerCase().includes('encrypted')
|
||||
: false;
|
||||
if (looksEncrypted && !options['passphrase'] && !this.options.noReadline) {
|
||||
// options['passphrase'] = await this.getPassphrase();
|
||||
}
|
||||
connection.on('ready', () => {
|
||||
this.connections.push(connection);
|
||||
return resolve(connection);
|
||||
});
|
||||
|
||||
connection.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
try {
|
||||
connection.connect(options);
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// private async getPassphrase() {
|
||||
// return new Promise(resolve => {
|
||||
// const rl = readline.createInterface({
|
||||
// input: process.stdin,
|
||||
// output: process.stdout,
|
||||
// });
|
||||
// rl.question('Please type in the passphrase for your private key: ', answer => {
|
||||
// return resolve(answer);
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
|
||||
async forward(options) {
|
||||
const connection = await this.establish();
|
||||
return new Promise((resolve, reject) => {
|
||||
this.server = net
|
||||
.createServer(socket => {
|
||||
this.debug(
|
||||
'Forwarding connection from "localhost:%d" to "%s:%d"',
|
||||
options.fromPort,
|
||||
options.toHost,
|
||||
options.toPort
|
||||
);
|
||||
connection.forwardOut(
|
||||
'localhost',
|
||||
options.fromPort,
|
||||
options.toHost || 'localhost',
|
||||
options.toPort,
|
||||
(error, stream) => {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
socket.pipe(stream);
|
||||
stream.pipe(socket);
|
||||
}
|
||||
);
|
||||
})
|
||||
.listen(options.fromPort, 'localhost', () => {
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { SSHConnection };
|
||||
@@ -1,8 +1,5 @@
|
||||
const { SSHConnection } = require('node-ssh-forward');
|
||||
const portfinder = require('portfinder');
|
||||
const fs = require('fs-extra');
|
||||
const { decryptConnection } = require('./crypting');
|
||||
const { getSshTunnel } = require('./sshTunnel');
|
||||
const { getSshTunnelProxy } = require('./sshTunnelProxy');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
const getMapExport = (geoJson) => {
|
||||
return `<html>
|
||||
<meta charset='utf-8'>
|
||||
|
||||
<head>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.8.0/dist/leaflet.css"
|
||||
integrity="sha512-hoalWLoI8r4UszCkZ5kL8vayOGVae1oxXe/2A4AO6J9+580uKHDO3JdHb7NzwwzK5xr/Fs0W40kiNHxM9vyTtQ=="
|
||||
crossorigin=""/>
|
||||
|
||||
<script src="https://unpkg.com/leaflet@1.8.0/dist/leaflet.js"
|
||||
integrity="sha512-BB3hKbKWOc9Ez/TAwyWxNXeoV9c1v6FIeYiBieIWkpLjauysF18NzgR1MBNBXf8/KABdlkX68nAhlwcDFLGPCQ=="
|
||||
crossorigin=""></script>
|
||||
|
||||
<script>
|
||||
function createMap() {
|
||||
map = leaflet.map('map').setView([50, 15], 13);
|
||||
|
||||
leaflet
|
||||
.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
maxZoom: 19,
|
||||
attribution: '<a href="https://dbgate.org" title="Exported from DbGate">DbGate</a> | © OpenStreetMap',
|
||||
})
|
||||
.addTo(map);
|
||||
|
||||
const geoJsonObj = leaflet
|
||||
.geoJSON(${JSON.stringify(geoJson)}, {
|
||||
style: function () {
|
||||
return {
|
||||
weight: 2,
|
||||
fillColor: '#ff7800',
|
||||
color: '#ff7800',
|
||||
opacity: 0.8,
|
||||
fillOpacity: 0.4,
|
||||
};
|
||||
},
|
||||
pointToLayer: (feature, latlng) => {
|
||||
return leaflet.circleMarker(latlng, {
|
||||
radius: 7,
|
||||
weight: 2,
|
||||
fillColor: '#ff0000',
|
||||
color: '#ff0000',
|
||||
opacity: 0.9,
|
||||
fillOpacity: 0.9,
|
||||
});
|
||||
},
|
||||
onEachFeature: (feature, layer) => {
|
||||
// does this feature have a property named popupContent?
|
||||
if (feature.properties && feature.properties.popupContent) {
|
||||
layer.bindPopup(feature.properties.popupContent);
|
||||
layer.bindTooltip(feature.properties.popupContent);
|
||||
}
|
||||
},
|
||||
})
|
||||
.addTo(map);
|
||||
map.fitBounds(geoJsonObj.getBounds());
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
#map {
|
||||
position: fixed;
|
||||
left: 0;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body onload='createMap()'>
|
||||
<div id='map'></div>
|
||||
</body>
|
||||
|
||||
</html>`;
|
||||
};
|
||||
|
||||
module.exports = getMapExport;
|
||||
@@ -4,12 +4,21 @@ const _ = require('lodash');
|
||||
const userPermissions = {};
|
||||
|
||||
function hasPermission(tested, req) {
|
||||
if (!req) {
|
||||
// request object not available, allow all
|
||||
return true;
|
||||
}
|
||||
const { user } = (req && req.auth) || {};
|
||||
const key = user || '';
|
||||
const logins = getLogins();
|
||||
if (!userPermissions[key] && logins) {
|
||||
const login = logins.find(x => x.login == user);
|
||||
userPermissions[key] = compilePermissions(login ? login.permissions : null);
|
||||
|
||||
if (!userPermissions[key]) {
|
||||
if (logins) {
|
||||
const login = logins.find(x => x.login == user);
|
||||
userPermissions[key] = compilePermissions(login ? login.permissions : null);
|
||||
} else {
|
||||
userPermissions[key] = compilePermissions(process.env.PERMISSIONS);
|
||||
}
|
||||
}
|
||||
return testPermission(tested, userPermissions[key]);
|
||||
}
|
||||
@@ -50,7 +59,26 @@ function getLogins() {
|
||||
return loginsCache;
|
||||
}
|
||||
|
||||
function connectionHasPermission(connection, req) {
|
||||
if (!connection) {
|
||||
return true;
|
||||
}
|
||||
if (_.isString(connection)) {
|
||||
return hasPermission(`connections/${connection}`, req);
|
||||
} else {
|
||||
return hasPermission(`connections/${connection._id}`, req);
|
||||
}
|
||||
}
|
||||
|
||||
function testConnectionPermission(connection, req) {
|
||||
if (!connectionHasPermission(connection, req)) {
|
||||
throw new Error('Connection permission not granted');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
hasPermission,
|
||||
getLogins,
|
||||
connectionHasPermission,
|
||||
testConnectionPermission,
|
||||
};
|
||||
|
||||
@@ -39,8 +39,8 @@ const platformInfo = {
|
||||
environment: process.env.NODE_ENV,
|
||||
platform,
|
||||
runningInWebpack: !!process.env.WEBPACK_DEV_SERVER_URL,
|
||||
allowShellConnection: !!process.env.SHELL_CONNECTION || !!isElectron(),
|
||||
allowShellScripting: !!process.env.SHELL_SCRIPTING || !!isElectron(),
|
||||
allowShellConnection: !processArgs.listenApiChild || !!process.env.SHELL_CONNECTION || !!isElectron(),
|
||||
allowShellScripting: !processArgs.listenApiChild || !!process.env.SHELL_SCRIPTING || !!isElectron(),
|
||||
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
|
||||
};
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ const startProcess = getNamedArg('--start-process');
|
||||
const isForkedApi = process.argv.includes('--is-forked-api');
|
||||
const pluginsDir = getNamedArg('--plugins-dir');
|
||||
const workspaceDir = getNamedArg('--workspace-dir');
|
||||
const listenApi = process.argv.includes('--listen-api');
|
||||
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
|
||||
|
||||
function getPassArgs() {
|
||||
const res = [];
|
||||
@@ -20,6 +22,9 @@ function getPassArgs() {
|
||||
if (global['PLUGINS_DIR']) {
|
||||
res.push('--plugins-dir', global['PLUGINS_DIR']);
|
||||
}
|
||||
if (listenApiChild) {
|
||||
res.push('listen-api-child');
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -30,4 +35,6 @@ module.exports = {
|
||||
getPassArgs,
|
||||
pluginsDir,
|
||||
workspaceDir,
|
||||
listenApi,
|
||||
listenApiChild,
|
||||
};
|
||||
|
||||
@@ -16,7 +16,9 @@ function requireEngineDriver(connection) {
|
||||
if (engine.includes('@')) {
|
||||
const [shortName, packageName] = engine.split('@');
|
||||
const plugin = requirePlugin(packageName);
|
||||
return plugin.drivers.find(x => x.engine == engine);
|
||||
if (plugin.drivers) {
|
||||
return plugin.drivers.find(x => x.engine == engine);
|
||||
}
|
||||
}
|
||||
throw new Error(`Could not find engine driver ${engine}`);
|
||||
}
|
||||
|
||||
@@ -1,36 +1,33 @@
|
||||
let sseResponse = null;
|
||||
const _ = require('lodash');
|
||||
|
||||
const sseResponses = [];
|
||||
let electronSender = null;
|
||||
let init = [];
|
||||
let pingConfigured = false;
|
||||
|
||||
module.exports = {
|
||||
setSseResponse(value) {
|
||||
sseResponse = value;
|
||||
setInterval(() => this.emit('ping'), 29 * 1000);
|
||||
ensurePing() {
|
||||
if (!pingConfigured) {
|
||||
setInterval(() => this.emit('ping'), 29 * 1000);
|
||||
pingConfigured = true;
|
||||
}
|
||||
},
|
||||
addSseResponse(value) {
|
||||
sseResponses.push(value);
|
||||
this.ensurePing();
|
||||
},
|
||||
removeSseResponse(value) {
|
||||
_.remove(sseResponses, x => x == value);
|
||||
},
|
||||
setElectronSender(value) {
|
||||
electronSender = value;
|
||||
this.ensurePing();
|
||||
},
|
||||
emit(message, data) {
|
||||
if (electronSender) {
|
||||
if (init.length > 0) {
|
||||
for (const item of init) {
|
||||
electronSender.send(item.message, item.data == null ? null : item.data);
|
||||
}
|
||||
init = [];
|
||||
}
|
||||
electronSender.send(message, data == null ? null : data);
|
||||
} else if (sseResponse) {
|
||||
if (init.length > 0) {
|
||||
for (const item of init) {
|
||||
sseResponse.write(
|
||||
`event: ${item.message}\ndata: ${JSON.stringify(item.data == null ? null : item.data)}\n\n`
|
||||
);
|
||||
}
|
||||
init = [];
|
||||
}
|
||||
sseResponse.write(`event: ${message}\ndata: ${JSON.stringify(data == null ? null : data)}\n\n`);
|
||||
} else {
|
||||
init.push([{ message, data }]);
|
||||
}
|
||||
for (const res of sseResponses) {
|
||||
res.write(`event: ${message}\ndata: ${JSON.stringify(data == null ? null : data)}\n\n`);
|
||||
}
|
||||
},
|
||||
emitChanged(key) {
|
||||
|
||||
@@ -47,7 +47,6 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
|
||||
let method = 'post';
|
||||
let raw = false;
|
||||
let rawParams = false;
|
||||
|
||||
// if (_.isString(meta)) {
|
||||
// method = meta;
|
||||
@@ -55,7 +54,6 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
if (_.isPlainObject(meta)) {
|
||||
method = meta.method;
|
||||
raw = meta.raw;
|
||||
rawParams = meta.rawParams;
|
||||
}
|
||||
|
||||
if (raw) {
|
||||
@@ -67,9 +65,7 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
// controller._init_called = true;
|
||||
// }
|
||||
try {
|
||||
let params = [{ ...req.body, ...req.query }, req];
|
||||
if (rawParams) params = [req, res];
|
||||
const data = await controller[key](...params);
|
||||
const data = await controller[key]({ ...req.body, ...req.query }, req);
|
||||
res.json(data);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
moduleFileExtensions: ['js'],
|
||||
};
|
||||
@@ -5,6 +5,8 @@
|
||||
"typings": "lib/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest",
|
||||
"test:ci": "jest --json --outputFile=result.json --testLocationInResults",
|
||||
"start": "tsc --watch"
|
||||
},
|
||||
"files": [
|
||||
@@ -12,11 +14,14 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-filterparser": "^5.0.0-alpha.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
"@types/node": "^13.7.0",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
"typescript": "^4.4.3"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,15 @@
|
||||
import _ from 'lodash';
|
||||
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from 'dbgate-sqltree';
|
||||
import { NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
import {
|
||||
Command,
|
||||
Insert,
|
||||
Update,
|
||||
Delete,
|
||||
UpdateField,
|
||||
Condition,
|
||||
AllowIdentityInsert,
|
||||
Expression,
|
||||
} from 'dbgate-sqltree';
|
||||
import type { NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
|
||||
export interface ChangeSetItem {
|
||||
pureName: string;
|
||||
@@ -262,27 +271,39 @@ function changeSetInsertToSql(
|
||||
}
|
||||
|
||||
export function extractChangeSetCondition(item: ChangeSetItem, alias?: string): Condition {
|
||||
function getColumnCondition(columnName: string): Condition {
|
||||
const value = item.condition[columnName];
|
||||
const expr: Expression = {
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
source: {
|
||||
name: {
|
||||
pureName: item.pureName,
|
||||
schemaName: item.schemaName,
|
||||
},
|
||||
alias,
|
||||
},
|
||||
};
|
||||
if (value == null) {
|
||||
return {
|
||||
conditionType: 'isNull',
|
||||
expr,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: expr,
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: _.keys(item.condition).map(columnName => ({
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
source: {
|
||||
name: {
|
||||
pureName: item.pureName,
|
||||
schemaName: item.schemaName,
|
||||
},
|
||||
alias,
|
||||
},
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: item.condition[columnName],
|
||||
},
|
||||
})),
|
||||
conditions: _.keys(item.condition).map(columnName => getColumnCondition(columnName)),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc, DisplayColumn } from './GridDisplay';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo, CollectionInfo } from 'dbgate-types';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo, CollectionInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
|
||||
function getObjectKeys(obj) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc } from './GridConfig';
|
||||
import { TableInfo, EngineDriver, DatabaseInfo, SqlDialect } from 'dbgate-types';
|
||||
import type { TableInfo, EngineDriver, DatabaseInfo, SqlDialect } from 'dbgate-types';
|
||||
import { getFilterValueExpression } from 'dbgate-filterparser';
|
||||
import { ChangeCacheFunc, ChangeConfigFunc, DisplayColumn } from './GridDisplay';
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { FreeTableModel } from './FreeTableModel';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { TableInfo } from 'dbgate-types';
|
||||
import type { TableInfo } from 'dbgate-types';
|
||||
|
||||
export interface FreeTableModel {
|
||||
structure: TableInfo;
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import { DisplayColumn } from './GridDisplay';
|
||||
import { TableInfo } from 'dbgate-types';
|
||||
|
||||
export interface GridConfigColumns {
|
||||
hiddenColumns: string[];
|
||||
expandedColumns: string[];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc } from './GridConfig';
|
||||
import {
|
||||
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc, createGridConfig } from './GridConfig';
|
||||
import type {
|
||||
ForeignKeyInfo,
|
||||
TableInfo,
|
||||
ColumnInfo,
|
||||
@@ -24,7 +24,7 @@ export interface DisplayColumn {
|
||||
headerText: string;
|
||||
uniqueName: string;
|
||||
uniquePath: string[];
|
||||
notNull: boolean;
|
||||
notNull?: boolean;
|
||||
autoIncrement?: boolean;
|
||||
isPrimaryKey?: boolean;
|
||||
foreignKey?: ForeignKeyInfo;
|
||||
@@ -194,12 +194,14 @@ export abstract class GridDisplay {
|
||||
if (condition) {
|
||||
conditions.push(
|
||||
_.cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder')
|
||||
return {
|
||||
exprType: 'column',
|
||||
columnName: column.columnName,
|
||||
source: { alias: column.sourceAlias },
|
||||
};
|
||||
if (expr.exprType == 'placeholder') {
|
||||
return this.createColumnExpression(column, { alias: column.sourceAlias });
|
||||
}
|
||||
// return {
|
||||
// exprType: 'column',
|
||||
// columnName: column.columnName,
|
||||
// source: { alias: column.sourceAlias },
|
||||
// };
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -372,6 +374,22 @@ export abstract class GridDisplay {
|
||||
this.reload();
|
||||
}
|
||||
|
||||
addToSort(uniqueName, order) {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
sort: [...(cfg.sort || []), { uniqueName, order }],
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
|
||||
clearSort() {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
sort: [],
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
|
||||
setGrouping(uniqueName, groupFunc: GroupFunc) {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
@@ -408,6 +426,15 @@ export abstract class GridDisplay {
|
||||
return this.config.sort.find(x => x.uniqueName == uniqueName)?.order;
|
||||
}
|
||||
|
||||
getSortOrderIndex(uniqueName) {
|
||||
if (this.config.sort.length <= 1) return -1;
|
||||
return _.findIndex(this.config.sort, x => x.uniqueName == uniqueName);
|
||||
}
|
||||
|
||||
isSortDefined() {
|
||||
return (this.config.sort || []).length > 0;
|
||||
}
|
||||
|
||||
get filterCount() {
|
||||
return _.compact(_.values(this.config.filters)).length;
|
||||
}
|
||||
@@ -420,6 +447,11 @@ export abstract class GridDisplay {
|
||||
this.reload();
|
||||
}
|
||||
|
||||
resetConfig() {
|
||||
this.setConfig(cfg => createGridConfig());
|
||||
this.reload();
|
||||
}
|
||||
|
||||
getChangeSetCondition(row) {
|
||||
if (!this.changeSetKeyFields) return null;
|
||||
return _.pick(row, this.changeSetKeyFields);
|
||||
@@ -458,6 +490,22 @@ export abstract class GridDisplay {
|
||||
|
||||
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {}
|
||||
|
||||
createColumnExpression(col, source, alias?) {
|
||||
let expr = null;
|
||||
if (this.dialect.createColumnViewExpression) {
|
||||
expr = this.dialect.createColumnViewExpression(col.columnName, col.dataType, source, alias);
|
||||
if (expr) {
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
return {
|
||||
exprType: 'column',
|
||||
alias: alias || col.columnName,
|
||||
source,
|
||||
...col,
|
||||
};
|
||||
}
|
||||
|
||||
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[], options) {
|
||||
if (!columns) return null;
|
||||
const orderColumnName = columns[0].columnName;
|
||||
@@ -467,12 +515,7 @@ export abstract class GridDisplay {
|
||||
name: _.pick(name, ['schemaName', 'pureName']),
|
||||
alias: 'basetbl',
|
||||
},
|
||||
columns: columns.map(col => ({
|
||||
exprType: 'column',
|
||||
alias: col.columnName,
|
||||
source: { alias: 'basetbl' },
|
||||
...col,
|
||||
})),
|
||||
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' })),
|
||||
orderBy: [
|
||||
{
|
||||
exprType: 'column',
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { QueryResultColumn } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { analyseCollectionDisplayColumns } from './CollectionGridDisplay';
|
||||
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { PerspectiveDataLoadProps } from './PerspectiveDataProvider';
|
||||
import _pick from 'lodash/pick';
|
||||
import _zip from 'lodash/zip';
|
||||
import _difference from 'lodash/difference';
|
||||
import debug from 'debug';
|
||||
import stableStringify from 'json-stable-stringify';
|
||||
import { PerspectiveDataPattern } from './PerspectiveDataPattern';
|
||||
|
||||
const dbg = debug('dbgate:PerspectiveCache');
|
||||
|
||||
export class PerspectiveBindingGroup {
|
||||
constructor(public table: PerspectiveCacheTable) {}
|
||||
|
||||
groupSize?: number;
|
||||
loadedAll: boolean;
|
||||
loadedRows: any[] = [];
|
||||
bindingValues: any[];
|
||||
|
||||
matchRow(row) {
|
||||
return this.table.bindingColumns.every((column, index) => row[column] == this.bindingValues[index]);
|
||||
}
|
||||
}
|
||||
|
||||
export class PerspectiveCacheTable {
|
||||
constructor(props: PerspectiveDataLoadProps, public cache: PerspectiveCache) {
|
||||
this.schemaName = props.schemaName;
|
||||
this.pureName = props.pureName;
|
||||
this.bindingColumns = props.bindingColumns;
|
||||
this.dataColumns = props.dataColumns;
|
||||
this.loadedAll = false;
|
||||
}
|
||||
|
||||
schemaName: string;
|
||||
pureName: string;
|
||||
bindingColumns?: string[];
|
||||
dataColumns: string[];
|
||||
allColumns?: boolean;
|
||||
loadedAll: boolean;
|
||||
loadedRows: any[] = [];
|
||||
bindingGroups: { [bindingKey: string]: PerspectiveBindingGroup } = {};
|
||||
allRowCount: number = null;
|
||||
|
||||
get loadedCount() {
|
||||
return this.loadedRows.length;
|
||||
}
|
||||
|
||||
getRowsResult(props: PerspectiveDataLoadProps): { rows: any[]; incomplete: boolean } {
|
||||
return {
|
||||
rows: this.loadedRows.slice(0, props.topCount),
|
||||
incomplete: props.topCount < this.loadedCount || !this.loadedAll,
|
||||
};
|
||||
}
|
||||
|
||||
getBindingGroup(groupValues: any[]) {
|
||||
const key = stableStringify(groupValues);
|
||||
return this.bindingGroups[key];
|
||||
}
|
||||
|
||||
getUncachedBindingGroups(props: PerspectiveDataLoadProps): any[][] {
|
||||
const uncached = [];
|
||||
for (const group of props.bindingValues) {
|
||||
const key = stableStringify(group);
|
||||
const item = this.bindingGroups[key];
|
||||
if (!item) {
|
||||
uncached.push(group);
|
||||
}
|
||||
}
|
||||
return uncached;
|
||||
}
|
||||
|
||||
storeGroupSize(props: PerspectiveDataLoadProps, bindingValues: any[], count: number) {
|
||||
const originalBindingValue = props.bindingValues.find(v => _zip(v, bindingValues).every(([x, y]) => x == y));
|
||||
if (originalBindingValue) {
|
||||
const key = stableStringify(originalBindingValue);
|
||||
// console.log('SET SIZE', originalBindingValue, bindingValues, key, count);
|
||||
const group = new PerspectiveBindingGroup(this);
|
||||
group.bindingValues = bindingValues;
|
||||
group.groupSize = count;
|
||||
this.bindingGroups[key] = group;
|
||||
} else {
|
||||
dbg('Group not found', bindingValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class PerspectiveCache {
|
||||
constructor() {}
|
||||
|
||||
tables: { [tableKey: string]: PerspectiveCacheTable } = {};
|
||||
dataPatterns: PerspectiveDataPattern[] = [];
|
||||
|
||||
getTableCache(props: PerspectiveDataLoadProps) {
|
||||
const tableKey = stableStringify(
|
||||
_pick(props, [
|
||||
'schemaName',
|
||||
'pureName',
|
||||
'bindingColumns',
|
||||
'databaseConfig',
|
||||
'orderBy',
|
||||
'sqlCondition',
|
||||
'mongoCondition',
|
||||
])
|
||||
);
|
||||
let res = this.tables[tableKey];
|
||||
|
||||
if (res && _difference(props.dataColumns, res.dataColumns).length > 0 && !res.allColumns) {
|
||||
dbg('Delete cache because incomplete columns', props.pureName, res.dataColumns);
|
||||
|
||||
// we have incomplete cache
|
||||
delete this.tables[tableKey];
|
||||
res = null;
|
||||
}
|
||||
|
||||
if (!res) {
|
||||
res = new PerspectiveCacheTable(props, this);
|
||||
this.tables[tableKey] = res;
|
||||
return res;
|
||||
}
|
||||
|
||||
// cache could be used
|
||||
return res;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.tables = {};
|
||||
this.dataPatterns = [];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,185 @@
|
||||
import type { DatabaseInfo, ForeignKeyInfo, NamedObjectInfo, TableInfo } from 'dbgate-types';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
|
||||
// export interface PerspectiveConfigColumns {
|
||||
// expandedColumns: string[];
|
||||
// checkedColumns: string[];
|
||||
// uncheckedColumns: string[];
|
||||
// }
|
||||
|
||||
export type PerspectiveDatabaseEngineType = 'sqldb' | 'docdb';
|
||||
|
||||
export interface PerspectiveDatabaseConfig {
|
||||
conid: string;
|
||||
database: string;
|
||||
}
|
||||
|
||||
export interface PerspectiveCustomJoinConfig {
|
||||
refNodeDesignerId: string;
|
||||
referenceDesignerId: string;
|
||||
joinName: string;
|
||||
baseDesignerId: string;
|
||||
conid?: string;
|
||||
database?: string;
|
||||
refSchemaName?: string;
|
||||
refTableName: string;
|
||||
columns: {
|
||||
baseColumnName: string;
|
||||
refColumnName: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
export interface PerspectiveFilterColumnInfo {
|
||||
columnName: string;
|
||||
filterType: string;
|
||||
pureName: string;
|
||||
schemaName: string;
|
||||
foreignKey: ForeignKeyInfo;
|
||||
}
|
||||
|
||||
// export interface PerspectiveParentFilterConfig {
|
||||
// uniqueName: string;
|
||||
// }
|
||||
// export interface PerspectiveConfig extends PerspectiveConfigColumns {
|
||||
// rootObject: { schemaName?: string; pureName: string };
|
||||
// filters: { [uniqueName: string]: string };
|
||||
// sort: {
|
||||
// [parentUniqueName: string]: {
|
||||
// uniqueName: string;
|
||||
// order: 'ASC' | 'DESC';
|
||||
// }[];
|
||||
// };
|
||||
// customJoins: PerspectiveCustomJoinConfig[];
|
||||
// parentFilters: PerspectiveParentFilterConfig[];
|
||||
// }
|
||||
|
||||
export interface PerspectiveNodeConfig {
|
||||
designerId: string;
|
||||
schemaName?: string;
|
||||
pureName: string;
|
||||
defaultColumnsProcessed?: boolean;
|
||||
|
||||
alias?: string;
|
||||
|
||||
conid?: string;
|
||||
database?: string;
|
||||
|
||||
isParentFilter?: boolean;
|
||||
|
||||
expandedColumns: string[];
|
||||
checkedColumns: string[];
|
||||
columnDisplays: {};
|
||||
// uncheckedColumns: string[];
|
||||
|
||||
sort: {
|
||||
columnName: string;
|
||||
order: 'ASC' | 'DESC';
|
||||
}[];
|
||||
|
||||
filters: { [uniqueName: string]: string };
|
||||
isAutoGenerated?: true | undefined;
|
||||
isNodeChecked?: boolean;
|
||||
|
||||
position?: {
|
||||
x: number;
|
||||
y: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface PerspectiveReferenceConfig {
|
||||
designerId: string;
|
||||
|
||||
sourceId: string;
|
||||
targetId: string;
|
||||
|
||||
columns: {
|
||||
source: string;
|
||||
target: string;
|
||||
}[];
|
||||
|
||||
isAutoGenerated?: true | undefined;
|
||||
}
|
||||
|
||||
export interface PerspectiveConfig {
|
||||
rootDesignerId: string;
|
||||
isArranged: boolean;
|
||||
nodes: PerspectiveNodeConfig[];
|
||||
references: PerspectiveReferenceConfig[];
|
||||
}
|
||||
|
||||
export function createPerspectiveNodeConfig(name: { schemaName?: string; pureName: string }) {
|
||||
const node: PerspectiveNodeConfig = {
|
||||
pureName: name.pureName,
|
||||
schemaName: name.schemaName,
|
||||
designerId: uuidv1(),
|
||||
|
||||
expandedColumns: [],
|
||||
checkedColumns: [],
|
||||
columnDisplays: {},
|
||||
|
||||
sort: [],
|
||||
filters: {},
|
||||
|
||||
isNodeChecked: true,
|
||||
};
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createPerspectiveConfig(rootObject?: { schemaName?: string; pureName: string }): PerspectiveConfig {
|
||||
if (!rootObject) {
|
||||
return {
|
||||
nodes: [],
|
||||
references: [],
|
||||
isArranged: true,
|
||||
rootDesignerId: null,
|
||||
};
|
||||
}
|
||||
|
||||
const rootNode = createPerspectiveNodeConfig(rootObject);
|
||||
return {
|
||||
nodes: [rootNode],
|
||||
references: [],
|
||||
rootDesignerId: rootNode.designerId,
|
||||
isArranged: true,
|
||||
};
|
||||
}
|
||||
|
||||
export type ChangePerspectiveConfigFunc = (
|
||||
changeFunc: (config: PerspectiveConfig) => PerspectiveConfig,
|
||||
reload?: boolean
|
||||
) => void;
|
||||
|
||||
export function extractPerspectiveDatabases(
|
||||
{ conid, database },
|
||||
cfg: PerspectiveConfig
|
||||
): { conid: string; database: string }[] {
|
||||
const res: { conid: string; database: string }[] = [];
|
||||
res.push({ conid, database });
|
||||
|
||||
function add(conid, database) {
|
||||
if (res.find(x => x.conid == conid && x.database == database)) return;
|
||||
res.push({ conid, database });
|
||||
}
|
||||
|
||||
for (const node of cfg.nodes) {
|
||||
add(node.conid || conid, node.database || database);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export interface MultipleDatabaseInfo {
|
||||
[conid: string]: {
|
||||
[database: string]: DatabaseInfo;
|
||||
};
|
||||
}
|
||||
|
||||
export function switchPerspectiveReferenceDirection(ref: PerspectiveReferenceConfig): PerspectiveReferenceConfig {
|
||||
return {
|
||||
designerId: ref.designerId,
|
||||
sourceId: ref.targetId,
|
||||
targetId: ref.sourceId,
|
||||
isAutoGenerated: ref.isAutoGenerated,
|
||||
columns: ref.columns.map(x => ({ source: x.target, target: x.source })),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,373 @@
|
||||
import { Condition, Expression, Select } from 'dbgate-sqltree';
|
||||
import { PerspectiveDataLoadProps } from './PerspectiveDataProvider';
|
||||
import debug from 'debug';
|
||||
import _zipObject from 'lodash/zipObject';
|
||||
import _mapValues from 'lodash/mapValues';
|
||||
import _isArray from 'lodash/isArray';
|
||||
import { safeJsonParse } from 'dbgate-tools';
|
||||
|
||||
function normalizeLoadedRow(row) {
|
||||
return _mapValues(row, v => safeJsonParse(v) || v);
|
||||
}
|
||||
|
||||
function normalizeResult(result) {
|
||||
if (_isArray(result)) {
|
||||
return result.map(normalizeLoadedRow);
|
||||
}
|
||||
if (result.errorMessage) {
|
||||
return result;
|
||||
}
|
||||
return {
|
||||
...result,
|
||||
errorMessage: 'Unspecified error',
|
||||
};
|
||||
}
|
||||
|
||||
const dbg = debug('dbgate:PerspectiveDataLoader');
|
||||
|
||||
export class PerspectiveDataLoader {
|
||||
constructor(public apiCall) {}
|
||||
|
||||
buildSqlCondition(props: PerspectiveDataLoadProps): Condition {
|
||||
const { schemaName, pureName, bindingColumns, bindingValues, dataColumns, orderBy, sqlCondition } = props;
|
||||
|
||||
const conditions = [];
|
||||
|
||||
if (sqlCondition) {
|
||||
conditions.push(sqlCondition);
|
||||
}
|
||||
|
||||
if (bindingColumns?.length == 1) {
|
||||
conditions.push({
|
||||
conditionType: 'in',
|
||||
expr: {
|
||||
exprType: 'column',
|
||||
columnName: bindingColumns[0],
|
||||
source: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
},
|
||||
values: bindingValues.map(x => x[0]),
|
||||
});
|
||||
}
|
||||
|
||||
return conditions.length > 0
|
||||
? {
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
buildMongoCondition(props: PerspectiveDataLoadProps): {} {
|
||||
const { schemaName, pureName, bindingColumns, bindingValues, dataColumns, orderBy, mongoCondition } = props;
|
||||
|
||||
const conditions = [];
|
||||
|
||||
if (mongoCondition) {
|
||||
conditions.push(mongoCondition);
|
||||
}
|
||||
|
||||
if (bindingColumns?.length == 1) {
|
||||
conditions.push({
|
||||
[bindingColumns[0]]: { $in: bindingValues.map(x => x[0]) },
|
||||
});
|
||||
}
|
||||
|
||||
return conditions.length == 1 ? conditions[0] : conditions.length > 0 ? { $and: conditions } : null;
|
||||
}
|
||||
|
||||
async loadGroupingSqlDb(props: PerspectiveDataLoadProps) {
|
||||
const { schemaName, pureName, bindingColumns } = props;
|
||||
|
||||
const bindingColumnExpressions = bindingColumns.map(
|
||||
columnName =>
|
||||
({
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
source: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
} as Expression)
|
||||
);
|
||||
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
from: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
columns: [
|
||||
{
|
||||
exprType: 'call',
|
||||
func: 'COUNT',
|
||||
args: [
|
||||
{
|
||||
exprType: 'raw',
|
||||
sql: '*',
|
||||
},
|
||||
],
|
||||
alias: '_perspective_group_size_',
|
||||
},
|
||||
...bindingColumnExpressions,
|
||||
],
|
||||
where: this.buildSqlCondition(props),
|
||||
};
|
||||
|
||||
select.groupBy = bindingColumnExpressions;
|
||||
|
||||
if (dbg?.enabled) {
|
||||
dbg(`LOAD COUNTS, table=${props.pureName}, columns=${bindingColumns?.join(',')}`);
|
||||
}
|
||||
|
||||
const response = await this.apiCall('database-connections/sql-select', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
select,
|
||||
});
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows.map(row => ({
|
||||
...row,
|
||||
_perspective_group_size_: parseInt(row._perspective_group_size_),
|
||||
}));
|
||||
}
|
||||
|
||||
async loadGroupingDocDb(props: PerspectiveDataLoadProps) {
|
||||
const { schemaName, pureName, bindingColumns } = props;
|
||||
|
||||
const aggregate = [
|
||||
{ $match: this.buildMongoCondition(props) },
|
||||
{
|
||||
$group: {
|
||||
_id: _zipObject(
|
||||
bindingColumns,
|
||||
bindingColumns.map(col => '$' + col)
|
||||
),
|
||||
count: { $sum: 1 },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
if (dbg?.enabled) {
|
||||
dbg(`LOAD COUNTS, table=${props.pureName}, columns=${bindingColumns?.join(',')}`);
|
||||
}
|
||||
|
||||
const response = await this.apiCall('database-connections/collection-data', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
options: {
|
||||
pureName,
|
||||
aggregate,
|
||||
},
|
||||
});
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows.map(row => ({
|
||||
...row._id,
|
||||
_perspective_group_size_: parseInt(row.count),
|
||||
}));
|
||||
}
|
||||
|
||||
async loadGrouping(props: PerspectiveDataLoadProps) {
|
||||
const { engineType } = props;
|
||||
switch (engineType) {
|
||||
case 'sqldb':
|
||||
return this.loadGroupingSqlDb(props);
|
||||
case 'docdb':
|
||||
return this.loadGroupingDocDb(props);
|
||||
}
|
||||
}
|
||||
|
||||
async loadDataSqlDb(props: PerspectiveDataLoadProps) {
|
||||
const {
|
||||
schemaName,
|
||||
pureName,
|
||||
bindingColumns,
|
||||
bindingValues,
|
||||
dataColumns,
|
||||
orderBy,
|
||||
sqlCondition: condition,
|
||||
engineType,
|
||||
} = props;
|
||||
|
||||
if (dataColumns?.length == 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
from: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
columns: dataColumns?.map(columnName => ({
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
source: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
})),
|
||||
selectAll: !dataColumns,
|
||||
orderBy:
|
||||
orderBy?.length > 0
|
||||
? orderBy?.map(({ columnName, order }) => ({
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
direction: order,
|
||||
source: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
}))
|
||||
: null,
|
||||
range: props.range,
|
||||
where: this.buildSqlCondition(props),
|
||||
};
|
||||
|
||||
if (dbg?.enabled) {
|
||||
dbg(
|
||||
`LOAD DATA, table=${props.pureName}, columns=${props.dataColumns?.join(',')}, range=${props.range?.offset},${
|
||||
props.range?.limit
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
const response = await this.apiCall('database-connections/sql-select', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
select,
|
||||
});
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows;
|
||||
}
|
||||
|
||||
getDocDbLoadOptions(props: PerspectiveDataLoadProps, useSort: boolean) {
|
||||
const { pureName } = props;
|
||||
const res: any = {
|
||||
pureName,
|
||||
condition: this.buildMongoCondition(props),
|
||||
skip: props.range?.offset,
|
||||
limit: props.range?.limit,
|
||||
};
|
||||
if (useSort && props.orderBy?.length > 0) {
|
||||
res.sort = _zipObject(
|
||||
props.orderBy.map(col => col.columnName),
|
||||
props.orderBy.map(col => (col.order == 'DESC' ? -1 : 1))
|
||||
);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async loadDataDocDb(props: PerspectiveDataLoadProps) {
|
||||
const {
|
||||
schemaName,
|
||||
pureName,
|
||||
bindingColumns,
|
||||
bindingValues,
|
||||
dataColumns,
|
||||
orderBy,
|
||||
sqlCondition: condition,
|
||||
engineType,
|
||||
} = props;
|
||||
|
||||
if (dbg?.enabled) {
|
||||
dbg(
|
||||
`LOAD DATA, collection=${props.pureName}, columns=${props.dataColumns?.join(',')}, range=${
|
||||
props.range?.offset
|
||||
},${props.range?.limit}`
|
||||
);
|
||||
}
|
||||
|
||||
const options = this.getDocDbLoadOptions(props, true);
|
||||
|
||||
const response = await this.apiCall('database-connections/collection-data', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
options,
|
||||
});
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows;
|
||||
}
|
||||
|
||||
async loadData(props: PerspectiveDataLoadProps) {
|
||||
const { engineType } = props;
|
||||
switch (engineType) {
|
||||
case 'sqldb':
|
||||
return normalizeResult(await this.loadDataSqlDb(props));
|
||||
case 'docdb':
|
||||
return normalizeResult(await this.loadDataDocDb(props));
|
||||
}
|
||||
}
|
||||
|
||||
async loadRowCountSqlDb(props: PerspectiveDataLoadProps) {
|
||||
const {
|
||||
schemaName,
|
||||
pureName,
|
||||
bindingColumns,
|
||||
bindingValues,
|
||||
dataColumns,
|
||||
orderBy,
|
||||
sqlCondition: condition,
|
||||
} = props;
|
||||
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
from: {
|
||||
name: { schemaName, pureName },
|
||||
},
|
||||
columns: [
|
||||
{
|
||||
exprType: 'raw',
|
||||
sql: 'COUNT(*)',
|
||||
alias: 'count',
|
||||
},
|
||||
],
|
||||
where: this.buildSqlCondition(props),
|
||||
};
|
||||
|
||||
const response = await this.apiCall('database-connections/sql-select', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
select,
|
||||
});
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows[0];
|
||||
}
|
||||
|
||||
async loadRowCountDocDb(props: PerspectiveDataLoadProps) {
|
||||
const {
|
||||
schemaName,
|
||||
pureName,
|
||||
bindingColumns,
|
||||
bindingValues,
|
||||
dataColumns,
|
||||
orderBy,
|
||||
sqlCondition: condition,
|
||||
} = props;
|
||||
|
||||
const options = {
|
||||
...this.getDocDbLoadOptions(props, false),
|
||||
countDocuments: true,
|
||||
};
|
||||
|
||||
const response = await this.apiCall('database-connections/collection-data', {
|
||||
conid: props.databaseConfig.conid,
|
||||
database: props.databaseConfig.database,
|
||||
options,
|
||||
});
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async loadRowCount(props: PerspectiveDataLoadProps) {
|
||||
const { engineType } = props;
|
||||
switch (engineType) {
|
||||
case 'sqldb':
|
||||
return this.loadRowCountSqlDb(props);
|
||||
case 'docdb':
|
||||
return this.loadRowCountDocDb(props);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import { PerspectiveDataLoader } from './PerspectiveDataLoader';
|
||||
import { PerspectiveDataLoadProps } from './PerspectiveDataProvider';
|
||||
import _isString from 'lodash/isString';
|
||||
import _isPlainObject from 'lodash/isPlainObject';
|
||||
import _isNumber from 'lodash/isNumber';
|
||||
import _isBoolean from 'lodash/isBoolean';
|
||||
import _isArray from 'lodash/isArray';
|
||||
import { safeJsonParse } from 'dbgate-tools';
|
||||
|
||||
export type PerspectiveDataPatternColumnType = 'null' | 'oid' | 'string' | 'number' | 'boolean' | 'json';
|
||||
|
||||
export interface PerspectiveDataPatternColumn {
|
||||
name: string;
|
||||
types: PerspectiveDataPatternColumnType[];
|
||||
columns: PerspectiveDataPatternColumn[];
|
||||
}
|
||||
|
||||
export interface PerspectiveDataPattern {
|
||||
conid: string;
|
||||
database: string;
|
||||
schemaName?: string;
|
||||
pureName: string;
|
||||
columns: PerspectiveDataPatternColumn[];
|
||||
}
|
||||
|
||||
export type PerspectiveDataPatternDict = { [designerId: string]: PerspectiveDataPattern };
|
||||
|
||||
function detectValueType(value): PerspectiveDataPatternColumnType {
|
||||
if (_isString(value)) return 'string';
|
||||
if (_isNumber(value)) return 'number';
|
||||
if (_isBoolean(value)) return 'boolean';
|
||||
if (value?.$oid) return 'oid';
|
||||
if (_isPlainObject(value) || _isArray(value)) return 'json';
|
||||
if (value == null) return 'null';
|
||||
}
|
||||
|
||||
function addObjectToColumns(columns: PerspectiveDataPatternColumn[], row) {
|
||||
if (_isPlainObject(row)) {
|
||||
for (const key of Object.keys(row)) {
|
||||
let column: PerspectiveDataPatternColumn = columns.find(x => x.name == key);
|
||||
if (!column) {
|
||||
column = {
|
||||
name: key,
|
||||
types: [],
|
||||
columns: [],
|
||||
};
|
||||
columns.push(column);
|
||||
}
|
||||
const value = row[key];
|
||||
const type = detectValueType(value);
|
||||
if (!column.types.includes(type)) {
|
||||
column.types.push(type);
|
||||
}
|
||||
if (_isPlainObject(value)) {
|
||||
addObjectToColumns(column.columns, value);
|
||||
}
|
||||
if (_isArray(value)) {
|
||||
for (const item of value) {
|
||||
addObjectToColumns(column.columns, item);
|
||||
}
|
||||
}
|
||||
if (_isString(value)) {
|
||||
const json = safeJsonParse(value);
|
||||
if (json && (_isPlainObject(json) || _isArray(json))) {
|
||||
if (!column.types.includes('json')) {
|
||||
column.types.push('json');
|
||||
}
|
||||
if (_isPlainObject(json)) {
|
||||
addObjectToColumns(column.columns, json);
|
||||
}
|
||||
if (_isArray(json)) {
|
||||
for (const item of json) {
|
||||
addObjectToColumns(column.columns, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function analyseDataPattern(
|
||||
patternBase: Omit<PerspectiveDataPattern, 'columns'>,
|
||||
rows: any[]
|
||||
): PerspectiveDataPattern {
|
||||
const res: PerspectiveDataPattern = {
|
||||
...patternBase,
|
||||
columns: [],
|
||||
};
|
||||
// console.log('ROWS', rows);
|
||||
for (const row of rows) {
|
||||
addObjectToColumns(res.columns, row);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
@@ -0,0 +1,229 @@
|
||||
import debug from 'debug';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import type { RangeDefinition } from 'dbgate-types';
|
||||
import { PerspectiveBindingGroup, PerspectiveCache } from './PerspectiveCache';
|
||||
import { PerspectiveDataLoader } from './PerspectiveDataLoader';
|
||||
import { PerspectiveDataPatternDict } from './PerspectiveDataPattern';
|
||||
import { PerspectiveDatabaseConfig, PerspectiveDatabaseEngineType } from './PerspectiveConfig';
|
||||
|
||||
export const PERSPECTIVE_PAGE_SIZE = 100;
|
||||
|
||||
const dbg = debug('dbgate:PerspectiveDataProvider');
|
||||
|
||||
export interface PerspectiveDataLoadProps {
|
||||
databaseConfig: PerspectiveDatabaseConfig;
|
||||
schemaName?: string;
|
||||
pureName: string;
|
||||
dataColumns?: string[];
|
||||
allColumns?: boolean;
|
||||
orderBy: {
|
||||
columnName: string;
|
||||
order: 'ASC' | 'DESC';
|
||||
}[];
|
||||
bindingColumns?: string[];
|
||||
bindingValues?: any[][];
|
||||
range?: RangeDefinition;
|
||||
topCount?: number;
|
||||
sqlCondition?: Condition;
|
||||
mongoCondition?: any;
|
||||
engineType: PerspectiveDatabaseEngineType;
|
||||
}
|
||||
|
||||
export class PerspectiveDataProvider {
|
||||
constructor(
|
||||
public cache: PerspectiveCache,
|
||||
public loader: PerspectiveDataLoader,
|
||||
public dataPatterns: PerspectiveDataPatternDict
|
||||
) {}
|
||||
async loadData(props: PerspectiveDataLoadProps): Promise<{ rows: any[]; incomplete: boolean }> {
|
||||
dbg('load data', props);
|
||||
// console.log('LOAD DATA', props);
|
||||
if (props.bindingColumns) {
|
||||
return this.loadDataNested(props);
|
||||
} else {
|
||||
return this.loadDataFlat(props);
|
||||
}
|
||||
}
|
||||
|
||||
async loadDataNested(props: PerspectiveDataLoadProps): Promise<{ rows: any[]; incomplete: boolean }> {
|
||||
const tableCache = this.cache.getTableCache(props);
|
||||
|
||||
const uncached = tableCache.getUncachedBindingGroups(props);
|
||||
if (uncached.length > 0) {
|
||||
const counts = await this.loader.loadGrouping({
|
||||
...props,
|
||||
bindingValues: uncached,
|
||||
});
|
||||
// console.log('COUNTS', counts);
|
||||
for (const resetItem of uncached) {
|
||||
tableCache.storeGroupSize(props, resetItem, 0);
|
||||
}
|
||||
for (const countItem of counts) {
|
||||
const { _perspective_group_size_, ...fields } = countItem;
|
||||
tableCache.storeGroupSize(
|
||||
props,
|
||||
props.bindingColumns.map(col => fields[col]),
|
||||
_perspective_group_size_
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const rows = [];
|
||||
|
||||
// console.log('CACHE', tableCache.bindingGroups);
|
||||
|
||||
let groupIndex = 0;
|
||||
let loadCalled = false;
|
||||
let shouldReturn = false;
|
||||
for (; groupIndex < props.bindingValues.length; groupIndex++) {
|
||||
const groupValues = props.bindingValues[groupIndex];
|
||||
const group = tableCache.getBindingGroup(groupValues);
|
||||
|
||||
if (!group.loadedAll) {
|
||||
if (loadCalled) {
|
||||
shouldReturn = true;
|
||||
} else {
|
||||
// we need to load next data
|
||||
await this.loadNextGroup(props, groupIndex);
|
||||
loadCalled = true;
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('GRP', groupValues, group);
|
||||
rows.push(...group.loadedRows);
|
||||
if (rows.length >= props.topCount || shouldReturn) {
|
||||
return {
|
||||
rows: rows.slice(0, props.topCount),
|
||||
incomplete: props.topCount < rows.length || !group.loadedAll || groupIndex < props.bindingValues.length - 1,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (groupIndex >= props.bindingValues.length) {
|
||||
// all groups are fully loaded
|
||||
return { rows, incomplete: false };
|
||||
}
|
||||
}
|
||||
|
||||
async loadNextGroup(props: PerspectiveDataLoadProps, groupIndex: number) {
|
||||
const tableCache = this.cache.getTableCache(props);
|
||||
|
||||
const planLoadingGroupIndexes: number[] = [];
|
||||
const planLoadingGroups: PerspectiveBindingGroup[] = [];
|
||||
let planLoadRowCount = 0;
|
||||
|
||||
const loadPlanned = async () => {
|
||||
// console.log(
|
||||
// 'LOAD PLANNED',
|
||||
// planLoadingGroupIndexes,
|
||||
// planLoadingGroupIndexes.map(idx => props.bindingValues[idx])
|
||||
// );
|
||||
const rows = await this.loader.loadData({
|
||||
...props,
|
||||
bindingValues: planLoadingGroupIndexes.map(idx => props.bindingValues[idx]),
|
||||
});
|
||||
// console.log('LOADED PLANNED', rows);
|
||||
// distribute rows into groups
|
||||
for (const row of rows) {
|
||||
const group = planLoadingGroups.find(x => x.matchRow(row));
|
||||
if (group) {
|
||||
group.loadedRows.push(row);
|
||||
}
|
||||
}
|
||||
for (const group of planLoadingGroups) {
|
||||
group.loadedAll = true;
|
||||
}
|
||||
};
|
||||
|
||||
for (; groupIndex < props.bindingValues.length; groupIndex++) {
|
||||
const groupValues = props.bindingValues[groupIndex];
|
||||
const group = tableCache.getBindingGroup(groupValues);
|
||||
if (!group) continue;
|
||||
if (group.loadedAll) continue;
|
||||
if (group.groupSize == 0) {
|
||||
group.loadedAll = true;
|
||||
continue;
|
||||
}
|
||||
if (group.groupSize >= PERSPECTIVE_PAGE_SIZE) {
|
||||
if (planLoadingGroupIndexes.length > 0) {
|
||||
await loadPlanned();
|
||||
return;
|
||||
}
|
||||
const nextRows = await this.loader.loadData({
|
||||
...props,
|
||||
topCount: null,
|
||||
range: {
|
||||
offset: group.loadedRows.length,
|
||||
limit: PERSPECTIVE_PAGE_SIZE,
|
||||
},
|
||||
bindingValues: [group.bindingValues],
|
||||
});
|
||||
group.loadedRows = [...group.loadedRows, ...nextRows];
|
||||
group.loadedAll = nextRows.length < PERSPECTIVE_PAGE_SIZE;
|
||||
return;
|
||||
} else {
|
||||
if (planLoadRowCount + group.groupSize > PERSPECTIVE_PAGE_SIZE) {
|
||||
await loadPlanned();
|
||||
return;
|
||||
}
|
||||
planLoadingGroupIndexes.push(groupIndex);
|
||||
planLoadingGroups.push(group);
|
||||
planLoadRowCount += group.groupSize;
|
||||
}
|
||||
}
|
||||
|
||||
if (planLoadingGroupIndexes.length > 0) {
|
||||
await loadPlanned();
|
||||
}
|
||||
}
|
||||
|
||||
async loadDataFlat(props: PerspectiveDataLoadProps): Promise<{ rows: any[]; incomplete: boolean }> {
|
||||
const tableCache = this.cache.getTableCache(props);
|
||||
|
||||
if (props.topCount <= tableCache.loadedCount) {
|
||||
return tableCache.getRowsResult(props);
|
||||
}
|
||||
|
||||
// load missing rows
|
||||
tableCache.dataColumns = props.dataColumns;
|
||||
tableCache.allColumns = props.allColumns;
|
||||
|
||||
const nextRows = await this.loader.loadData({
|
||||
...props,
|
||||
topCount: null,
|
||||
range: {
|
||||
offset: tableCache.loadedCount,
|
||||
limit: props.topCount - tableCache.loadedCount,
|
||||
},
|
||||
});
|
||||
|
||||
if (nextRows.errorMessage) {
|
||||
throw new Error(nextRows.errorMessage);
|
||||
}
|
||||
|
||||
tableCache.loadedRows = [...tableCache.loadedRows, ...nextRows];
|
||||
tableCache.loadedAll = nextRows.length < props.topCount - tableCache.loadedCount;
|
||||
|
||||
// const rows=tableCache.getRows(props);
|
||||
|
||||
return tableCache.getRowsResult(props);
|
||||
}
|
||||
|
||||
async loadRowCount(props: PerspectiveDataLoadProps): Promise<number> {
|
||||
const tableCache = this.cache.getTableCache(props);
|
||||
|
||||
if (tableCache.allRowCount != null) {
|
||||
return tableCache.allRowCount;
|
||||
}
|
||||
|
||||
const result = await this.loader.loadRowCount({
|
||||
...props,
|
||||
});
|
||||
|
||||
if (result.errorMessage) {
|
||||
throw new Error(result.errorMessage);
|
||||
}
|
||||
|
||||
tableCache.allRowCount = parseInt(result.count);
|
||||
return tableCache.allRowCount;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,300 @@
|
||||
import { getTableChildPerspectiveNodes, PerspectiveTableNode, PerspectiveTreeNode } from './PerspectiveTreeNode';
|
||||
import _max from 'lodash/max';
|
||||
import _range from 'lodash/max';
|
||||
import _fill from 'lodash/fill';
|
||||
import _findIndex from 'lodash/findIndex';
|
||||
import _isPlainObject from 'lodash/isPlainObject';
|
||||
import _isArray from 'lodash/isArray';
|
||||
import debug from 'debug';
|
||||
|
||||
const dbg = debug('dbgate:PerspectiveDisplay');
|
||||
|
||||
let lastJoinId = 0;
|
||||
function getJoinId(): number {
|
||||
lastJoinId += 1;
|
||||
return lastJoinId;
|
||||
}
|
||||
|
||||
export class PerspectiveDisplayColumn {
|
||||
title: string;
|
||||
dataField: string;
|
||||
displayType: string;
|
||||
parentNodes: PerspectiveTreeNode[] = [];
|
||||
colSpanAtLevel = {};
|
||||
columnIndex = 0;
|
||||
dataNode: PerspectiveTreeNode = null;
|
||||
|
||||
constructor(public display: PerspectiveDisplay) {}
|
||||
|
||||
get rowSpan() {
|
||||
return this.display.columnLevelCount - this.parentNodes.length;
|
||||
}
|
||||
|
||||
showParent(level: number) {
|
||||
return !!this.colSpanAtLevel[level];
|
||||
}
|
||||
|
||||
getColSpan(level: number) {
|
||||
return this.colSpanAtLevel[level];
|
||||
}
|
||||
|
||||
isVisible(level: number) {
|
||||
return level == this.columnLevel;
|
||||
}
|
||||
|
||||
get columnLevel() {
|
||||
return this.parentNodes.length;
|
||||
}
|
||||
|
||||
getParentName(level) {
|
||||
return this.parentNodes[level]?.title;
|
||||
}
|
||||
|
||||
getParentNode(level) {
|
||||
return this.parentNodes[level];
|
||||
}
|
||||
|
||||
getParentTableDesignerId(level) {
|
||||
return this.parentNodes[level]?.headerTableAttributes ? this.parentNodes[level]?.designerId : '';
|
||||
}
|
||||
|
||||
// hasParentNode(node: PerspectiveTreeNode) {
|
||||
// return this.parentNodes.includes(node);
|
||||
// }
|
||||
}
|
||||
|
||||
interface PerspectiveSubRowCollection {
|
||||
rows: CollectedPerspectiveDisplayRow[];
|
||||
}
|
||||
|
||||
interface CollectedPerspectiveDisplayRow {
|
||||
columnIndexes: number[];
|
||||
rowData: any[];
|
||||
subRowCollections: PerspectiveSubRowCollection[];
|
||||
incompleteRowsIndicator?: string[];
|
||||
}
|
||||
|
||||
export class PerspectiveDisplayRow {
|
||||
constructor(public display: PerspectiveDisplay) {
|
||||
this.rowData = _fill(Array(display.columns.length), undefined);
|
||||
this.rowSpans = _fill(Array(display.columns.length), 1);
|
||||
this.rowJoinIds = _fill(Array(display.columns.length), 0);
|
||||
this.rowCellSkips = _fill(Array(display.columns.length), false);
|
||||
}
|
||||
|
||||
rowData: any[] = [];
|
||||
rowSpans: number[] = null;
|
||||
rowCellSkips: boolean[] = null;
|
||||
|
||||
rowJoinIds: number[] = [];
|
||||
}
|
||||
|
||||
export class PerspectiveDisplay {
|
||||
columns: PerspectiveDisplayColumn[] = [];
|
||||
rows: PerspectiveDisplayRow[] = [];
|
||||
readonly columnLevelCount: number;
|
||||
loadIndicatorsCounts: { [designerId: string]: number } = {};
|
||||
|
||||
constructor(public root: PerspectiveTreeNode, rows: any[]) {
|
||||
// dbg('source rows', rows);
|
||||
this.fillColumns(root.childNodes, [root]);
|
||||
if (this.columns.length > 0) {
|
||||
this.columns[0].colSpanAtLevel[0] = this.columns.length;
|
||||
}
|
||||
this.columnLevelCount = _max(this.columns.map(x => x.parentNodes.length)) + 1;
|
||||
const collectedRows = this.collectRows(rows, root.childNodes);
|
||||
dbg('collected rows', collectedRows);
|
||||
// console.log('COLLECTED', JSON.stringify(collectedRows, null, 2));
|
||||
// this.mergeRows(collectedRows);
|
||||
this.mergeRows(collectedRows);
|
||||
// dbg('merged rows', this.rows);
|
||||
|
||||
// console.log(
|
||||
// 'MERGED',
|
||||
// this.rows.map(r =>
|
||||
// r.incompleteRowsIndicator
|
||||
// ? `************************************ ${r.incompleteRowsIndicator.join('|')}`
|
||||
// : r.rowData.join('|')
|
||||
// )
|
||||
// );
|
||||
}
|
||||
|
||||
private getRowAt(rowIndex) {
|
||||
while (this.rows.length <= rowIndex) {
|
||||
this.rows.push(new PerspectiveDisplayRow(this));
|
||||
}
|
||||
return this.rows[rowIndex];
|
||||
}
|
||||
|
||||
fillColumns(children: PerspectiveTreeNode[], parentNodes: PerspectiveTreeNode[]) {
|
||||
for (const child of children) {
|
||||
if (child.generatesHiearchicGridColumn || child.generatesDataGridColumn) {
|
||||
this.processColumn(child, parentNodes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processColumn(node: PerspectiveTreeNode, parentNodes: PerspectiveTreeNode[]) {
|
||||
if (node.generatesDataGridColumn) {
|
||||
const column = new PerspectiveDisplayColumn(this);
|
||||
column.title = node.columnTitle;
|
||||
column.dataField = node.dataField;
|
||||
column.parentNodes = parentNodes;
|
||||
column.display = this;
|
||||
column.columnIndex = this.columns.length;
|
||||
column.dataNode = node;
|
||||
column.displayType = node.parentNodeConfig?.columnDisplays?.[node.columnName];
|
||||
this.columns.push(column);
|
||||
}
|
||||
|
||||
if (node.generatesHiearchicGridColumn) {
|
||||
const countBefore = this.columns.length;
|
||||
this.fillColumns(node.childNodes, [...parentNodes, node]);
|
||||
|
||||
if (this.columns.length > countBefore) {
|
||||
this.columns[countBefore].colSpanAtLevel[parentNodes.length] = this.columns.length - countBefore;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
findColumnIndexFromNode(node: PerspectiveTreeNode) {
|
||||
return _findIndex(
|
||||
this.columns,
|
||||
x =>
|
||||
x.dataNode.columnName == node.columnName && x.dataNode?.parentNode?.designerId == node?.parentNode?.designerId
|
||||
);
|
||||
}
|
||||
|
||||
// findColumnIndexFromNode(node: PerspectiveTreeNode) {
|
||||
// return _findIndex(this.columns, x => x.dataNode.designerId == node.designerId);
|
||||
// }
|
||||
|
||||
extractArray(value) {
|
||||
if (_isArray(value)) return value;
|
||||
if (_isPlainObject(value)) return [value];
|
||||
return [];
|
||||
}
|
||||
|
||||
collectRows(sourceRows: any[], nodes: PerspectiveTreeNode[]): CollectedPerspectiveDisplayRow[] {
|
||||
// console.log('********** COLLECT ROWS', sourceRows);
|
||||
const columnNodes = nodes.filter(x => x.generatesDataGridColumn);
|
||||
const treeNodes = nodes.filter(x => x.generatesHiearchicGridColumn);
|
||||
|
||||
// console.log(
|
||||
// 'columnNodes',
|
||||
// columnNodes.map(x => x.title)
|
||||
// );
|
||||
// console.log(
|
||||
// 'treeNodes',
|
||||
// treeNodes.map(x => x.title)
|
||||
// );
|
||||
|
||||
// console.log(
|
||||
// 'nodes',
|
||||
// nodes.map(x => x.title)
|
||||
// );
|
||||
|
||||
const columnIndexes = columnNodes.map(node => this.findColumnIndexFromNode(node));
|
||||
|
||||
const res: CollectedPerspectiveDisplayRow[] = [];
|
||||
for (const sourceRow of sourceRows) {
|
||||
// console.log('PROCESS SOURCE', sourceRow);
|
||||
// row.startIndex = startIndex;
|
||||
const rowData = columnNodes.map(node => sourceRow[node.columnName]);
|
||||
const subRowCollections = [];
|
||||
|
||||
for (const node of treeNodes) {
|
||||
// console.log('sourceRow[node.fieldName]', node.fieldName, sourceRow[node.fieldName]);
|
||||
if (sourceRow[node.fieldName]) {
|
||||
const subrows = {
|
||||
rows: this.collectRows(this.extractArray(sourceRow[node.fieldName]), node.childNodes),
|
||||
};
|
||||
subRowCollections.push(subrows);
|
||||
}
|
||||
}
|
||||
|
||||
res.push({
|
||||
rowData,
|
||||
columnIndexes,
|
||||
subRowCollections,
|
||||
incompleteRowsIndicator: sourceRow.incompleteRowsIndicator,
|
||||
});
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
fillRowSpans() {
|
||||
for (let col = 0; col < this.columns.length; col++) {
|
||||
// let lastFilledJoinId = null;
|
||||
let lastFilledRow = 0;
|
||||
let rowIndex = 0;
|
||||
|
||||
for (const row of this.rows) {
|
||||
if (
|
||||
row.rowData[col] === undefined &&
|
||||
row.rowJoinIds[col] == this.rows[lastFilledRow].rowJoinIds[col] &&
|
||||
row.rowJoinIds[col]
|
||||
) {
|
||||
row.rowCellSkips[col] = true;
|
||||
this.rows[lastFilledRow].rowSpans[col] = rowIndex - lastFilledRow + 1;
|
||||
} else {
|
||||
lastFilledRow = rowIndex;
|
||||
}
|
||||
rowIndex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mergeRows(collectedRows: CollectedPerspectiveDisplayRow[]) {
|
||||
let rowIndex = 0;
|
||||
for (const collectedRow of collectedRows) {
|
||||
const count = this.mergeRow(collectedRow, rowIndex);
|
||||
rowIndex += count;
|
||||
}
|
||||
this.fillRowSpans();
|
||||
}
|
||||
|
||||
mergeRow(collectedRow: CollectedPerspectiveDisplayRow, rowIndex: number): number {
|
||||
if (collectedRow.incompleteRowsIndicator?.length > 0) {
|
||||
for (const indicator of collectedRow.incompleteRowsIndicator) {
|
||||
if (!this.loadIndicatorsCounts[indicator]) {
|
||||
this.loadIndicatorsCounts[indicator] = rowIndex;
|
||||
}
|
||||
if (rowIndex < this.loadIndicatorsCounts[indicator]) {
|
||||
this.loadIndicatorsCounts[indicator] = rowIndex;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
const mainRow = this.getRowAt(rowIndex);
|
||||
for (let i = 0; i < collectedRow.columnIndexes.length; i++) {
|
||||
mainRow.rowData[collectedRow.columnIndexes[i]] = collectedRow.rowData[i];
|
||||
}
|
||||
|
||||
let rowCount = 1;
|
||||
for (const subrows of collectedRow.subRowCollections) {
|
||||
let additionalRowCount = 0;
|
||||
let currentRowIndex = rowIndex;
|
||||
for (const subrow of subrows.rows) {
|
||||
const count = this.mergeRow(subrow, currentRowIndex);
|
||||
additionalRowCount += count;
|
||||
currentRowIndex += count;
|
||||
}
|
||||
if (additionalRowCount > rowCount) {
|
||||
rowCount = additionalRowCount;
|
||||
}
|
||||
}
|
||||
|
||||
const joinId = getJoinId();
|
||||
for (let radd = 0; radd < rowCount; radd++) {
|
||||
const row = this.getRowAt(rowIndex + radd);
|
||||
for (let i = 0; i < collectedRow.columnIndexes.length; i++) {
|
||||
row.rowJoinIds[collectedRow.columnIndexes[i]] = joinId;
|
||||
}
|
||||
}
|
||||
|
||||
return rowCount;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
import { FormViewDisplay } from './FormViewDisplay';
|
||||
import _ from 'lodash';
|
||||
import { ChangeCacheFunc, DisplayColumn, ChangeConfigFunc } from './GridDisplay';
|
||||
import { EngineDriver, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
import type { EngineDriver, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { mergeConditions, Condition, OrderByExpression } from 'dbgate-sqltree';
|
||||
import { TableGridDisplay } from './TableGridDisplay';
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import _ from 'lodash';
|
||||
import { filterName, isTableColumnUnique } from 'dbgate-tools';
|
||||
import { GridDisplay, ChangeCacheFunc, DisplayColumn, DisplayedColumnInfo, ChangeConfigFunc } from './GridDisplay';
|
||||
import {
|
||||
import type {
|
||||
TableInfo,
|
||||
EngineDriver,
|
||||
ViewInfo,
|
||||
@@ -267,12 +267,9 @@ export class TableGridDisplay extends GridDisplay {
|
||||
) {
|
||||
for (const column of columns) {
|
||||
if (this.addAllExpandedColumnsToSelected || this.config.addedColumns.includes(column.uniqueName)) {
|
||||
select.columns.push({
|
||||
exprType: 'column',
|
||||
columnName: column.columnName,
|
||||
alias: column.uniqueName,
|
||||
source: { name: column, alias: parentAlias },
|
||||
});
|
||||
select.columns.push(
|
||||
this.createColumnExpression(column, { name: column, alias: parentAlias }, column.uniqueName)
|
||||
);
|
||||
displayedColumnInfo[column.uniqueName] = {
|
||||
...column,
|
||||
sourceAlias: parentAlias,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
|
||||
export class ViewGridDisplay extends GridDisplay {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from 'dbgate-sqltree';
|
||||
import { NamedObjectInfo, DatabaseInfo, ForeignKeyInfo, TableInfo } from 'dbgate-types';
|
||||
import type { NamedObjectInfo, DatabaseInfo, ForeignKeyInfo, TableInfo } from 'dbgate-types';
|
||||
import { ChangeSet, ChangeSetItem, extractChangeSetCondition } from './ChangeSet';
|
||||
|
||||
export interface ChangeSetDeleteCascade {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
export * from './GridDisplay';
|
||||
export * from './GridConfig';
|
||||
export * from './PerspectiveConfig';
|
||||
export * from './PerspectiveTreeNode';
|
||||
export * from './TableGridDisplay';
|
||||
export * from './ViewGridDisplay';
|
||||
export * from './JslGridDisplay';
|
||||
@@ -12,3 +14,10 @@ export * from './FormViewDisplay';
|
||||
export * from './TableFormViewDisplay';
|
||||
export * from './CollectionGridDisplay';
|
||||
export * from './deleteCascade';
|
||||
export * from './PerspectiveDisplay';
|
||||
export * from './PerspectiveDataProvider';
|
||||
export * from './PerspectiveCache';
|
||||
export * from './PerspectiveConfig';
|
||||
export * from './processPerspectiveDefaultColunns';
|
||||
export * from './PerspectiveDataPattern';
|
||||
export * from './PerspectiveDataLoader';
|
||||
|
||||
@@ -0,0 +1,255 @@
|
||||
import { findForeignKeyForColumn } from 'dbgate-tools';
|
||||
import type { DatabaseInfo, TableInfo, ViewInfo } from 'dbgate-types';
|
||||
import { createPerspectiveNodeConfig, MultipleDatabaseInfo, PerspectiveConfig } from './PerspectiveConfig';
|
||||
import { PerspectiveDataPattern, PerspectiveDataPatternDict } from './PerspectiveDataPattern';
|
||||
import { PerspectiveTableNode } from './PerspectiveTreeNode';
|
||||
|
||||
const namePredicates = [
|
||||
x => x.toLowerCase() == 'name',
|
||||
x => x.toLowerCase() == 'title',
|
||||
x => x.toLowerCase().includes('name'),
|
||||
x => x.toLowerCase().includes('title'),
|
||||
x => x.toLowerCase().includes('subject'),
|
||||
];
|
||||
|
||||
function getPerspectiveDefaultColumns(
|
||||
table: TableInfo | ViewInfo,
|
||||
db: DatabaseInfo,
|
||||
circularColumns?: string[]
|
||||
): [string[], string[]] {
|
||||
const columns = table.columns.map(x => x.columnName);
|
||||
const predicates = [
|
||||
...namePredicates,
|
||||
x =>
|
||||
table.columns
|
||||
.find(y => y.columnName == x)
|
||||
?.dataType?.toLowerCase()
|
||||
?.includes('char'),
|
||||
];
|
||||
|
||||
for (const predicate of predicates) {
|
||||
const col = columns.find(predicate);
|
||||
if (col) return [[col], null];
|
||||
}
|
||||
|
||||
if (circularColumns) {
|
||||
const keyPredicates = [
|
||||
x => findForeignKeyForColumn(table as TableInfo, x)?.columns?.length == 1 && !circularColumns.includes(x),
|
||||
x => findForeignKeyForColumn(table as TableInfo, x)?.columns?.length == 1,
|
||||
];
|
||||
|
||||
for (const predicate of keyPredicates) {
|
||||
const col = columns.find(predicate);
|
||||
if (col) return [null, [col]];
|
||||
}
|
||||
}
|
||||
|
||||
return [[columns[0]], null];
|
||||
}
|
||||
|
||||
function getPerspectiveDefaultCollectionColumns(pattern: PerspectiveDataPattern): string[] {
|
||||
const columns = pattern.columns.map(x => x.name);
|
||||
const predicates = [...namePredicates, x => pattern.columns.find(y => y.name == x)?.types?.includes('string')];
|
||||
|
||||
for (const predicate of predicates) {
|
||||
const col = columns.find(predicate);
|
||||
if (col) return [col];
|
||||
}
|
||||
}
|
||||
|
||||
export function perspectiveNodesHaveStructure(
|
||||
config: PerspectiveConfig,
|
||||
dbInfos: MultipleDatabaseInfo,
|
||||
dataPatterns: PerspectiveDataPatternDict,
|
||||
conid: string,
|
||||
database: string
|
||||
) {
|
||||
for (const node of config.nodes) {
|
||||
const db = dbInfos?.[node.conid || conid]?.[node.database || database];
|
||||
if (!db) return false;
|
||||
|
||||
const table = db.tables.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
const view = db.views.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
const collection = db.collections.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
|
||||
if (!table && !view && !collection) return false;
|
||||
if (collection && !dataPatterns?.[node.designerId]) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function shouldProcessPerspectiveDefaultColunns(
|
||||
config: PerspectiveConfig,
|
||||
dbInfos: MultipleDatabaseInfo,
|
||||
dataPatterns: PerspectiveDataPatternDict,
|
||||
conid: string,
|
||||
database: string
|
||||
) {
|
||||
const nodesNotProcessed = config.nodes.filter(x => !x.defaultColumnsProcessed);
|
||||
if (nodesNotProcessed.length == 0) return false;
|
||||
|
||||
return perspectiveNodesHaveStructure(config, dbInfos, dataPatterns, conid, database);
|
||||
}
|
||||
|
||||
function processPerspectiveDefaultColunnsStep(
|
||||
config: PerspectiveConfig,
|
||||
dbInfos: MultipleDatabaseInfo,
|
||||
dataPatterns: PerspectiveDataPatternDict,
|
||||
conid: string,
|
||||
database: string
|
||||
) {
|
||||
const rootNode = config.nodes.find(x => x.designerId == config.rootDesignerId);
|
||||
if (!rootNode) return null;
|
||||
const rootDb = dbInfos?.[rootNode.conid || conid]?.[rootNode.database || database];
|
||||
if (!rootDb) return null;
|
||||
const rootTable = rootDb.tables.find(x => x.pureName == rootNode.pureName && x.schemaName == rootNode.schemaName);
|
||||
const rootView = rootDb.views.find(x => x.pureName == rootNode.pureName && x.schemaName == rootNode.schemaName);
|
||||
|
||||
const root = new PerspectiveTableNode(
|
||||
rootTable || rootView,
|
||||
dbInfos,
|
||||
config,
|
||||
null,
|
||||
null,
|
||||
{ conid, database },
|
||||
null,
|
||||
config.rootDesignerId
|
||||
);
|
||||
|
||||
for (const node of config.nodes) {
|
||||
if (node.defaultColumnsProcessed) continue;
|
||||
|
||||
const db = dbInfos?.[node.conid || conid]?.[node.database || database];
|
||||
if (!db) continue;
|
||||
|
||||
const table = db.tables.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
const view = db.views.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
const collection = db.collections.find(x => x.pureName == node.pureName && x.schemaName == node.schemaName);
|
||||
|
||||
if (table || view) {
|
||||
const treeNode = root.findNodeByDesignerId(node.designerId);
|
||||
|
||||
if (!treeNode) {
|
||||
const [defaultColumns] = getPerspectiveDefaultColumns(table || view, db, null);
|
||||
|
||||
return {
|
||||
...config,
|
||||
nodes: config.nodes.map(n =>
|
||||
n.designerId == node.designerId
|
||||
? {
|
||||
...n,
|
||||
defaultColumnsProcessed: true,
|
||||
checkedColumns: defaultColumns,
|
||||
}
|
||||
: n
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
const circularColumns = treeNode.childNodes.filter(x => x.isCircular).map(x => x.columnName);
|
||||
const [defaultColumns, defaultRefs] = getPerspectiveDefaultColumns(table || view, db, circularColumns);
|
||||
|
||||
if (defaultRefs) {
|
||||
const childNode = treeNode.childNodes.find(x => x.columnName == defaultRefs[0]);
|
||||
if (childNode?.designerId) {
|
||||
return {
|
||||
...config,
|
||||
nodes: config.nodes.map(n =>
|
||||
n.designerId == childNode.designerId
|
||||
? {
|
||||
...n,
|
||||
isNodeChecked: true,
|
||||
}
|
||||
: n.designerId == node.designerId
|
||||
? {
|
||||
...n,
|
||||
defaultColumnsProcessed: true,
|
||||
}
|
||||
: n
|
||||
),
|
||||
};
|
||||
} else if (childNode) {
|
||||
const [newConfig, nodeConfig] = childNode.ensureNodeConfig(config);
|
||||
nodeConfig.isNodeChecked = true;
|
||||
|
||||
return {
|
||||
...newConfig,
|
||||
nodes: newConfig.nodes.map(n =>
|
||||
n.designerId == node.designerId
|
||||
? {
|
||||
...n,
|
||||
defaultColumnsProcessed: true,
|
||||
}
|
||||
: n
|
||||
),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
...config,
|
||||
nodes: config.nodes.map(n =>
|
||||
n.designerId == node.designerId
|
||||
? {
|
||||
...n,
|
||||
defaultColumnsProcessed: true,
|
||||
checkedColumns: defaultColumns,
|
||||
}
|
||||
: n
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (collection) {
|
||||
const defaultColumns = getPerspectiveDefaultCollectionColumns(dataPatterns?.[node.designerId]);
|
||||
return {
|
||||
...config,
|
||||
nodes: config.nodes.map(n =>
|
||||
n.designerId == node.designerId
|
||||
? {
|
||||
...n,
|
||||
defaultColumnsProcessed: true,
|
||||
checkedColumns: defaultColumns,
|
||||
}
|
||||
: n
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function markAllProcessed(config: PerspectiveConfig): PerspectiveConfig {
|
||||
return {
|
||||
...config,
|
||||
nodes: config.nodes.map(x => ({
|
||||
...x,
|
||||
defaultColumnsProcessed: true,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
export function processPerspectiveDefaultColunns(
|
||||
config: PerspectiveConfig,
|
||||
dbInfos: MultipleDatabaseInfo,
|
||||
dataPatterns: PerspectiveDataPatternDict,
|
||||
conid: string,
|
||||
database: string
|
||||
): PerspectiveConfig {
|
||||
while (config.nodes.filter(x => !x.defaultColumnsProcessed).length > 0) {
|
||||
const newConfig = processPerspectiveDefaultColunnsStep(config, dbInfos, dataPatterns, conid, database);
|
||||
if (!newConfig) {
|
||||
return markAllProcessed(config);
|
||||
}
|
||||
if (
|
||||
newConfig.nodes.filter(x => x.defaultColumnsProcessed).length <=
|
||||
config.nodes.filter(x => x.defaultColumnsProcessed).length
|
||||
) {
|
||||
return markAllProcessed(config);
|
||||
}
|
||||
config = newConfig;
|
||||
}
|
||||
return markAllProcessed(config);
|
||||
}
|
||||
@@ -0,0 +1,142 @@
|
||||
import { PerspectiveDisplay } from '../PerspectiveDisplay';
|
||||
import { PerspectiveTableNode } from '../PerspectiveTreeNode';
|
||||
import { chinookDbInfo } from './chinookDbInfo';
|
||||
import { createPerspectiveConfig, createPerspectiveNodeConfig } from '../PerspectiveConfig';
|
||||
import artistDataFlat from './artistDataFlat';
|
||||
import artistDataAlbum from './artistDataAlbum';
|
||||
import artistDataAlbumTrack from './artistDataAlbumTrack';
|
||||
import { processPerspectiveDefaultColunns } from '../processPerspectiveDefaultColunns';
|
||||
|
||||
test('test flat view', () => {
|
||||
const artistTable = chinookDbInfo.tables.find(x => x.pureName == 'Artist');
|
||||
const configColumns = processPerspectiveDefaultColunns(
|
||||
createPerspectiveConfig({ pureName: 'Artist' }),
|
||||
{ conid: { db: chinookDbInfo } },
|
||||
null,
|
||||
'conid',
|
||||
'db'
|
||||
);
|
||||
const root = new PerspectiveTableNode(
|
||||
artistTable,
|
||||
{ conid: { db: chinookDbInfo } },
|
||||
configColumns,
|
||||
null,
|
||||
null,
|
||||
{ conid: 'conid', database: 'db' },
|
||||
null,
|
||||
configColumns.rootDesignerId
|
||||
);
|
||||
const display = new PerspectiveDisplay(root, artistDataFlat);
|
||||
|
||||
expect(display.rows.length).toEqual(4);
|
||||
expect(display.rows[0].rowData).toEqual(['AC/DC']);
|
||||
expect(display.loadIndicatorsCounts).toEqual({
|
||||
Artist: 4,
|
||||
});
|
||||
});
|
||||
|
||||
test('test one level nesting', () => {
|
||||
const artistTable = chinookDbInfo.tables.find(x => x.pureName == 'Artist');
|
||||
|
||||
const config = createPerspectiveConfig({ pureName: 'Artist' });
|
||||
config.nodes.push(createPerspectiveNodeConfig({ pureName: 'Album' }));
|
||||
config.references.push({
|
||||
sourceId: config.nodes[0].designerId,
|
||||
targetId: config.nodes[1].designerId,
|
||||
designerId: '1',
|
||||
columns: [{ source: 'ArtistId', target: 'ArtistId' }],
|
||||
});
|
||||
|
||||
const configColumns = processPerspectiveDefaultColunns(config, { conid: { db: chinookDbInfo } }, null, 'conid', 'db');
|
||||
|
||||
// const config = createPerspectiveConfig({ pureName: 'Artist' });
|
||||
// config.nodes[0].checkedColumns = ['Album'];
|
||||
|
||||
const root = new PerspectiveTableNode(
|
||||
artistTable,
|
||||
{ conid: { db: chinookDbInfo } },
|
||||
configColumns,
|
||||
null,
|
||||
null,
|
||||
{ conid: 'conid', database: 'db' },
|
||||
null,
|
||||
configColumns.nodes[0].designerId
|
||||
);
|
||||
const display = new PerspectiveDisplay(root, artistDataAlbum);
|
||||
|
||||
console.log(display.loadIndicatorsCounts);
|
||||
// console.log(display.rows);
|
||||
|
||||
expect(display.rows.length).toEqual(6);
|
||||
|
||||
expect(display.rows[0].rowData).toEqual(['AC/DC', 'For Those About To Rock We Salute You']);
|
||||
expect(display.rows[0].rowSpans).toEqual([2, 1]);
|
||||
expect(display.rows[0].rowCellSkips).toEqual([false, false]);
|
||||
|
||||
expect(display.rows[1].rowData).toEqual([undefined, 'Let There Be Rock']);
|
||||
expect(display.rows[1].rowSpans).toEqual([1, 1]);
|
||||
expect(display.rows[1].rowCellSkips).toEqual([true, false]);
|
||||
|
||||
expect(display.rows[2].rowData).toEqual(['Accept', 'Balls to the Wall']);
|
||||
expect(display.rows[2].rowSpans).toEqual([2, 1]);
|
||||
expect(display.rows[2].rowCellSkips).toEqual([false, false]);
|
||||
|
||||
expect(display.rows[5].rowData).toEqual(['Alanis Morissette', 'Jagged Little Pill']);
|
||||
expect(display.rows[5].rowSpans).toEqual([1, 1]);
|
||||
|
||||
expect(display.loadIndicatorsCounts).toEqual({
|
||||
Artist: 6,
|
||||
'Artist.Album': 6,
|
||||
});
|
||||
});
|
||||
|
||||
test('test two level nesting', () => {
|
||||
const artistTable = chinookDbInfo.tables.find(x => x.pureName == 'Artist');
|
||||
const config = createPerspectiveConfig({ pureName: 'Artist' });
|
||||
config.nodes.push(createPerspectiveNodeConfig({ pureName: 'Album' }));
|
||||
config.nodes.push(createPerspectiveNodeConfig({ pureName: 'Track' }));
|
||||
config.references.push({
|
||||
sourceId: config.nodes[0].designerId,
|
||||
targetId: config.nodes[1].designerId,
|
||||
designerId: '1',
|
||||
columns: [{ source: 'ArtistId', target: 'ArtistId' }],
|
||||
});
|
||||
config.references.push({
|
||||
sourceId: config.nodes[1].designerId,
|
||||
targetId: config.nodes[2].designerId,
|
||||
designerId: '2',
|
||||
columns: [{ source: 'AlbumId', target: 'AlbumId' }],
|
||||
});
|
||||
const configColumns = processPerspectiveDefaultColunns(config, { conid: { db: chinookDbInfo } }, null, 'conid', 'db');
|
||||
|
||||
const root = new PerspectiveTableNode(
|
||||
artistTable,
|
||||
{ conid: { db: chinookDbInfo } },
|
||||
configColumns,
|
||||
null,
|
||||
null,
|
||||
{ conid: 'conid', database: 'db' },
|
||||
null,
|
||||
configColumns.nodes[0].designerId
|
||||
);
|
||||
const display = new PerspectiveDisplay(root, artistDataAlbumTrack);
|
||||
|
||||
console.log(display.rows);
|
||||
expect(display.rows.length).toEqual(8);
|
||||
|
||||
expect(display.rows[0].rowData).toEqual([
|
||||
'AC/DC',
|
||||
'For Those About To Rock We Salute You',
|
||||
'For Those About To Rock (We Salute You)',
|
||||
]);
|
||||
expect(display.rows[0].rowSpans).toEqual([4, 2, 1]);
|
||||
expect(display.rows[0].rowCellSkips).toEqual([false, false, false]);
|
||||
|
||||
expect(display.rows[1].rowData).toEqual([undefined, undefined, 'Put The Finger On You']);
|
||||
expect(display.rows[1].rowSpans).toEqual([1, 1, 1]);
|
||||
expect(display.rows[1].rowCellSkips).toEqual([true, true, false]);
|
||||
|
||||
expect(display.rows[2].rowData).toEqual([undefined, 'Let There Be Rock', 'Go Down']);
|
||||
expect(display.rows[2].rowSpans).toEqual([1, 2, 1]);
|
||||
expect(display.rows[2].rowCellSkips).toEqual([true, false, false]);
|
||||
});
|
||||
@@ -0,0 +1,98 @@
|
||||
import { PerspectiveDisplay } from '../PerspectiveDisplay';
|
||||
import { PerspectiveTableNode } from '../PerspectiveTreeNode';
|
||||
import { createPerspectiveConfig, PerspectiveNodeConfig } from '../PerspectiveConfig';
|
||||
import { processPerspectiveDefaultColunns } from '../processPerspectiveDefaultColunns';
|
||||
import { DatabaseAnalyser } from 'dbgate-tools';
|
||||
import { analyseDataPattern } from '../PerspectiveDataPattern';
|
||||
import { PerspectiveDataProvider } from '../PerspectiveDataProvider';
|
||||
|
||||
const accountData = [
|
||||
{
|
||||
name: 'jan',
|
||||
email: 'jan@foo.co',
|
||||
follows: [{ name: 'lucie' }, { name: 'petr' }],
|
||||
nested: { email: 'jan@nest.cz' },
|
||||
},
|
||||
{
|
||||
name: 'romeo',
|
||||
email: 'romeo@foo.co',
|
||||
follows: [{ name: 'julie' }, { name: 'wiliam' }],
|
||||
nested: { email: 'romeo@nest.cz' },
|
||||
},
|
||||
];
|
||||
|
||||
function createDisplay(cfgFunc?: (cfg: PerspectiveNodeConfig) => void) {
|
||||
const collectionInfo = {
|
||||
objectTypeField: 'collections',
|
||||
pureName: 'Account',
|
||||
};
|
||||
const dbInfo = {
|
||||
...DatabaseAnalyser.createEmptyStructure(),
|
||||
collections: [collectionInfo],
|
||||
};
|
||||
const config = createPerspectiveConfig({ pureName: 'Account' });
|
||||
const dataPatterns = {
|
||||
[config.rootDesignerId]: analyseDataPattern(
|
||||
{
|
||||
conid: 'conid',
|
||||
database: 'db',
|
||||
pureName: 'Account',
|
||||
},
|
||||
accountData
|
||||
),
|
||||
};
|
||||
const configColumns = processPerspectiveDefaultColunns(
|
||||
config,
|
||||
{ conid: { db: dbInfo } },
|
||||
dataPatterns,
|
||||
'conid',
|
||||
'db'
|
||||
);
|
||||
if (cfgFunc) {
|
||||
cfgFunc(configColumns.nodes[0]);
|
||||
}
|
||||
const root = new PerspectiveTableNode(
|
||||
collectionInfo,
|
||||
{ conid: { db: dbInfo } },
|
||||
configColumns,
|
||||
null,
|
||||
new PerspectiveDataProvider(null, null, dataPatterns),
|
||||
{ conid: 'conid', database: 'db' },
|
||||
null,
|
||||
configColumns.rootDesignerId
|
||||
);
|
||||
|
||||
const display = new PerspectiveDisplay(root, accountData);
|
||||
|
||||
return display;
|
||||
}
|
||||
|
||||
test('test nosql display', () => {
|
||||
const display = createDisplay();
|
||||
|
||||
expect(display.rows.length).toEqual(2);
|
||||
expect(display.rows[0].rowData).toEqual(['jan']);
|
||||
expect(display.rows[1].rowData).toEqual(['romeo']);
|
||||
});
|
||||
|
||||
test('test nosql nested array display', () => {
|
||||
const display = createDisplay(cfg => {
|
||||
cfg.checkedColumns = ['name', 'follows::name'];
|
||||
});
|
||||
|
||||
expect(display.rows.length).toEqual(4);
|
||||
expect(display.rows[0].rowData).toEqual(['jan', 'lucie']);
|
||||
expect(display.rows[1].rowData).toEqual([undefined, 'petr']);
|
||||
expect(display.rows[2].rowData).toEqual(['romeo', 'julie']);
|
||||
expect(display.rows[3].rowData).toEqual([undefined, 'wiliam']);
|
||||
});
|
||||
|
||||
test('test nosql nested object', () => {
|
||||
const display = createDisplay(cfg => {
|
||||
cfg.checkedColumns = ['name', 'nested::email'];
|
||||
});
|
||||
|
||||
expect(display.rows.length).toEqual(2);
|
||||
expect(display.rows[0].rowData).toEqual(['jan', 'jan@nest.cz']);
|
||||
expect(display.rows[1].rowData).toEqual(['romeo', 'romeo@nest.cz']);
|
||||
});
|
||||
@@ -0,0 +1,56 @@
|
||||
export default [
|
||||
{
|
||||
ArtistId: 1,
|
||||
Name: 'AC/DC',
|
||||
Album: [
|
||||
{
|
||||
Title: 'For Those About To Rock We Salute You',
|
||||
ArtistId: 1,
|
||||
},
|
||||
{
|
||||
Title: 'Let There Be Rock',
|
||||
ArtistId: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
ArtistId: 2,
|
||||
Name: 'Accept',
|
||||
Album: [
|
||||
{
|
||||
Title: 'Balls to the Wall',
|
||||
ArtistId: 2,
|
||||
},
|
||||
{
|
||||
Title: 'Restless and Wild',
|
||||
ArtistId: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
ArtistId: 3,
|
||||
Name: 'Aerosmith',
|
||||
Album: [
|
||||
{
|
||||
Title: 'Big Ones',
|
||||
ArtistId: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
ArtistId: 4,
|
||||
Name: 'Alanis Morissette',
|
||||
Album: [
|
||||
{
|
||||
Title: 'Jagged Little Pill',
|
||||
ArtistId: 4,
|
||||
},
|
||||
{
|
||||
incompleteRowsIndicator: ['Artist.Album'],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
incompleteRowsIndicator: ['Artist'],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,78 @@
|
||||
export default [
|
||||
{
|
||||
ArtistId: 1,
|
||||
Name: 'AC/DC',
|
||||
Album: [
|
||||
{
|
||||
Title: 'For Those About To Rock We Salute You',
|
||||
AlbumId: 1,
|
||||
ArtistId: 1,
|
||||
Track: [
|
||||
{
|
||||
Name: 'For Those About To Rock (We Salute You)',
|
||||
AlbumId: 1,
|
||||
},
|
||||
{
|
||||
Name: 'Put The Finger On You',
|
||||
AlbumId: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
Title: 'Let There Be Rock',
|
||||
AlbumId: 4,
|
||||
ArtistId: 1,
|
||||
Track: [
|
||||
{
|
||||
Name: 'Go Down',
|
||||
AlbumId: 4,
|
||||
},
|
||||
{
|
||||
Name: 'Dog Eat Dog',
|
||||
AlbumId: 4,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
ArtistId: 2,
|
||||
Name: 'Accept',
|
||||
Album: [
|
||||
{
|
||||
Title: 'Balls to the Wall',
|
||||
AlbumId: 2,
|
||||
ArtistId: 2,
|
||||
Track: [
|
||||
{
|
||||
Name: 'Balls to the Wall',
|
||||
AlbumId: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
Title: 'Restless and Wild',
|
||||
AlbumId: 3,
|
||||
ArtistId: 2,
|
||||
Track: [
|
||||
{
|
||||
Name: 'Fast As a Shark',
|
||||
AlbumId: 3,
|
||||
},
|
||||
{
|
||||
Name: 'Restless and Wild',
|
||||
AlbumId: 3,
|
||||
},
|
||||
{
|
||||
Name: 'Princess of the Dawn',
|
||||
AlbumId: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
incompleteRowsIndicator: ['Artist'],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,21 @@
|
||||
export default [
|
||||
{
|
||||
ArtistId: 1,
|
||||
Name: 'AC/DC',
|
||||
},
|
||||
{
|
||||
ArtistId: 2,
|
||||
Name: 'Accept',
|
||||
},
|
||||
{
|
||||
ArtistId: 3,
|
||||
Name: 'Aerosmith',
|
||||
},
|
||||
{
|
||||
ArtistId: 4,
|
||||
Name: 'Alanis Morissette',
|
||||
},
|
||||
{
|
||||
incompleteRowsIndicator: ['Artist'],
|
||||
},
|
||||
];
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,8 +16,8 @@
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
"@types/jest": "^25.1.4",
|
||||
"@types/node": "^13.7.0",
|
||||
"jest": "^24.9.0",
|
||||
"ts-jest": "^25.2.1",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -0,0 +1,310 @@
|
||||
import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { FilterType } from './types';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import type { TransformType } from 'dbgate-types';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
|
||||
const compoudCondition = conditionType => conditions => {
|
||||
if (conditions.length == 1) return conditions[0];
|
||||
return {
|
||||
[conditionType]: conditions,
|
||||
};
|
||||
};
|
||||
|
||||
function getTransformCondition(transform: TransformType, value) {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'transform',
|
||||
transform,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const yearCondition = () => value => {
|
||||
return getTransformCondition('YEAR', value);
|
||||
};
|
||||
|
||||
const yearMonthCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [getTransformCondition('YEAR', m[1]), getTransformCondition('MONTH', m[2])],
|
||||
};
|
||||
};
|
||||
|
||||
const yearMonthDayCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
getTransformCondition('YEAR', m[1]),
|
||||
getTransformCondition('MONTH', m[2]),
|
||||
getTransformCondition('DAY', m[3]),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const yearEdge = edgeFunction => value => {
|
||||
return moment(new Date(parseInt(value), 0, 1))
|
||||
[edgeFunction]('year')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, 1))
|
||||
[edgeFunction]('month')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, parseInt(m[3])))
|
||||
[edgeFunction]('day')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('minute').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteSecondEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('second').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const createIntervalCondition = (start, end) => {
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: start,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: end,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const createDateIntervalCondition = (start, end) => {
|
||||
return createIntervalCondition(start.format('YYYY-MM-DDTHH:mm:ss.SSS'), end.format('YYYY-MM-DDTHH:mm:ss.SSS'));
|
||||
};
|
||||
|
||||
const fixedMomentIntervalCondition = (intervalType, diff) => () => {
|
||||
return createDateIntervalCondition(
|
||||
moment().add(intervalType, diff).startOf(intervalType),
|
||||
moment().add(intervalType, diff).endOf(intervalType)
|
||||
);
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('minute'), moment(dateObject).endOf('minute'));
|
||||
};
|
||||
|
||||
const yearMonthDaySecondCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('second'), moment(dateObject).endOf('second'));
|
||||
};
|
||||
|
||||
const binaryCondition = operator => value => ({
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
});
|
||||
|
||||
const unaryCondition = conditionType => () => {
|
||||
return {
|
||||
conditionType,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const sqlTemplate = templateSql => {
|
||||
return {
|
||||
conditionType: 'rawTemplate',
|
||||
templateSql,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const createParser = () => {
|
||||
const langDef = {
|
||||
comma: () => word(','),
|
||||
|
||||
not: () => word('NOT'),
|
||||
notNull: r => r.not.then(r.null).map(unaryCondition('isNotNull')),
|
||||
null: () => word('NULL').map(unaryCondition('isNull')),
|
||||
|
||||
sql: () =>
|
||||
token(P.regexp(/\{(.*?)\}/, 1))
|
||||
.map(sqlTemplate)
|
||||
.desc('sql literal'),
|
||||
|
||||
yearNum: () => P.regexp(/\d\d\d\d/).map(yearCondition()),
|
||||
yearMonthNum: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthCondition()),
|
||||
yearMonthDayNum: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayCondition()),
|
||||
yearMonthDayMinute: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteCondition()),
|
||||
yearMonthDaySecond: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDaySecondCondition()),
|
||||
|
||||
yearNumStart: () => P.regexp(/\d\d\d\d/).map(yearEdge('startOf')),
|
||||
yearNumEnd: () => P.regexp(/\d\d\d\d/).map(yearEdge('endOf')),
|
||||
yearMonthStart: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('startOf')),
|
||||
yearMonthEnd: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('endOf')),
|
||||
yearMonthDayStart: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('startOf')),
|
||||
yearMonthDayEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('endOf')),
|
||||
yearMonthDayMinuteStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('startOf')),
|
||||
yearMonthDayMinuteEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('endOf')),
|
||||
yearMonthDayMinuteSecondStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('startOf')),
|
||||
yearMonthDayMinuteSecondEnd: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('endOf')),
|
||||
|
||||
this: () => word('THIS'),
|
||||
last: () => word('LAST'),
|
||||
next: () => word('NEXT'),
|
||||
week: () => word('WEEK'),
|
||||
month: () => word('MONTH'),
|
||||
year: () => word('YEAR'),
|
||||
|
||||
yesterday: () => word('YESTERDAY').map(fixedMomentIntervalCondition('day', -1)),
|
||||
today: () => word('TODAY').map(fixedMomentIntervalCondition('day', 0)),
|
||||
tomorrow: () => word('TOMORROW').map(fixedMomentIntervalCondition('day', 1)),
|
||||
|
||||
lastWeek: r => r.last.then(r.week).map(fixedMomentIntervalCondition('week', -1)),
|
||||
thisWeek: r => r.this.then(r.week).map(fixedMomentIntervalCondition('week', 0)),
|
||||
nextWeek: r => r.next.then(r.week).map(fixedMomentIntervalCondition('week', 1)),
|
||||
|
||||
lastMonth: r => r.last.then(r.month).map(fixedMomentIntervalCondition('month', -1)),
|
||||
thisMonth: r => r.this.then(r.month).map(fixedMomentIntervalCondition('month', 0)),
|
||||
nextMonth: r => r.next.then(r.month).map(fixedMomentIntervalCondition('month', 1)),
|
||||
|
||||
lastYear: r => r.last.then(r.year).map(fixedMomentIntervalCondition('year', -1)),
|
||||
thisYear: r => r.this.then(r.year).map(fixedMomentIntervalCondition('year', 0)),
|
||||
nextYear: r => r.next.then(r.year).map(fixedMomentIntervalCondition('year', 1)),
|
||||
|
||||
valueStart: r =>
|
||||
P.alt(
|
||||
r.yearMonthDayMinuteSecondStart,
|
||||
r.yearMonthDayMinuteStart,
|
||||
r.yearMonthDayStart,
|
||||
r.yearMonthStart,
|
||||
r.yearNumStart
|
||||
),
|
||||
valueEnd: r =>
|
||||
P.alt(r.yearMonthDayMinuteSecondEnd, r.yearMonthDayMinuteEnd, r.yearMonthDayEnd, r.yearMonthEnd, r.yearNumEnd),
|
||||
|
||||
le: r => word('<=').then(r.valueEnd).map(binaryCondition('<=')),
|
||||
ge: r => word('>=').then(r.valueStart).map(binaryCondition('>=')),
|
||||
lt: r => word('<').then(r.valueStart).map(binaryCondition('<')),
|
||||
gt: r => word('>').then(r.valueEnd).map(binaryCondition('>')),
|
||||
|
||||
element: r =>
|
||||
P.alt(
|
||||
r.yearMonthDaySecond,
|
||||
r.yearMonthDayMinute,
|
||||
r.yearMonthDayNum,
|
||||
r.yearMonthNum,
|
||||
r.yearNum,
|
||||
r.yesterday,
|
||||
r.today,
|
||||
r.tomorrow,
|
||||
r.lastWeek,
|
||||
r.thisWeek,
|
||||
r.nextWeek,
|
||||
r.lastMonth,
|
||||
r.thisMonth,
|
||||
r.nextMonth,
|
||||
r.lastYear,
|
||||
r.thisYear,
|
||||
r.nextYear,
|
||||
r.null,
|
||||
r.notNull,
|
||||
r.le,
|
||||
r.lt,
|
||||
r.ge,
|
||||
r.gt,
|
||||
r.sql
|
||||
).trim(whitespace),
|
||||
factor: r => r.element.sepBy(whitespace).map(compoudCondition('$and')),
|
||||
list: r => r.factor.sepBy(r.comma).map(compoudCondition('$or')),
|
||||
};
|
||||
|
||||
return P.createLanguage(langDef);
|
||||
};
|
||||
|
||||
export const datetimeParser = createParser();
|
||||
@@ -3,7 +3,7 @@ import moment from 'moment';
|
||||
|
||||
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
|
||||
|
||||
export function getFilterValueExpression(value, dataType) {
|
||||
export function getFilterValueExpression(value, dataType?) {
|
||||
if (value == null) return 'NULL';
|
||||
if (isTypeDateTime(dataType)) return moment(value).format('YYYY-MM-DD HH:mm:ss');
|
||||
if (value === true) return 'TRUE';
|
||||
|
||||
@@ -2,9 +2,9 @@ import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { FilterType } from './types';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import { TransformType } from 'dbgate-types';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
import { mongoParser } from './mongoParser';
|
||||
import { datetimeParser } from './datetimeParser';
|
||||
|
||||
const binaryCondition = operator => value => ({
|
||||
conditionType: 'binary',
|
||||
@@ -67,114 +67,14 @@ const negateCondition = condition => {
|
||||
};
|
||||
};
|
||||
|
||||
function getTransformCondition(transform: TransformType, value) {
|
||||
const sqlTemplate = templateSql => {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'transform',
|
||||
transform,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
conditionType: 'rawTemplate',
|
||||
templateSql,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const yearCondition = () => value => {
|
||||
return getTransformCondition('YEAR', value);
|
||||
};
|
||||
|
||||
const yearMonthCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [getTransformCondition('YEAR', m[1]), getTransformCondition('MONTH', m[2])],
|
||||
};
|
||||
};
|
||||
|
||||
const yearMonthDayCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
getTransformCondition('YEAR', m[1]),
|
||||
getTransformCondition('MONTH', m[2]),
|
||||
getTransformCondition('DAY', m[3]),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const createIntervalCondition = (start, end) => {
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: start,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: end,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const createDateIntervalCondition = (start, end) => {
|
||||
return createIntervalCondition(start.format('YYYY-MM-DDTHH:mm:ss.SSS'), end.format('YYYY-MM-DDTHH:mm:ss.SSS'));
|
||||
};
|
||||
|
||||
const fixedMomentIntervalCondition = (intervalType, diff) => () => {
|
||||
return createDateIntervalCondition(
|
||||
moment().add(intervalType, diff).startOf(intervalType),
|
||||
moment().add(intervalType, diff).endOf(intervalType)
|
||||
);
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('minute'), moment(dateObject).endOf('minute'));
|
||||
};
|
||||
|
||||
const yearMonthDaySecondCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('second'), moment(dateObject).endOf('second'));
|
||||
};
|
||||
|
||||
const createParser = (filterType: FilterType) => {
|
||||
@@ -206,12 +106,10 @@ const createParser = (filterType: FilterType) => {
|
||||
|
||||
noQuotedString: () => P.regexp(/[^\s^,^'^"]+/).desc('string unquoted'),
|
||||
|
||||
yearNum: () => P.regexp(/\d\d\d\d/).map(yearCondition()),
|
||||
yearMonthNum: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthCondition()),
|
||||
yearMonthDayNum: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayCondition()),
|
||||
yearMonthDayMinute: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteCondition()),
|
||||
yearMonthDaySecond: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDaySecondCondition()),
|
||||
sql: () =>
|
||||
token(P.regexp(/\{(.*?)\}/, 1))
|
||||
.map(sqlTemplate)
|
||||
.desc('sql literal'),
|
||||
|
||||
value: r => P.alt(...allowedValues.map(x => r[x])),
|
||||
valueTestEq: r => r.value.map(binaryCondition('=')),
|
||||
@@ -223,33 +121,10 @@ const createParser = (filterType: FilterType) => {
|
||||
null: () => word('NULL').map(unaryCondition('isNull')),
|
||||
empty: () => word('EMPTY').map(unaryCondition('isEmpty')),
|
||||
notEmpty: r => r.not.then(r.empty).map(unaryCondition('isNotEmpty')),
|
||||
true: () => P.regexp(/true/i).map(binaryFixedValueCondition(1)),
|
||||
false: () => P.regexp(/false/i).map(binaryFixedValueCondition(0)),
|
||||
trueNum: () => word('1').map(binaryFixedValueCondition(1)),
|
||||
falseNum: () => word('0').map(binaryFixedValueCondition(0)),
|
||||
|
||||
this: () => word('THIS'),
|
||||
last: () => word('LAST'),
|
||||
next: () => word('NEXT'),
|
||||
week: () => word('WEEK'),
|
||||
month: () => word('MONTH'),
|
||||
year: () => word('YEAR'),
|
||||
|
||||
yesterday: () => word('YESTERDAY').map(fixedMomentIntervalCondition('day', -1)),
|
||||
today: () => word('TODAY').map(fixedMomentIntervalCondition('day', 0)),
|
||||
tomorrow: () => word('TOMORROW').map(fixedMomentIntervalCondition('day', 1)),
|
||||
|
||||
lastWeek: r => r.last.then(r.week).map(fixedMomentIntervalCondition('week', -1)),
|
||||
thisWeek: r => r.this.then(r.week).map(fixedMomentIntervalCondition('week', 0)),
|
||||
nextWeek: r => r.next.then(r.week).map(fixedMomentIntervalCondition('week', 1)),
|
||||
|
||||
lastMonth: r => r.last.then(r.month).map(fixedMomentIntervalCondition('month', -1)),
|
||||
thisMonth: r => r.this.then(r.month).map(fixedMomentIntervalCondition('month', 0)),
|
||||
nextMonth: r => r.next.then(r.month).map(fixedMomentIntervalCondition('month', 1)),
|
||||
|
||||
lastYear: r => r.last.then(r.year).map(fixedMomentIntervalCondition('year', -1)),
|
||||
thisYear: r => r.this.then(r.year).map(fixedMomentIntervalCondition('year', 0)),
|
||||
nextYear: r => r.next.then(r.year).map(fixedMomentIntervalCondition('year', 1)),
|
||||
true: () => P.regexp(/true/i).map(binaryFixedValueCondition('1')),
|
||||
false: () => P.regexp(/false/i).map(binaryFixedValueCondition('0')),
|
||||
trueNum: () => word('1').map(binaryFixedValueCondition('1')),
|
||||
falseNum: () => word('0').map(binaryFixedValueCondition('0')),
|
||||
|
||||
eq: r => word('=').then(r.value).map(binaryCondition('=')),
|
||||
ne: r => word('!=').then(r.value).map(binaryCondition('<>')),
|
||||
@@ -278,7 +153,7 @@ const createParser = (filterType: FilterType) => {
|
||||
allowedValues.push('string1Num', 'string2Num', 'number');
|
||||
}
|
||||
|
||||
const allowedElements = ['null', 'notNull', 'eq', 'ne', 'ne2'];
|
||||
const allowedElements = ['null', 'notNull', 'eq', 'ne', 'ne2', 'sql'];
|
||||
if (filterType == 'number' || filterType == 'datetime' || filterType == 'eval') {
|
||||
allowedElements.push('le', 'ge', 'lt', 'gt');
|
||||
}
|
||||
@@ -294,27 +169,7 @@ const createParser = (filterType: FilterType) => {
|
||||
if (filterType == 'eval') {
|
||||
allowedElements.push('true', 'false');
|
||||
}
|
||||
if (filterType == 'datetime') {
|
||||
allowedElements.push(
|
||||
'yearMonthDaySecond',
|
||||
'yearMonthDayMinute',
|
||||
'yearMonthDayNum',
|
||||
'yearMonthNum',
|
||||
'yearNum',
|
||||
'yesterday',
|
||||
'today',
|
||||
'tomorrow',
|
||||
'lastWeek',
|
||||
'thisWeek',
|
||||
'nextWeek',
|
||||
'lastMonth',
|
||||
'thisMonth',
|
||||
'nextMonth',
|
||||
'lastYear',
|
||||
'thisYear',
|
||||
'nextYear'
|
||||
);
|
||||
}
|
||||
|
||||
// must be last
|
||||
if (filterType == 'string' || filterType == 'eval') {
|
||||
allowedElements.push('valueTestStr');
|
||||
@@ -328,10 +183,10 @@ const createParser = (filterType: FilterType) => {
|
||||
const parsers = {
|
||||
number: createParser('number'),
|
||||
string: createParser('string'),
|
||||
datetime: createParser('datetime'),
|
||||
logical: createParser('logical'),
|
||||
eval: createParser('eval'),
|
||||
mongo: mongoParser,
|
||||
datetime: datetimeParser,
|
||||
};
|
||||
|
||||
export function parseFilter(value: string, filterType: FilterType): Condition {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { parseFilter } from './parseFilter';
|
||||
const { parseFilter } = require('./parseFilter');
|
||||
|
||||
test('parse string', () => {
|
||||
const ast = parseFilter('"123"', 'string');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SqlDumper } from 'dbgate-types';
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { Command, Select, Update, Delete, Insert } from './types';
|
||||
import { dumpSqlExpression } from './dumpSqlExpression';
|
||||
import { dumpSqlFromDefinition, dumpSqlSourceRef } from './dumpSqlSource';
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { SqlDumper } from 'dbgate-types';
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { Condition, BinaryCondition } from './types';
|
||||
import { dumpSqlExpression } from './dumpSqlExpression';
|
||||
import { link } from 'fs';
|
||||
import { dumpSqlSelect } from './dumpSqlCommand';
|
||||
|
||||
export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
@@ -69,5 +68,19 @@ export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
dmp.put(' ^and ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
break;
|
||||
case 'in':
|
||||
dumpSqlExpression(dmp, condition.expr);
|
||||
dmp.put(' ^in (%,v)', condition.values);
|
||||
break;
|
||||
case 'rawTemplate':
|
||||
let was = false;
|
||||
for (const item of condition.templateSql.split('$$')) {
|
||||
if (was) {
|
||||
dumpSqlExpression(dmp, condition.expr);
|
||||
}
|
||||
dmp.putRaw(item);
|
||||
was = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import { SqlDumper } from 'dbgate-types';
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { Expression, ColumnRefExpression } from './types';
|
||||
import { dumpSqlSourceRef } from './dumpSqlSource';
|
||||
|
||||
@@ -35,17 +35,24 @@ export function dumpSqlExpression(dmp: SqlDumper, expr: Expression) {
|
||||
dmp.put(')');
|
||||
break;
|
||||
|
||||
case 'methodCall':
|
||||
dumpSqlExpression(dmp, expr.thisObject)
|
||||
dmp.put('.%s(', expr.method);
|
||||
dmp.putCollection(',', expr.args, x => dumpSqlExpression(dmp, x));
|
||||
dmp.put(')');
|
||||
break;
|
||||
|
||||
case 'transform':
|
||||
dmp.transform(expr.transform, () => dumpSqlExpression(dmp, expr.expr));
|
||||
break;
|
||||
|
||||
case 'rowNumber':
|
||||
dmp.put(" ^row_number() ^over (^order ^by ");
|
||||
dmp.put(' ^row_number() ^over (^order ^by ');
|
||||
dmp.putCollection(', ', expr.orderBy, x => {
|
||||
dumpSqlExpression(dmp, x);
|
||||
dmp.put(' %k', x.direction);
|
||||
});
|
||||
dmp.put(")");
|
||||
dmp.put(')');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Source, FromDefinition, Relation } from './types';
|
||||
import { SqlDumper } from 'dbgate-types';
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { dumpSqlSelect } from './dumpSqlCommand';
|
||||
import { dumpSqlCondition } from './dumpSqlCondition';
|
||||
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import { SqlDumper } from 'dbgate-types';
|
||||
import _ from 'lodash';
|
||||
import { Condition, BinaryCondition } from './types';
|
||||
import { dumpSqlExpression } from './dumpSqlExpression';
|
||||
import { link } from 'fs';
|
||||
import { evaluateExpression } from './evaluateExpression';
|
||||
import { cond } from 'lodash';
|
||||
|
||||
function isEmpty(value) {
|
||||
if (value == null) return true;
|
||||
|
||||
@@ -20,6 +20,9 @@ export function evaluateExpression(expr: Expression, values) {
|
||||
case 'call':
|
||||
return null;
|
||||
|
||||
case 'methodCall':
|
||||
return null;
|
||||
|
||||
case 'transform':
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { NamedObjectInfo, RangeDefinition, TransformType } from 'dbgate-types';
|
||||
import type { NamedObjectInfo, RangeDefinition, TransformType } from 'dbgate-types';
|
||||
|
||||
// export interface Command {
|
||||
// }
|
||||
@@ -99,6 +99,18 @@ export interface BetweenCondition {
|
||||
right: Expression;
|
||||
}
|
||||
|
||||
export interface InCondition {
|
||||
conditionType: 'in';
|
||||
expr: Expression;
|
||||
values: any[];
|
||||
}
|
||||
|
||||
export interface RawTemplateCondition {
|
||||
conditionType: 'rawTemplate';
|
||||
templateSql: string;
|
||||
expr: Expression;
|
||||
}
|
||||
|
||||
export type Condition =
|
||||
| BinaryCondition
|
||||
| NotCondition
|
||||
@@ -107,7 +119,9 @@ export type Condition =
|
||||
| LikeCondition
|
||||
| ExistsCondition
|
||||
| NotExistsCondition
|
||||
| BetweenCondition;
|
||||
| BetweenCondition
|
||||
| InCondition
|
||||
| RawTemplateCondition;
|
||||
|
||||
export interface Source {
|
||||
name?: NamedObjectInfo;
|
||||
@@ -155,6 +169,13 @@ export interface CallExpression {
|
||||
argsPrefix?: string; // DISTINCT in case of COUNT DISTINCT
|
||||
}
|
||||
|
||||
export interface MethodCallExpression {
|
||||
exprType: 'methodCall';
|
||||
method: string;
|
||||
args: Expression[];
|
||||
thisObject: Expression;
|
||||
}
|
||||
|
||||
export interface TranformExpression {
|
||||
exprType: 'transform';
|
||||
expr: Expression;
|
||||
@@ -172,6 +193,7 @@ export type Expression =
|
||||
| PlaceholderExpression
|
||||
| RawExpression
|
||||
| CallExpression
|
||||
| MethodCallExpression
|
||||
| TranformExpression
|
||||
| RowNumberExpression;
|
||||
export type OrderByExpression = Expression & { direction: 'ASC' | 'DESC' };
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { EngineDriver, SqlDumper } from 'dbgate-types';
|
||||
import type { EngineDriver, SqlDumper } from 'dbgate-types';
|
||||
import { Command, Condition } from './types';
|
||||
import { dumpSqlCommand } from './dumpSqlCommand';
|
||||
|
||||
|
||||
@@ -31,9 +31,11 @@
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.21",
|
||||
"dbgate-query-splitter": "^4.9.0",
|
||||
"dbgate-query-splitter": "^4.9.2",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"uuid": "^3.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
import type {
|
||||
ColumnInfo,
|
||||
ConstraintInfo,
|
||||
EngineDriver,
|
||||
@@ -32,6 +32,12 @@ export class SqlDumper implements AlterProcessor {
|
||||
dialect: SqlDialect;
|
||||
indentLevel = 0;
|
||||
|
||||
static keywordsCase = 'upperCase';
|
||||
static convertKeywordCase(keyword: any): string {
|
||||
if (this.keywordsCase == 'lowerCase') return keyword?.toString()?.toLowerCase();
|
||||
return keyword?.toString()?.toUpperCase();
|
||||
}
|
||||
|
||||
constructor(driver: EngineDriver) {
|
||||
this.driver = driver;
|
||||
this.dialect = driver.dialect;
|
||||
@@ -60,10 +66,10 @@ export class SqlDumper implements AlterProcessor {
|
||||
this.putRaw("'");
|
||||
}
|
||||
putByteArrayValue(value) {
|
||||
this.putRaw('NULL');
|
||||
this.put('^null');
|
||||
}
|
||||
putValue(value) {
|
||||
if (value === null) this.putRaw('NULL');
|
||||
if (value === null) this.put('^null');
|
||||
else if (value === true) this.putRaw('1');
|
||||
else if (value === false) this.putRaw('0');
|
||||
else if (_isString(value)) this.putStringValue(value);
|
||||
@@ -71,7 +77,7 @@ export class SqlDumper implements AlterProcessor {
|
||||
else if (_isDate(value)) this.putStringValue(new Date(value).toISOString());
|
||||
else if (value?.type == 'Buffer' && _isArray(value?.data)) this.putByteArrayValue(value?.data);
|
||||
else if (_isPlainObject(value) || _isArray(value)) this.putStringValue(JSON.stringify(value));
|
||||
else this.putRaw('NULL');
|
||||
else this.put('^null');
|
||||
}
|
||||
putCmd(format, ...args) {
|
||||
this.put(format, ...args);
|
||||
@@ -92,7 +98,7 @@ export class SqlDumper implements AlterProcessor {
|
||||
case 'k':
|
||||
{
|
||||
if (value) {
|
||||
this.putRaw(value.toUpperCase());
|
||||
this.putRaw(SqlDumper.convertKeywordCase(value));
|
||||
}
|
||||
}
|
||||
break;
|
||||
@@ -128,7 +134,7 @@ export class SqlDumper implements AlterProcessor {
|
||||
switch (c) {
|
||||
case '^':
|
||||
while (i < length && format[i].match(/[a-z0-9_]/i)) {
|
||||
this.putRaw(format[i].toUpperCase());
|
||||
this.putRaw(SqlDumper.convertKeywordCase(format[i]));
|
||||
i++;
|
||||
}
|
||||
break;
|
||||
@@ -181,6 +187,14 @@ export class SqlDumper implements AlterProcessor {
|
||||
this.put(' ^auto_increment');
|
||||
}
|
||||
|
||||
createDatabase(name: string) {
|
||||
this.putCmd('^create ^database %i', name);
|
||||
}
|
||||
|
||||
dropDatabase(name: string) {
|
||||
this.putCmd('^drop ^database %i', name);
|
||||
}
|
||||
|
||||
specialColumnOptions(column) {}
|
||||
|
||||
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
|
||||
@@ -527,7 +541,7 @@ export class SqlDumper implements AlterProcessor {
|
||||
}
|
||||
|
||||
truncateTable(name: NamedObjectInfo) {
|
||||
this.putCmd('^delete ^from %f', name);
|
||||
this.putCmd('^truncate ^table %f', name);
|
||||
}
|
||||
|
||||
dropConstraints(table: TableInfo, dropReferences = false) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {
|
||||
import type {
|
||||
DatabaseInfo,
|
||||
EngineDriver,
|
||||
FunctionInfo,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DbDiffOptions, testEqualColumns, testEqualTables, testEqualSqlObjects } from './diffTools';
|
||||
import { DatabaseInfo, EngineDriver, SqlObjectInfo, TableInfo } from 'dbgate-types';
|
||||
import type { DatabaseInfo, EngineDriver, SqlObjectInfo, TableInfo } from 'dbgate-types';
|
||||
import _ from 'lodash';
|
||||
|
||||
export function computeDiffRowsCore(sourceList, targetList, testEqual) {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { EngineDriver } from 'dbgate-types';
|
||||
import _intersection from 'lodash/intersection';
|
||||
import { prepareTableForImport } from './tableTransforms';
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {
|
||||
import type {
|
||||
ColumnInfo,
|
||||
ConstraintInfo,
|
||||
DatabaseInfo,
|
||||
|
||||
@@ -16,6 +16,7 @@ const dialect = {
|
||||
isSparse: false,
|
||||
isPersisted: false,
|
||||
},
|
||||
defaultSchemaName: null,
|
||||
};
|
||||
|
||||
export const driverBase = {
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import _compact from 'lodash/compact';
|
||||
import _isString from 'lodash/isString';
|
||||
|
||||
export interface FilterNameDefinition {
|
||||
childName: string;
|
||||
}
|
||||
|
||||
// original C# variant
|
||||
// public bool Match(string value)
|
||||
@@ -54,17 +59,29 @@ function fuzzysearch(needle, haystack) {
|
||||
return true;
|
||||
}
|
||||
|
||||
export function filterName(filter: string, ...names: string[]) {
|
||||
export function filterName(filter: string, ...names: (string | FilterNameDefinition)[]) {
|
||||
if (!filter) return true;
|
||||
|
||||
// const camelVariants = [name.replace(/[^A-Z]/g, '')]
|
||||
const tokens = filter.split(' ').map(x => x.trim());
|
||||
|
||||
const namesCompacted = _compact(names);
|
||||
|
||||
// @ts-ignore
|
||||
const namesOwn: string[] = namesCompacted.filter(x => _isString(x));
|
||||
// @ts-ignore
|
||||
const namesChild: string[] = namesCompacted.filter(x => x.childName).map(x => x.childName);
|
||||
|
||||
for (const token of tokens) {
|
||||
const tokenUpper = token.toUpperCase();
|
||||
const found = namesCompacted.find(name => fuzzysearch(tokenUpper, name.toUpperCase()));
|
||||
if (!found) return false;
|
||||
if (tokenUpper.startsWith('#')) {
|
||||
const tokenUpperSub = tokenUpper.substring(1);
|
||||
const found = namesChild.find(name => fuzzysearch(tokenUpperSub, name.toUpperCase()));
|
||||
if (!found) return false;
|
||||
} else {
|
||||
const found = namesOwn.find(name => fuzzysearch(tokenUpper, name.toUpperCase()));
|
||||
if (!found) return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user