Compare commits
315 Commits
v5.2.9-bet
...
v5.3.5-pro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
891329de29 | ||
|
|
34e11b351e | ||
|
|
2d64d37f58 | ||
|
|
915c6f42ec | ||
|
|
875f1adb3d | ||
|
|
311bf3f706 | ||
|
|
2e9daba3aa | ||
|
|
106a09162b | ||
|
|
21f7623c29 | ||
|
|
31162ef175 | ||
|
|
50583f928a | ||
|
|
b87e53b704 | ||
|
|
2f42319d2b | ||
|
|
ff8a5f1658 | ||
|
|
be0be4d0a0 | ||
|
|
9a39fee663 | ||
|
|
075f92ac31 | ||
|
|
0c4ad146b8 | ||
|
|
3fa688c9cb | ||
|
|
fe9394103f | ||
|
|
b747c750e8 | ||
|
|
967daf3bb6 | ||
|
|
c097e78dd0 | ||
|
|
e982e8cd9b | ||
|
|
5559d51dfb | ||
|
|
791a2e8cd4 | ||
|
|
d243af323e | ||
|
|
73ec42a9c8 | ||
|
|
e71d278b20 | ||
|
|
61c3ff423a | ||
|
|
1afa9000f8 | ||
|
|
75ef8ec801 | ||
|
|
94dc292dc9 | ||
|
|
74adf1dd3f | ||
|
|
db6d5f498b | ||
|
|
b9737533bd | ||
|
|
93f64a6bab | ||
|
|
8367cc4b59 | ||
|
|
e97787113c | ||
|
|
6fb9c4b14f | ||
|
|
4436ff95a8 | ||
|
|
d54b47f713 | ||
|
|
62de736bce | ||
|
|
2232a7bab1 | ||
|
|
32ebd86171 | ||
|
|
8e17516d54 | ||
|
|
3bfa7d54d0 | ||
|
|
60bf682449 | ||
|
|
24c6205d81 | ||
|
|
018b97b197 | ||
|
|
4cbfa7c937 | ||
|
|
db7f3e5619 | ||
|
|
bcafd9a078 | ||
|
|
eaa943a39d | ||
|
|
3b813e93e7 | ||
|
|
23a52dc79e | ||
|
|
88e245da7d | ||
|
|
5ee9e5098c | ||
|
|
4ea55644c4 | ||
|
|
a13ca9f96a | ||
|
|
ba4826559b | ||
|
|
9d4803edc7 | ||
|
|
71850f8497 | ||
|
|
ccb28783a2 | ||
|
|
7ad8edcdae | ||
|
|
77b42e6a04 | ||
|
|
869e837ee5 | ||
|
|
b27f58be9f | ||
|
|
a51bd70e80 | ||
|
|
95f580d51c | ||
|
|
2b9fa9a70f | ||
|
|
1cbeeac7cd | ||
|
|
d131287ca0 | ||
|
|
9f553ef52a | ||
|
|
781d6f1585 | ||
|
|
76c8f8ef62 | ||
|
|
49e338bbbc | ||
|
|
968e69c7f2 | ||
|
|
8a69e94d79 | ||
|
|
80a4d3f238 | ||
|
|
30e3bc6eeb | ||
|
|
9bc654cd38 | ||
|
|
b9ad63c926 | ||
|
|
4bdcf219f2 | ||
|
|
303bd659ad | ||
|
|
9fedfcbb0e | ||
|
|
8cffeaa767 | ||
|
|
9e28f6f3aa | ||
|
|
19377bbeed | ||
|
|
12d60c7ed9 | ||
|
|
64e770f51e | ||
|
|
17cf9d5007 | ||
|
|
c3609e8c7b | ||
|
|
2a48e0c4a0 | ||
|
|
d0fa565704 | ||
|
|
b30286cd11 | ||
|
|
4b5c136589 | ||
|
|
84cd9d53b5 | ||
|
|
2ef4b534e3 | ||
|
|
b7c7e41375 | ||
|
|
c0d664d399 | ||
|
|
a89cb607b4 | ||
|
|
ecde2da2af | ||
|
|
7193a4d26c | ||
|
|
38d8a471b3 | ||
|
|
a9f9085daa | ||
|
|
83ce5710ae | ||
|
|
ddf385caac | ||
|
|
c582902902 | ||
|
|
e9cd1906bc | ||
|
|
2706297142 | ||
|
|
75465bf415 | ||
|
|
42a79b2557 | ||
|
|
838bc34a4f | ||
|
|
63cdb4e507 | ||
|
|
ff3c39ccad | ||
|
|
49597b4b01 | ||
|
|
a3b7490849 | ||
|
|
45a1c58dc5 | ||
|
|
61b9fd9210 | ||
|
|
7c8156fbb9 | ||
|
|
7a0635234a | ||
|
|
7e5364d400 | ||
|
|
cfa08286de | ||
|
|
9132bfb656 | ||
|
|
a9352f2a93 | ||
|
|
47729d8cc3 | ||
|
|
e537b43563 | ||
|
|
5f14da3844 | ||
|
|
e179b0f20b | ||
|
|
35532b718a | ||
|
|
42c71c1204 | ||
|
|
591945dc93 | ||
|
|
ecfaa7198b | ||
|
|
27e714111b | ||
|
|
c086eaa510 | ||
|
|
a7444a1475 | ||
|
|
399298d3bb | ||
|
|
196c0b8a3e | ||
|
|
5d6d827044 | ||
|
|
2440d6b75f | ||
|
|
623456b0a7 | ||
|
|
9bfb37ab94 | ||
|
|
630d909b73 | ||
|
|
33552e30b7 | ||
|
|
a64504ba02 | ||
|
|
04b195f4c6 | ||
|
|
17fd9035ee | ||
|
|
f867cc5a1e | ||
|
|
97aa563fe7 | ||
|
|
fb2e261a08 | ||
|
|
aad4df419c | ||
|
|
de60f1b335 | ||
|
|
c0c06a2099 | ||
|
|
bcfb54b7c7 | ||
|
|
8b56ebfb39 | ||
|
|
1128fe6c8f | ||
|
|
db5f5a9153 | ||
|
|
25fb3b71ca | ||
|
|
a6822dd293 | ||
|
|
112513a569 | ||
|
|
fc448ed578 | ||
|
|
f777530b1c | ||
|
|
7fcebedcdd | ||
|
|
cf39fd59f9 | ||
|
|
6beecd157f | ||
|
|
57b26a2729 | ||
|
|
6ee8ca5f86 | ||
|
|
1adf1da0eb | ||
|
|
d537a75d83 | ||
|
|
2e847eee9b | ||
|
|
07cb4defe6 | ||
|
|
74a597164e | ||
|
|
f7f4a0ed3f | ||
|
|
dc45b1e75f | ||
|
|
5e68ce3218 | ||
|
|
faf6339b41 | ||
|
|
33cd3b0647 | ||
|
|
4c5da50a04 | ||
|
|
2c805b3357 | ||
|
|
f345c80144 | ||
|
|
4346147bfc | ||
|
|
b0405855aa | ||
|
|
53ee6eacb2 | ||
|
|
f39b3dd347 | ||
|
|
385f8ff5fd | ||
|
|
fad8e91c7e | ||
|
|
74b0216714 | ||
|
|
af3529e5e7 | ||
|
|
d3936ae3ec | ||
|
|
0afee6e3fe | ||
|
|
f1920549a8 | ||
|
|
b5661afdcf | ||
|
|
38a80ec695 | ||
|
|
f697ba03f8 | ||
|
|
feaaa35590 | ||
|
|
74c04cf113 | ||
|
|
83e15ede5c | ||
|
|
6a942a5058 | ||
|
|
8864c3489d | ||
|
|
a4cb65b7b1 | ||
|
|
c3fe20b6f9 | ||
|
|
8db941dc06 | ||
|
|
05329951f9 | ||
|
|
dd964273cd | ||
|
|
c3c9ad1aed | ||
|
|
cd8fe5d691 | ||
|
|
15d99f98f8 | ||
|
|
be6e0f3bc8 | ||
|
|
3867b7f5ba | ||
|
|
1b347c7e0b | ||
|
|
10664b16fe | ||
|
|
e10e8ca161 | ||
|
|
c3e05e22ad | ||
|
|
97753e2b11 | ||
|
|
315c0670d0 | ||
|
|
e5fb3414fe | ||
|
|
227d81a01a | ||
|
|
bacb9510d7 | ||
|
|
48209509ae | ||
|
|
c2a01e4822 | ||
|
|
3e44fd823c | ||
|
|
47b98041c9 | ||
|
|
739205c192 | ||
|
|
8f0b44ade9 | ||
|
|
cb0a11fda9 | ||
|
|
befada8b87 | ||
|
|
85a43c7a5b | ||
|
|
50b64cf0c6 | ||
|
|
5c080568d8 | ||
|
|
9d5c7e6df2 | ||
|
|
4864a376c6 | ||
|
|
ef77dbf768 | ||
|
|
7999148f3c | ||
|
|
ed134d787b | ||
|
|
f04a3bdbd5 | ||
|
|
2a56b562eb | ||
|
|
2199a49126 | ||
|
|
14db7b1a98 | ||
|
|
314b72f148 | ||
|
|
ebcad6eded | ||
|
|
edad03d988 | ||
|
|
062c4053ca | ||
|
|
f20a802068 | ||
|
|
a612fc1649 | ||
|
|
bb38778853 | ||
|
|
409b1c84e7 | ||
|
|
db8b8feb3e | ||
|
|
6cdbfd1a89 | ||
|
|
49c90b9be9 | ||
|
|
8043869332 | ||
|
|
9f9c4d82da | ||
|
|
297b321bc8 | ||
|
|
a8999855bf | ||
|
|
0c12dcaf16 | ||
|
|
954df821a5 | ||
|
|
d68d9206b4 | ||
|
|
9d99c01018 | ||
|
|
16ed91b147 | ||
|
|
4d32e57947 | ||
|
|
4d25fef37d | ||
|
|
25835ee19f | ||
|
|
2cd3aac158 | ||
|
|
e6cdc63e61 | ||
|
|
ee671297bf | ||
|
|
ce895f92cd | ||
|
|
4e746a3055 | ||
|
|
5bc0af1fba | ||
|
|
66a9e0d14a | ||
|
|
b536b56348 | ||
|
|
ad6a22d2a6 | ||
|
|
64d9b26d79 | ||
|
|
284606e3d5 | ||
|
|
e8129fd499 | ||
|
|
d454da325f | ||
|
|
5c703c786d | ||
|
|
864c5bb208 | ||
|
|
504d16f189 | ||
|
|
d87af2a820 | ||
|
|
4e6e0a79c4 | ||
|
|
f4fbb28124 | ||
|
|
1c03a8ce9e | ||
|
|
5bf2cf2784 | ||
|
|
e572cd392c | ||
|
|
a812ff510d | ||
|
|
cafe0e68c3 | ||
|
|
728f3621eb | ||
|
|
ca0ae2084c | ||
|
|
0cc7a98391 | ||
|
|
68a40e5da6 | ||
|
|
bbf2e2f7ed | ||
|
|
1f75a818c8 | ||
|
|
ebdcd9ad94 | ||
|
|
2d1ac97191 | ||
|
|
8f5b395935 | ||
|
|
df60d40134 | ||
|
|
2723c41832 | ||
|
|
a2102a51a1 | ||
|
|
958d786dfb | ||
|
|
e2526082b8 | ||
|
|
0d22c675b6 | ||
|
|
ab481121f9 | ||
|
|
e9ee52ac9d | ||
|
|
5eecb45961 | ||
|
|
94e991b059 | ||
|
|
373a35fe65 | ||
|
|
eba5bd9c2b | ||
|
|
ca3507f5d4 | ||
|
|
4f5db15c20 | ||
|
|
22bed04d13 | ||
|
|
d3737b4e08 | ||
|
|
20c1315380 | ||
|
|
e9442bd633 | ||
|
|
4214b4f613 | ||
|
|
cb9921918f |
139
.github/workflows/build-app-pro-beta.yaml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: Electron app BETA PREMIUM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-pro.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-22.04]
|
||||
# vos: [windows-2022, ubuntu-22.04]
|
||||
# os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
# os: [macOS-10.15]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
echo "Working directory"
|
||||
pwd
|
||||
cd ..
|
||||
git clone https://github.com/dbgate/dbgate-pro.git
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
- name: yarn adjustPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustPackageJsonPremium
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillPackagedPlugins
|
||||
- name: Publish
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
|
||||
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp app/dist/*x86*.AppImage artifacts/dbgate-premium-beta.AppImage || true
|
||||
cp app/dist/*win*.exe artifacts/dbgate-premium-beta.exe || true
|
||||
cp app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta.dmg || true
|
||||
|
||||
mv app/dist/*.exe artifacts/ || true
|
||||
mv app/dist/*.zip artifacts/ || true
|
||||
mv app/dist/*.tar.gz artifacts/ || true
|
||||
mv app/dist/*.AppImage artifacts/ || true
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: ../dbgate-merged/artifacts
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: '../dbgate-merged/artifacts/**'
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
5
.github/workflows/build-app.yaml
vendored
@@ -45,6 +45,11 @@ jobs:
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
yarn fillNativeModulesElectron
|
||||
|
||||
7
.github/workflows/build-docker.yaml
vendored
@@ -62,9 +62,16 @@ jobs:
|
||||
# yarn --version
|
||||
# yarn config set network-timeout 300000
|
||||
yarn install
|
||||
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
yarn run prepare:docker
|
||||
|
||||
6
.github/workflows/build-npm.yaml
vendored
@@ -49,6 +49,12 @@ jobs:
|
||||
run: |
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
|
||||
- name: Publish types
|
||||
working-directory: packages/types
|
||||
run: |
|
||||
|
||||
33
CHANGELOG.md
@@ -8,6 +8,39 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
### 5.3.4
|
||||
- FIXED: On blank system does not start (window does not appear) #862
|
||||
- FIXED: Missing Execute, Export bar #861
|
||||
|
||||
### 5.3.3
|
||||
- FIXED: The application Window is not visible when openning after changing monitor configuration. #856
|
||||
- FIXED: Multi column filter is broken for Postgresql #855
|
||||
- ADDED: Do not display internal timescaledb objects in postgres databases #839
|
||||
- FIXED: When in splitview mode and Clicking "Refresh" button on the right side, will refresh the left side, and not the right side #810
|
||||
- FIXED: Cannot filter by uuid field in psql #538
|
||||
|
||||
### 5.3.1
|
||||
- FIXED: Column sorting on query tab not working #819
|
||||
- FIXED: Postgres Connection stays in "Loading database structure" until reloading the page #826
|
||||
- FIXED: Cannot read properties of undefined (reading 'length') on Tables #824
|
||||
- FIXED: Redshift doesn't show tables when connected #816
|
||||
|
||||
### 5.3.0
|
||||
- CHANGED: New Oracle driver, much better Oracle support. Works now also in docker distribution
|
||||
- FIXED: Connection to oracle with service name #809
|
||||
- ADDED: Connect to redis using a custom username #807
|
||||
- FIXED: Unable to open SQL files #797
|
||||
- FIXED: MongoDB query without columns #811
|
||||
- ADDED: Switch connection for opened file #814
|
||||
|
||||
### 5.2.9
|
||||
- FIXED: PostgresSQL doesn't show tables when connected #793 #805
|
||||
- FIXED: MongoDB write operations fail #798 #802
|
||||
- FIXED: Elecrron app logging losed most of log messages
|
||||
- FIXED: Connection error with SSH tunnel
|
||||
- ADDED: option to disable autoupgrades (with --disable-auto-upgrade)
|
||||
- ADDED: Send error context to github gist
|
||||
|
||||
### 5.2.8
|
||||
- FIXED: file menu save and save as not working
|
||||
- FIXED: query editor on import/export screen overlaps with selector
|
||||
|
||||
687
LICENSE
@@ -1,21 +1,674 @@
|
||||
MIT License
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (c) 2021 Jan Prochazka
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
This project is licensed under the GPLv3 License. See the LICENSE file for full text of the GPLv3 license.
|
||||
|
||||
The original project was licensed under the MIT License, and the following notice applies to the original code:
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Jan Prochazka
|
||||
@@ -18,4 +22,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
SOFTWARE.
|
||||
@@ -12,7 +12,7 @@ DbGate is cross-platform database manager.
|
||||
It's designed to be simple to use and effective, when working with more databases simultaneously.
|
||||
But there are also many advanced features like schema compare, visual query designer, chart visualisation or batch export and import.
|
||||
|
||||
DbGate is licensed under MIT license and is completely free.
|
||||
DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
|
||||
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
|
||||
* **Download** application for Windows, Linux or Mac from [dbgate.org](https://dbgate.org/download/)
|
||||
@@ -22,7 +22,7 @@ DbGate is licensed under MIT license and is completely free.
|
||||
* MySQL
|
||||
* PostgreSQL
|
||||
* SQL Server
|
||||
* Oracle (experimental)
|
||||
* Oracle
|
||||
* MongoDB
|
||||
* Redis
|
||||
* SQLite
|
||||
|
||||
@@ -5,9 +5,6 @@ function adjustFile(file) {
|
||||
if (process.platform != 'win32') {
|
||||
delete json.optionalDependencies.msnodesqlv8;
|
||||
}
|
||||
if (process.arch == 'arm64') {
|
||||
delete json.optionalDependencies.oracledb;
|
||||
}
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
|
||||
687
app/LICENSE
@@ -1,21 +1,674 @@
|
||||
MIT License
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (c) 2021 Jan Prochazka
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
This project is licensed under the GPLv3 License. See the LICENSE file for full text of the GPLv3 license.
|
||||
|
||||
The original project was licensed under the MIT License, and the following notice applies to the original code:
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Jan Prochazka
|
||||
@@ -18,4 +22,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
SOFTWARE.
|
||||
BIN
app/icon.ico
|
Before Width: | Height: | Size: 192 KiB After Width: | Height: | Size: 202 KiB |
BIN
app/icon.png
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 68 KiB |
BIN
app/icon32.png
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 89 KiB After Width: | Height: | Size: 64 KiB |
BIN
app/icon512.png
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 143 KiB |
|
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 68 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 9.6 KiB |
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 143 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 14 KiB |
@@ -7,6 +7,7 @@
|
||||
"dependencies": {
|
||||
"electron-log": "^4.4.1",
|
||||
"electron-updater": "^4.6.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lodash.clonedeepwith": "^4.5.0",
|
||||
"patch-package": "^6.4.7"
|
||||
},
|
||||
@@ -91,8 +92,7 @@
|
||||
"icon": "icon.ico",
|
||||
"publish": [
|
||||
"github"
|
||||
],
|
||||
"rfc3161TimeStampServer": "http://sha256timestamp.ws.symantec.com/sha256/timestamp"
|
||||
]
|
||||
},
|
||||
"files": [
|
||||
"packages",
|
||||
@@ -122,6 +122,6 @@
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^5.5.0"
|
||||
"oracledb": "^6.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,12 +16,13 @@ const BrowserWindow = electron.BrowserWindow;
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const mainMenuDefinition = require('./mainMenuDefinition');
|
||||
const { settings } = require('cluster');
|
||||
const { isProApp, checkLicense } = require('./proTools');
|
||||
let disableAutoUpgrade = false;
|
||||
|
||||
// require('@electron/remote/main').initialize();
|
||||
|
||||
const configRootPath = path.join(app.getPath('userData'), 'config-root.json');
|
||||
let saveConfigOnExit = true;
|
||||
let initialConfig = {};
|
||||
let apiLoaded = false;
|
||||
let mainModule;
|
||||
@@ -173,6 +174,21 @@ ipcMain.on('quit-app', async (event, arg) => {
|
||||
mainWindow.close();
|
||||
}
|
||||
});
|
||||
ipcMain.on('reset-settings', async (event, arg) => {
|
||||
try {
|
||||
saveConfigOnExit = false;
|
||||
fs.unlinkSync(configRootPath);
|
||||
console.log('Deleted file:', configRootPath);
|
||||
} catch (err) {
|
||||
console.log('Error deleting config-root:', err.message);
|
||||
}
|
||||
|
||||
if (isMac()) {
|
||||
app.quit();
|
||||
} else {
|
||||
mainWindow.close();
|
||||
}
|
||||
});
|
||||
ipcMain.on('set-title', async (event, arg) => {
|
||||
mainWindow.setTitle(arg);
|
||||
});
|
||||
@@ -276,10 +292,34 @@ function fillMissingSettings(value) {
|
||||
return res;
|
||||
}
|
||||
|
||||
function ensureBoundsVisible(bounds) {
|
||||
const area = electron.screen.getDisplayMatching(bounds).workArea;
|
||||
|
||||
let { x, y, width, height } = bounds;
|
||||
|
||||
const isWithinDisplay =
|
||||
x >= area.x && x + width <= area.x + area.width && y >= area.y && y + height <= area.y + area.height;
|
||||
|
||||
if (!isWithinDisplay) {
|
||||
width = Math.min(width, area.width);
|
||||
height = Math.min(height, area.height);
|
||||
|
||||
if (width < 400) width = 400;
|
||||
if (height < 300) height = 300;
|
||||
|
||||
x = area.x; // + Math.round(area.width - width / 2);
|
||||
y = area.y; // + Math.round(area.height - height / 2);
|
||||
}
|
||||
|
||||
return { x, y, width, height };
|
||||
}
|
||||
|
||||
function createWindow() {
|
||||
const datadir = path.join(os.homedir(), '.dbgate');
|
||||
|
||||
let settingsJson = {};
|
||||
let licenseKey = null;
|
||||
try {
|
||||
const datadir = path.join(os.homedir(), '.dbgate');
|
||||
settingsJson = fillMissingSettings(
|
||||
JSON.parse(fs.readFileSync(path.join(datadir, 'settings.json'), { encoding: 'utf-8' }))
|
||||
);
|
||||
@@ -287,9 +327,22 @@ function createWindow() {
|
||||
console.log('Error loading settings.json:', err.message);
|
||||
settingsJson = fillMissingSettings({});
|
||||
}
|
||||
if (isProApp()) {
|
||||
try {
|
||||
licenseKey = fs.readFileSync(path.join(datadir, 'license.key'), { encoding: 'utf-8' });
|
||||
} catch (err) {
|
||||
console.log('Error loading license.key:', err.message);
|
||||
licenseKey = null;
|
||||
}
|
||||
}
|
||||
|
||||
const bounds = initialConfig['winBounds'];
|
||||
useNativeMenu = settingsJson['app.useNativeMenu'];
|
||||
const licenseOk = !isProApp() || checkLicense(licenseKey) == 'premium';
|
||||
|
||||
let bounds = initialConfig['winBounds'];
|
||||
if (bounds) {
|
||||
bounds = ensureBoundsVisible(bounds);
|
||||
}
|
||||
useNativeMenu = settingsJson['app.useNativeMenu'] || !licenseOk;
|
||||
|
||||
mainWindow = new BrowserWindow({
|
||||
width: 1200,
|
||||
@@ -327,24 +380,28 @@ function createWindow() {
|
||||
});
|
||||
mainWindow.on('close', () => {
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
configRootPath,
|
||||
JSON.stringify({
|
||||
winBounds: mainWindow.getBounds(),
|
||||
winIsMaximized: mainWindow.isMaximized(),
|
||||
disableAutoUpgrade,
|
||||
}),
|
||||
'utf-8'
|
||||
);
|
||||
if (saveConfigOnExit) {
|
||||
fs.writeFileSync(
|
||||
configRootPath,
|
||||
JSON.stringify({
|
||||
winBounds: mainWindow.getBounds(),
|
||||
winIsMaximized: mainWindow.isMaximized(),
|
||||
disableAutoUpgrade,
|
||||
}),
|
||||
'utf-8'
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error saving config-root:', err.message);
|
||||
}
|
||||
});
|
||||
|
||||
// mainWindow.webContents.toggleDevTools();
|
||||
|
||||
mainWindow.loadURL(startUrl);
|
||||
if (os.platform() == 'linux') {
|
||||
mainWindow.setIcon(path.resolve(__dirname, '../icon.png'));
|
||||
}
|
||||
// mainWindow.webContents.toggleDevTools();
|
||||
|
||||
mainWindow.on('maximize', () => {
|
||||
mainWindow.webContents.send('setIsMaximized', true);
|
||||
@@ -353,6 +410,11 @@ function createWindow() {
|
||||
mainWindow.on('unmaximize', () => {
|
||||
mainWindow.webContents.send('setIsMaximized', false);
|
||||
});
|
||||
|
||||
// app.on('browser-window-focus', () => {
|
||||
// const bounds = ensureBoundsVisible(mainWindow.getBounds());
|
||||
// mainWindow.setBounds(bounds);
|
||||
// });
|
||||
}
|
||||
|
||||
if (!apiLoaded) {
|
||||
|
||||
@@ -91,6 +91,7 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'folder.showLogs', hideDisabled: true },
|
||||
{ command: 'folder.showData', hideDisabled: true },
|
||||
{ command: 'new.gist', hideDisabled: true },
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
12
app/src/proTools.js
Normal file
@@ -0,0 +1,12 @@
|
||||
function isProApp() {
|
||||
return false;
|
||||
}
|
||||
|
||||
function checkLicense(license) {
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isProApp,
|
||||
checkLicense,
|
||||
};
|
||||
@@ -476,6 +476,11 @@ buffer-crc32@~0.2.3:
|
||||
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
|
||||
integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
|
||||
|
||||
buffer-equal-constant-time@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
|
||||
integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==
|
||||
|
||||
buffer-equal@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-1.0.0.tgz#59616b498304d556abd466966b22eeda3eca5fbe"
|
||||
@@ -927,6 +932,13 @@ duplexer3@^0.1.4:
|
||||
resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e"
|
||||
integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==
|
||||
|
||||
ecdsa-sig-formatter@1.0.11:
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
|
||||
integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
|
||||
dependencies:
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
ejs@^3.1.7:
|
||||
version "3.1.10"
|
||||
resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b"
|
||||
@@ -1663,6 +1675,39 @@ jsonfile@^6.0.1:
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsonwebtoken@^9.0.2:
|
||||
version "9.0.2"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3"
|
||||
integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==
|
||||
dependencies:
|
||||
jws "^3.2.2"
|
||||
lodash.includes "^4.3.0"
|
||||
lodash.isboolean "^3.0.3"
|
||||
lodash.isinteger "^4.0.4"
|
||||
lodash.isnumber "^3.0.3"
|
||||
lodash.isplainobject "^4.0.6"
|
||||
lodash.isstring "^4.0.1"
|
||||
lodash.once "^4.0.0"
|
||||
ms "^2.1.1"
|
||||
semver "^7.5.4"
|
||||
|
||||
jwa@^1.4.1:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
|
||||
integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==
|
||||
dependencies:
|
||||
buffer-equal-constant-time "1.0.1"
|
||||
ecdsa-sig-formatter "1.0.11"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
jws@^3.2.2:
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
|
||||
integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==
|
||||
dependencies:
|
||||
jwa "^1.4.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
keyv@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
|
||||
@@ -1718,11 +1763,46 @@ lodash.escaperegexp@^4.1.2:
|
||||
resolved "https://registry.yarnpkg.com/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz#64762c48618082518ac3df4ccf5d5886dae20347"
|
||||
integrity sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==
|
||||
|
||||
lodash.includes@^4.3.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
|
||||
integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==
|
||||
|
||||
lodash.isboolean@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
|
||||
integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==
|
||||
|
||||
lodash.isequal@^4.5.0:
|
||||
version "4.5.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0"
|
||||
integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==
|
||||
|
||||
lodash.isinteger@^4.0.4:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
|
||||
integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==
|
||||
|
||||
lodash.isnumber@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
|
||||
integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==
|
||||
|
||||
lodash.isplainobject@^4.0.6:
|
||||
version "4.0.6"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
|
||||
integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==
|
||||
|
||||
lodash.isstring@^4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
|
||||
integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==
|
||||
|
||||
lodash.once@^4.0.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
|
||||
integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==
|
||||
|
||||
lodash@^4.17.15:
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
@@ -1860,6 +1940,11 @@ ms@2.1.2:
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
|
||||
|
||||
ms@^2.1.1:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
msnodesqlv8@^4.2.1:
|
||||
version "4.2.1"
|
||||
resolved "https://registry.yarnpkg.com/msnodesqlv8/-/msnodesqlv8-4.2.1.tgz#59f2930e7f3b9b201d7288425a6ffa923ea1a573"
|
||||
@@ -1944,10 +2029,10 @@ open@^7.4.2:
|
||||
is-docker "^2.0.0"
|
||||
is-wsl "^2.1.1"
|
||||
|
||||
oracledb@^5.5.0:
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.5.0.tgz#0cf9af5d0c0815f74849ae9ed56aee823514d71b"
|
||||
integrity sha512-i5cPvMENpZP8nnqptB6l0pjiOyySj1IISkbM4Hr3yZEDdANo2eezarwZb9NQ8fTh5pRjmgpZdSyIbnn9N3AENw==
|
||||
oracledb@^6.6.0:
|
||||
version "6.6.0"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-6.6.0.tgz#bb40adbe81a84a1e544c48af9f120c61f030e936"
|
||||
integrity sha512-T3dx+o3j+tVN53wQyr4yGTmoPHLy+a2V8yb1T2PmWrsj3ZlSt2Yu1BgV2yTDqnmBZYpRi/I3yJXRCOHHD7PiyA==
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
@@ -2322,6 +2407,11 @@ semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7:
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.1.tgz#60bfe090bf907a25aa8119a72b9f90ef7ca281b2"
|
||||
integrity sha512-f/vbBsu+fOiYt+lmwZV0rVwJScl46HppnOA1ZvIuBWKOTlllpyJ3bfVax76/OrhCH38dyxoDIA8K7uB963IYgA==
|
||||
|
||||
semver@^7.5.4:
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
|
||||
integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==
|
||||
|
||||
serialize-error@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-7.0.1.tgz#f1360b0447f61ffb483ec4157c737fab7d778e18"
|
||||
|
||||
@@ -3,12 +3,10 @@ const fs = require('fs');
|
||||
let fillContent = '';
|
||||
|
||||
if (process.platform == 'win32') {
|
||||
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');`;
|
||||
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');\n`;
|
||||
}
|
||||
if (process.arch != 'arm64') {
|
||||
fillContent += `content.oracledb = () => require('oracledb');`;
|
||||
}
|
||||
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');`;
|
||||
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');\n`;
|
||||
fillContent += `content['oracledb'] = () => require('oracledb');\n`;
|
||||
|
||||
const getContent = empty => `
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"scripts": {
|
||||
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
|
||||
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
global.DBGATE_TOOLS = require('dbgate-tools');
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const crypto = require('crypto');
|
||||
|
||||
@@ -44,16 +48,18 @@ async function connect(engine, database) {
|
||||
}
|
||||
}
|
||||
|
||||
const testWrapper = body => async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await connect(engine, randomDbName());
|
||||
try {
|
||||
await body(conn, driver, ...other);
|
||||
} finally {
|
||||
await driver.close(conn);
|
||||
}
|
||||
};
|
||||
const testWrapper =
|
||||
body =>
|
||||
async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await connect(engine, randomDbName());
|
||||
try {
|
||||
await body(conn, driver, ...other);
|
||||
} finally {
|
||||
await driver.close(conn);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
randomDbName,
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const engines = require('./engines');
|
||||
const { extractConnection } = require('./tools');
|
||||
global.DBGATE_TOOLS = require('dbgate-tools');
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
async function connectEngine(engine) {
|
||||
const connection = extractConnection(engine);
|
||||
|
||||
@@ -35,38 +35,40 @@ RIGHT=850
|
||||
|
||||
|
||||
|
||||
magick \
|
||||
\( \
|
||||
-size 1000x1000 -define gradient:direction=east 'gradient:#0050b3-#1890ff' \
|
||||
\( +clone -fill Black -colorize 100 \
|
||||
-fill White -stroke White -draw "arc $LEFT,750 $RIGHT,950 0,360" -draw "rectangle $LEFT,150 $RIGHT,850" \
|
||||
\) \
|
||||
-alpha off \
|
||||
-compose CopyOpacity -composite \
|
||||
\) \
|
||||
\( \
|
||||
-size 1000x1000 -define gradient:direction=east 'gradient:#096dd9-#40a9ff' \
|
||||
\( +clone -fill Black -colorize 100 \
|
||||
-fill White -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
\) \
|
||||
-alpha off \
|
||||
-compose CopyOpacity -composite \
|
||||
\) \
|
||||
-compose Over -composite \
|
||||
-strokewidth $STROKE_WIDTH -stroke '#0050b3' -fill transparent \
|
||||
-draw "arc $LEFT,225 $RIGHT,425 0,180" \
|
||||
-draw "arc $LEFT,400 $RIGHT,600 0,180" \
|
||||
-draw "arc $LEFT,575 $RIGHT,775 0,180" \
|
||||
-draw "arc $LEFT,750 $RIGHT,950 0,180" \
|
||||
-draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
-draw "line $LEFT,150 $LEFT,850" \
|
||||
-draw "line $RIGHT,150 $RIGHT,850" \
|
||||
-fill '#fafafa' -stroke '#8c8c8c' -strokewidth 3 \
|
||||
-pointsize 800 -font './Mcbungus-Regular.ttf' \
|
||||
-gravity center \
|
||||
-draw 'text 0,100 "G"' \
|
||||
icon.png
|
||||
# magick \
|
||||
# \( \
|
||||
# -size 1000x1000 -define gradient:direction=east 'gradient:#0050b3-#1890ff' \
|
||||
# \( +clone -fill Black -colorize 100 \
|
||||
# -fill White -stroke White -draw "arc $LEFT,750 $RIGHT,950 0,360" -draw "rectangle $LEFT,150 $RIGHT,850" \
|
||||
# \) \
|
||||
# -alpha off \
|
||||
# -compose CopyOpacity -composite \
|
||||
# \) \
|
||||
# \( \
|
||||
# -size 1000x1000 -define gradient:direction=east 'gradient:#096dd9-#40a9ff' \
|
||||
# \( +clone -fill Black -colorize 100 \
|
||||
# -fill White -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
# \) \
|
||||
# -alpha off \
|
||||
# -compose CopyOpacity -composite \
|
||||
# \) \
|
||||
# -compose Over -composite \
|
||||
# -strokewidth $STROKE_WIDTH -stroke '#0050b3' -fill transparent \
|
||||
# -draw "arc $LEFT,225 $RIGHT,425 0,180" \
|
||||
# -draw "arc $LEFT,400 $RIGHT,600 0,180" \
|
||||
# -draw "arc $LEFT,575 $RIGHT,775 0,180" \
|
||||
# -draw "arc $LEFT,750 $RIGHT,950 0,180" \
|
||||
# -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
# -draw "line $LEFT,150 $LEFT,850" \
|
||||
# -draw "line $RIGHT,150 $RIGHT,850" \
|
||||
# -fill '#fafafa' -stroke '#8c8c8c' -strokewidth 3 \
|
||||
# -pointsize 800 -font './Mcbungus-Regular.ttf' \
|
||||
# -gravity center \
|
||||
# -draw 'text 0,100 "G"' \
|
||||
# icon.png
|
||||
|
||||
convert icon-input.png -background white -alpha remove -alpha off icon.png
|
||||
convert -size 1000x1000 xc:none -fill white -draw "circle 500,500 500,0" icon.png -compose SrcIn -composite icon.png
|
||||
|
||||
# magick \
|
||||
# \( \
|
||||
@@ -106,4 +108,6 @@ magick icon.png -resize 512x512! ../packages/web/public/logo512.png
|
||||
magick icon.png -define icon:auto-resize="256,128,96,64,48,32,16" ../packages/web/public/favicon.ico
|
||||
|
||||
convert icon.png -resize 800x800 -background transparent -gravity center -extent 1000x1000 iconmac.png
|
||||
magick composite iconmac.png macbg.png -resize 600x600! ../app/icon512-mac.png
|
||||
|
||||
convert macbg.png icon.png -compose SrcIn -composite -resize 600x600! ../app/icon512-mac.png
|
||||
# magick composite iconmac.png macbg.png -resize 600x600! ../app/icon512-mac.png
|
||||
|
||||
BIN
misc/icon-input.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
misc/icon.png
|
Before Width: | Height: | Size: 134 KiB After Width: | Height: | Size: 111 KiB |
BIN
misc/iconmac.png
|
Before Width: | Height: | Size: 211 KiB After Width: | Height: | Size: 241 KiB |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "5.2.9-beta.3",
|
||||
"version": "5.3.5-pro.15",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -20,6 +20,8 @@
|
||||
"start:api:singledb": "yarn workspace dbgate-api start:singledb | pino-pretty",
|
||||
"start:api:auth": "yarn workspace dbgate-api start:auth | pino-pretty",
|
||||
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin | pino-pretty",
|
||||
"start:api:storage": "yarn workspace dbgate-api start:storage | pino-pretty",
|
||||
"sync:pro": "cd sync && yarn start",
|
||||
"start:web": "yarn workspace dbgate-web dev",
|
||||
"start:sqltree": "yarn workspace dbgate-sqltree start",
|
||||
"start:tools": "yarn workspace dbgate-tools start",
|
||||
@@ -35,6 +37,7 @@
|
||||
"build:web:docker": "yarn workspace dbgate-web build",
|
||||
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
|
||||
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
|
||||
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
|
||||
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
|
||||
"build:app:local": "yarn plugins:copydist && cd app && yarn build:local",
|
||||
"start:app:local": "cd app && yarn start:local",
|
||||
@@ -48,8 +51,8 @@
|
||||
"resetPackagedPlugins": "node resetPackagedPlugins",
|
||||
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
|
||||
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
|
||||
"install:sqlite:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && cd ..",
|
||||
"prepare:docker": "yarn plugins:copydist && yarn build:web:docker && yarn build:api && yarn copy:docker:build && yarn install:sqlite:docker",
|
||||
"install:drivers:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && yarn add oracledb && cd ..",
|
||||
"prepare:docker": "yarn plugins:copydist && yarn build:web:docker && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
|
||||
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
|
||||
"ts:api": "yarn workspace dbgate-api ts",
|
||||
|
||||
14
packages/api/env/portal/.env
vendored
@@ -1,6 +1,6 @@
|
||||
DEVMODE=1
|
||||
|
||||
CONNECTIONS=mysql,postgres,mongo,mongo2,mysqlssh,sqlite,relational
|
||||
CONNECTIONS=mysql,postgres,postgres1,mongo,mongo2,mysqlssh,sqlite,relational
|
||||
|
||||
LABEL_mysql=MySql localhost
|
||||
SERVER_mysql=localhost
|
||||
@@ -12,10 +12,18 @@ ENGINE_mysql=mysql@dbgate-plugin-mysql
|
||||
LABEL_postgres=Postgres localhost
|
||||
SERVER_postgres=localhost
|
||||
USER_postgres=postgres
|
||||
PASSWORD_postgres=test
|
||||
PORT_postgres=5433
|
||||
PASSWORD_postgres=Pwd2020Db
|
||||
PORT_postgres=5432
|
||||
ENGINE_postgres=postgres@dbgate-plugin-postgres
|
||||
|
||||
LABEL_postgres1=Postgres localhost test DB
|
||||
SERVER_postgres1=localhost
|
||||
USER_postgres1=postgres
|
||||
PASSWORD_postgres1=Pwd2020Db
|
||||
PORT_postgres1=5432
|
||||
ENGINE_postgres1=postgres@dbgate-plugin-postgres
|
||||
DATABASE_postgres1=test
|
||||
|
||||
LABEL_mongo=Mongo URL
|
||||
URL_mongo=mongodb://localhost:27017
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"json",
|
||||
@@ -26,10 +26,10 @@
|
||||
"compare-versions": "^3.6.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.10.3",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
@@ -52,7 +52,7 @@
|
||||
"ncp": "^2.0.0",
|
||||
"node-cron": "^2.0.3",
|
||||
"on-finished": "^2.4.1",
|
||||
"pinomin": "^1.0.1",
|
||||
"pinomin": "^1.0.4",
|
||||
"portfinder": "^1.0.28",
|
||||
"rimraf": "^3.0.0",
|
||||
"simple-encryptor": "^4.0.0",
|
||||
@@ -60,12 +60,13 @@
|
||||
"tar": "^6.0.5"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd -f .env.local node src/index.js --listen-api",
|
||||
"start": "env-cmd -f .env node src/index.js --listen-api",
|
||||
"start:portal": "env-cmd -f env/portal/.env node src/index.js --listen-api",
|
||||
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js --listen-api",
|
||||
"start:auth": "env-cmd -f env/auth/.env node src/index.js --listen-api",
|
||||
"start:dblogin": "env-cmd -f env/dblogin/.env node src/index.js --listen-api",
|
||||
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db --listen-api",
|
||||
"start:storage": "env-cmd -f env/storage/.env node src/index.js --listen-api",
|
||||
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
|
||||
"ts": "tsc",
|
||||
"build": "webpack"
|
||||
@@ -84,6 +85,6 @@
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^5.5.0"
|
||||
"oracledb": "^6.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
16
packages/api/src/auth/authCommon.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const crypto = require('crypto');
|
||||
|
||||
const tokenSecret = crypto.randomUUID();
|
||||
|
||||
function getTokenLifetime() {
|
||||
return process.env.TOKEN_LIFETIME || '1d';
|
||||
}
|
||||
|
||||
function getTokenSecret() {
|
||||
return tokenSecret;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTokenLifetime,
|
||||
getTokenSecret,
|
||||
};
|
||||
322
packages/api/src/auth/authProvider.js
Normal file
@@ -0,0 +1,322 @@
|
||||
const { getTokenSecret, getTokenLifetime } = require('./authCommon');
|
||||
const _ = require('lodash');
|
||||
const axios = require('axios');
|
||||
const { getLogger, getPredefinedPermissions } = require('dbgate-tools');
|
||||
|
||||
const AD = require('activedirectory2').promiseWrapper;
|
||||
const jwt = require('jsonwebtoken');
|
||||
|
||||
const logger = getLogger('authProvider');
|
||||
|
||||
class AuthProviderBase {
|
||||
amoid = 'none';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
oauthToken(params) {
|
||||
return {};
|
||||
}
|
||||
|
||||
getCurrentLogin(req) {
|
||||
const login = req?.user?.login ?? req?.auth?.user ?? null;
|
||||
return login;
|
||||
}
|
||||
|
||||
isUserLoggedIn(req) {
|
||||
return !!req?.user || !!req?.auth;
|
||||
}
|
||||
|
||||
getCurrentPermissions(req) {
|
||||
const login = this.getCurrentLogin(req);
|
||||
const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
|
||||
return permissions || process.env.PERMISSIONS;
|
||||
}
|
||||
|
||||
getLoginPageConnections() {
|
||||
return null;
|
||||
}
|
||||
|
||||
getSingleConnectionId(req) {
|
||||
return null;
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
amoid: this.amoid,
|
||||
workflowType: 'anonymous',
|
||||
name: 'Anonymous',
|
||||
};
|
||||
}
|
||||
|
||||
async redirect({ state }) {
|
||||
return {
|
||||
status: 'error',
|
||||
};
|
||||
}
|
||||
|
||||
async getLogoutUrl() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class OAuthProvider extends AuthProviderBase {
|
||||
amoid = 'oauth';
|
||||
|
||||
async oauthToken(params) {
|
||||
const { redirectUri, code } = params;
|
||||
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
const resp = await axios.default.post(
|
||||
`${process.env.OAUTH_TOKEN}`,
|
||||
`grant_type=authorization_code&code=${encodeURIComponent(code)}&redirect_uri=${encodeURIComponent(
|
||||
redirectUri
|
||||
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
|
||||
);
|
||||
|
||||
const { access_token, refresh_token } = resp.data;
|
||||
|
||||
const payload = jwt.decode(access_token);
|
||||
|
||||
logger.info({ payload }, 'User payload returned from OAUTH');
|
||||
|
||||
const login =
|
||||
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
? payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
: 'oauth';
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_LOGINS &&
|
||||
!process.env.OAUTH_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
|
||||
const groups =
|
||||
process.env.OAUTH_GROUP_FIELD && payload && payload[process.env.OAUTH_GROUP_FIELD]
|
||||
? payload[process.env.OAUTH_GROUP_FIELD]
|
||||
: [];
|
||||
|
||||
const allowedGroups = process.env.OAUTH_ALLOWED_GROUPS
|
||||
? process.env.OAUTH_ALLOWED_GROUPS.split(',').map(group => group.toLowerCase().trim())
|
||||
: [];
|
||||
|
||||
if (process.env.OAUTH_ALLOWED_GROUPS && !groups.some(group => allowedGroups.includes(group.toLowerCase().trim()))) {
|
||||
return { error: `Username ${login} does not belong to an allowed group` };
|
||||
}
|
||||
|
||||
if (access_token) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, getTokenSecret(), { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Token not found' };
|
||||
}
|
||||
|
||||
async getLogoutUrl() {
|
||||
return process.env.OAUTH_LOGOUT;
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'redirect',
|
||||
name: 'OAuth 2.0',
|
||||
};
|
||||
}
|
||||
|
||||
redirect({ state, redirectUri }) {
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
return {
|
||||
status: 'ok',
|
||||
uri: `${process.env.OAUTH_AUTH}?client_id=${
|
||||
process.env.OAUTH_CLIENT_ID
|
||||
}&response_type=code&redirect_uri=${encodeURIComponent(redirectUri)}&state=${encodeURIComponent(
|
||||
state
|
||||
)}${scopeParam}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ADProvider extends AuthProviderBase {
|
||||
amoid = 'ad';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
const adConfig = {
|
||||
url: process.env.AD_URL,
|
||||
baseDN: process.env.AD_BASEDN,
|
||||
username: process.env.AD_USERNAME,
|
||||
password: process.env.AD_PASSWORD,
|
||||
};
|
||||
const ad = new AD(adConfig);
|
||||
try {
|
||||
const res = await ad.authenticate(login, password);
|
||||
if (!res) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
if (
|
||||
process.env.AD_ALLOWED_LOGINS &&
|
||||
!process.env.AD_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
login,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
} catch (e) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Active Directory',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class LoginsProvider extends AuthProviderBase {
|
||||
amoid = 'logins';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
if (password == process.env[`LOGIN_PASSWORD_${login}`]) {
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
login,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
}
|
||||
return { error: 'Invalid credentials' };
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Login & Password',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class DenyAllProvider extends AuthProviderBase {
|
||||
amoid = 'deny';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
return { error: 'Login not allowed' };
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Deny all',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function hasEnvLogins() {
|
||||
if (process.env.LOGIN && process.env.PASSWORD) {
|
||||
return true;
|
||||
}
|
||||
for (const key in process.env) {
|
||||
if (key.startsWith('LOGIN_PASSWORD_')) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function detectEnvAuthProvider() {
|
||||
if (process.env.AUTH_PROVIDER) {
|
||||
return process.env.AUTH_PROVIDER;
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
return 'denyall';
|
||||
}
|
||||
if (process.env.OAUTH_AUTH) {
|
||||
return 'oauth';
|
||||
}
|
||||
if (process.env.AD_URL) {
|
||||
return 'ad';
|
||||
}
|
||||
if (hasEnvLogins()) {
|
||||
return 'logins';
|
||||
}
|
||||
return 'none';
|
||||
}
|
||||
|
||||
function createEnvAuthProvider() {
|
||||
const authProvider = detectEnvAuthProvider();
|
||||
switch (authProvider) {
|
||||
case 'oauth':
|
||||
return new OAuthProvider();
|
||||
case 'ad':
|
||||
return new ADProvider();
|
||||
case 'logins':
|
||||
return new LoginsProvider();
|
||||
case 'denyall':
|
||||
return new DenyAllProvider();
|
||||
default:
|
||||
return new AuthProviderBase();
|
||||
}
|
||||
}
|
||||
|
||||
let defaultAuthProvider = createEnvAuthProvider();
|
||||
let authProviders = [defaultAuthProvider];
|
||||
|
||||
function getAuthProviders() {
|
||||
return authProviders;
|
||||
}
|
||||
|
||||
function getAuthProviderById(amoid) {
|
||||
return authProviders.find(x => x.amoid == amoid);
|
||||
}
|
||||
|
||||
function getDefaultAuthProvider() {
|
||||
return defaultAuthProvider;
|
||||
}
|
||||
|
||||
function getAuthProviderFromReq(req) {
|
||||
const authProviderId = req?.auth?.amoid || req?.user?.amoid;
|
||||
return getAuthProviderById(authProviderId) ?? getDefaultAuthProvider();
|
||||
}
|
||||
|
||||
function setAuthProviders(value, defaultProvider = null) {
|
||||
authProviders = value;
|
||||
defaultAuthProvider = defaultProvider || value[0];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AuthProviderBase,
|
||||
detectEnvAuthProvider,
|
||||
getAuthProviders,
|
||||
getDefaultAuthProvider,
|
||||
setAuthProviders,
|
||||
getAuthProviderById,
|
||||
getAuthProviderFromReq,
|
||||
};
|
||||
@@ -1,24 +1,20 @@
|
||||
const axios = require('axios');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const getExpressPath = require('../utility/getExpressPath');
|
||||
const { getLogins } = require('../utility/hasPermission');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const AD = require('activedirectory2').promiseWrapper;
|
||||
const crypto = require('crypto');
|
||||
const { getTokenSecret, getTokenLifetime } = require('../auth/authCommon');
|
||||
const {
|
||||
getAuthProviderFromReq,
|
||||
getAuthProviders,
|
||||
getDefaultAuthProvider,
|
||||
getAuthProviderById,
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
const tokenSecret = crypto.randomUUID();
|
||||
|
||||
function shouldAuthorizeApi() {
|
||||
const logins = getLogins();
|
||||
return !!process.env.OAUTH_AUTH || !!process.env.AD_URL || (!!logins && !process.env.BASIC_AUTH);
|
||||
}
|
||||
|
||||
function getTokenLifetime() {
|
||||
return process.env.TOKEN_LIFETIME || '1d';
|
||||
}
|
||||
|
||||
function unauthorizedResponse(req, res, text) {
|
||||
// if (req.path == getExpressPath('/config/get-settings')) {
|
||||
// return res.json({});
|
||||
@@ -30,11 +26,32 @@ function unauthorizedResponse(req, res, text) {
|
||||
}
|
||||
|
||||
function authMiddleware(req, res, next) {
|
||||
const SKIP_AUTH_PATHS = ['/config/get', '/auth/oauth-token', '/auth/login', '/stream'];
|
||||
const SKIP_AUTH_PATHS = [
|
||||
'/config/get',
|
||||
'/config/logout',
|
||||
'/config/get-settings',
|
||||
'/config/save-license-key',
|
||||
'/auth/oauth-token',
|
||||
'/auth/login',
|
||||
'/auth/redirect',
|
||||
'/stream',
|
||||
'storage/get-connections-for-login-page',
|
||||
'auth/get-providers',
|
||||
'/connections/dblogin-web',
|
||||
'/connections/dblogin-app',
|
||||
'/connections/dblogin-auth',
|
||||
'/connections/dblogin-auth-token',
|
||||
];
|
||||
|
||||
if (!shouldAuthorizeApi()) {
|
||||
// console.log('********************* getAuthProvider()', getAuthProvider());
|
||||
|
||||
// const isAdminPage = req.headers['x-is-admin-page'] == 'true';
|
||||
|
||||
if (process.env.BASIC_AUTH) {
|
||||
// API is not authorized for basic auth
|
||||
return next();
|
||||
}
|
||||
|
||||
let skipAuth = !!SKIP_AUTH_PATHS.find(x => req.path == getExpressPath(x));
|
||||
|
||||
const authHeader = req.headers.authorization;
|
||||
@@ -46,7 +63,7 @@ function authMiddleware(req, res, next) {
|
||||
}
|
||||
const token = authHeader.split(' ')[1];
|
||||
try {
|
||||
const decoded = jwt.verify(token, tokenSecret);
|
||||
const decoded = jwt.verify(token, getTokenSecret());
|
||||
req.user = decoded;
|
||||
return next();
|
||||
} catch (err) {
|
||||
@@ -63,106 +80,49 @@ function authMiddleware(req, res, next) {
|
||||
module.exports = {
|
||||
oauthToken_meta: true,
|
||||
async oauthToken(params) {
|
||||
const { redirectUri, code } = params;
|
||||
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
const resp = await axios.default.post(
|
||||
`${process.env.OAUTH_TOKEN}`,
|
||||
`grant_type=authorization_code&code=${encodeURIComponent(code)}&redirect_uri=${encodeURIComponent(
|
||||
redirectUri
|
||||
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
|
||||
);
|
||||
|
||||
const { access_token, refresh_token } = resp.data;
|
||||
|
||||
const payload = jwt.decode(access_token);
|
||||
|
||||
logger.info({ payload }, 'User payload returned from OAUTH');
|
||||
|
||||
const login =
|
||||
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
? payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
: 'oauth';
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_LOGINS &&
|
||||
!process.env.OAUTH_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
|
||||
const groups =
|
||||
process.env.OAUTH_GROUP_FIELD && payload && payload[process.env.OAUTH_GROUP_FIELD]
|
||||
? payload[process.env.OAUTH_GROUP_FIELD]
|
||||
: [];
|
||||
|
||||
const allowedGroups =
|
||||
process.env.OAUTH_ALLOWED_GROUPS
|
||||
? process.env.OAUTH_ALLOWED_GROUPS.split(',').map(group => group.toLowerCase().trim())
|
||||
: [];
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_GROUPS &&
|
||||
!groups.some(group => allowedGroups.includes(group.toLowerCase().trim()))
|
||||
) {
|
||||
return { error: `Username ${login} does not belong to an allowed group` };
|
||||
}
|
||||
|
||||
if (access_token) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Token not found' };
|
||||
const { amoid } = params;
|
||||
return getAuthProviderById(amoid).oauthToken(params);
|
||||
},
|
||||
login_meta: true,
|
||||
async login(params) {
|
||||
const { login, password } = params;
|
||||
const { amoid, login, password, isAdminPage } = params;
|
||||
|
||||
if (process.env.AD_URL) {
|
||||
const adConfig = {
|
||||
url: process.env.AD_URL,
|
||||
baseDN: process.env.AD_BASEDN,
|
||||
username: process.env.AD_USERNAME,
|
||||
password: process.env.AD_PASSOWRD,
|
||||
};
|
||||
const ad = new AD(adConfig);
|
||||
try {
|
||||
const res = await ad.authenticate(login, password);
|
||||
if (!res) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
if (
|
||||
process.env.AD_ALLOWED_LOGINS &&
|
||||
!process.env.AD_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
if (isAdminPage) {
|
||||
if (process.env.ADMIN_PASSWORD && process.env.ADMIN_PASSWORD == password) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed active directory authentization');
|
||||
return {
|
||||
error: err.message,
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
login: 'superadmin',
|
||||
permissions: await storage.loadSuperadminPermissions(),
|
||||
roleId: -3,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{
|
||||
expiresIn: getTokenLifetime(),
|
||||
}
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
|
||||
const logins = getLogins();
|
||||
if (!logins) {
|
||||
return { error: 'Logins not configured' };
|
||||
}
|
||||
const foundLogin = logins.find(x => x.login == login);
|
||||
if (foundLogin && foundLogin.password && foundLogin.password == password) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
return { error: 'Invalid credentials' };
|
||||
return getAuthProviderById(amoid).login(login, password);
|
||||
},
|
||||
|
||||
getProviders_meta: true,
|
||||
getProviders() {
|
||||
return {
|
||||
providers: getAuthProviders().map(x => x.toJson()),
|
||||
default: getDefaultAuthProvider()?.amoid,
|
||||
};
|
||||
},
|
||||
|
||||
redirect_meta: true,
|
||||
async redirect(params) {
|
||||
const { amoid } = params;
|
||||
return getAuthProviderById(amoid).redirect(params);
|
||||
},
|
||||
|
||||
authMiddleware,
|
||||
shouldAuthorizeApi,
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ const os = require('os');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const { datadir, getLogsFilePath } = require('../utility/directories');
|
||||
const { hasPermission, getLogins } = require('../utility/hasPermission');
|
||||
const { hasPermission } = require('../utility/hasPermission');
|
||||
const socket = require('../utility/socket');
|
||||
const _ = require('lodash');
|
||||
const AsyncLock = require('async-lock');
|
||||
@@ -11,6 +11,9 @@ const AsyncLock = require('async-lock');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const { getAuthProviderFromReq } = require('../auth/authProvider');
|
||||
const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
|
||||
const storage = require('./storage');
|
||||
|
||||
const lock = new AsyncLock();
|
||||
|
||||
@@ -27,27 +30,51 @@ module.exports = {
|
||||
|
||||
get_meta: true,
|
||||
async get(_params, req) {
|
||||
const logins = getLogins();
|
||||
const loginName =
|
||||
req && req.user && req.user.login ? req.user.login : req && req.auth && req.auth.user ? req.auth.user : null;
|
||||
const login = logins && loginName ? logins.find(x => x.login == loginName) : null;
|
||||
const permissions = login ? login.permissions : process.env.PERMISSIONS;
|
||||
const authProvider = getAuthProviderFromReq(req);
|
||||
const login = authProvider.getCurrentLogin(req);
|
||||
const permissions = authProvider.getCurrentPermissions(req);
|
||||
const isUserLoggedIn = authProvider.isUserLoggedIn(req);
|
||||
|
||||
const singleConid = authProvider.getSingleConnectionId(req);
|
||||
|
||||
const singleConnection = singleConid
|
||||
? await connections.getCore({ conid: singleConid })
|
||||
: connections.singleConnection;
|
||||
|
||||
let configurationError = null;
|
||||
if (process.env.STORAGE_DATABASE && process.env.BASIC_AUTH) {
|
||||
configurationError =
|
||||
'Basic authentization is not allowed, when using storage. Cannot use both STORAGE_DATABASE and BASIC_AUTH';
|
||||
}
|
||||
|
||||
const checkedLicense = await checkLicense();
|
||||
const isLicenseValid = checkedLicense?.status == 'ok';
|
||||
|
||||
return {
|
||||
runAsPortal: !!connections.portalConnections,
|
||||
singleDbConnection: connections.singleDbConnection,
|
||||
singleConnection: connections.singleConnection,
|
||||
singleConnection: singleConnection,
|
||||
isUserLoggedIn,
|
||||
// hideAppEditor: !!process.env.HIDE_APP_EDITOR,
|
||||
allowShellConnection: platformInfo.allowShellConnection,
|
||||
allowShellScripting: platformInfo.allowShellScripting,
|
||||
isDocker: platformInfo.isDocker,
|
||||
isElectron: platformInfo.isElectron,
|
||||
isLicenseValid,
|
||||
checkedLicense,
|
||||
configurationError,
|
||||
logoutUrl: await authProvider.getLogoutUrl(),
|
||||
permissions,
|
||||
login,
|
||||
oauth: process.env.OAUTH_AUTH,
|
||||
oauthClient: process.env.OAUTH_CLIENT_ID,
|
||||
oauthScope: process.env.OAUTH_SCOPE,
|
||||
oauthLogout: process.env.OAUTH_LOGOUT,
|
||||
isLoginForm: !!process.env.AD_URL || (!!logins && !process.env.BASIC_AUTH),
|
||||
// ...additionalConfigProps,
|
||||
isBasicAuth: !!process.env.BASIC_AUTH,
|
||||
isAdminLoginForm: !!(
|
||||
process.env.STORAGE_DATABASE &&
|
||||
process.env.ADMIN_PASSWORD &&
|
||||
!process.env.BASIC_AUTH &&
|
||||
checkedLicense?.type == 'premium'
|
||||
),
|
||||
storageDatabase: process.env.STORAGE_DATABASE,
|
||||
logsFilePath: getLogsFilePath(),
|
||||
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
|
||||
...currentVersion,
|
||||
@@ -75,6 +102,12 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
deleteSettings_meta: true,
|
||||
async deleteSettings() {
|
||||
await fs.unlink(path.join(datadir(), 'settings.json'));
|
||||
return true;
|
||||
},
|
||||
|
||||
fillMissingSettings(value) {
|
||||
const res = {
|
||||
...value,
|
||||
@@ -97,12 +130,39 @@ module.exports = {
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(path.join(datadir(), 'settings.json'), { encoding: 'utf-8' });
|
||||
return this.fillMissingSettings(JSON.parse(settingsText));
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
},
|
||||
|
||||
async loadLicenseKey() {
|
||||
try {
|
||||
const licenseKey = await fs.readFile(path.join(datadir(), 'license.key'), { encoding: 'utf-8' });
|
||||
return licenseKey;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
saveLicenseKey_meta: true,
|
||||
async saveLicenseKey({ licenseKey }) {
|
||||
try {
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
await storage.writeConfig({ group: 'license', config: { licenseKey } });
|
||||
// await storageWriteConfig('license', { licenseKey });
|
||||
} else {
|
||||
await fs.writeFile(path.join(datadir(), 'license.key'), licenseKey);
|
||||
}
|
||||
socket.emitChanged(`config-changed`);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
updateSettings_meta: true,
|
||||
async updateSettings(values, req) {
|
||||
if (!hasPermission(`settings/change`, req)) return false;
|
||||
@@ -112,10 +172,16 @@ module.exports = {
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
...values,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(path.join(datadir(), 'settings.json'), JSON.stringify(updated, undefined, 2));
|
||||
// this.settingsValue = updated;
|
||||
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
}
|
||||
|
||||
socket.emitChanged(`settings-changed`);
|
||||
return updated;
|
||||
} catch (err) {
|
||||
@@ -130,4 +196,10 @@ module.exports = {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
},
|
||||
|
||||
checkLicense_meta: true,
|
||||
async checkLicense({ licenseKey }) {
|
||||
const resp = await checkLicenseKey(licenseKey);
|
||||
return resp;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -16,6 +16,9 @@ const { safeJsonParse, getLogger } = require('dbgate-tools');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
|
||||
const pipeForkLogs = require('../utility/pipeForkLogs');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { getAuthProviderById } = require('../auth/authProvider');
|
||||
const { startTokenChecking } = require('../utility/authProxy');
|
||||
|
||||
const logger = getLogger('connections');
|
||||
|
||||
@@ -61,6 +64,7 @@ function getPortalCollections() {
|
||||
useDatabaseUrl: !!process.env[`URL_${id}`],
|
||||
databaseFile: process.env[`FILE_${id}`],
|
||||
socketPath: process.env[`SOCKET_PATH_${id}`],
|
||||
serviceName: process.env[`SERVICE_NAME_${id}`],
|
||||
authType: process.env[`AUTH_TYPE_${id}`] || (process.env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
|
||||
defaultDatabase:
|
||||
process.env[`DATABASE_${id}`] ||
|
||||
@@ -69,6 +73,8 @@ function getPortalCollections() {
|
||||
displayName: process.env[`LABEL_${id}`],
|
||||
isReadOnly: process.env[`READONLY_${id}`],
|
||||
databases: process.env[`DBCONFIG_${id}`] ? safeJsonParse(process.env[`DBCONFIG_${id}`]) : null,
|
||||
allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'),
|
||||
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
|
||||
parent: process.env[`PARENT_${id}`] || undefined,
|
||||
|
||||
// SSH tunnel
|
||||
@@ -198,6 +204,12 @@ module.exports = {
|
||||
|
||||
list_meta: true,
|
||||
async list(_params, req) {
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnections = await storage.connections(req);
|
||||
if (storageConnections) {
|
||||
return storageConnections;
|
||||
}
|
||||
if (portalConnections) {
|
||||
if (platformInfo.allowShellConnection) return portalConnections;
|
||||
return portalConnections.map(maskConnection).filter(x => connectionHasPermission(x, req));
|
||||
@@ -235,14 +247,16 @@ module.exports = {
|
||||
},
|
||||
|
||||
saveVolatile_meta: true,
|
||||
async saveVolatile({ conid, user, password, test }) {
|
||||
async saveVolatile({ conid, user = undefined, password = undefined, accessToken = undefined, test = false }) {
|
||||
const old = await this.getCore({ conid });
|
||||
const res = {
|
||||
...old,
|
||||
_id: crypto.randomUUID(),
|
||||
password,
|
||||
accessToken,
|
||||
passwordMode: undefined,
|
||||
unsaved: true,
|
||||
useRedirectDbLogin: false,
|
||||
};
|
||||
if (old.passwordMode == 'askUser') {
|
||||
res.user = user;
|
||||
@@ -335,6 +349,14 @@ module.exports = {
|
||||
if (volatile) {
|
||||
return volatile;
|
||||
}
|
||||
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
if (storageConnection) {
|
||||
return storageConnection;
|
||||
}
|
||||
|
||||
if (portalConnections) {
|
||||
const res = portalConnections.find(x => x._id == conid) || null;
|
||||
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : res;
|
||||
@@ -364,4 +386,95 @@ module.exports = {
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
dbloginWeb_meta: {
|
||||
raw: true,
|
||||
method: 'get',
|
||||
},
|
||||
async dbloginWeb(req, res) {
|
||||
const { conid, state, redirectUri } = req.query;
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const authResp = await driver.getRedirectAuthUrl(connection, {
|
||||
redirectUri,
|
||||
state,
|
||||
client: 'web',
|
||||
});
|
||||
res.redirect(authResp.url);
|
||||
},
|
||||
|
||||
dbloginApp_meta: true,
|
||||
async dbloginApp({ conid, state }) {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const resp = await driver.getRedirectAuthUrl(connection, {
|
||||
state,
|
||||
client: 'app',
|
||||
});
|
||||
startTokenChecking(resp.sid, async token => {
|
||||
const volatile = await this.saveVolatile({ conid, accessToken: token });
|
||||
socket.emit('got-volatile-token', { savedConId: conid, volatileConId: volatile._id });
|
||||
});
|
||||
return resp;
|
||||
},
|
||||
|
||||
dbloginToken_meta: true,
|
||||
async dbloginToken({ code, conid, strmid, redirectUri, sid }) {
|
||||
try {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAuthTokenFromCode(connection, { sid, code, redirectUri });
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
// console.log('******************************** WE HAVE ACCESS TOKEN', accessToken);
|
||||
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
|
||||
return { success: true };
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
|
||||
dbloginAuthToken_meta: true,
|
||||
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }) {
|
||||
try {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAuthTokenFromCode(connection, { code, redirectUri, sid });
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
const authProvider = getAuthProviderById(amoid);
|
||||
const resp = await authProvider.login(null, null, { conid: volatile._id });
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
|
||||
dbloginAuth_meta: true,
|
||||
async dbloginAuth({ amoid, conid, user, password }) {
|
||||
if (user || password) {
|
||||
const saveResp = await this.saveVolatile({ conid, user, password, test: true });
|
||||
if (saveResp.msgtype == 'connected') {
|
||||
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id });
|
||||
return loginResp;
|
||||
}
|
||||
return saveResp;
|
||||
}
|
||||
|
||||
// user and password is stored in connection, volatile connection is not needed
|
||||
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid });
|
||||
return loginResp;
|
||||
},
|
||||
|
||||
volatileDbloginFromAuth_meta: true,
|
||||
async volatileDbloginFromAuth({ conid }, req) {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAccessTokenFromAuth(connection, req);
|
||||
if (accessToken) {
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
return volatile;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -89,6 +89,9 @@ module.exports = {
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
if (connection.useRedirectDbLogin) {
|
||||
throw new MissingCredentialsError({ conid, redirectToDbLogin: true });
|
||||
}
|
||||
const subprocess = fork(
|
||||
global['API_PACKAGE'] || process.argv[1],
|
||||
[
|
||||
@@ -179,6 +182,15 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
runOperation_meta: true,
|
||||
async runOperation({ conid, database, operation, useTransaction }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ conid, database, operation }, 'Processing operation');
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'runOperation', operation, useTransaction });
|
||||
return res;
|
||||
},
|
||||
|
||||
collectionData_meta: true,
|
||||
async collectionData({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const crypto = require('crypto');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
@@ -56,6 +57,9 @@ module.exports = {
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
if (connection.useRedirectDbLogin) {
|
||||
throw new MissingCredentialsError({ conid, redirectToDbLogin: true });
|
||||
}
|
||||
const subprocess = fork(
|
||||
global['API_PACKAGE'] || process.argv[1],
|
||||
[
|
||||
@@ -181,22 +185,29 @@ module.exports = {
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
createDatabase_meta: true,
|
||||
async createDatabase({ conid, name }, req) {
|
||||
async sendDatabaseOp({ conid, msgtype, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'createDatabase', name });
|
||||
return { status: 'ok' };
|
||||
const res = await this.sendRequest(opened, { msgtype, name });
|
||||
if (res.errorMessage) {
|
||||
console.error(res.errorMessage);
|
||||
|
||||
return {
|
||||
apiErrorMessage: res.errorMessage,
|
||||
};
|
||||
}
|
||||
return res.result || null;
|
||||
},
|
||||
|
||||
createDatabase_meta: true,
|
||||
async createDatabase({ conid, name }, req) {
|
||||
return this.sendDatabaseOp({ conid, msgtype: 'createDatabase', name }, req);
|
||||
},
|
||||
|
||||
dropDatabase_meta: true,
|
||||
async dropDatabase({ conid, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'dropDatabase', name });
|
||||
return { status: 'ok' };
|
||||
return this.sendDatabaseOp({ conid, msgtype: 'dropDatabase', name }, req);
|
||||
},
|
||||
|
||||
sendRequest(conn, message) {
|
||||
|
||||
@@ -149,7 +149,7 @@ module.exports = {
|
||||
const { sesid } = await this.create({ conid, database });
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
session.killOnDone = true;
|
||||
const jslid = uuidv1();
|
||||
const jslid = crypto.randomUUID();
|
||||
session.loadingReader_jslid = jslid;
|
||||
const fileName = queryName && appFolder ? path.join(appdir(), appFolder, `${queryName}.query.sql`) : null;
|
||||
|
||||
@@ -169,7 +169,7 @@ module.exports = {
|
||||
|
||||
startProfiler_meta: true,
|
||||
async startProfiler({ sesid }) {
|
||||
const jslid = uuidv1();
|
||||
const jslid = crypto.randomUUID();
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
|
||||
20
packages/api/src/controllers/storage.js
Normal file
@@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
connections_meta: true,
|
||||
async connections(req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
getConnection_meta: true,
|
||||
async getConnection({ conid }) {
|
||||
return null;
|
||||
},
|
||||
|
||||
async loadSuperadminPermissions() {
|
||||
return [];
|
||||
},
|
||||
|
||||
getConnectionsForLoginPage_meta: true,
|
||||
async getConnectionsForLoginPage() {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
@@ -10,7 +10,9 @@ const { read } = require('./queryHistory');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const _ = require('lodash');
|
||||
const serverConnections = require('./serverConnections');
|
||||
const config = require('./config');
|
||||
const gistSecret = require('../gistSecret');
|
||||
const currentVersion = require('../currentVersion');
|
||||
|
||||
module.exports = {
|
||||
upload_meta: {
|
||||
@@ -44,16 +46,21 @@ module.exports = {
|
||||
res.sendFile(path.join(uploadsdir(), req.query.file));
|
||||
},
|
||||
|
||||
async getGistToken() {
|
||||
const settings = await config.getSettings();
|
||||
|
||||
return settings['other.gistCreateToken'] || gistSecret;
|
||||
},
|
||||
|
||||
uploadErrorToGist_meta: true,
|
||||
async uploadErrorToGist() {
|
||||
console.log('&&&SECRET', gistSecret);
|
||||
const logs = await fs.readFile(getLogsFilePath(), { encoding: 'utf-8' });
|
||||
const connections = await serverConnections.getOpenedConnectionReport();
|
||||
try {
|
||||
const response = await axios.default.post(
|
||||
'https://api.github.com/gists',
|
||||
{
|
||||
description: 'DbGate error report',
|
||||
description: `DbGate ${currentVersion.version} error report`,
|
||||
public: false,
|
||||
files: {
|
||||
'logs.jsonl': {
|
||||
@@ -87,11 +94,14 @@ module.exports = {
|
||||
'connections.json': {
|
||||
content: JSON.stringify(connections, null, 2),
|
||||
},
|
||||
'version.json': {
|
||||
content: JSON.stringify(currentVersion, null, 2),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `token ${gistSecret}`,
|
||||
Authorization: `token ${await this.getGistToken()}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
},
|
||||
@@ -113,7 +123,7 @@ module.exports = {
|
||||
async deleteGist({ url }) {
|
||||
const response = await axios.default.delete(url, {
|
||||
headers: {
|
||||
Authorization: `token ${gistSecret}`,
|
||||
Authorization: `token ${await this.getGistToken()}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { setLogger, getLogger, setLoggerName } = require('dbgate-tools');
|
||||
const { setLogConfig, getLogger, setLoggerName } = require('dbgate-tools');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
const fs = require('fs');
|
||||
const moment = require('moment');
|
||||
@@ -30,22 +30,27 @@ function configureLogger() {
|
||||
setLogsFilePath(logsFilePath);
|
||||
setLoggerName('main');
|
||||
|
||||
const logger = createLogger({
|
||||
const consoleLogLevel = process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info';
|
||||
const fileLogLevel = process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'debug';
|
||||
|
||||
const logConfig = {
|
||||
base: { pid: process.pid },
|
||||
targets: [
|
||||
{
|
||||
type: 'console',
|
||||
// @ts-ignore
|
||||
level: process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
|
||||
level: consoleLogLevel,
|
||||
},
|
||||
{
|
||||
type: 'stream',
|
||||
// @ts-ignore
|
||||
level: process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
|
||||
level: fileLogLevel,
|
||||
stream: fs.createWriteStream(logsFilePath, { flags: 'a' }),
|
||||
},
|
||||
],
|
||||
});
|
||||
};
|
||||
|
||||
// logger.info(`Initialized logging, console log level: ${consoleLogLevel}, file log level: ${fileLogLevel}`);
|
||||
|
||||
// const streams = [];
|
||||
// if (!platformInfo.isElectron) {
|
||||
@@ -83,7 +88,8 @@ function configureLogger() {
|
||||
// },
|
||||
// });
|
||||
|
||||
setLogger(logger);
|
||||
// @ts-ignore
|
||||
setLogConfig(logConfig);
|
||||
}
|
||||
|
||||
if (processArgs.listenApi) {
|
||||
@@ -91,9 +97,12 @@ if (processArgs.listenApi) {
|
||||
}
|
||||
|
||||
const shell = require('./shell/index');
|
||||
const dbgateTools = require('dbgate-tools');
|
||||
const currentVersion = require('./currentVersion');
|
||||
|
||||
global['DBGATE_TOOLS'] = dbgateTools;
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
if (processArgs.startProcess) {
|
||||
const proc = require('./proc');
|
||||
@@ -110,6 +119,7 @@ module.exports = {
|
||||
...shell,
|
||||
getLogger,
|
||||
configureLogger,
|
||||
currentVersion,
|
||||
// loadLogsContent,
|
||||
getMainModule: () => require('./main'),
|
||||
};
|
||||
|
||||
@@ -18,6 +18,7 @@ const sessions = require('./controllers/sessions');
|
||||
const runners = require('./controllers/runners');
|
||||
const jsldata = require('./controllers/jsldata');
|
||||
const config = require('./controllers/config');
|
||||
const storage = require('./controllers/storage');
|
||||
const archive = require('./controllers/archive');
|
||||
const apps = require('./controllers/apps');
|
||||
const auth = require('./controllers/auth');
|
||||
@@ -31,9 +32,9 @@ const onFinished = require('on-finished');
|
||||
const { rundir } = require('./utility/directories');
|
||||
const platformInfo = require('./utility/platformInfo');
|
||||
const getExpressPath = require('./utility/getExpressPath');
|
||||
const { getLogins } = require('./utility/hasPermission');
|
||||
const _ = require('lodash');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -44,11 +45,23 @@ function start() {
|
||||
|
||||
const server = http.createServer(app);
|
||||
|
||||
const logins = getLogins();
|
||||
if (logins && process.env.BASIC_AUTH) {
|
||||
if (process.env.BASIC_AUTH && !process.env.STORAGE_DATABASE) {
|
||||
async function authorizer(username, password, cb) {
|
||||
try {
|
||||
const resp = await getDefaultAuthProvider().login(username, password);
|
||||
if (resp.accessToken) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(null, false);
|
||||
}
|
||||
} catch (err) {
|
||||
cb(err, false);
|
||||
}
|
||||
}
|
||||
app.use(
|
||||
basicAuth({
|
||||
users: _.fromPairs(logins.filter(x => x.password).map(x => [x.login, x.password])),
|
||||
authorizer,
|
||||
authorizeAsync: true,
|
||||
challenge: true,
|
||||
realm: 'DbGate Web App',
|
||||
})
|
||||
@@ -72,9 +85,7 @@ function start() {
|
||||
});
|
||||
}
|
||||
|
||||
if (auth.shouldAuthorizeApi()) {
|
||||
app.use(auth.authMiddleware);
|
||||
}
|
||||
app.use(auth.authMiddleware);
|
||||
|
||||
app.get(getExpressPath('/stream'), async function (req, res) {
|
||||
const strmid = req.query.strmid;
|
||||
@@ -162,6 +173,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/runners', runners);
|
||||
useController(app, electron, '/jsldata', jsldata);
|
||||
useController(app, electron, '/config', config);
|
||||
useController(app, electron, '/storage', storage);
|
||||
useController(app, electron, '/archive', archive);
|
||||
useController(app, electron, '/uploads', uploads);
|
||||
useController(app, electron, '/plugins', plugins);
|
||||
|
||||
@@ -170,6 +170,18 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRunOperation({ msgid, operation, useTransaction }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
await driver.operation(systemConnection, operation, { useTransaction });
|
||||
process.send({ msgtype: 'response', msgid });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
@@ -311,6 +323,7 @@ const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
queryData: handleQueryData,
|
||||
runScript: handleRunScript,
|
||||
runOperation: handleRunOperation,
|
||||
updateCollection: handleUpdateCollection,
|
||||
collectionData: handleCollectionData,
|
||||
loadKeys: handleLoadKeys,
|
||||
|
||||
@@ -16,7 +16,18 @@ let afterConnectCallbacks = [];
|
||||
async function handleRefresh() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const databases = await driver.listDatabases(systemConnection);
|
||||
let databases = await driver.listDatabases(systemConnection);
|
||||
if (storedConnection?.allowedDatabases?.trim()) {
|
||||
const allowedDatabaseList = storedConnection.allowedDatabases
|
||||
.split('\n')
|
||||
.map(x => x.trim().toLowerCase())
|
||||
.filter(x => x);
|
||||
databases = databases.filter(x => allowedDatabaseList.includes(x.name.toLocaleLowerCase()));
|
||||
}
|
||||
if (storedConnection?.allowedDatabasesRegex?.trim()) {
|
||||
const regex = new RegExp(storedConnection.allowedDatabasesRegex, 'i');
|
||||
databases = databases.filter(x => regex.test(x.name));
|
||||
}
|
||||
setStatusName('ok');
|
||||
const databasesString = stableStringify(databases);
|
||||
if (lastDatabases != databasesString) {
|
||||
@@ -94,18 +105,24 @@ function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
async function handleDatabaseOp(op, { name }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
if (driver[op]) {
|
||||
await driver[op](systemConnection, name);
|
||||
} else {
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
logger.info({ sql: dmp.s }, 'Running script');
|
||||
await driver.query(systemConnection, dmp.s);
|
||||
async function handleDatabaseOp(op, { msgid, name }) {
|
||||
try {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
if (driver[op]) {
|
||||
await driver[op](systemConnection, name);
|
||||
} else {
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
logger.info({ sql: dmp.s }, 'Running script');
|
||||
await driver.query(systemConnection, dmp.s);
|
||||
}
|
||||
await handleRefresh();
|
||||
|
||||
process.send({ msgtype: 'response', msgid, status: 'ok' });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
}
|
||||
await handleRefresh();
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod) {
|
||||
|
||||
@@ -15,10 +15,12 @@ async function getSshConnection(connection) {
|
||||
agentForward: connection.sshMode == 'agent',
|
||||
passphrase: connection.sshMode == 'keyFile' ? connection.sshKeyfilePassword : undefined,
|
||||
username: connection.sshLogin,
|
||||
password: connection.sshMode == 'userPassword' ? connection.sshPassword : undefined,
|
||||
password: (connection.sshMode || 'userPassword') == 'userPassword' ? connection.sshPassword : undefined,
|
||||
agentSocket: connection.sshMode == 'agent' ? platformInfo.sshAuthSock : undefined,
|
||||
privateKey:
|
||||
connection.sshMode == 'keyFile' && connection.sshKeyfile ? await fs.readFile(connection.sshKeyfile) : undefined,
|
||||
connection.sshMode == 'keyFile' && (connection.sshKeyfile || platformInfo?.defaultKeyfile)
|
||||
? await fs.readFile(connection.sshKeyfile || platformInfo?.defaultKeyfile)
|
||||
: undefined,
|
||||
skipAutoPrivateKey: true,
|
||||
noReadline: true,
|
||||
};
|
||||
|
||||
16
packages/api/src/shell/dbModelToJson.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const importDbModel = require('../utility/importDbModel');
|
||||
const fs = require('fs');
|
||||
|
||||
async function dbModelToJson({ modelFolder, outputFile, commonjs }) {
|
||||
const dbInfo = await importDbModel(modelFolder);
|
||||
|
||||
const json = JSON.stringify(dbInfo, null, 2);
|
||||
if (commonjs) {
|
||||
fs.writeFileSync(outputFile, `module.exports = ${json};`);
|
||||
return;
|
||||
} else {
|
||||
fs.writeFileSync(outputFile, json);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dbModelToJson;
|
||||
@@ -27,6 +27,8 @@ const loadDatabase = require('./loadDatabase');
|
||||
const generateModelSql = require('./generateModelSql');
|
||||
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
|
||||
const dataDuplicator = require('./dataDuplicator');
|
||||
const dbModelToJson = require('./dbModelToJson');
|
||||
const jsonToDbModel = require('./jsonToDbModel');
|
||||
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
@@ -57,6 +59,8 @@ const dbgateApi = {
|
||||
generateModelSql,
|
||||
modifyJsonLinesReader,
|
||||
dataDuplicator,
|
||||
dbModelToJson,
|
||||
jsonToDbModel,
|
||||
};
|
||||
|
||||
requirePlugin.initializeDbgateApi(dbgateApi);
|
||||
|
||||
9
packages/api/src/shell/jsonToDbModel.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const exportDbModel = require('../utility/exportDbModel');
|
||||
const fs = require('fs');
|
||||
|
||||
async function jsonToDbModel({ modelFile, outputDir }) {
|
||||
const dbInfo = JSON.parse(fs.readFileSync(modelFile, 'utf-8'));
|
||||
await exportDbModel(dbInfo, outputDir);
|
||||
}
|
||||
|
||||
module.exports = jsonToDbModel;
|
||||
@@ -61,10 +61,13 @@ class ParseStream extends stream.Transform {
|
||||
if (update.document) {
|
||||
obj = update.document;
|
||||
} else {
|
||||
obj = {
|
||||
...obj,
|
||||
...update.fields,
|
||||
};
|
||||
obj = _.omitBy(
|
||||
{
|
||||
...obj,
|
||||
...update.fields,
|
||||
},
|
||||
(v, k) => v.$$undefined$$
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ const fs = require('fs');
|
||||
const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../utility/directories');
|
||||
const nativeModules = require('../nativeModules');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const authProxy = require('../utility/authProxy');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const logger = getLogger('requirePlugin');
|
||||
|
||||
@@ -11,6 +12,8 @@ const loadedPlugins = {};
|
||||
const dbgateEnv = {
|
||||
dbgateApi: null,
|
||||
nativeModules,
|
||||
platformInfo,
|
||||
authProxy,
|
||||
};
|
||||
function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (!packageName) throw new Error('Missing packageName in plugin');
|
||||
|
||||
@@ -210,7 +210,7 @@ class JsonLinesDatastore {
|
||||
async getRows(offset, limit, filter, sort) {
|
||||
const res = [];
|
||||
if (sort && !this.sortedFiles[stableStringify(sort)]) {
|
||||
const jslid = uuidv1();
|
||||
const jslid = crypto.randomUUID();
|
||||
const sortedFile = path.join(jsldir(), `${jslid}.jsonl`);
|
||||
await JsonLinesDatastore.sortFile(this.file, sortedFile, sort);
|
||||
this.sortedFiles[stableStringify(sort)] = sortedFile;
|
||||
|
||||
20
packages/api/src/utility/authProxy.js
Normal file
@@ -0,0 +1,20 @@
|
||||
function isAuthProxySupported() {
|
||||
return false;
|
||||
}
|
||||
|
||||
async function authProxyGetRedirectUrl(options) {
|
||||
return null;
|
||||
}
|
||||
|
||||
async function authProxyGetTokenFromCode(options) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function startTokenChecking(sid, callback) {}
|
||||
|
||||
module.exports = {
|
||||
isAuthProxySupported,
|
||||
authProxyGetRedirectUrl,
|
||||
authProxyGetTokenFromCode,
|
||||
startTokenChecking,
|
||||
};
|
||||
10
packages/api/src/utility/checkLicense.js
Normal file
@@ -0,0 +1,10 @@
|
||||
function checkLicense() {
|
||||
return {
|
||||
status: 'ok',
|
||||
type: 'community',
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkLicense,
|
||||
};
|
||||
@@ -1,70 +1,81 @@
|
||||
const { compilePermissions, testPermission } = require('dbgate-tools');
|
||||
const _ = require('lodash');
|
||||
const { getAuthProviderFromReq } = require('../auth/authProvider');
|
||||
|
||||
const userPermissions = {};
|
||||
const cachedPermissions = {};
|
||||
|
||||
function hasPermission(tested, req) {
|
||||
if (!req) {
|
||||
// request object not available, allow all
|
||||
return true;
|
||||
}
|
||||
const { user } = (req && req.auth) || {};
|
||||
const key = user || '';
|
||||
const logins = getLogins();
|
||||
|
||||
if (!userPermissions[key]) {
|
||||
if (logins) {
|
||||
const login = logins.find(x => x.login == user);
|
||||
userPermissions[key] = compilePermissions(login ? login.permissions : null);
|
||||
} else {
|
||||
userPermissions[key] = compilePermissions(process.env.PERMISSIONS);
|
||||
}
|
||||
const permissions = getAuthProviderFromReq(req).getCurrentPermissions(req);
|
||||
|
||||
if (!cachedPermissions[permissions]) {
|
||||
cachedPermissions[permissions] = compilePermissions(permissions);
|
||||
}
|
||||
return testPermission(tested, userPermissions[key]);
|
||||
|
||||
return testPermission(tested, cachedPermissions[permissions]);
|
||||
|
||||
// const { user } = (req && req.auth) || {};
|
||||
// const { login } = (process.env.OAUTH_PERMISSIONS && req && req.user) || {};
|
||||
// const key = user || login || '';
|
||||
// const logins = getLogins();
|
||||
|
||||
// if (!userPermissions[key]) {
|
||||
// if (logins) {
|
||||
// const login = logins.find(x => x.login == user);
|
||||
// userPermissions[key] = compilePermissions(login ? login.permissions : null);
|
||||
// } else {
|
||||
// userPermissions[key] = compilePermissions(process.env.PERMISSIONS);
|
||||
// }
|
||||
// }
|
||||
// return testPermission(tested, userPermissions[key]);
|
||||
}
|
||||
|
||||
let loginsCache = null;
|
||||
let loginsLoaded = false;
|
||||
// let loginsCache = null;
|
||||
// let loginsLoaded = false;
|
||||
|
||||
function getLogins() {
|
||||
if (loginsLoaded) {
|
||||
return loginsCache;
|
||||
}
|
||||
// function getLogins() {
|
||||
// if (loginsLoaded) {
|
||||
// return loginsCache;
|
||||
// }
|
||||
|
||||
const res = [];
|
||||
if (process.env.LOGIN && process.env.PASSWORD) {
|
||||
res.push({
|
||||
login: process.env.LOGIN,
|
||||
password: process.env.PASSWORD,
|
||||
permissions: process.env.PERMISSIONS,
|
||||
});
|
||||
}
|
||||
if (process.env.LOGINS || process.env.OAUTH_PERMISSIONS) {
|
||||
const logins = _.compact(process.env.LOGINS.split(',').map(x => x.trim()));
|
||||
for (const login of logins) {
|
||||
const password = process.env[`LOGIN_PASSWORD_${login}`];
|
||||
const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
|
||||
if (password) {
|
||||
res.push({
|
||||
login,
|
||||
password,
|
||||
permissions,
|
||||
});
|
||||
}
|
||||
if (process.env.OAUTH_PERMISSIONS) {
|
||||
res.push({
|
||||
login,
|
||||
password: null,
|
||||
permissions,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
// const res = [];
|
||||
// if (process.env.LOGIN && process.env.PASSWORD) {
|
||||
// res.push({
|
||||
// login: process.env.LOGIN,
|
||||
// password: process.env.PASSWORD,
|
||||
// permissions: process.env.PERMISSIONS,
|
||||
// });
|
||||
// }
|
||||
// if (process.env.LOGINS) {
|
||||
// const logins = _.compact(process.env.LOGINS.split(',').map(x => x.trim()));
|
||||
// for (const login of logins) {
|
||||
// const password = process.env[`LOGIN_PASSWORD_${login}`];
|
||||
// const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
|
||||
// if (password) {
|
||||
// res.push({
|
||||
// login,
|
||||
// password,
|
||||
// permissions,
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// } else if (process.env.OAUTH_PERMISSIONS) {
|
||||
// const login_permission_keys = Object.keys(process.env).filter(key => _.startsWith(key, 'LOGIN_PERMISSIONS_'));
|
||||
// for (const permissions_key of login_permission_keys) {
|
||||
// const login = permissions_key.replace('LOGIN_PERMISSIONS_', '');
|
||||
// const permissions = process.env[permissions_key];
|
||||
// userPermissions[login] = compilePermissions(permissions);
|
||||
// }
|
||||
// }
|
||||
|
||||
loginsCache = res.length > 0 ? res : null;
|
||||
loginsLoaded = true;
|
||||
return loginsCache;
|
||||
}
|
||||
// loginsCache = res.length > 0 ? res : null;
|
||||
// loginsLoaded = true;
|
||||
// return loginsCache;
|
||||
// }
|
||||
|
||||
function connectionHasPermission(connection, req) {
|
||||
if (!connection) {
|
||||
@@ -85,7 +96,6 @@ function testConnectionPermission(connection, req) {
|
||||
|
||||
module.exports = {
|
||||
hasPermission,
|
||||
getLogins,
|
||||
connectionHasPermission,
|
||||
testConnectionPermission,
|
||||
};
|
||||
|
||||
@@ -31,6 +31,9 @@ module.exports = {
|
||||
electronSender.send(message, data == null ? null : data);
|
||||
}
|
||||
for (const strmid in sseResponses) {
|
||||
if (data?.strmid && data?.strmid != strmid) {
|
||||
continue;
|
||||
}
|
||||
let skipThisStream = false;
|
||||
if (sseResponses[strmid].filter) {
|
||||
for (const key in sseResponses[strmid].filter) {
|
||||
@@ -47,7 +50,7 @@ module.exports = {
|
||||
}
|
||||
|
||||
sseResponses[strmid].response?.write(
|
||||
`event: ${message}\ndata: ${stableStringify(data == null ? null : data)}\n\n`
|
||||
`event: ${message}\ndata: ${stableStringify(data == null ? null : _.omit(data, ['strmid']))}\n\n`
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -67,7 +67,7 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
}
|
||||
|
||||
if (raw) {
|
||||
router[method](routeAction, controller[key]);
|
||||
router[method](routeAction, (req, res) => controller[key](req, res));
|
||||
} else {
|
||||
router[method](routeAction, async (req, res) => {
|
||||
// if (controller._init && !controller._init_called) {
|
||||
|
||||
@@ -47,6 +47,8 @@ var config = {
|
||||
],
|
||||
externals: {
|
||||
'better-sqlite3': 'commonjs better-sqlite3',
|
||||
'oracledb': 'commonjs oracledb',
|
||||
'msnodesqlv8': 'commonjs msnodesqlv8',
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ export interface ChangeSetItem {
|
||||
document?: any;
|
||||
condition?: { [column: string]: string };
|
||||
fields?: { [column: string]: string };
|
||||
insertIfNotExistsFields?: { [column: string]: string };
|
||||
}
|
||||
|
||||
export interface ChangeSetItemFields {
|
||||
@@ -229,13 +230,23 @@ export function batchUpdateChangeSet(
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
function extractFields(item: ChangeSetItem, allowNulls = true): UpdateField[] {
|
||||
return _.keys(item.fields)
|
||||
.filter(targetColumn => allowNulls || item.fields[targetColumn] != null)
|
||||
function extractFields(item: ChangeSetItem, allowNulls = true, allowedDocumentColumns: string[] = []): UpdateField[] {
|
||||
const allFields = {
|
||||
...item.fields,
|
||||
};
|
||||
|
||||
for (const docField in item.document || {}) {
|
||||
if (allowedDocumentColumns.includes(docField)) {
|
||||
allFields[docField] = item.document[docField];
|
||||
}
|
||||
}
|
||||
|
||||
return _.keys(allFields)
|
||||
.filter(targetColumn => allowNulls || allFields[targetColumn] != null)
|
||||
.map(targetColumn => ({
|
||||
targetColumn,
|
||||
exprType: 'value',
|
||||
value: item.fields[targetColumn],
|
||||
value: allFields[targetColumn],
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -243,17 +254,19 @@ function changeSetInsertToSql(
|
||||
item: ChangeSetItem,
|
||||
dbinfo: DatabaseInfo = null
|
||||
): [AllowIdentityInsert, Insert, AllowIdentityInsert] {
|
||||
const fields = extractFields(item, false);
|
||||
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
|
||||
const fields = extractFields(
|
||||
item,
|
||||
false,
|
||||
table?.columns?.map(x => x.columnName)
|
||||
);
|
||||
if (fields.length == 0) return null;
|
||||
let autoInc = false;
|
||||
if (dbinfo) {
|
||||
const table = dbinfo.tables.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
|
||||
if (table) {
|
||||
const autoIncCol = table.columns.find(x => x.autoIncrement);
|
||||
// console.log('autoIncCol', autoIncCol);
|
||||
if (autoIncCol && fields.find(x => x.targetColumn == autoIncCol.columnName)) {
|
||||
autoInc = true;
|
||||
}
|
||||
if (table) {
|
||||
const autoIncCol = table.columns.find(x => x.autoIncrement);
|
||||
// console.log('autoIncCol', autoIncCol);
|
||||
if (autoIncCol && fields.find(x => x.targetColumn == autoIncCol.columnName)) {
|
||||
autoInc = true;
|
||||
}
|
||||
}
|
||||
const targetTable = {
|
||||
@@ -272,6 +285,9 @@ function changeSetInsertToSql(
|
||||
targetTable,
|
||||
commandType: 'insert',
|
||||
fields,
|
||||
insertWhereNotExistsCondition: item.insertIfNotExistsFields
|
||||
? compileSimpleChangeSetCondition(item.insertIfNotExistsFields)
|
||||
: null,
|
||||
},
|
||||
autoInc
|
||||
? {
|
||||
@@ -320,7 +336,41 @@ export function extractChangeSetCondition(item: ChangeSetItem, alias?: string):
|
||||
};
|
||||
}
|
||||
|
||||
function changeSetUpdateToSql(item: ChangeSetItem): Update {
|
||||
function compileSimpleChangeSetCondition(fields: { [column: string]: string }): Condition {
|
||||
function getColumnCondition(columnName: string): Condition {
|
||||
const value = fields[columnName];
|
||||
const expr: Expression = {
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
};
|
||||
if (value == null) {
|
||||
return {
|
||||
conditionType: 'isNull',
|
||||
expr,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: expr,
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: _.keys(fields).map(columnName => getColumnCondition(columnName)),
|
||||
};
|
||||
}
|
||||
|
||||
function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null): Update {
|
||||
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
|
||||
|
||||
const autoIncCol = table?.columns?.find(x => x.autoIncrement);
|
||||
|
||||
return {
|
||||
from: {
|
||||
name: {
|
||||
@@ -329,7 +379,11 @@ function changeSetUpdateToSql(item: ChangeSetItem): Update {
|
||||
},
|
||||
},
|
||||
commandType: 'update',
|
||||
fields: extractFields(item),
|
||||
fields: extractFields(
|
||||
item,
|
||||
true,
|
||||
table?.columns?.map(x => x.columnName).filter(x => x != autoIncCol?.columnName)
|
||||
),
|
||||
where: extractChangeSetCondition(item),
|
||||
};
|
||||
}
|
||||
@@ -351,7 +405,7 @@ export function changeSetToSql(changeSet: ChangeSet, dbinfo: DatabaseInfo): Comm
|
||||
return _.compact(
|
||||
_.flatten([
|
||||
...(changeSet.inserts.map(item => changeSetInsertToSql(item, dbinfo)) as any),
|
||||
...changeSet.updates.map(changeSetUpdateToSql),
|
||||
...changeSet.updates.map(item => changeSetUpdateToSql(item, dbinfo)),
|
||||
...changeSet.deletes.map(changeSetDeleteToSql),
|
||||
])
|
||||
);
|
||||
@@ -446,7 +500,12 @@ export function changeSetInsertNewRow(changeSet: ChangeSet, name?: NamedObjectIn
|
||||
};
|
||||
}
|
||||
|
||||
export function changeSetInsertDocuments(changeSet: ChangeSet, documents: any[], name?: NamedObjectInfo): ChangeSet {
|
||||
export function changeSetInsertDocuments(
|
||||
changeSet: ChangeSet,
|
||||
documents: any[],
|
||||
name?: NamedObjectInfo,
|
||||
insertIfNotExistsFieldNames?: string[]
|
||||
): ChangeSet {
|
||||
const insertedRows = getChangeSetInsertedRows(changeSet, name);
|
||||
return {
|
||||
...changeSet,
|
||||
@@ -456,6 +515,7 @@ export function changeSetInsertDocuments(changeSet: ChangeSet, documents: any[],
|
||||
...name,
|
||||
insertedRowIndex: insertedRows.length + index,
|
||||
fields: doc,
|
||||
insertIfNotExistsFields: insertIfNotExistsFieldNames ? _.pick(doc, insertIfNotExistsFieldNames) : null,
|
||||
})),
|
||||
],
|
||||
};
|
||||
@@ -472,3 +532,7 @@ export function changeSetContainsChanges(changeSet: ChangeSet) {
|
||||
changeSet.dataUpdateCommands?.length > 0
|
||||
);
|
||||
}
|
||||
|
||||
export function changeSetChangedCount(changeSet: ChangeSet) {
|
||||
return changeSet.deletes.length + changeSet.updates.length + changeSet.inserts.length;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc, DisplayColumn } from './GridDisplay';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo, CollectionInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { mongoFilterBehaviour, standardFilterBehaviours } from 'dbgate-tools';
|
||||
|
||||
function getObjectKeys(obj) {
|
||||
if (_.isArray(obj)) {
|
||||
@@ -52,7 +53,7 @@ function getColumnsForObject(basePath, obj, res: any[], display) {
|
||||
}
|
||||
}
|
||||
|
||||
function getDisplayColumn(basePath, columnName, display) {
|
||||
function getDisplayColumn(basePath, columnName, display: CollectionGridDisplay) {
|
||||
const uniquePath = [...basePath, columnName];
|
||||
const uniqueName = uniquePath.join('.');
|
||||
return {
|
||||
@@ -62,9 +63,13 @@ function getDisplayColumn(basePath, columnName, display) {
|
||||
uniquePath,
|
||||
isStructured: true,
|
||||
parentHeaderText: createHeaderText(basePath),
|
||||
filterType: 'mongo',
|
||||
filterBehaviour: display?.driver?.getFilterBehaviour(null, standardFilterBehaviours) ?? mongoFilterBehaviour,
|
||||
pureName: display.collection?.pureName,
|
||||
schemaName: display.collection?.schemaName,
|
||||
|
||||
isPartitionKey: !!display?.collection?.partitionKey?.find(x => x.columnName == uniqueName),
|
||||
isClusterKey: !!display?.collection?.clusterKey?.find(x => x.columnName == uniqueName),
|
||||
isUniqueKey: !!display?.collection?.uniqueKey?.find(x => x.columnName == uniqueName),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -95,7 +100,7 @@ export class CollectionGridDisplay extends GridDisplay {
|
||||
cache: GridCache,
|
||||
setCache: ChangeCacheFunc,
|
||||
loadedRows,
|
||||
changeSet,
|
||||
changeSet,
|
||||
readOnly = false
|
||||
) {
|
||||
super(config, setConfig, cache, setCache, driver);
|
||||
@@ -104,10 +109,10 @@ export class CollectionGridDisplay extends GridDisplay {
|
||||
this.columns = analyseCollectionDisplayColumns([...(loadedRows || []), ...changedDocs, ...insertedDocs], this);
|
||||
this.filterable = true;
|
||||
this.sortable = true;
|
||||
this.editable = !readOnly;
|
||||
this.editable = !readOnly && collection?.uniqueKey?.length > 0;
|
||||
this.supportsReload = true;
|
||||
this.isDynamicStructure = true;
|
||||
this.changeSetKeyFields = ['_id'];
|
||||
this.changeSetKeyFields = collection?.uniqueKey?.map(x => x.columnName);
|
||||
this.baseCollection = collection;
|
||||
}
|
||||
}
|
||||
|
||||
91
packages/datalib/src/CustomGridDisplay.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import _ from 'lodash';
|
||||
import { filterName, isTableColumnUnique } from 'dbgate-tools';
|
||||
import { GridDisplay, ChangeCacheFunc, DisplayColumn, DisplayedColumnInfo, ChangeConfigFunc } from './GridDisplay';
|
||||
import type {
|
||||
TableInfo,
|
||||
EngineDriver,
|
||||
ViewInfo,
|
||||
ColumnInfo,
|
||||
NamedObjectInfo,
|
||||
DatabaseInfo,
|
||||
ForeignKeyInfo,
|
||||
} from 'dbgate-types';
|
||||
import { GridConfig, GridCache, createGridCache } from './GridConfig';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect, ColumnRefExpression, Condition } from 'dbgate-sqltree';
|
||||
|
||||
export interface CustomGridColumn {
|
||||
columnName: string;
|
||||
columnLabel: string;
|
||||
isPrimaryKey?: boolean;
|
||||
}
|
||||
|
||||
export class CustomGridDisplay extends GridDisplay {
|
||||
customColumns: CustomGridColumn[];
|
||||
|
||||
constructor(
|
||||
public tableName: NamedObjectInfo,
|
||||
columns: CustomGridColumn[],
|
||||
driver: EngineDriver,
|
||||
config: GridConfig,
|
||||
setConfig: ChangeConfigFunc,
|
||||
cache: GridCache,
|
||||
setCache: ChangeCacheFunc,
|
||||
dbinfo: DatabaseInfo,
|
||||
serverVersion,
|
||||
isReadOnly = false,
|
||||
public additionalcondition: Condition = null
|
||||
) {
|
||||
super(config, setConfig, cache, setCache, driver, dbinfo, serverVersion);
|
||||
|
||||
this.customColumns = columns;
|
||||
|
||||
this.columns = columns.map(col => ({
|
||||
columnName: col.columnName,
|
||||
headerText: col.columnLabel,
|
||||
uniqueName: col.columnName,
|
||||
uniquePath: [col.columnName],
|
||||
isPrimaryKey: col.isPrimaryKey,
|
||||
isForeignKeyUnique: false,
|
||||
schemaName: tableName.schemaName,
|
||||
pureName: tableName.pureName,
|
||||
}));
|
||||
|
||||
this.changeSetKeyFields = columns.filter(x => x.isPrimaryKey).map(x => x.columnName);
|
||||
this.baseTable = {
|
||||
...tableName,
|
||||
columns: this.columns.map(x => ({ ...tableName, columnName: x.columnName, dataType: 'string' })),
|
||||
foreignKeys: [],
|
||||
};
|
||||
|
||||
this.filterable = true;
|
||||
this.sortable = true;
|
||||
this.groupable = false;
|
||||
this.editable = !isReadOnly;
|
||||
this.supportsReload = true;
|
||||
}
|
||||
|
||||
createSelect(options = {}) {
|
||||
const select = this.createSelectBase(
|
||||
this.tableName,
|
||||
[],
|
||||
// @ts-ignore
|
||||
// this.columns.map(col => ({
|
||||
// columnName: col.columnName,
|
||||
// })),
|
||||
options,
|
||||
this.customColumns.find(x => x.isPrimaryKey)?.columnName
|
||||
);
|
||||
select.selectAll = true;
|
||||
if (this.additionalcondition) {
|
||||
if (select.where) {
|
||||
select.where = {
|
||||
conditionType: 'and',
|
||||
conditions: [select.where, this.additionalcondition],
|
||||
};
|
||||
} else {
|
||||
select.where = this.additionalcondition;
|
||||
}
|
||||
}
|
||||
return select;
|
||||
}
|
||||
}
|
||||
@@ -10,12 +10,13 @@ import type {
|
||||
CollectionInfo,
|
||||
SqlDialect,
|
||||
ViewInfo,
|
||||
FilterBehaviour,
|
||||
} from 'dbgate-types';
|
||||
import { parseFilter, getFilterType } from 'dbgate-filterparser';
|
||||
import { parseFilter } from 'dbgate-filterparser';
|
||||
import { filterName } from 'dbgate-tools';
|
||||
import { ChangeSetFieldDefinition, ChangeSetRowDefinition } from './ChangeSet';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect, Condition, CompoudCondition } from 'dbgate-sqltree';
|
||||
import { isTypeLogical } from 'dbgate-tools';
|
||||
import { isTypeLogical, standardFilterBehaviours, detectSqlFilterBehaviour, stringFilterBehaviour } from 'dbgate-tools';
|
||||
|
||||
export interface DisplayColumn {
|
||||
schemaName: string;
|
||||
@@ -27,13 +28,19 @@ export interface DisplayColumn {
|
||||
notNull?: boolean;
|
||||
autoIncrement?: boolean;
|
||||
isPrimaryKey?: boolean;
|
||||
|
||||
// NoSQL specific
|
||||
isPartitionKey?: boolean;
|
||||
isClusterKey?: boolean;
|
||||
isUniqueKey?: boolean;
|
||||
|
||||
foreignKey?: ForeignKeyInfo;
|
||||
isForeignKeyUnique?: boolean;
|
||||
isExpandable?: boolean;
|
||||
isChecked?: boolean;
|
||||
hintColumnNames?: string[];
|
||||
dataType?: string;
|
||||
filterType?: boolean;
|
||||
filterBehaviour?: FilterBehaviour;
|
||||
isStructured?: boolean;
|
||||
}
|
||||
|
||||
@@ -92,20 +99,26 @@ export abstract class GridDisplay {
|
||||
isLoadedCorrectly = true;
|
||||
supportsReload = false;
|
||||
isDynamicStructure = false;
|
||||
filterTypeOverride = null;
|
||||
filterBehaviourOverride = null;
|
||||
|
||||
setColumnVisibility(uniquePath: string[], isVisible: boolean) {
|
||||
const uniqueName = uniquePath.join('.');
|
||||
if (uniquePath.length == 1) {
|
||||
this.includeInColumnSet('hiddenColumns', uniqueName, !isVisible);
|
||||
this.includeInColumnSet([
|
||||
{ field: 'hiddenColumns', uniqueName, isIncluded: !isVisible },
|
||||
isVisible == false && this.isDynamicStructure && { field: 'addedColumns', uniqueName, isIncluded: false },
|
||||
]);
|
||||
} else {
|
||||
this.includeInColumnSet('addedColumns', uniqueName, isVisible);
|
||||
this.includeInColumnSet([{ field: 'addedColumns', uniqueName, isIncluded: isVisible }]);
|
||||
if (!this.isDynamicStructure) this.reload();
|
||||
}
|
||||
}
|
||||
|
||||
addDynamicColumn(name: string) {
|
||||
this.includeInColumnSet('addedColumns', name, true);
|
||||
this.includeInColumnSet([
|
||||
{ field: 'addedColumns', uniqueName: name, isIncluded: true },
|
||||
{ field: 'hiddenColumns', uniqueName: name, isIncluded: false },
|
||||
]);
|
||||
}
|
||||
|
||||
focusColumns(uniqueNames: string[]) {
|
||||
@@ -143,19 +156,30 @@ export abstract class GridDisplay {
|
||||
this.setCache(reloadDataCacheFunc);
|
||||
}
|
||||
|
||||
includeInColumnSet(field: keyof GridConfigColumns, uniqueName: string, isIncluded: boolean) {
|
||||
// console.log('includeInColumnSet', field, uniqueName, isIncluded);
|
||||
if (isIncluded) {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
[field]: [...(cfg[field] || []), uniqueName],
|
||||
}));
|
||||
} else {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
[field]: (cfg[field] || []).filter(x => x != uniqueName),
|
||||
}));
|
||||
}
|
||||
includeInColumnSet(
|
||||
modifications: ({ field: keyof GridConfigColumns; uniqueName: string; isIncluded: boolean } | null)[]
|
||||
) {
|
||||
this.setConfig(cfg => {
|
||||
let res = cfg;
|
||||
for (const modification of modifications) {
|
||||
if (!modification) {
|
||||
continue;
|
||||
}
|
||||
const { field, uniqueName, isIncluded } = modification;
|
||||
if (isIncluded) {
|
||||
res = {
|
||||
...res,
|
||||
[field]: [...(cfg[field] || []), uniqueName],
|
||||
};
|
||||
} else {
|
||||
res = {
|
||||
...res,
|
||||
[field]: (cfg[field] || []).filter(x => x != uniqueName),
|
||||
};
|
||||
}
|
||||
}
|
||||
return res;
|
||||
});
|
||||
}
|
||||
|
||||
showAllColumns() {
|
||||
@@ -192,7 +216,11 @@ export abstract class GridDisplay {
|
||||
const column = displayedColumnInfo[uniqueName];
|
||||
if (!column) continue;
|
||||
try {
|
||||
const condition = parseFilter(filter, getFilterType(column.dataType));
|
||||
const condition = parseFilter(
|
||||
filter,
|
||||
this.driver?.getFilterBehaviour(column.dataType, standardFilterBehaviours) ??
|
||||
detectSqlFilterBehaviour(column.dataType)
|
||||
);
|
||||
if (condition) {
|
||||
conditions.push(
|
||||
_.cloneDeepWith(condition, (expr: Expression) => {
|
||||
@@ -214,14 +242,14 @@ export abstract class GridDisplay {
|
||||
}
|
||||
|
||||
if (this.baseTableOrView && this.config.multiColumnFilter) {
|
||||
try {
|
||||
const condition = parseFilter(this.config.multiColumnFilter, 'string');
|
||||
if (condition) {
|
||||
const orCondition: CompoudCondition = {
|
||||
conditionType: 'or',
|
||||
conditions: [],
|
||||
};
|
||||
for (const column of this.baseTableOrView.columns) {
|
||||
const orCondition: CompoudCondition = {
|
||||
conditionType: 'or',
|
||||
conditions: [],
|
||||
};
|
||||
for (const column of this.baseTableOrView.columns) {
|
||||
try {
|
||||
const condition = parseFilter(this.config.multiColumnFilter, detectSqlFilterBehaviour(column.dataType));
|
||||
if (condition) {
|
||||
orCondition.conditions.push(
|
||||
_.cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder') {
|
||||
@@ -230,12 +258,13 @@ export abstract class GridDisplay {
|
||||
})
|
||||
);
|
||||
}
|
||||
if (orCondition.conditions.length > 0) {
|
||||
conditions.push(orCondition);
|
||||
}
|
||||
} catch (err) {
|
||||
// skip for this column
|
||||
continue;
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(err.message);
|
||||
}
|
||||
if (orCondition.conditions.length > 0) {
|
||||
conditions.push(orCondition);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -343,7 +372,9 @@ export abstract class GridDisplay {
|
||||
}
|
||||
|
||||
toggleExpandedColumn(uniqueName: string, value?: boolean) {
|
||||
this.includeInColumnSet('expandedColumns', uniqueName, value == null ? !this.isExpandedColumn(uniqueName) : value);
|
||||
this.includeInColumnSet([
|
||||
{ field: 'expandedColumns', uniqueName, isIncluded: value == null ? !this.isExpandedColumn(uniqueName) : value },
|
||||
]);
|
||||
}
|
||||
|
||||
getFilter(uniqueName: string) {
|
||||
@@ -482,6 +513,7 @@ export abstract class GridDisplay {
|
||||
this.setConfig(cfg => ({
|
||||
...cfg,
|
||||
filters: {},
|
||||
multiColumnFilter: null,
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
@@ -554,9 +586,9 @@ export abstract class GridDisplay {
|
||||
};
|
||||
}
|
||||
|
||||
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[], options) {
|
||||
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[], options, defaultOrderColumnName?: string) {
|
||||
if (!columns) return null;
|
||||
const orderColumnName = columns[0].columnName;
|
||||
const orderColumnName = defaultOrderColumnName ?? columns[0]?.columnName;
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
from: {
|
||||
@@ -732,6 +764,7 @@ export abstract class GridDisplay {
|
||||
alias: 'count',
|
||||
},
|
||||
];
|
||||
select.selectAll = false;
|
||||
}
|
||||
return select;
|
||||
// const sql = treeToSql(this.driver, select, dumpSqlSelect);
|
||||
@@ -745,10 +778,12 @@ export abstract class GridDisplay {
|
||||
for (const name in filters) {
|
||||
const column = this.isDynamicStructure ? null : this.columns.find(x => x.columnName == name);
|
||||
if (!this.isDynamicStructure && !column) continue;
|
||||
const filterType =
|
||||
this.filterTypeOverride ?? (this.isDynamicStructure ? 'mongo' : getFilterType(column.dataType));
|
||||
const filterBehaviour =
|
||||
this.filterBehaviourOverride ??
|
||||
this.driver?.getFilterBehaviour(column.dataType, standardFilterBehaviours) ??
|
||||
detectSqlFilterBehaviour(column.dataType);
|
||||
try {
|
||||
const condition = parseFilter(filters[name], filterType);
|
||||
const condition = parseFilter(filters[name], filterBehaviour);
|
||||
const replaced = _.cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder')
|
||||
return {
|
||||
@@ -763,7 +798,7 @@ export abstract class GridDisplay {
|
||||
}
|
||||
|
||||
if (this.config.multiColumnFilter) {
|
||||
const placeholderCondition = parseFilter(this.config.multiColumnFilter, 'string');
|
||||
const placeholderCondition = parseFilter(this.config.multiColumnFilter, stringFilterBehaviour);
|
||||
if (placeholderCondition) {
|
||||
conditions.push({
|
||||
conditionType: 'anyColumnPass',
|
||||
|
||||
@@ -2,6 +2,7 @@ import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { analyseCollectionDisplayColumns } from './CollectionGridDisplay';
|
||||
import { evalFilterBehaviour } from 'dbgate-tools';
|
||||
|
||||
export class JslGridDisplay extends GridDisplay {
|
||||
constructor(
|
||||
@@ -22,7 +23,7 @@ export class JslGridDisplay extends GridDisplay {
|
||||
this.sortable = true;
|
||||
this.supportsReload = supportsReload;
|
||||
this.isDynamicStructure = isDynamicStructure;
|
||||
this.filterTypeOverride = 'eval';
|
||||
this.filterBehaviourOverride = evalFilterBehaviour;
|
||||
this.editable = editable;
|
||||
this.editableStructure = editable ? structure : null;
|
||||
|
||||
|
||||
@@ -105,7 +105,6 @@ export class PerspectiveCache {
|
||||
'databaseConfig',
|
||||
'orderBy',
|
||||
'sqlCondition',
|
||||
'mongoCondition',
|
||||
])
|
||||
);
|
||||
let res = this.tables[tableKey];
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { DatabaseInfo, ForeignKeyInfo, NamedObjectInfo, TableInfo } from 'dbgate-types';
|
||||
import type { DatabaseInfo, FilterBehaviour, ForeignKeyInfo, NamedObjectInfo, TableInfo } from 'dbgate-types';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
|
||||
// export interface PerspectiveConfigColumns {
|
||||
@@ -31,7 +31,7 @@ export interface PerspectiveCustomJoinConfig {
|
||||
|
||||
export interface PerspectiveFilterColumnInfo {
|
||||
columnName: string;
|
||||
filterType: string;
|
||||
filterBehaviour: FilterBehaviour;
|
||||
pureName: string;
|
||||
schemaName: string;
|
||||
foreignKey: ForeignKeyInfo;
|
||||
|
||||
@@ -5,6 +5,7 @@ import _zipObject from 'lodash/zipObject';
|
||||
import _mapValues from 'lodash/mapValues';
|
||||
import _isArray from 'lodash/isArray';
|
||||
import { safeJsonParse } from 'dbgate-tools';
|
||||
import { CollectionAggregateDefinition } from 'dbgate-types';
|
||||
|
||||
function normalizeLoadedRow(row) {
|
||||
return _mapValues(row, v => safeJsonParse(v) || v);
|
||||
@@ -59,24 +60,6 @@ export class PerspectiveDataLoader {
|
||||
: null;
|
||||
}
|
||||
|
||||
buildMongoCondition(props: PerspectiveDataLoadProps): {} {
|
||||
const { schemaName, pureName, bindingColumns, bindingValues, dataColumns, orderBy, mongoCondition } = props;
|
||||
|
||||
const conditions = [];
|
||||
|
||||
if (mongoCondition) {
|
||||
conditions.push(mongoCondition);
|
||||
}
|
||||
|
||||
if (bindingColumns?.length == 1) {
|
||||
conditions.push({
|
||||
[bindingColumns[0]]: { $in: bindingValues.map(x => x[0]) },
|
||||
});
|
||||
}
|
||||
|
||||
return conditions.length == 1 ? conditions[0] : conditions.length > 0 ? { $and: conditions } : null;
|
||||
}
|
||||
|
||||
async loadGroupingSqlDb(props: PerspectiveDataLoadProps) {
|
||||
const { schemaName, pureName, bindingColumns } = props;
|
||||
|
||||
@@ -135,18 +118,28 @@ export class PerspectiveDataLoader {
|
||||
async loadGroupingDocDb(props: PerspectiveDataLoadProps) {
|
||||
const { schemaName, pureName, bindingColumns } = props;
|
||||
|
||||
const aggregate = [
|
||||
{ $match: this.buildMongoCondition(props) },
|
||||
{
|
||||
$group: {
|
||||
_id: _zipObject(
|
||||
bindingColumns,
|
||||
bindingColumns.map(col => '$' + col)
|
||||
),
|
||||
count: { $sum: 1 },
|
||||
const aggregate: CollectionAggregateDefinition = {
|
||||
condition: this.buildSqlCondition(props),
|
||||
groupByColumns: bindingColumns,
|
||||
aggregateColumns: [
|
||||
{
|
||||
alias: 'pergrpsize',
|
||||
aggregateFunction: 'count',
|
||||
},
|
||||
},
|
||||
];
|
||||
],
|
||||
};
|
||||
// const aggregate = [
|
||||
// { $match: this.buildMongoCondition(props) },
|
||||
// {
|
||||
// $group: {
|
||||
// _id: _zipObject(
|
||||
// bindingColumns,
|
||||
// bindingColumns.map(col => '$' + col)
|
||||
// ),
|
||||
// count: { $sum: 1 },
|
||||
// },
|
||||
// },
|
||||
// ];
|
||||
|
||||
if (dbg?.enabled) {
|
||||
dbg(`LOAD COUNTS, table=${props.pureName}, columns=${bindingColumns?.join(',')}`);
|
||||
@@ -163,8 +156,8 @@ export class PerspectiveDataLoader {
|
||||
|
||||
if (response.errorMessage) return response;
|
||||
return response.rows.map(row => ({
|
||||
...row._id,
|
||||
_perspective_group_size_: parseInt(row.count),
|
||||
...row,
|
||||
_perspective_group_size_: parseInt(row.pergrpsize),
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -244,16 +237,23 @@ export class PerspectiveDataLoader {
|
||||
const { pureName } = props;
|
||||
const res: any = {
|
||||
pureName,
|
||||
condition: this.buildMongoCondition(props),
|
||||
condition: this.buildSqlCondition(props),
|
||||
sort:
|
||||
useSort && props.orderBy?.length > 0
|
||||
? props.orderBy.map(x => ({
|
||||
...x,
|
||||
direction: x.order,
|
||||
}))
|
||||
: undefined,
|
||||
skip: props.range?.offset,
|
||||
limit: props.range?.limit,
|
||||
};
|
||||
if (useSort && props.orderBy?.length > 0) {
|
||||
res.sort = _zipObject(
|
||||
props.orderBy.map(col => col.columnName),
|
||||
props.orderBy.map(col => (col.order == 'DESC' ? -1 : 1))
|
||||
);
|
||||
}
|
||||
// if (useSort && props.orderBy?.length > 0) {
|
||||
// res.sort = _zipObject(
|
||||
// props.orderBy.map(col => col.columnName),
|
||||
// props.orderBy.map(col => (col.order == 'DESC' ? -1 : 1))
|
||||
// );
|
||||
// }
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ export interface PerspectiveDataLoadProps {
|
||||
range?: RangeDefinition;
|
||||
topCount?: number;
|
||||
sqlCondition?: Condition;
|
||||
mongoCondition?: any;
|
||||
engineType: PerspectiveDatabaseEngineType;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,13 +2,22 @@ import type {
|
||||
CollectionInfo,
|
||||
ColumnInfo,
|
||||
DatabaseInfo,
|
||||
FilterBehaviour,
|
||||
ForeignKeyInfo,
|
||||
NamedObjectInfo,
|
||||
RangeDefinition,
|
||||
TableInfo,
|
||||
ViewInfo,
|
||||
} from 'dbgate-types';
|
||||
import { equalFullName, isCollectionInfo, isTableInfo, isViewInfo } from 'dbgate-tools';
|
||||
import {
|
||||
detectSqlFilterBehaviour,
|
||||
equalFullName,
|
||||
isCollectionInfo,
|
||||
isTableInfo,
|
||||
isViewInfo,
|
||||
mongoFilterBehaviour,
|
||||
stringFilterBehaviour,
|
||||
} from 'dbgate-tools';
|
||||
import {
|
||||
ChangePerspectiveConfigFunc,
|
||||
createPerspectiveNodeConfig,
|
||||
@@ -33,8 +42,7 @@ import _cloneDeepWith from 'lodash/cloneDeepWith';
|
||||
import _findIndex from 'lodash/findIndex';
|
||||
import { PerspectiveDataLoadProps, PerspectiveDataProvider } from './PerspectiveDataProvider';
|
||||
import stableStringify from 'json-stable-stringify';
|
||||
import { getFilterType, parseFilter } from 'dbgate-filterparser';
|
||||
import { FilterType } from 'dbgate-filterparser/lib/types';
|
||||
import { parseFilter } from 'dbgate-filterparser';
|
||||
import { CompoudCondition, Condition, Expression, Select } from 'dbgate-sqltree';
|
||||
// import { getPerspectiveDefaultColumns } from './getPerspectiveDefaultColumns';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
@@ -197,8 +205,8 @@ export abstract class PerspectiveTreeNode {
|
||||
get columnTitle() {
|
||||
return this.title;
|
||||
}
|
||||
get filterType(): FilterType {
|
||||
return 'string';
|
||||
get filterBehaviour(): FilterBehaviour {
|
||||
return stringFilterBehaviour;
|
||||
}
|
||||
get columnName() {
|
||||
return null;
|
||||
@@ -341,12 +349,27 @@ export abstract class PerspectiveTreeNode {
|
||||
);
|
||||
}
|
||||
|
||||
getMutliColumnCondition(source): Condition {
|
||||
if (!this.nodeConfig?.multiColumnFilter) return null;
|
||||
|
||||
const base = this.getBaseTableFromThis() as TableInfo | ViewInfo | CollectionInfo;
|
||||
if (!base) return null;
|
||||
|
||||
const isDocDb = isCollectionInfo(base);
|
||||
if (isDocDb) {
|
||||
return this.getMutliColumnNoSqlCondition();
|
||||
} else {
|
||||
return this.getMutliColumnSqlCondition(source);
|
||||
}
|
||||
}
|
||||
|
||||
getMutliColumnSqlCondition(source): Condition {
|
||||
if (!this.nodeConfig?.multiColumnFilter) return null;
|
||||
|
||||
const base = this.getBaseTableFromThis() as TableInfo | ViewInfo;
|
||||
if (!base) return null;
|
||||
try {
|
||||
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, 'string');
|
||||
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, stringFilterBehaviour);
|
||||
if (condition) {
|
||||
const orCondition: CompoudCondition = {
|
||||
conditionType: 'or',
|
||||
@@ -375,32 +398,40 @@ export abstract class PerspectiveTreeNode {
|
||||
return null;
|
||||
}
|
||||
|
||||
getMutliColumnMongoCondition(): {} {
|
||||
getMutliColumnNoSqlCondition(): Condition {
|
||||
if (!this.nodeConfig?.multiColumnFilter) return null;
|
||||
const pattern = this.dataProvider?.dataPatterns?.[this.designerId];
|
||||
if (!pattern) return null;
|
||||
|
||||
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, 'mongo');
|
||||
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, mongoFilterBehaviour);
|
||||
if (!condition) return null;
|
||||
const res = pattern.columns.map(col => {
|
||||
return _cloneDeepWith(condition, expr => {
|
||||
if (expr.__placeholder__) {
|
||||
return {
|
||||
[col.name]: expr.__placeholder__,
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
return {
|
||||
$or: res,
|
||||
|
||||
const orCondition: CompoudCondition = {
|
||||
conditionType: 'or',
|
||||
conditions: [],
|
||||
};
|
||||
for (const column of pattern.columns || []) {
|
||||
orCondition.conditions.push(
|
||||
_cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder') {
|
||||
return {
|
||||
exprType: 'column',
|
||||
columnName: column.name,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
if (orCondition.conditions.length > 0) {
|
||||
return orCondition;
|
||||
}
|
||||
}
|
||||
|
||||
getChildrenSqlCondition(source = null): Condition {
|
||||
const conditions = _compact([
|
||||
...this.childNodes.map(x => x.parseFilterCondition(source)),
|
||||
...this.buildParentFilterConditions(),
|
||||
this.getMutliColumnSqlCondition(source),
|
||||
this.getMutliColumnCondition(source),
|
||||
]);
|
||||
if (conditions.length == 0) {
|
||||
return null;
|
||||
@@ -414,20 +445,6 @@ export abstract class PerspectiveTreeNode {
|
||||
};
|
||||
}
|
||||
|
||||
getChildrenMongoCondition(source = null): {} {
|
||||
const conditions = _compact([
|
||||
...this.childNodes.map(x => x.parseFilterCondition(source)),
|
||||
this.getMutliColumnMongoCondition(),
|
||||
]);
|
||||
if (conditions.length == 0) {
|
||||
return null;
|
||||
}
|
||||
if (conditions.length == 1) {
|
||||
return conditions[0];
|
||||
}
|
||||
return { $and: conditions };
|
||||
}
|
||||
|
||||
getOrderBy(table: TableInfo | ViewInfo | CollectionInfo): PerspectiveDataLoadProps['orderBy'] {
|
||||
const res = _compact(
|
||||
this.childNodes.map(node => {
|
||||
@@ -446,6 +463,8 @@ export abstract class PerspectiveTreeNode {
|
||||
order: 'ASC' as 'ASC',
|
||||
}));
|
||||
if (pkColumns) return pkColumns;
|
||||
const uqColumns = (table as CollectionInfo)?.uniqueKey;
|
||||
if (uqColumns?.length >= 1) return uqColumns.map(x => ({ columnName: x.columnName, order: 'ASC' }));
|
||||
const columns = (table as TableInfo | ViewInfo)?.columns;
|
||||
if (columns) return [{ columnName: columns[0].columnName, order: 'ASC' }];
|
||||
return [{ columnName: '_id', order: 'ASC' }];
|
||||
@@ -714,8 +733,8 @@ export class PerspectiveTableColumnNode extends PerspectiveTreeNode {
|
||||
return true;
|
||||
}
|
||||
|
||||
get filterType(): FilterType {
|
||||
return getFilterType(this.column.dataType);
|
||||
get filterBehaviour(): FilterBehaviour {
|
||||
return detectSqlFilterBehaviour(this.column.dataType);
|
||||
}
|
||||
|
||||
get isCircular() {
|
||||
@@ -767,7 +786,7 @@ export class PerspectiveTableColumnNode extends PerspectiveTreeNode {
|
||||
get filterInfo(): PerspectiveFilterColumnInfo {
|
||||
return {
|
||||
columnName: this.columnName,
|
||||
filterType: this.filterType,
|
||||
filterBehaviour: this.filterBehaviour,
|
||||
pureName: this.column.pureName,
|
||||
schemaName: this.column.schemaName,
|
||||
foreignKey: this.foreignKey,
|
||||
@@ -777,7 +796,7 @@ export class PerspectiveTableColumnNode extends PerspectiveTreeNode {
|
||||
parseFilterCondition(source = null): Condition {
|
||||
const filter = this.getFilter();
|
||||
if (!filter) return null;
|
||||
const condition = parseFilter(filter, this.filterType);
|
||||
const condition = parseFilter(filter, this.filterBehaviour);
|
||||
if (!condition) return null;
|
||||
return _cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder') {
|
||||
@@ -949,9 +968,9 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
|
||||
return !this.isChildColumn;
|
||||
}
|
||||
|
||||
get filterType(): FilterType {
|
||||
if (this.tableColumn) return getFilterType(this.tableColumn.dataType);
|
||||
return 'mongo';
|
||||
get filterBehaviour(): FilterBehaviour {
|
||||
if (this.tableColumn) return detectSqlFilterBehaviour(this.tableColumn.dataType);
|
||||
return mongoFilterBehaviour;
|
||||
}
|
||||
|
||||
get preloadedLevelData() {
|
||||
@@ -1083,7 +1102,7 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
|
||||
|
||||
return {
|
||||
columnName: this.columnName,
|
||||
filterType: this.filterType,
|
||||
filterBehaviour: this.filterBehaviour,
|
||||
pureName: this.table.pureName,
|
||||
schemaName: this.table.schemaName,
|
||||
foreignKey: this.foreignKey,
|
||||
@@ -1093,7 +1112,7 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
|
||||
parseFilterCondition(source = null): {} {
|
||||
const filter = this.getFilter();
|
||||
if (!filter) return null;
|
||||
const condition = parseFilter(filter, 'mongo');
|
||||
const condition = parseFilter(filter, mongoFilterBehaviour);
|
||||
if (!condition) return null;
|
||||
return _cloneDeepWith(condition, expr => {
|
||||
if (expr.__placeholder__) {
|
||||
@@ -1150,17 +1169,16 @@ export class PerspectiveTableNode extends PerspectiveTreeNode {
|
||||
}
|
||||
|
||||
getNodeLoadProps(parentRows: any[]): PerspectiveDataLoadProps {
|
||||
const isMongo = isCollectionInfo(this.table);
|
||||
const isDocDb = isCollectionInfo(this.table);
|
||||
return {
|
||||
schemaName: this.table.schemaName,
|
||||
pureName: this.table.pureName,
|
||||
dataColumns: this.getDataLoadColumns(),
|
||||
allColumns: isMongo,
|
||||
allColumns: isDocDb,
|
||||
databaseConfig: this.databaseConfig,
|
||||
orderBy: this.getOrderBy(this.table),
|
||||
sqlCondition: isMongo ? null : this.getChildrenSqlCondition(),
|
||||
mongoCondition: isMongo ? this.getChildrenMongoCondition() : null,
|
||||
engineType: isMongo ? 'docdb' : 'sqldb',
|
||||
sqlCondition: this.getChildrenSqlCondition(),
|
||||
engineType: isDocDb ? 'docdb' : 'sqldb',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1364,7 +1382,7 @@ export class PerspectiveCustomJoinTreeNode extends PerspectiveTableNode {
|
||||
// console.log('PARENT ROWS', parentRows);
|
||||
|
||||
// console.log('this.getDataLoadColumns()', this.getDataLoadColumns());
|
||||
const isMongo = isCollectionInfo(this.table);
|
||||
const isDocDb = isCollectionInfo(this.table);
|
||||
|
||||
// const bindingValues = [];
|
||||
|
||||
@@ -1424,12 +1442,12 @@ export class PerspectiveCustomJoinTreeNode extends PerspectiveTableNode {
|
||||
bindingColumns: this.getParentMatchColumns(),
|
||||
bindingValues: _uniqBy(bindingValues, x => JSON.stringify(x)),
|
||||
dataColumns: this.getDataLoadColumns(),
|
||||
allColumns: isMongo,
|
||||
allColumns: isDocDb,
|
||||
databaseConfig: this.databaseConfig,
|
||||
orderBy: this.getOrderBy(this.table),
|
||||
sqlCondition: isMongo ? null : this.getChildrenSqlCondition(),
|
||||
mongoCondition: isMongo ? this.getChildrenMongoCondition() : null,
|
||||
engineType: isMongo ? 'docdb' : 'sqldb',
|
||||
sqlCondition: this.getChildrenSqlCondition(),
|
||||
// mongoCondition: isMongo ? this.getChildrenMongoCondition() : null,
|
||||
engineType: isDocDb ? 'docdb' : 'sqldb',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -21,3 +21,4 @@ export * from './perspectiveTools';
|
||||
export * from './DataDuplicator';
|
||||
export * from './FreeTableGridDisplay';
|
||||
export * from './FreeTableModel';
|
||||
export * from './CustomGridDisplay';
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"description": "Opensource database administration tool - web interface",
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"dbgate",
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 dbshell
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -26,6 +26,8 @@ async function runAndExit(promise) {
|
||||
}
|
||||
}
|
||||
|
||||
program.version(dbgateApi.currentVersion.version);
|
||||
|
||||
program
|
||||
.option('-s, --server <server>', 'server host')
|
||||
.option('-u, --user <user>', 'user name')
|
||||
@@ -36,7 +38,8 @@ program
|
||||
'--load-data-condition <condition>',
|
||||
'regex, which table data will be loaded and stored in model (in load command)'
|
||||
)
|
||||
.requiredOption('-e, --engine <engine>', 'engine name, eg. mysql@dbgate-plugin-mysql');
|
||||
.option('-e, --engine <engine>', 'engine name, eg. mysql@dbgate-plugin-mysql')
|
||||
.option('--commonjs', 'Creates CommonJS module');
|
||||
|
||||
program
|
||||
.command('deploy <modelFolder>')
|
||||
@@ -115,4 +118,30 @@ program
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command('json-to-model <jsonFile> <modelFolder>')
|
||||
.description('Converts JSON file to model')
|
||||
.action((jsonFile, modelFolder) => {
|
||||
runAndExit(
|
||||
dbgateApi.jsonToDbModel({
|
||||
modelFile: jsonFile,
|
||||
outputDir: modelFolder,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command('model-to-json <modelFolder> <jsonFile>')
|
||||
.description('Converts model to JSON file')
|
||||
.action((modelFolder, jsonFile) => {
|
||||
const { commonjs } = program.opts();
|
||||
runAndExit(
|
||||
dbgateApi.dbModelToJson({
|
||||
modelFolder,
|
||||
outputFile: jsonFile,
|
||||
commonjs,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"description": "Deploy, load or build script from model of SQL database",
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"bin": {
|
||||
"dbmodel": "./bin/dbmodel.js"
|
||||
},
|
||||
@@ -41,6 +41,6 @@
|
||||
"dbgate-plugin-oracle": "^5.0.0-alpha.1",
|
||||
"dbgate-web": "^5.0.0-alpha.1",
|
||||
"dotenv": "^16.0.0",
|
||||
"pinomin": "^1.0.1"
|
||||
"pinomin": "^1.0.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { FilterType } from './types';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import type { TransformType } from 'dbgate-types';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
|
||||
const compoudCondition = conditionType => conditions => {
|
||||
if (conditions.length == 1) return conditions[0];
|
||||
return {
|
||||
[conditionType]: conditions,
|
||||
};
|
||||
};
|
||||
|
||||
function getTransformCondition(transform: TransformType, value) {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'transform',
|
||||
transform,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const yearCondition = () => value => {
|
||||
return getTransformCondition('YEAR', value);
|
||||
};
|
||||
|
||||
const yearMonthCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [getTransformCondition('YEAR', m[1]), getTransformCondition('MONTH', m[2])],
|
||||
};
|
||||
};
|
||||
|
||||
const yearMonthDayCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
getTransformCondition('YEAR', m[1]),
|
||||
getTransformCondition('MONTH', m[2]),
|
||||
getTransformCondition('DAY', m[3]),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const yearEdge = edgeFunction => value => {
|
||||
return moment(new Date(parseInt(value), 0, 1))
|
||||
[edgeFunction]('year')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, 1))
|
||||
[edgeFunction]('month')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, parseInt(m[3])))
|
||||
[edgeFunction]('day')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('minute').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteSecondEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('second').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const createIntervalCondition = (start, end) => {
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: start,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: end,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const createDateIntervalCondition = (start, end) => {
|
||||
return createIntervalCondition(start.format('YYYY-MM-DDTHH:mm:ss.SSS'), end.format('YYYY-MM-DDTHH:mm:ss.SSS'));
|
||||
};
|
||||
|
||||
const fixedMomentIntervalCondition = (intervalType, diff) => () => {
|
||||
return createDateIntervalCondition(
|
||||
moment().add(intervalType, diff).startOf(intervalType),
|
||||
moment().add(intervalType, diff).endOf(intervalType)
|
||||
);
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('minute'), moment(dateObject).endOf('minute'));
|
||||
};
|
||||
|
||||
const yearMonthDaySecondCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('second'), moment(dateObject).endOf('second'));
|
||||
};
|
||||
|
||||
const binaryCondition = operator => value => ({
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
});
|
||||
|
||||
const unaryCondition = conditionType => () => {
|
||||
return {
|
||||
conditionType,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const sqlTemplate = templateSql => {
|
||||
return {
|
||||
conditionType: 'rawTemplate',
|
||||
templateSql,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const createParser = () => {
|
||||
const langDef = {
|
||||
comma: () => word(','),
|
||||
|
||||
not: () => word('NOT'),
|
||||
notNull: r => r.not.then(r.null).map(unaryCondition('isNotNull')),
|
||||
null: () => word('NULL').map(unaryCondition('isNull')),
|
||||
|
||||
sql: () =>
|
||||
token(P.regexp(/\{(.*?)\}/, 1))
|
||||
.map(sqlTemplate)
|
||||
.desc('sql literal'),
|
||||
|
||||
yearNum: () => P.regexp(/\d\d\d\d/).map(yearCondition()),
|
||||
yearMonthNum: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthCondition()),
|
||||
yearMonthDayNum: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayCondition()),
|
||||
yearMonthDayMinute: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteCondition()),
|
||||
yearMonthDaySecond: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDaySecondCondition()),
|
||||
|
||||
yearNumStart: () => P.regexp(/\d\d\d\d/).map(yearEdge('startOf')),
|
||||
yearNumEnd: () => P.regexp(/\d\d\d\d/).map(yearEdge('endOf')),
|
||||
yearMonthStart: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('startOf')),
|
||||
yearMonthEnd: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('endOf')),
|
||||
yearMonthDayStart: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('startOf')),
|
||||
yearMonthDayEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('endOf')),
|
||||
yearMonthDayMinuteStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('startOf')),
|
||||
yearMonthDayMinuteEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('endOf')),
|
||||
yearMonthDayMinuteSecondStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('startOf')),
|
||||
yearMonthDayMinuteSecondEnd: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('endOf')),
|
||||
|
||||
this: () => word('THIS'),
|
||||
last: () => word('LAST'),
|
||||
next: () => word('NEXT'),
|
||||
week: () => word('WEEK'),
|
||||
month: () => word('MONTH'),
|
||||
year: () => word('YEAR'),
|
||||
|
||||
yesterday: () => word('YESTERDAY').map(fixedMomentIntervalCondition('day', -1)),
|
||||
today: () => word('TODAY').map(fixedMomentIntervalCondition('day', 0)),
|
||||
tomorrow: () => word('TOMORROW').map(fixedMomentIntervalCondition('day', 1)),
|
||||
|
||||
lastWeek: r => r.last.then(r.week).map(fixedMomentIntervalCondition('week', -1)),
|
||||
thisWeek: r => r.this.then(r.week).map(fixedMomentIntervalCondition('week', 0)),
|
||||
nextWeek: r => r.next.then(r.week).map(fixedMomentIntervalCondition('week', 1)),
|
||||
|
||||
lastMonth: r => r.last.then(r.month).map(fixedMomentIntervalCondition('month', -1)),
|
||||
thisMonth: r => r.this.then(r.month).map(fixedMomentIntervalCondition('month', 0)),
|
||||
nextMonth: r => r.next.then(r.month).map(fixedMomentIntervalCondition('month', 1)),
|
||||
|
||||
lastYear: r => r.last.then(r.year).map(fixedMomentIntervalCondition('year', -1)),
|
||||
thisYear: r => r.this.then(r.year).map(fixedMomentIntervalCondition('year', 0)),
|
||||
nextYear: r => r.next.then(r.year).map(fixedMomentIntervalCondition('year', 1)),
|
||||
|
||||
valueStart: r =>
|
||||
P.alt(
|
||||
r.yearMonthDayMinuteSecondStart,
|
||||
r.yearMonthDayMinuteStart,
|
||||
r.yearMonthDayStart,
|
||||
r.yearMonthStart,
|
||||
r.yearNumStart
|
||||
),
|
||||
valueEnd: r =>
|
||||
P.alt(r.yearMonthDayMinuteSecondEnd, r.yearMonthDayMinuteEnd, r.yearMonthDayEnd, r.yearMonthEnd, r.yearNumEnd),
|
||||
|
||||
le: r => word('<=').then(r.valueEnd).map(binaryCondition('<=')),
|
||||
ge: r => word('>=').then(r.valueStart).map(binaryCondition('>=')),
|
||||
lt: r => word('<').then(r.valueStart).map(binaryCondition('<')),
|
||||
gt: r => word('>').then(r.valueEnd).map(binaryCondition('>')),
|
||||
|
||||
element: r =>
|
||||
P.alt(
|
||||
r.yearMonthDaySecond,
|
||||
r.yearMonthDayMinute,
|
||||
r.yearMonthDayNum,
|
||||
r.yearMonthNum,
|
||||
r.yearNum,
|
||||
r.yesterday,
|
||||
r.today,
|
||||
r.tomorrow,
|
||||
r.lastWeek,
|
||||
r.thisWeek,
|
||||
r.nextWeek,
|
||||
r.lastMonth,
|
||||
r.thisMonth,
|
||||
r.nextMonth,
|
||||
r.lastYear,
|
||||
r.thisYear,
|
||||
r.nextYear,
|
||||
r.null,
|
||||
r.notNull,
|
||||
r.le,
|
||||
r.lt,
|
||||
r.ge,
|
||||
r.gt,
|
||||
r.sql
|
||||
).trim(whitespace),
|
||||
factor: r => r.element.sepBy(whitespace).map(compoudCondition('$and')),
|
||||
list: r => r.factor.sepBy(r.comma).map(compoudCondition('$or')),
|
||||
};
|
||||
|
||||
return P.createLanguage(langDef);
|
||||
};
|
||||
|
||||
export const datetimeParser = createParser();
|
||||
@@ -1,11 +0,0 @@
|
||||
import { isTypeNumber, isTypeString, isTypeLogical, isTypeDateTime } from 'dbgate-tools';
|
||||
import { FilterType } from './types';
|
||||
|
||||
export function getFilterType(dataType: string): FilterType {
|
||||
if (!dataType) return 'string';
|
||||
if (isTypeNumber(dataType)) return 'number';
|
||||
if (isTypeString(dataType)) return 'string';
|
||||
if (isTypeLogical(dataType)) return 'logical';
|
||||
if (isTypeDateTime(dataType)) return 'datetime';
|
||||
return 'string';
|
||||
}
|
||||
@@ -1,3 +1,2 @@
|
||||
export * from './parseFilter';
|
||||
export * from './getFilterType';
|
||||
export * from './filterTool';
|
||||
|
||||
@@ -1,151 +0,0 @@
|
||||
import P from 'parsimmon';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
|
||||
const operatorCondition = operator => value => ({
|
||||
__placeholder__: {
|
||||
[operator]: value,
|
||||
},
|
||||
});
|
||||
|
||||
const regexCondition = regexString => value => ({
|
||||
__placeholder__: {
|
||||
$regex: regexString.replace('#VALUE#', value),
|
||||
$options: 'i',
|
||||
},
|
||||
});
|
||||
|
||||
const numberTestCondition = () => value => ({
|
||||
$or: [
|
||||
{
|
||||
__placeholder__: {
|
||||
$regex: `.*${value}.*`,
|
||||
$options: 'i',
|
||||
},
|
||||
},
|
||||
{
|
||||
__placeholder__: value,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const idRegex = /[('"]([0-9a-f]{24})['")]/;
|
||||
|
||||
const objectIdTestCondition = () => value => ({
|
||||
$or: [
|
||||
{
|
||||
__placeholder__: { $oid: value.match(idRegex)[1] },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const testCondition = (operator, value) => () => ({
|
||||
__placeholder__: {
|
||||
[operator]: value,
|
||||
},
|
||||
});
|
||||
|
||||
const multiTestCondition = condition => () => ({
|
||||
__placeholder__: condition,
|
||||
});
|
||||
|
||||
const compoudCondition = conditionType => conditions => {
|
||||
if (conditions.length == 1) return conditions[0];
|
||||
return {
|
||||
[conditionType]: conditions,
|
||||
};
|
||||
};
|
||||
|
||||
const negateCondition = condition => ({
|
||||
__placeholder__: {
|
||||
$not: condition.__placeholder__,
|
||||
},
|
||||
});
|
||||
|
||||
const createParser = () => {
|
||||
const langDef = {
|
||||
string1: () =>
|
||||
token(P.regexp(/"((?:\\.|.)*?)"/, 1))
|
||||
.map(interpretEscapes)
|
||||
.desc('string quoted'),
|
||||
|
||||
string2: () =>
|
||||
token(P.regexp(/'((?:\\.|.)*?)'/, 1))
|
||||
.map(interpretEscapes)
|
||||
.desc('string quoted'),
|
||||
|
||||
number: () =>
|
||||
token(P.regexp(/-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?/))
|
||||
.map(Number)
|
||||
.desc('number'),
|
||||
|
||||
objectid: () => token(P.regexp(/ObjectId\(['"]?[0-9a-f]{24}['"]?\)/)).desc('ObjectId'),
|
||||
|
||||
noQuotedString: () => P.regexp(/[^\s^,^'^"]+/).desc('string unquoted'),
|
||||
|
||||
value: r => P.alt(r.objectid, r.string1, r.string2, r.number, r.noQuotedString),
|
||||
valueTestObjectId: r => r.objectid.map(objectIdTestCondition()),
|
||||
valueTestNum: r => r.number.map(numberTestCondition()),
|
||||
valueTest: r => r.value.map(regexCondition('.*#VALUE#.*')),
|
||||
|
||||
comma: () => word(','),
|
||||
not: () => word('NOT'),
|
||||
empty: () => word('EMPTY'),
|
||||
array: () => word('ARRAY'),
|
||||
notExists: r => r.not.then(r.exists).map(testCondition('$exists', false)),
|
||||
notEmptyArray: r =>
|
||||
r.not
|
||||
.then(r.empty)
|
||||
.then(r.array)
|
||||
.map(multiTestCondition({ $exists: true, $type: 'array', $ne: [] })),
|
||||
emptyArray: r => r.empty.then(r.array).map(multiTestCondition({ $exists: true, $eq: [] })),
|
||||
exists: () => word('EXISTS').map(testCondition('$exists', true)),
|
||||
true: () => word('TRUE').map(testCondition('$eq', true)),
|
||||
false: () => word('FALSE').map(testCondition('$eq', false)),
|
||||
|
||||
eq: r => word('=').then(r.value).map(operatorCondition('$eq')),
|
||||
ne: r => word('!=').then(r.value).map(operatorCondition('$ne')),
|
||||
ne2: r => word('<>').then(r.value).map(operatorCondition('$ne')),
|
||||
lt: r => word('<').then(r.value).map(operatorCondition('$lt')),
|
||||
gt: r => word('>').then(r.value).map(operatorCondition('$gt')),
|
||||
le: r => word('<=').then(r.value).map(operatorCondition('$lte')),
|
||||
ge: r => word('>=').then(r.value).map(operatorCondition('$gte')),
|
||||
startsWith: r => word('^').then(r.value).map(regexCondition('#VALUE#.*')),
|
||||
endsWith: r => word('$').then(r.value).map(regexCondition('.*#VALUE#')),
|
||||
contains: r => word('+').then(r.value).map(regexCondition('.*#VALUE#.*')),
|
||||
startsWithNot: r => word('!^').then(r.value).map(regexCondition('#VALUE#.*')).map(negateCondition),
|
||||
endsWithNot: r => word('!$').then(r.value).map(regexCondition('.*#VALUE#')).map(negateCondition),
|
||||
containsNot: r => word('~').then(r.value).map(regexCondition('.*#VALUE#.*')).map(negateCondition),
|
||||
|
||||
element: r =>
|
||||
P.alt(
|
||||
r.exists,
|
||||
r.notExists,
|
||||
r.true,
|
||||
r.false,
|
||||
r.eq,
|
||||
r.ne,
|
||||
r.ne2,
|
||||
r.lt,
|
||||
r.gt,
|
||||
r.le,
|
||||
r.ge,
|
||||
r.notEmptyArray,
|
||||
r.emptyArray,
|
||||
r.startsWith,
|
||||
r.endsWith,
|
||||
r.contains,
|
||||
r.startsWithNot,
|
||||
r.endsWithNot,
|
||||
r.containsNot,
|
||||
r.valueTestObjectId,
|
||||
r.valueTestNum,
|
||||
r.valueTest
|
||||
).trim(whitespace),
|
||||
factor: r => r.element.sepBy(whitespace).map(compoudCondition('$and')),
|
||||
list: r => r.factor.sepBy(r.comma).map(compoudCondition('$or')),
|
||||
};
|
||||
|
||||
return P.createLanguage(langDef);
|
||||
};
|
||||
|
||||
export const mongoParser = createParser();
|
||||
@@ -1,23 +1,56 @@
|
||||
import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { FilterType } from './types';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
import { mongoParser } from './mongoParser';
|
||||
import { datetimeParser } from './datetimeParser';
|
||||
import { hexStringToArray } from 'dbgate-tools';
|
||||
import { FilterBehaviour, TransformType } from 'dbgate-types';
|
||||
|
||||
const binaryCondition = operator => value => ({
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
});
|
||||
const binaryCondition =
|
||||
(operator, numberDualTesting = false) =>
|
||||
value => {
|
||||
const numValue = parseFloat(value);
|
||||
if (numberDualTesting && !isNaN(numValue)) {
|
||||
return {
|
||||
conditionType: 'or',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: numValue,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator,
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const likeCondition = (conditionType, likeString) => value => ({
|
||||
conditionType,
|
||||
@@ -68,6 +101,57 @@ const negateCondition = condition => {
|
||||
};
|
||||
};
|
||||
|
||||
const numberTestCondition = () => value => {
|
||||
return {
|
||||
conditionType: 'or',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'like',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: `.*${value}.*`,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const idRegex = /[('"]([0-9a-f]{24})['")]/;
|
||||
|
||||
const objectIdTestCondition = () => value => ({
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: { $oid: value.match(idRegex)[1] },
|
||||
},
|
||||
});
|
||||
|
||||
const specificPredicateCondition = predicate => () => ({
|
||||
conditionType: 'specificPredicate',
|
||||
predicate,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
});
|
||||
|
||||
const sqlTemplate = templateSql => {
|
||||
return {
|
||||
conditionType: 'rawTemplate',
|
||||
@@ -78,7 +162,164 @@ const sqlTemplate = templateSql => {
|
||||
};
|
||||
};
|
||||
|
||||
const createParser = (filterType: FilterType) => {
|
||||
function getTransformCondition(transform: TransformType, value) {
|
||||
return {
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'transform',
|
||||
transform,
|
||||
expr: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const yearCondition = () => value => {
|
||||
return getTransformCondition('YEAR', value);
|
||||
};
|
||||
|
||||
const yearMonthCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [getTransformCondition('YEAR', m[1]), getTransformCondition('MONTH', m[2])],
|
||||
};
|
||||
};
|
||||
|
||||
const yearMonthDayCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
getTransformCondition('YEAR', m[1]),
|
||||
getTransformCondition('MONTH', m[2]),
|
||||
getTransformCondition('DAY', m[3]),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const yearEdge = edgeFunction => value => {
|
||||
return moment(new Date(parseInt(value), 0, 1))
|
||||
[edgeFunction]('year')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, 1))
|
||||
[edgeFunction]('month')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)/);
|
||||
|
||||
return moment(new Date(parseInt(m[1]), parseInt(m[2]) - 1, parseInt(m[3])))
|
||||
[edgeFunction]('day')
|
||||
.format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('minute').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteSecondEdge = edgeFunction => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return moment(dateObject)[edgeFunction]('second').format('YYYY-MM-DDTHH:mm:ss.SSS');
|
||||
};
|
||||
|
||||
const createIntervalCondition = (start, end) => {
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions: [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: start,
|
||||
},
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: end,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const createDateIntervalCondition = (start, end) => {
|
||||
return createIntervalCondition(start.format('YYYY-MM-DDTHH:mm:ss.SSS'), end.format('YYYY-MM-DDTHH:mm:ss.SSS'));
|
||||
};
|
||||
|
||||
const fixedMomentIntervalCondition = (intervalType, diff) => () => {
|
||||
return createDateIntervalCondition(
|
||||
moment().add(intervalType, diff).startOf(intervalType),
|
||||
moment().add(intervalType, diff).endOf(intervalType)
|
||||
);
|
||||
};
|
||||
|
||||
const yearMonthDayMinuteCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)\s+(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[4];
|
||||
const minute = m[5];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('minute'), moment(dateObject).endOf('minute'));
|
||||
};
|
||||
|
||||
const yearMonthDaySecondCondition = () => value => {
|
||||
const m = value.match(/(\d\d\d\d)-(\d\d?)-(\d\d?)(T|\s+)(\d\d?):(\d\d?):(\d\d?)/);
|
||||
const year = m[1];
|
||||
const month = m[2];
|
||||
const day = m[3];
|
||||
const hour = m[5];
|
||||
const minute = m[6];
|
||||
const second = m[7];
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
return createDateIntervalCondition(moment(dateObject).startOf('second'), moment(dateObject).endOf('second'));
|
||||
};
|
||||
|
||||
const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
const langDef = {
|
||||
string1: () =>
|
||||
token(P.regexp(/"((?:\\.|.)*?)"/, 1))
|
||||
@@ -105,6 +346,8 @@ const createParser = (filterType: FilterType) => {
|
||||
.map(Number)
|
||||
.desc('number'),
|
||||
|
||||
objectid: () => token(P.regexp(/ObjectId\(['"]?[0-9a-f]{24}['"]?\)/)).desc('ObjectId'),
|
||||
|
||||
hexstring: () =>
|
||||
token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1))
|
||||
.map(x => ({
|
||||
@@ -124,25 +367,94 @@ const createParser = (filterType: FilterType) => {
|
||||
valueTestEq: r => r.value.map(binaryCondition('=')),
|
||||
hexTestEq: r => r.hexstring.map(binaryCondition('=')),
|
||||
valueTestStr: r => r.value.map(likeCondition('like', '%#VALUE#%')),
|
||||
valueTestNum: r => r.number.map(numberTestCondition()),
|
||||
valueTestObjectId: r => r.objectid.map(objectIdTestCondition()),
|
||||
|
||||
notExists: r => r.not.then(r.exists).map(specificPredicateCondition('notExists')),
|
||||
notEmptyArray: r => r.not.then(r.empty).then(r.array).map(specificPredicateCondition('notEmptyArray')),
|
||||
emptyArray: r => r.empty.then(r.array).map(specificPredicateCondition('emptyArray')),
|
||||
exists: () => word('EXISTS').map(specificPredicateCondition('exists')),
|
||||
|
||||
this: () => word('THIS'),
|
||||
last: () => word('LAST'),
|
||||
next: () => word('NEXT'),
|
||||
week: () => word('WEEK'),
|
||||
month: () => word('MONTH'),
|
||||
year: () => word('YEAR'),
|
||||
|
||||
yesterday: () => word('YESTERDAY').map(fixedMomentIntervalCondition('day', -1)),
|
||||
today: () => word('TODAY').map(fixedMomentIntervalCondition('day', 0)),
|
||||
tomorrow: () => word('TOMORROW').map(fixedMomentIntervalCondition('day', 1)),
|
||||
|
||||
lastWeek: r => r.last.then(r.week).map(fixedMomentIntervalCondition('week', -1)),
|
||||
thisWeek: r => r.this.then(r.week).map(fixedMomentIntervalCondition('week', 0)),
|
||||
nextWeek: r => r.next.then(r.week).map(fixedMomentIntervalCondition('week', 1)),
|
||||
|
||||
lastMonth: r => r.last.then(r.month).map(fixedMomentIntervalCondition('month', -1)),
|
||||
thisMonth: r => r.this.then(r.month).map(fixedMomentIntervalCondition('month', 0)),
|
||||
nextMonth: r => r.next.then(r.month).map(fixedMomentIntervalCondition('month', 1)),
|
||||
|
||||
lastYear: r => r.last.then(r.year).map(fixedMomentIntervalCondition('year', -1)),
|
||||
thisYear: r => r.this.then(r.year).map(fixedMomentIntervalCondition('year', 0)),
|
||||
nextYear: r => r.next.then(r.year).map(fixedMomentIntervalCondition('year', 1)),
|
||||
|
||||
dateValueStart: r =>
|
||||
P.alt(
|
||||
r.yearMonthDayMinuteSecondStart,
|
||||
r.yearMonthDayMinuteStart,
|
||||
r.yearMonthDayStart,
|
||||
r.yearMonthStart,
|
||||
r.yearNumStart
|
||||
),
|
||||
dateValueEnd: r =>
|
||||
P.alt(r.yearMonthDayMinuteSecondEnd, r.yearMonthDayMinuteEnd, r.yearMonthDayEnd, r.yearMonthEnd, r.yearNumEnd),
|
||||
|
||||
dateLe: r => word('<=').then(r.dateValueEnd).map(binaryCondition('<=')),
|
||||
dateGe: r => word('>=').then(r.dateValueStart).map(binaryCondition('>=')),
|
||||
dateLt: r => word('<').then(r.dateValueStart).map(binaryCondition('<')),
|
||||
dateGt: r => word('>').then(r.dateValueEnd).map(binaryCondition('>')),
|
||||
|
||||
yearNum: () => P.regexp(/\d\d\d\d/).map(yearCondition()),
|
||||
yearMonthNum: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthCondition()),
|
||||
yearMonthDayNum: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayCondition()),
|
||||
yearMonthDayMinute: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteCondition()),
|
||||
yearMonthDaySecond: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDaySecondCondition()),
|
||||
|
||||
yearNumStart: () => P.regexp(/\d\d\d\d/).map(yearEdge('startOf')),
|
||||
yearNumEnd: () => P.regexp(/\d\d\d\d/).map(yearEdge('endOf')),
|
||||
yearMonthStart: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('startOf')),
|
||||
yearMonthEnd: () => P.regexp(/\d\d\d\d-\d\d?/).map(yearMonthEdge('endOf')),
|
||||
yearMonthDayStart: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('startOf')),
|
||||
yearMonthDayEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?/).map(yearMonthDayEdge('endOf')),
|
||||
yearMonthDayMinuteStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('startOf')),
|
||||
yearMonthDayMinuteEnd: () => P.regexp(/\d\d\d\d-\d\d?-\d\d?\s+\d\d?:\d\d?/).map(yearMonthDayMinuteEdge('endOf')),
|
||||
yearMonthDayMinuteSecondStart: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('startOf')),
|
||||
yearMonthDayMinuteSecondEnd: () =>
|
||||
P.regexp(/\d\d\d\d-\d\d?-\d\d?(\s+|T)\d\d?:\d\d?:\d\d?/).map(yearMonthDayMinuteSecondEdge('endOf')),
|
||||
|
||||
comma: () => word(','),
|
||||
not: () => word('NOT'),
|
||||
empty: () => word('EMPTY'),
|
||||
array: () => word('ARRAY'),
|
||||
notNull: r => r.not.then(r.null).map(unaryCondition('isNotNull')),
|
||||
null: () => word('NULL').map(unaryCondition('isNull')),
|
||||
empty: () => word('EMPTY').map(unaryCondition('isEmpty')),
|
||||
notEmpty: r => r.not.then(r.empty).map(unaryCondition('isNotEmpty')),
|
||||
isEmpty: r => r.empty.map(unaryCondition('isEmpty')),
|
||||
isNotEmpty: r => r.not.then(r.empty).map(unaryCondition('isNotEmpty')),
|
||||
true: () => P.regexp(/true/i).map(binaryFixedValueCondition('1')),
|
||||
false: () => P.regexp(/false/i).map(binaryFixedValueCondition('0')),
|
||||
trueNum: () => word('1').map(binaryFixedValueCondition('1')),
|
||||
falseNum: () => word('0').map(binaryFixedValueCondition('0')),
|
||||
|
||||
eq: r => word('=').then(r.value).map(binaryCondition('=')),
|
||||
ne: r => word('!=').then(r.value).map(binaryCondition('<>')),
|
||||
ne2: r => word('<>').then(r.value).map(binaryCondition('<>')),
|
||||
le: r => word('<=').then(r.value).map(binaryCondition('<=')),
|
||||
ge: r => word('>=').then(r.value).map(binaryCondition('>=')),
|
||||
lt: r => word('<').then(r.value).map(binaryCondition('<')),
|
||||
gt: r => word('>').then(r.value).map(binaryCondition('>')),
|
||||
eq: r => word('=').then(r.value).map(binaryCondition('=', filterBehaviour.allowNumberDualTesting)),
|
||||
ne: r => word('!=').then(r.value).map(binaryCondition('<>', filterBehaviour.allowNumberDualTesting)),
|
||||
ne2: r => word('<>').then(r.value).map(binaryCondition('<>', filterBehaviour.allowNumberDualTesting)),
|
||||
le: r => word('<=').then(r.value).map(binaryCondition('<=', filterBehaviour.allowNumberDualTesting)),
|
||||
ge: r => word('>=').then(r.value).map(binaryCondition('>=', filterBehaviour.allowNumberDualTesting)),
|
||||
lt: r => word('<').then(r.value).map(binaryCondition('<', filterBehaviour.allowNumberDualTesting)),
|
||||
gt: r => word('>').then(r.value).map(binaryCondition('>', filterBehaviour.allowNumberDualTesting)),
|
||||
startsWith: r => word('^').then(r.value).map(likeCondition('like', '#VALUE#%')),
|
||||
endsWith: r => word('$').then(r.value).map(likeCondition('like', '%#VALUE#')),
|
||||
contains: r => word('+').then(r.value).map(likeCondition('like', '%#VALUE#%')),
|
||||
@@ -156,32 +468,93 @@ const createParser = (filterType: FilterType) => {
|
||||
};
|
||||
|
||||
const allowedValues = []; // 'string1', 'string2', 'number', 'noQuotedString'];
|
||||
if (filterType == 'string' || filterType == 'eval') {
|
||||
|
||||
if (filterBehaviour.allowStringToken) {
|
||||
allowedValues.push('string1', 'string2', 'noQuotedString');
|
||||
}
|
||||
if (filterType == 'number') {
|
||||
if (filterBehaviour.allowNumberToken) {
|
||||
allowedValues.push('string1Num', 'string2Num', 'number');
|
||||
}
|
||||
|
||||
const allowedElements = ['null', 'notNull', 'eq', 'ne', 'ne2', 'sql'];
|
||||
if (filterType == 'number' || filterType == 'datetime' || filterType == 'eval') {
|
||||
const allowedElements = [];
|
||||
|
||||
if (filterBehaviour.supportDatetimeComparison) {
|
||||
allowedElements.push('yearMonthDaySecond', 'yearMonthDayMinute', 'yearMonthDayNum', 'yearMonthNum', 'yearNum');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportDatetimeSymbols) {
|
||||
allowedElements.push(
|
||||
'today',
|
||||
'tomorrow',
|
||||
'lastWeek',
|
||||
'thisWeek',
|
||||
'nextWeek',
|
||||
'lastMonth',
|
||||
'thisMonth',
|
||||
'nextMonth',
|
||||
'lastYear',
|
||||
'thisYear',
|
||||
'nextYear'
|
||||
);
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportDatetimeComparison) {
|
||||
allowedElements.push('dateLe', 'dateGe', 'dateLt', 'dateGt');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportExistsTesting) {
|
||||
allowedElements.push('exists', 'notExists');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportArrayTesting) {
|
||||
allowedElements.push('emptyArray', 'notEmptyArray');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportNullTesting) {
|
||||
allowedElements.push('null', 'notNull');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportEquals) {
|
||||
allowedElements.push('eq', 'ne', 'ne2');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportSqlCondition) {
|
||||
allowedElements.push('sql');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportNumberLikeComparison || filterBehaviour.supportDatetimeComparison) {
|
||||
allowedElements.push('le', 'ge', 'lt', 'gt');
|
||||
}
|
||||
if (filterType == 'string') {
|
||||
allowedElements.push('empty', 'notEmpty', 'hexTestEq');
|
||||
|
||||
if (filterBehaviour.supportEmpty) {
|
||||
allowedElements.push('isEmpty', 'isNotEmpty');
|
||||
}
|
||||
if (filterType == 'eval' || filterType == 'string') {
|
||||
|
||||
if (filterBehaviour.allowHexString) {
|
||||
allowedElements.push('hexTestEq');
|
||||
}
|
||||
|
||||
if (filterBehaviour.supportStringInclusion) {
|
||||
allowedElements.push('startsWith', 'endsWith', 'contains', 'startsWithNot', 'endsWithNot', 'containsNot');
|
||||
}
|
||||
if (filterType == 'logical') {
|
||||
allowedElements.push('true', 'false', 'trueNum', 'falseNum');
|
||||
if (filterBehaviour.supportBooleanValues) {
|
||||
if (filterBehaviour.allowNumberToken || filterBehaviour.allowStringToken) {
|
||||
allowedElements.push('true', 'false');
|
||||
} else {
|
||||
allowedElements.push('true', 'false', 'trueNum', 'falseNum');
|
||||
}
|
||||
}
|
||||
if (filterType == 'eval') {
|
||||
allowedElements.push('true', 'false');
|
||||
|
||||
if (filterBehaviour.allowNumberDualTesting) {
|
||||
allowedElements.push('valueTestNum');
|
||||
}
|
||||
|
||||
if (filterBehaviour.allowObjectIdTesting) {
|
||||
allowedElements.push('valueTestObjectId');
|
||||
}
|
||||
|
||||
// must be last
|
||||
if (filterType == 'string' || filterType == 'eval') {
|
||||
if (filterBehaviour.allowStringToken) {
|
||||
allowedElements.push('valueTestStr');
|
||||
} else {
|
||||
allowedElements.push('valueTestEq');
|
||||
@@ -190,18 +563,21 @@ const createParser = (filterType: FilterType) => {
|
||||
return P.createLanguage(langDef);
|
||||
};
|
||||
|
||||
const parsers = {
|
||||
number: createParser('number'),
|
||||
string: createParser('string'),
|
||||
logical: createParser('logical'),
|
||||
eval: createParser('eval'),
|
||||
mongo: mongoParser,
|
||||
datetime: datetimeParser,
|
||||
};
|
||||
const cachedFilters: { [key: string]: P.Language } = {};
|
||||
|
||||
export function parseFilter(value: string, filterType: FilterType): Condition {
|
||||
// console.log('PARSING', value, 'WITH', filterType);
|
||||
const ast = parsers[filterType].list.tryParse(value);
|
||||
function getParser(filterBehaviour: FilterBehaviour) {
|
||||
const key = JSON.stringify(filterBehaviour);
|
||||
if (!cachedFilters[key]) {
|
||||
cachedFilters[key] = createParser(filterBehaviour);
|
||||
}
|
||||
return cachedFilters[key];
|
||||
}
|
||||
|
||||
export function parseFilter(value: string, filterBehaviour: FilterBehaviour): Condition {
|
||||
const parser = getParser(filterBehaviour);
|
||||
// console.log('value', value);
|
||||
// console.log('filterBehaviour', filterBehaviour);
|
||||
const ast = parser.list.tryParse(value);
|
||||
// console.log('AST', ast);
|
||||
return ast;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const { parseFilter } = require('./parseFilter');
|
||||
const { stringFilterBehaviour } = require('dbgate-tools');
|
||||
|
||||
test('parse string', () => {
|
||||
const ast = parseFilter('"123"', 'string');
|
||||
const ast = parseFilter('"123"', stringFilterBehaviour);
|
||||
console.log(JSON.stringify(ast));
|
||||
expect(ast).toEqual({
|
||||
conditionType: 'like',
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
// import types from 'dbgate-types';
|
||||
|
||||
export type FilterType = 'number' | 'string' | 'datetime' | 'logical' | 'eval' | 'mongo';
|
||||
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"description": "Opensource database administration tool - web interface",
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"bin": {
|
||||
"dbgate-serve": "./bin/dbgate-serve.js"
|
||||
},
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"mssql",
|
||||
|
||||
@@ -15,7 +15,7 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
|
||||
if (cmd.selectAll) {
|
||||
dmp.put('* ');
|
||||
}
|
||||
if (cmd.columns) {
|
||||
if (cmd.columns && cmd.columns.length > 0) {
|
||||
if (cmd.selectAll) dmp.put('&n,');
|
||||
dmp.put('&>&n');
|
||||
dmp.putCollection(',&n', cmd.columns, fld => {
|
||||
@@ -92,13 +92,28 @@ export function dumpSqlDelete(dmp: SqlDumper, cmd: Delete) {
|
||||
}
|
||||
|
||||
export function dumpSqlInsert(dmp: SqlDumper, cmd: Insert) {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (',
|
||||
cmd.targetTable,
|
||||
cmd.fields.map(x => x.targetColumn)
|
||||
);
|
||||
dmp.putCollection(',', cmd.fields, x => dumpSqlExpression(dmp, x));
|
||||
dmp.put(')');
|
||||
if (cmd.insertWhereNotExistsCondition) {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^select ',
|
||||
cmd.targetTable,
|
||||
cmd.fields.map(x => x.targetColumn)
|
||||
);
|
||||
dmp.putCollection(',', cmd.fields, x => dumpSqlExpression(dmp, x));
|
||||
if (dmp.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual ');
|
||||
}
|
||||
dmp.put(' ^where ^not ^exists (^select * ^from %f ^where ', cmd.targetTable);
|
||||
dumpSqlCondition(dmp, cmd.insertWhereNotExistsCondition);
|
||||
dmp.put(')');
|
||||
} else {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (',
|
||||
cmd.targetTable,
|
||||
cmd.fields.map(x => x.targetColumn)
|
||||
);
|
||||
dmp.putCollection(',', cmd.fields, x => dumpSqlExpression(dmp, x));
|
||||
dmp.put(')');
|
||||
}
|
||||
}
|
||||
|
||||
export function dumpSqlCommand(dmp: SqlDumper, cmd: Command) {
|
||||
|
||||
@@ -68,10 +68,17 @@ export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
dmp.put(' ^and ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
break;
|
||||
case 'expression':
|
||||
dumpSqlExpression(dmp, condition.expr);
|
||||
break;
|
||||
case 'in':
|
||||
dumpSqlExpression(dmp, condition.expr);
|
||||
dmp.put(' ^in (%,v)', condition.values);
|
||||
break;
|
||||
case 'notIn':
|
||||
dumpSqlExpression(dmp, condition.expr);
|
||||
dmp.put(' ^not ^in (%,v)', condition.values);
|
||||
break;
|
||||
case 'rawTemplate':
|
||||
let was = false;
|
||||
for (const item of condition.templateSql.split('$$')) {
|
||||
|
||||
@@ -28,6 +28,12 @@ export function dumpSqlExpression(dmp: SqlDumper, expr: Expression) {
|
||||
dmp.put('%s', expr.sql);
|
||||
break;
|
||||
|
||||
case 'unaryRaw':
|
||||
if (expr.beforeSql) dmp.putRaw(expr.beforeSql);
|
||||
dumpSqlExpression(dmp, expr.expr);
|
||||
if (expr.afterSql) dmp.putRaw(expr.afterSql);
|
||||
break;
|
||||
|
||||
case 'call':
|
||||
dmp.put('%s(', expr.func);
|
||||
if (expr.argsPrefix) dmp.put('%s ', expr.argsPrefix);
|
||||
@@ -36,7 +42,7 @@ export function dumpSqlExpression(dmp: SqlDumper, expr: Expression) {
|
||||
break;
|
||||
|
||||
case 'methodCall':
|
||||
dumpSqlExpression(dmp, expr.thisObject)
|
||||
dumpSqlExpression(dmp, expr.thisObject);
|
||||
dmp.put('.%s(', expr.method);
|
||||
dmp.putCollection(',', expr.args, x => dumpSqlExpression(dmp, x));
|
||||
dmp.put(')');
|
||||
|
||||
@@ -38,6 +38,7 @@ export interface Insert {
|
||||
commandType: 'insert';
|
||||
fields: UpdateField[];
|
||||
targetTable: NamedObjectInfo;
|
||||
insertWhereNotExistsCondition?: Condition;
|
||||
}
|
||||
|
||||
export interface AllowIdentityInsert {
|
||||
@@ -56,6 +57,11 @@ export interface UnaryCondition {
|
||||
expr: Expression;
|
||||
}
|
||||
|
||||
export interface ExpressionCondition extends UnaryCondition {
|
||||
// not in standard SQL
|
||||
conditionType: 'expression';
|
||||
}
|
||||
|
||||
export interface BinaryCondition {
|
||||
conditionType: 'binary';
|
||||
operator: '=' | '!=' | '<>' | '<' | '>' | '>=' | '<=';
|
||||
@@ -78,6 +84,11 @@ export interface TestCondition extends UnaryCondition {
|
||||
conditionType: 'isNull' | 'isNotNull' | 'isEmpty' | 'isNotEmpty';
|
||||
}
|
||||
|
||||
export interface SpecificPredicateCondition extends UnaryCondition {
|
||||
conditionType: 'specificPredicate';
|
||||
predicate: string;
|
||||
}
|
||||
|
||||
export interface CompoudCondition {
|
||||
conditionType: 'and' | 'or';
|
||||
conditions: Condition[];
|
||||
@@ -105,6 +116,12 @@ export interface InCondition {
|
||||
values: any[];
|
||||
}
|
||||
|
||||
export interface NotInCondition {
|
||||
conditionType: 'notIn';
|
||||
expr: Expression;
|
||||
values: any[];
|
||||
}
|
||||
|
||||
export interface RawTemplateCondition {
|
||||
conditionType: 'rawTemplate';
|
||||
templateSql: string;
|
||||
@@ -126,8 +143,11 @@ export type Condition =
|
||||
| NotExistsCondition
|
||||
| BetweenCondition
|
||||
| InCondition
|
||||
| NotInCondition
|
||||
| RawTemplateCondition
|
||||
| AnyColumnPassEvalOnlyCondition;
|
||||
| AnyColumnPassEvalOnlyCondition
|
||||
| SpecificPredicateCondition
|
||||
| ExpressionCondition;
|
||||
|
||||
export interface Source {
|
||||
name?: NamedObjectInfo;
|
||||
@@ -168,6 +188,13 @@ export interface RawExpression {
|
||||
sql: string;
|
||||
}
|
||||
|
||||
export interface UnaryRawExpression {
|
||||
exprType: 'unaryRaw';
|
||||
expr: Expression;
|
||||
beforeSql?: string;
|
||||
afterSql?: string;
|
||||
}
|
||||
|
||||
export interface CallExpression {
|
||||
exprType: 'call';
|
||||
func: string;
|
||||
@@ -198,6 +225,7 @@ export type Expression =
|
||||
| ValueExpression
|
||||
| PlaceholderExpression
|
||||
| RawExpression
|
||||
| UnaryRawExpression
|
||||
| CallExpression
|
||||
| MethodCallExpression
|
||||
| TranformExpression
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"author": "Jan Prochazka",
|
||||
"license": "MIT",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"dbgate"
|
||||
@@ -31,12 +31,12 @@
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.3",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"pinomin": "^1.0.1",
|
||||
"pinomin": "^1.0.4",
|
||||
"uuid": "^3.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import _groupBy from 'lodash/groupBy';
|
||||
import _pick from 'lodash/pick';
|
||||
import _compact from 'lodash/compact';
|
||||
import { getLogger } from './getLogger';
|
||||
import { type Logger } from 'pinomin';
|
||||
|
||||
const logger = getLogger('dbAnalyser');
|
||||
|
||||
@@ -37,9 +38,11 @@ export class DatabaseAnalyser {
|
||||
singleObjectFilter: any;
|
||||
singleObjectId: string = null;
|
||||
dialect: SqlDialect;
|
||||
logger: Logger;
|
||||
|
||||
constructor(public pool, public driver: EngineDriver, version) {
|
||||
this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(version)) || driver?.dialect;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
@@ -177,8 +180,15 @@ export class DatabaseAnalyser {
|
||||
// return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
|
||||
// }
|
||||
|
||||
createQuery(template, typeFields) {
|
||||
return this.createQueryCore(template, typeFields);
|
||||
createQuery(template, typeFields, replacements = {}) {
|
||||
return this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields);
|
||||
}
|
||||
|
||||
processQueryReplacements(query, replacements) {
|
||||
for (const repl in replacements) {
|
||||
query = query.replaceAll(repl, replacements[repl]);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
createQueryCore(template, typeFields) {
|
||||
@@ -299,8 +309,8 @@ export class DatabaseAnalyser {
|
||||
return [..._compact(res), ...this.getDeletedObjects(snapshot)];
|
||||
}
|
||||
|
||||
async analyserQuery(template, typeFields) {
|
||||
const sql = this.createQuery(template, typeFields);
|
||||
async analyserQuery(template, typeFields, replacements = {}) {
|
||||
const sql = this.createQuery(template, typeFields, replacements);
|
||||
|
||||
if (!sql) {
|
||||
return {
|
||||
@@ -308,7 +318,9 @@ export class DatabaseAnalyser {
|
||||
};
|
||||
}
|
||||
try {
|
||||
return await this.driver.query(this.pool, sql);
|
||||
const res = await this.driver.query(this.pool, sql);
|
||||
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`);
|
||||
return res;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error running analyser query');
|
||||
return {
|
||||
|
||||
@@ -133,11 +133,30 @@ export class SqlDumper implements AlterProcessor {
|
||||
i++;
|
||||
switch (c) {
|
||||
case '^':
|
||||
if (format[i] == '^') {
|
||||
this.putRaw('^');
|
||||
i++;
|
||||
break;
|
||||
}
|
||||
|
||||
while (i < length && format[i].match(/[a-z0-9_]/i)) {
|
||||
this.putRaw(SqlDumper.convertKeywordCase(format[i]));
|
||||
i++;
|
||||
}
|
||||
break;
|
||||
case '~':
|
||||
if (format[i] == '~') {
|
||||
this.putRaw('~');
|
||||
i++;
|
||||
break;
|
||||
}
|
||||
let ident = '';
|
||||
while (i < length && format[i].match(/[a-z0-9_]/i)) {
|
||||
ident += format[i];
|
||||
i++;
|
||||
}
|
||||
this.putRaw(this.dialect.quoteIdentifier(ident));
|
||||
break;
|
||||
case '%':
|
||||
c = format[i];
|
||||
i++;
|
||||
@@ -199,14 +218,8 @@ export class SqlDumper implements AlterProcessor {
|
||||
|
||||
selectScopeIdentity(table: TableInfo) {}
|
||||
|
||||
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
|
||||
if (column.computedExpression) {
|
||||
this.put('^as %s', column.computedExpression);
|
||||
if (column.isPersisted) this.put(' ^persisted');
|
||||
return;
|
||||
}
|
||||
|
||||
const type = column.dataType || this.dialect.fallbackDataType;
|
||||
columnType(dataType: string) {
|
||||
const type = dataType || this.dialect.fallbackDataType;
|
||||
const typeWithValues = type.match(/([^(]+)(\(.+[^)]\))/);
|
||||
|
||||
if (typeWithValues?.length) {
|
||||
@@ -216,6 +229,16 @@ export class SqlDumper implements AlterProcessor {
|
||||
} else {
|
||||
this.putRaw(SqlDumper.convertKeywordCase(type));
|
||||
}
|
||||
}
|
||||
|
||||
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
|
||||
if (column.computedExpression) {
|
||||
this.put('^as %s', column.computedExpression);
|
||||
if (column.isPersisted) this.put(' ^persisted');
|
||||
return;
|
||||
}
|
||||
|
||||
this.columnType(column.dataType);
|
||||
|
||||
if (column.autoIncrement) {
|
||||
this.autoIncrement();
|
||||
@@ -648,7 +671,14 @@ export class SqlDumper implements AlterProcessor {
|
||||
this.putCmd('^drop %s %f', this.getSqlObjectSqlName(obj.objectTypeField), obj);
|
||||
}
|
||||
|
||||
fillPreloadedRows(table: NamedObjectInfo, oldRows: any[], newRows: any[], key: string[], insertOnly: string[]) {
|
||||
fillPreloadedRows(
|
||||
table: NamedObjectInfo,
|
||||
oldRows: any[],
|
||||
newRows: any[],
|
||||
key: string[],
|
||||
insertOnly: string[],
|
||||
autoIncrementColumn: string
|
||||
) {
|
||||
let was = false;
|
||||
for (const row of newRows) {
|
||||
const old = oldRows?.find(r => key.every(col => r[col] == row[col]));
|
||||
@@ -671,12 +701,15 @@ export class SqlDumper implements AlterProcessor {
|
||||
} else {
|
||||
if (was) this.put(';\n');
|
||||
was = true;
|
||||
const autoinc = rowKeys.includes(autoIncrementColumn);
|
||||
if (autoinc) this.allowIdentityInsert(table, true);
|
||||
this.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
table,
|
||||
rowKeys,
|
||||
rowKeys.map(x => row[x])
|
||||
);
|
||||
if (autoinc) this.allowIdentityInsert(table, false);
|
||||
}
|
||||
}
|
||||
if (was) {
|
||||
|
||||
@@ -94,6 +94,7 @@ interface AlterOperation_FillPreloadedRows {
|
||||
newRows: any[];
|
||||
key: string[];
|
||||
insertOnly: string[];
|
||||
autoIncrementColumn: string;
|
||||
}
|
||||
|
||||
type AlterOperation =
|
||||
@@ -233,7 +234,14 @@ export class AlterPlan {
|
||||
this.recreates.tables += 1;
|
||||
}
|
||||
|
||||
fillPreloadedRows(table: NamedObjectInfo, oldRows: any[], newRows: any[], key: string[], insertOnly: string[]) {
|
||||
fillPreloadedRows(
|
||||
table: NamedObjectInfo,
|
||||
oldRows: any[],
|
||||
newRows: any[],
|
||||
key: string[],
|
||||
insertOnly: string[],
|
||||
autoIncrementColumn: string
|
||||
) {
|
||||
this.operations.push({
|
||||
operationType: 'fillPreloadedRows',
|
||||
table,
|
||||
@@ -241,6 +249,7 @@ export class AlterPlan {
|
||||
newRows,
|
||||
key,
|
||||
insertOnly,
|
||||
autoIncrementColumn,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -567,7 +576,7 @@ export function runAlterOperation(op: AlterOperation, processor: AlterProcessor)
|
||||
processor.dropSqlObject(op.oldObject);
|
||||
break;
|
||||
case 'fillPreloadedRows':
|
||||
processor.fillPreloadedRows(op.table, op.oldRows, op.newRows, op.key, op.insertOnly);
|
||||
processor.fillPreloadedRows(op.table, op.oldRows, op.newRows, op.key, op.insertOnly, op.autoIncrementColumn);
|
||||
break;
|
||||
case 'recreateTable':
|
||||
{
|
||||
|
||||
@@ -56,24 +56,42 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
|
||||
const rows = writable.buffer;
|
||||
writable.buffer = [];
|
||||
|
||||
const dmp = driver.createDumper();
|
||||
if (driver.dialect.allowMultipleValuesInsert) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
let wasRow = false;
|
||||
for (const row of rows) {
|
||||
if (wasRow) dmp.putRaw(',\n');
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
wasRow = true;
|
||||
}
|
||||
dmp.putRaw(';');
|
||||
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
|
||||
// console.log(dmp.s);
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
} else {
|
||||
for (const row of rows) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
|
||||
let wasRow = false;
|
||||
for (const row of rows) {
|
||||
if (wasRow) dmp.putRaw(',\n');
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
wasRow = true;
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
}
|
||||
}
|
||||
if (options.commitAfterInsert) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.commitTransaction();
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
}
|
||||
dmp.putRaw(';');
|
||||
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
|
||||
// console.log(dmp.s);
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
};
|
||||
|
||||
writable.sendIfFull = async () => {
|
||||
|
||||
@@ -116,7 +116,14 @@ export class DatabaseInfoAlterProcessor {
|
||||
throw new Error('recreateTable not implemented for DatabaseInfoAlterProcessor');
|
||||
}
|
||||
|
||||
fillPreloadedRows(table: NamedObjectInfo, oldRows: any[], newRows: any[], key: string[], insertOnly: string[]) {
|
||||
fillPreloadedRows(
|
||||
table: NamedObjectInfo,
|
||||
oldRows: any[],
|
||||
newRows: any[],
|
||||
key: string[],
|
||||
insertOnly: string[],
|
||||
autoIncrementColumn: string
|
||||
) {
|
||||
const tableInfo = this.db.tables.find(x => x.pureName == table.pureName && x.schemaName == table.schemaName);
|
||||
tableInfo.preloadedRows = newRows;
|
||||
tableInfo.preloadedRowsKey = key;
|
||||
|
||||