Compare commits

...

573 Commits

Author SHA1 Message Date
Jan Prochazka
d49345de9c v6.6.1-beta.18 2025-08-13 07:22:06 +02:00
Jan Prochazka
1dbfa71bde v6.6.1-beta.16 2025-08-13 07:21:34 +02:00
Jan Prochazka
d46b84f0d6 Merge pull request #1171 from dbgate/feature/1137-mssql-column-desc
feat: add MS_Description to mssql analyzer columns
2025-08-13 06:55:35 +02:00
Pavel
971af1df5f fix: use baseColumns for tables hashes in _runAnalysis 2025-08-12 15:44:22 +02:00
Pavel
21641da0bf feat: safeCommentChanges flag to dialect 2025-08-07 15:32:24 +02:00
Pavel
a8d9c145e6 Merge branch 'master' into feature/1137-mssql-column-desc 2025-08-07 14:21:57 +02:00
Pavel
bfafcb76ba fix: use only cols with comments for obj hash 2025-08-07 14:21:57 +02:00
Pavel
52f74f1204 Merge branch 'master' into feature/1137-mssql-column-desc 2025-08-07 12:53:09 +02:00
SPRINX0\prochazka
00c212ecb2 v6.6.1-premium-beta.15 2025-08-07 09:57:18 +02:00
CI workflows
e77d302a49 chore: auto-update github workflows 2025-08-07 07:37:04 +00:00
CI workflows
5183f3729c Update pro ref 2025-08-07 07:36:45 +00:00
SPRINX0\prochazka
e93102f105 v6.6.1-premium-beta.14 2025-08-07 07:46:26 +02:00
CI workflows
a4652689ec chore: auto-update github workflows 2025-08-06 14:42:42 +00:00
CI workflows
ede1005087 Update pro ref 2025-08-06 14:42:23 +00:00
SPRINX0\prochazka
e97c7ed32e SYNC: fix BE 2025-08-06 14:42:10 +00:00
SPRINX0\prochazka
9230a2ab73 SYNC: Merge branch 'feature/mongosh' 2025-08-06 12:56:16 +00:00
SPRINX0\prochazka
01ee66ec4f SYNC: fixed choosing default database 2025-08-06 12:14:29 +00:00
SPRINX0\prochazka
4c12cbd3cc SYNC: improved log export 2025-08-06 11:37:49 +00:00
SPRINX0\prochazka
d8eeeaaef6 SYNC: export logs 2025-08-06 11:29:24 +00:00
SPRINX0\prochazka
994dae2a7d SYNC: export logs to file 2025-08-06 11:17:05 +00:00
CI workflows
51da6e928d chore: auto-update github workflows 2025-08-06 10:35:50 +00:00
CI workflows
a328ad030e Update pro ref 2025-08-06 10:35:32 +00:00
SPRINX0\prochazka
ed7605eccd SYNC: Merge branch 'feature/dblogs' 2025-08-06 10:35:21 +00:00
Pavel
de43880a1c feat: fetch only base column info for modifications, drop columnComment only if exists 2025-08-06 09:36:16 +02:00
Pavel
32b1a5b22d feat: include comments in contentHash for mssql 2025-08-06 09:36:16 +02:00
Pavel
795992fb42 feat: add add comment to table teest 2025-08-06 09:36:16 +02:00
Pavel
339eab33c8 feat: add add comment to column test 2025-08-06 09:36:16 +02:00
Pavel
489f3aa19d feat: add test for table creation with comments 2025-08-06 09:36:16 +02:00
Pavel
888e284f84 fix:fix: removeu duplicate method, simplify changeColumnComment 2025-08-06 09:36:16 +02:00
Pavel
d151114f08 fix: correctly parse table comment when creating a table 2025-08-06 09:36:16 +02:00
Pavel
4f6a3c23ad feat: ms_description for tables, upd columns 2025-08-06 09:36:16 +02:00
CI workflows
4ed437fd4e chore: auto-update github workflows 2025-08-05 15:15:45 +00:00
CI workflows
fa0b21ba81 Update pro ref 2025-08-05 15:14:18 +00:00
Jan Prochazka
a96f1d0b49 SYNC: Merge pull request #7 from dbgate/feature/applog 2025-08-05 15:14:06 +00:00
SPRINX0\prochazka
ac0aebd751 SYNC: strict delimit logs by date 2025-08-04 10:52:45 +00:00
SPRINX0\prochazka
781cbb4668 SYNC: updated mongo version 2025-08-04 08:34:43 +00:00
SPRINX0\prochazka
56ca1911a1 SYNC: mongosh - use only in stream method 2025-08-01 14:21:45 +00:00
Pavel
c20aec23a2 feat: change ms_description when columnComment changes 2025-07-31 15:01:22 +02:00
Pavel
a9cff01579 fix: show columnComment in structure ui 2025-07-31 15:01:22 +02:00
Pavel
6af56a61b8 feat: add ignoreComments options testEqualColumns 2025-07-31 15:01:22 +02:00
Pavel
252db191a6 feat: add ms_description to mssqldumper table options 2025-07-31 15:01:22 +02:00
Pavel
5e2776f264 feat: add MS_Description to mssql analyzer columns 2025-07-31 15:01:22 +02:00
CI workflows
7ec9fb2c44 chore: auto-update github workflows 2025-07-31 10:52:36 +00:00
SPRINX0\prochazka
34facb6b3b try to fix build 2025-07-31 12:52:17 +02:00
SPRINX0\prochazka
5bb2a1368e v6.6.1-premium-beta.13 2025-07-31 10:49:08 +02:00
CI workflows
85a7bbca66 chore: auto-update github workflows 2025-07-31 08:47:07 +00:00
CI workflows
2b7f27bf8f Update pro ref 2025-07-31 08:46:50 +00:00
SPRINX0\prochazka
dd57945e7d v6.6.1-premium-beta.12 2025-07-31 10:26:58 +02:00
SPRINX0\prochazka
fa02b4fd56 SYNC: tag fix 2025-07-31 08:26:35 +00:00
SPRINX0\prochazka
c8715eead5 v6.6.1-premium-beta.11 2025-07-31 09:56:51 +02:00
SPRINX0\prochazka
37aae8c10e SYNC: GTM support 2025-07-31 07:55:58 +00:00
SPRINX0\prochazka
a97ed02e15 support for script embeding 2025-07-31 09:34:45 +02:00
SPRINX0\prochazka
38ebb2d06a Revert "update all packages"
This reverts commit c765bfc946.
2025-07-30 15:42:23 +02:00
SPRINX0\prochazka
c765bfc946 update all packages 2025-07-30 15:41:18 +02:00
CI workflows
b00ac75f33 chore: auto-update github workflows 2025-07-30 10:40:36 +00:00
SPRINX0\prochazka
36730168c0 SYNC: upgraded mongo for E2E tests 2025-07-30 10:40:19 +00:00
SPRINX0\prochazka
4339ece6f6 v6.6.1-beta.7 2025-07-30 11:17:38 +02:00
SPRINX0\prochazka
041c997e59 v6.6.1-premium-beta.6 2025-07-30 11:17:26 +02:00
SPRINX0\prochazka
098ebb38dc SYNC: fixed BE for premium 2025-07-30 09:05:11 +00:00
CI workflows
b99c38a070 chore: auto-update github workflows 2025-07-30 08:46:03 +00:00
SPRINX0\prochazka
07b42d8e74 Merge branch 'feature/mongosh' 2025-07-30 10:45:02 +02:00
SPRINX0\prochazka
062d168c97 windows arm build for community is back 2025-07-30 10:44:33 +02:00
SPRINX0\prochazka
c9be5fb125 v6.6.1-premium-beta.5 2025-07-30 10:20:20 +02:00
SPRINX0\prochazka
06c6716ee1 v6.6.1-beta.4 2025-07-30 10:20:09 +02:00
SPRINX0\prochazka
e856d8fddf mongosh support only for premium 2025-07-30 10:19:57 +02:00
SPRINX0\prochazka
20339f70c1 text 2025-07-30 08:42:30 +02:00
SPRINX0\prochazka
c2e6cf1eb0 time-limited offer info 2025-07-30 08:19:26 +02:00
SPRINX0\prochazka
8dfdca97cd readme 2025-07-30 07:38:17 +02:00
Pavel
6e8cdc24a3 feat: use getColleciton syntax for mongo 2025-07-29 16:33:39 +02:00
Pavel
461f1e39fa fix: use correct dbName in connection string 2025-07-29 16:12:56 +02:00
Pavel
4892dbce5e fix: use [''] syntax for mongosh col calls 2025-07-29 15:31:13 +02:00
SPRINX0\prochazka
9da32a13de v6.6.1-beta.3 2025-07-29 10:55:18 +02:00
SPRINX0\prochazka
9c6908da77 reverted change 2025-07-29 10:55:07 +02:00
SPRINX0\prochazka
ac081e6c86 v6.6.1-beta.2 2025-07-29 10:50:18 +02:00
SPRINX0\prochazka
cc9744156c removed windows ARM build 2025-07-29 10:50:09 +02:00
SPRINX0\prochazka
72a874c7f4 v6.6.1-beta.1 2025-07-29 09:58:42 +02:00
SPRINX0\prochazka
2809324b35 Merge branch 'master' into feature/mongosh 2025-07-29 09:57:20 +02:00
SPRINX0\prochazka
58e6c45c73 mongosh volatile packages 2025-07-29 09:56:25 +02:00
SPRINX0\prochazka
43aaf192a2 promo widget 2025-07-25 13:04:21 +02:00
SPRINX0\prochazka
c0574bc738 readme 2025-07-25 12:25:02 +02:00
SPRINX0\prochazka
27e5d639ef changelog 2025-07-25 09:27:45 +02:00
SPRINX0\prochazka
aa9fdd4fc9 changelog 2025-07-25 09:24:22 +02:00
SPRINX0\prochazka
7af6d9b2ce test wait 2025-07-25 09:22:55 +02:00
SPRINX0\prochazka
0e06d28335 v6.6.0 2025-07-25 09:21:25 +02:00
SPRINX0\prochazka
e3b86e4d41 v6.5.7-premium-beta.6 2025-07-25 08:39:12 +02:00
SPRINX0\prochazka
5b1bfe7379 v6.5.7-beta.5 2025-07-25 08:39:02 +02:00
CI workflows
76d07b967e chore: auto-update github workflows 2025-07-25 06:30:06 +00:00
CI workflows
4b1932fe52 Update pro ref 2025-07-25 06:29:48 +00:00
CI workflows
a56de91b1e chore: auto-update github workflows 2025-07-25 06:23:26 +00:00
CI workflows
6b4fb616bc Update pro ref 2025-07-25 06:23:11 +00:00
SPRINX0\prochazka
d24670e14e SYNC: chat & chart permission 2025-07-25 06:23:00 +00:00
CI workflows
13b3ae35ed chore: auto-update github workflows 2025-07-25 06:04:12 +00:00
CI workflows
6860e1f085 Update pro ref 2025-07-25 06:03:56 +00:00
SPRINX0\prochazka
74fa1c6628 SYNC: store connection definition in storagedb 2025-07-25 06:03:44 +00:00
SPRINX0\prochazka
85f847a4f3 SYNC: fix 2025-07-25 06:01:37 +00:00
CI workflows
39df72d163 chore: auto-update github workflows 2025-07-25 05:54:02 +00:00
CI workflows
5ca8786802 Update pro ref 2025-07-25 05:53:44 +00:00
SPRINX0\prochazka
ca145967dc SYNC: Merge branch 'feature/firestore' 2025-07-25 05:53:34 +00:00
Pavel
06a3ce7486 feat: update mongo getCollectionExportQueryScript 2025-07-24 20:55:24 +02:00
Pavel
9f85b6154d feat: look into cursor's firstBatch in mongosh res 2025-07-24 18:48:58 +02:00
SPRINX0\prochazka
732763689a v6.5.7-beta.4 2025-07-24 17:28:19 +02:00
SPRINX0\prochazka
0ea75f25f1 process workflows - upgrade node 2025-07-24 17:21:28 +02:00
SPRINX0\prochazka
eab27ce0bb node version 22 2025-07-24 17:20:52 +02:00
SPRINX0\prochazka
29fd381989 Merge branch 'master' into feature/mongosh 2025-07-24 17:14:59 +02:00
Pavel
06a845697a feat: mongosh for script and stream methods 2025-07-24 15:52:59 +02:00
CI workflows
b12587626d chore: auto-update github workflows 2025-07-24 10:11:39 +00:00
SPRINX0\prochazka
b49988032e firestore plugin build 2025-07-24 12:11:21 +02:00
SPRINX0\prochazka
a9b4152553 v6.5.7-premium-beta.3 2025-07-24 12:08:00 +02:00
CI workflows
63720045f1 chore: auto-update github workflows 2025-07-24 10:07:04 +00:00
CI workflows
aa7529192e Update pro ref 2025-07-24 10:06:46 +00:00
SPRINX0\prochazka
a162a15a27 v6.5.7-premium-beta.2 2025-07-24 11:01:28 +02:00
CI workflows
457a73efae chore: auto-update github workflows 2025-07-24 09:00:52 +00:00
CI workflows
91c3dd982b Update pro ref 2025-07-24 09:00:35 +00:00
Jan Prochazka
c171f93c93 SYNC: Merge pull request #5 from dbgate/feature/firestore 2025-07-24 09:00:23 +00:00
SPRINX0\prochazka
0cf9ddb1cd SYNC: try to fix test 2025-07-24 07:13:39 +00:00
SPRINX0\prochazka
2322537350 v6.5.7-premium-beta.1 2025-07-24 08:58:19 +02:00
SPRINX0\prochazka
6ce50109da gw server API 2025-07-24 08:55:21 +02:00
SPRINX0\prochazka
abe7fdf34d SYNC: simplker chat test 2025-07-24 06:52:58 +00:00
CI workflows
ecf2f5ed8c chore: auto-update github workflows 2025-07-24 06:50:30 +00:00
CI workflows
98b4934dd5 Update pro ref 2025-07-24 06:50:16 +00:00
SPRINX0\prochazka
0bc7c544ad SYNC: chat UX 2025-07-24 06:50:05 +00:00
SPRINX0\prochazka
1f7ad9d418 SYNC: fix 2025-07-24 06:31:57 +00:00
CI workflows
4b9d3b3dbc Update pro ref 2025-07-24 06:31:44 +00:00
SPRINX0\prochazka
571e332ed5 SYNC: fixed test 2025-07-24 06:31:34 +00:00
CI workflows
d78d22b188 chore: auto-update github workflows 2025-07-24 06:11:23 +00:00
CI workflows
d37638240a Update pro ref 2025-07-24 06:11:05 +00:00
SPRINX0\prochazka
ae7fd3f87b SYNC: open db chat ctx menu 2025-07-24 06:11:00 +00:00
Jan Prochazka
e82e63b288 SYNC: adidtional test 2025-07-24 06:10:57 +00:00
Jan Prochazka
0149d4e27b SYNC: database chat test 2025-07-24 06:10:54 +00:00
CI workflows
9fc9c71b6f chore: auto-update github workflows 2025-07-23 13:46:29 +00:00
CI workflows
b264f690d1 Update pro ref 2025-07-23 13:46:13 +00:00
Jan Prochazka
c07e19c898 SYNC: Merge pull request #6 from dbgate/feature/ai-assistant 2025-07-23 13:46:01 +00:00
Pavel
b8e50737d2 feat: basic mongosh support 2025-07-23 04:40:52 +02:00
SPRINX0\prochazka
082d0aa02f translations 2025-07-18 10:03:58 +02:00
SPRINX0\prochazka
ca26d0e450 translations 2025-07-18 10:01:24 +02:00
SPRINX0\prochazka
8cbe021ffc v6.5.6 2025-07-17 09:04:06 +02:00
SPRINX0\prochazka
7b39d8025b changelog 2025-07-17 09:03:18 +02:00
SPRINX0\prochazka
47bd35b151 fixed failing test 2025-07-17 08:44:22 +02:00
SPRINX0\prochazka
d7add54a3c v6.5.6-premium-beta.5 2025-07-17 08:19:37 +02:00
SPRINX0\prochazka
d3c937569b SYNC: anonymized cloud instance 2025-07-17 06:18:50 +00:00
SPRINX0\prochazka
94ca613201 v6.5.6-premium-beta.4 2025-07-16 15:52:09 +02:00
SPRINX0\prochazka
30f2f635be v6.5.6-premium-beta.3 2025-07-16 15:51:20 +02:00
SPRINX0\prochazka
57f4d31c21 SYNC: fix 2025-07-16 13:24:47 +00:00
SPRINX0\prochazka
90e4fd7ff5 SYNC: disable splitting queries with blank lines? #1162 2025-07-16 13:16:02 +00:00
SPRINX0\prochazka
17835832f2 v6.5.6-beta.2 2025-07-16 14:51:55 +02:00
SPRINX0\prochazka
949817f597 SYNC: SKIP_ALL_AUTH support 2025-07-16 12:48:38 +00:00
SPRINX0\prochazka
23065f2c4b SYNC: test fix 2025-07-16 12:26:08 +00:00
SPRINX0\prochazka
b623b06cf0 SYNC: bugfix 2025-07-16 12:02:19 +00:00
CI workflows
55c86d8ec7 chore: auto-update github workflows 2025-07-16 11:43:35 +00:00
CI workflows
e955617aa1 Update pro ref 2025-07-16 11:43:18 +00:00
SPRINX0\prochazka
6304610713 SYNC: hard limit for pie chart 2025-07-16 11:43:08 +00:00
SPRINX0\prochazka
47d20928e0 SYNC: try to fix tests 2025-07-16 11:24:06 +00:00
SPRINX0\prochazka
c9a4d02e0d SYNC: new object window screenshot 2025-07-16 11:03:07 +00:00
CI workflows
6513dfb42a chore: auto-update github workflows 2025-07-16 10:52:10 +00:00
CI workflows
3f0412453f Update pro ref 2025-07-16 10:51:29 +00:00
SPRINX0\prochazka
dcba319071 SYNC: disabled messages in new object modal 2025-07-16 10:51:19 +00:00
SPRINX0\prochazka
d19851fc0c SYNC: compare database in new object modal 2025-07-16 10:51:17 +00:00
SPRINX0\prochazka
d6eb06cb72 SYNC: export db window 2025-07-16 10:51:16 +00:00
SPRINX0\prochazka
473080d7ee SYNC: typo 2025-07-16 10:51:15 +00:00
SPRINX0\prochazka
c98a6adb09 SYNC: new object modal testid 2025-07-16 10:51:13 +00:00
SPRINX0\prochazka
2cd56d5041 SYNC: new object button refactor + diagram accesibility 2025-07-16 10:51:12 +00:00
SPRINX0\prochazka
982098672e SYNC: new object modal 2025-07-16 10:51:10 +00:00
SPRINX0\prochazka
445ecea3e6 SYNC: new object modal WIP 2025-07-16 10:51:09 +00:00
SPRINX0\prochazka
db977dfba4 SYNC: next screenshots 2025-07-15 08:32:33 +00:00
CI workflows
a3c12ab9f5 chore: auto-update github workflows 2025-07-15 08:22:58 +00:00
CI workflows
0f7e152650 Update pro ref 2025-07-15 08:22:41 +00:00
SPRINX0\prochazka
b55c7ba9a1 v6.5.6-premium-beta.1 2025-07-15 09:16:08 +02:00
CI workflows
8256c9f7ad chore: auto-update github workflows 2025-07-15 07:12:36 +00:00
CI workflows
59727d7b0b Update pro ref 2025-07-15 07:12:20 +00:00
SPRINX0\prochazka
2dd2210a73 SYNC: separate schemas mode usable for administration 2025-07-15 07:12:08 +00:00
SPRINX0\prochazka
25aafdbebc SYNC: chart screenshots for tutorial 2025-07-15 06:46:01 +00:00
SPRINX0\prochazka
cd5717169c login checker dummy implementation 2025-07-14 15:23:09 +02:00
CI workflows
a38ad5a11e chore: auto-update github workflows 2025-07-14 13:22:34 +00:00
CI workflows
66d9b56976 Update pro ref 2025-07-14 13:22:21 +00:00
SPRINX0\prochazka
ac40bd1e17 SYNC: checking logged users 2025-07-14 13:22:10 +00:00
SPRINX0\prochazka
16d2a9bf99 SYNC: renew license from set license page 2025-07-14 11:41:55 +00:00
SPRINX0\prochazka
b7e6838d26 refresh license fake 2025-07-14 12:28:49 +02:00
CI workflows
21d23b5baa chore: auto-update github workflows 2025-07-14 10:22:13 +00:00
CI workflows
69a2941d57 Update pro ref 2025-07-14 10:21:59 +00:00
SPRINX0\prochazka
3cc2abf8b9 SYNC: better handling of expired license in electron app 2025-07-14 10:21:49 +00:00
Jan Prochazka
6f4173650a v6.5.5 2025-07-04 09:08:49 +02:00
Jan Prochazka
0fcb8bdc0a SYNC: changelog 2025-07-04 07:05:28 +00:00
CI workflows
c0937cf412 chore: auto-update github workflows 2025-07-04 06:44:51 +00:00
CI workflows
d9ab3aab0f Update pro ref 2025-07-04 06:44:36 +00:00
CI workflows
c8652de78b chore: auto-update github workflows 2025-07-04 06:34:19 +00:00
CI workflows
86dc4e2bd5 Update pro ref 2025-07-04 06:34:04 +00:00
Jan Prochazka
1b9c56a9b9 SYNC: fixed data replicator test 2025-07-04 06:33:54 +00:00
Jan Prochazka
08ab504fac SYNC: fix 2025-07-04 06:33:52 +00:00
CI workflows
21c0842fae chore: auto-update github workflows 2025-07-04 06:11:12 +00:00
CI workflows
8d10feaa68 Update pro ref 2025-07-04 06:10:53 +00:00
Jan Prochazka
df2171f253 SYNC: fixed disabling/enabling auth methods for team premium 2025-07-04 06:10:43 +00:00
SPRINX0\prochazka
f5fcd94faf SYNC: fix 2025-07-03 15:36:58 +00:00
CI workflows
15c5dbef00 chore: auto-update github workflows 2025-07-03 15:28:56 +00:00
CI workflows
79df56c096 Update pro ref 2025-07-03 15:28:39 +00:00
SPRINX0\prochazka
d3fffd9530 SYNC: missing audit logs 2025-07-03 15:28:28 +00:00
SPRINX0\prochazka
527c9c8e6e loginchecker placeholder 2025-07-03 16:58:27 +02:00
CI workflows
d285be45cb chore: auto-update github workflows 2025-07-03 14:51:25 +00:00
CI workflows
0dda9c73f6 Update pro ref 2025-07-03 14:51:10 +00:00
SPRINX0\prochazka
d07bf270e7 SYNC: logi checker refactor 2025-07-03 14:50:59 +00:00
CI workflows
eb24dd5d9e chore: auto-update github workflows 2025-07-03 13:25:47 +00:00
CI workflows
ce693c7cd5 Update pro ref 2025-07-03 13:25:32 +00:00
CI workflows
3198890269 chore: auto-update github workflows 2025-07-03 13:11:26 +00:00
CI workflows
eacc93de43 Update pro ref 2025-07-03 13:11:07 +00:00
SPRINX0\prochazka
9795740257 SYNC: check licensed user count 2025-07-03 13:10:55 +00:00
SPRINX0\prochazka
4548f5d8aa fix 2025-07-03 14:09:29 +02:00
SPRINX0\prochazka
8dfd2fb519 markUserAsActive dummy method 2025-07-03 14:07:04 +02:00
CI workflows
83a40f83e1 chore: auto-update github workflows 2025-07-03 11:50:05 +00:00
CI workflows
5b2fcb3c6c Update pro ref 2025-07-03 11:49:50 +00:00
Jan Prochazka
bcd9adb66d Merge pull request #1159 from dbgate/feature/firebird-always-use-text-for-file
feat: add useServerDatabaseFile for firebird
2025-07-03 13:30:21 +02:00
Pavel
5e2dc114ab feat: add useServerDatabaseFile for firebird 2025-07-03 13:27:22 +02:00
SPRINX0\prochazka
1ced4531be auditlog dummy methods 2025-07-03 13:20:07 +02:00
CI workflows
05fe39c0ae chore: auto-update github workflows 2025-07-03 11:18:54 +00:00
CI workflows
3769b2b3ea Update pro ref 2025-07-03 11:18:38 +00:00
Jan Prochazka
f4d5480f6f SYNC: try to fix test 2025-07-02 14:09:25 +00:00
Jan Prochazka
ddf3c0810b SYNC: charts auto detect 2025-07-02 13:49:39 +00:00
Jan Prochazka
6afd6d0aa0 v6.5.5-premium-beta.5 2025-07-02 13:42:41 +02:00
CI workflows
59fe92eb04 chore: auto-update github workflows 2025-07-02 11:41:41 +00:00
CI workflows
0550f32434 Update pro ref 2025-07-02 11:41:25 +00:00
Jan Prochazka
b702cad549 SYNC: fixed chart test 2025-07-02 11:41:15 +00:00
Jan Prochazka
aa5c4d3c5e changelog 2025-07-02 13:29:37 +02:00
CI workflows
6a99445d97 chore: auto-update github workflows 2025-07-02 11:22:53 +00:00
CI workflows
c9880ef47d Update pro ref 2025-07-02 11:22:38 +00:00
CI workflows
c16452dfcb chore: auto-update github workflows 2025-07-02 11:12:38 +00:00
CI workflows
af802c02fc Update pro ref 2025-07-02 11:12:20 +00:00
Jan Prochazka
8028aafeff SYNC: split too different ydefs 2025-07-02 11:12:09 +00:00
Jan Prochazka
b7469062a1 SYNC: charts autodetector test 2025-07-02 08:57:39 +00:00
Jan Prochazka
33b707aa68 SYNC: chart autodetection improved 2025-07-02 08:50:33 +00:00
Jan Prochazka
cd3a1bebff SYNC: autodetect - with grouping field 2025-07-02 08:23:12 +00:00
Jan Prochazka
794dd5a797 SYNC: refactor 2025-07-02 08:23:10 +00:00
CI workflows
a1465432e8 chore: auto-update github workflows 2025-07-02 06:54:14 +00:00
CI workflows
e1f8af0909 Update pro ref 2025-07-02 06:53:57 +00:00
Jan Prochazka
88918be329 SYNC: chart - detect data types 2025-07-02 06:53:47 +00:00
CI workflows
a3fc1dbff0 chore: auto-update github workflows 2025-07-02 06:20:20 +00:00
CI workflows
626c9825cc Update pro ref 2025-07-02 06:20:02 +00:00
Jan Prochazka
c10a84fc79 SYNC: timeline chart type 2025-07-02 06:19:49 +00:00
SPRINX0\prochazka
f14e4fe197 SYNC: month match 2025-07-01 14:53:27 +00:00
CI workflows
6eb218db5e chore: auto-update github workflows 2025-07-01 14:30:39 +00:00
CI workflows
0e77e053b0 Update pro ref 2025-07-01 14:30:23 +00:00
SPRINX0\prochazka
b9a4128a3d SYNC: charts - grouping field support 2025-07-01 14:30:12 +00:00
CI workflows
16f480e1f3 chore: auto-update github workflows 2025-07-01 12:23:46 +00:00
CI workflows
7c42511133 Update pro ref 2025-07-01 12:23:32 +00:00
CI workflows
1b252a84c2 chore: auto-update github workflows 2025-07-01 12:14:57 +00:00
CI workflows
bf833cadff Update pro ref 2025-07-01 12:14:42 +00:00
CI workflows
b6f872882a chore: auto-update github workflows 2025-07-01 12:12:56 +00:00
CI workflows
a18d6fb441 Update pro ref 2025-07-01 12:12:39 +00:00
CI workflows
922e703e81 chore: auto-update github workflows 2025-07-01 10:59:50 +00:00
CI workflows
d7f5817b8b Update pro ref 2025-07-01 10:59:32 +00:00
SPRINX0\prochazka
92a8a4bfa6 SYNC: chart improvements 2025-07-01 10:59:20 +00:00
CI workflows
b480151fc3 chore: auto-update github workflows 2025-07-01 10:35:08 +00:00
CI workflows
37bdbc1bd5 Update pro ref 2025-07-01 10:34:52 +00:00
CI workflows
8eb669139b chore: auto-update github workflows 2025-07-01 10:11:57 +00:00
CI workflows
b485e8cacc Update pro ref 2025-07-01 10:11:39 +00:00
CI workflows
c4bab61c47 chore: auto-update github workflows 2025-07-01 08:45:43 +00:00
CI workflows
72be417ff1 Update pro ref 2025-07-01 08:45:27 +00:00
CI workflows
9be483d7a6 chore: auto-update github workflows 2025-07-01 08:35:12 +00:00
CI workflows
910f2cee2c Update pro ref 2025-07-01 08:34:48 +00:00
SPRINX0\prochazka
1e47ace527 SYNC: fixed diagram zoom GL#57 2025-07-01 07:14:55 +00:00
Jan Prochazka
912b06b145 v6.5.5-premium-beta.4 2025-06-30 14:35:30 +02:00
CI workflows
87d878e287 chore: auto-update github workflows 2025-06-30 12:33:45 +00:00
CI workflows
be886d6bce Update pro ref 2025-06-30 12:33:29 +00:00
Jan Prochazka
0683deb47e v6.5.5-premium-beta.3 2025-06-30 13:53:39 +02:00
CI workflows
114bb22e27 chore: auto-update github workflows 2025-06-30 11:46:31 +00:00
CI workflows
c327ebc3df Update pro ref 2025-06-30 11:46:18 +00:00
Jan Prochazka
92cbd1c69c SYNC: config fixed 2025-06-30 11:46:05 +00:00
CI workflows
7242515e48 chore: auto-update github workflows 2025-06-30 10:32:57 +00:00
CI workflows
401d1a0ac2 Update pro ref 2025-06-30 10:32:41 +00:00
Jan Prochazka
863e042a37 SYNC: fixed exporting chart for electron 2025-06-30 10:32:31 +00:00
Jan Prochazka
39e6c45ec6 v6.5.5-premium-beta.2 2025-06-30 09:25:54 +02:00
CI workflows
0d364d18c7 chore: auto-update github workflows 2025-06-30 06:56:17 +00:00
CI workflows
61444ea390 Update pro ref 2025-06-30 06:56:01 +00:00
CI workflows
106a935efb chore: auto-update github workflows 2025-06-30 06:28:20 +00:00
CI workflows
d175d8a853 Update pro ref 2025-06-30 06:28:04 +00:00
Jan Prochazka
ce6d19a77a SYNC: call adapt db info 2025-06-30 06:27:54 +00:00
CI workflows
0a29273924 chore: auto-update github workflows 2025-06-29 18:32:57 +00:00
CI workflows
5ede64de58 Update pro ref 2025-06-29 18:32:40 +00:00
Jan Prochazka
224c6ad798 SYNC: try to fix oracle test 2025-06-29 18:27:07 +00:00
Jan Prochazka
57b3a0dbe7 v6.5.5-premium-beta.1 2025-06-28 12:00:16 +02:00
CI workflows
f381f708e0 chore: auto-update github workflows 2025-06-27 13:27:16 +00:00
CI workflows
63bf149546 Update pro ref 2025-06-27 13:27:02 +00:00
SPRINX0\prochazka
cb5e671259 SYNC: audit log test 2025-06-27 13:26:51 +00:00
CI workflows
3e38173c4e chore: auto-update github workflows 2025-06-27 13:01:02 +00:00
CI workflows
efacb643fc Update pro ref 2025-06-27 13:00:45 +00:00
SPRINX0\prochazka
1bd153ea0b SYNC: audit log UX 2025-06-27 13:00:32 +00:00
CI workflows
bac3dc5f4c chore: auto-update github workflows 2025-06-27 11:08:55 +00:00
CI workflows
959a853d77 Update pro ref 2025-06-27 11:08:36 +00:00
SPRINX0\prochazka
90bbdd563b SYNC: Merge branch 'feature/audit-logs' 2025-06-27 11:08:23 +00:00
SPRINX0\prochazka
e3c6d05a0a SYNC: try to fix test 2025-06-27 10:32:01 +00:00
SPRINX0\prochazka
930b3d4538 SYNC: cloud test - use test login 2025-06-27 08:51:36 +00:00
SPRINX0\prochazka
74b78141b4 fake method 2025-06-27 10:02:33 +02:00
SPRINX0\prochazka
aa1108cd5b SYNC: try to fix build 2025-06-27 07:42:42 +00:00
SPRINX0\prochazka
f24b1a9db3 audit log fake methods 2025-06-26 16:49:10 +02:00
SPRINX0\prochazka
71b191e740 SYNC: try to fix test 2025-06-25 07:24:42 +00:00
SPRINX0\prochazka
8f6341b903 SYNC: removed baseUrl config 2025-06-25 07:02:07 +00:00
SPRINX0\prochazka
161586db7e SYNC: try to fix test 2025-06-24 15:13:32 +00:00
SPRINX0\prochazka
052262bef9 SYNC: try to fix test 2025-06-24 13:05:38 +00:00
SPRINX0\prochazka
a5a7144707 SYNC: try to fix test 2025-06-24 10:57:54 +00:00
SPRINX0\prochazka
d945e0426d SYNC: try to fix test 2025-06-24 10:29:47 +00:00
SPRINX0\prochazka
926970c4eb SYNC: added missing script 2025-06-24 09:23:53 +00:00
CI workflows
cce36e0f28 chore: auto-update github workflows 2025-06-24 08:33:21 +00:00
CI workflows
48c6dc5be5 Update pro ref 2025-06-24 08:33:05 +00:00
SPRINX0\prochazka
c641830825 SYNC: private cloud test 2025-06-24 08:32:52 +00:00
SPRINX0\prochazka
eba16cc15d SYNC: dbgate cloud redirect workflow 2025-06-24 07:22:43 +00:00
SPRINX0\prochazka
bd88b8411e SYNC: private cloud test 2025-06-23 14:59:05 +00:00
SPRINX0\prochazka
fc121e8750 missing file 2025-06-23 16:51:42 +02:00
CI workflows
d4142fe56a chore: auto-update github workflows 2025-06-23 14:31:30 +00:00
CI workflows
f76a3e72bb Update pro ref 2025-06-23 14:31:11 +00:00
SPRINX0\prochazka
2d400ae7eb SYNC: folder administration modal 2025-06-23 14:30:58 +00:00
SPRINX0\prochazka
edf1632cab SYNC: fixed connection for scripts 2025-06-23 11:29:04 +00:00
SPRINX0\prochazka
a648f1ee67 SYNC: SQL fixed database WIP 2025-06-23 11:10:46 +00:00
SPRINX0\prochazka
d004e6e86c SYNC: new cloud file 2025-06-23 09:05:42 +00:00
SPRINX0\prochazka
fa321d3e8d SYNC: create query on cloud shortcut 2025-06-23 08:52:07 +00:00
SPRINX0\prochazka
e1e53d323f SYNC: dbgate cloud menu refactor 2025-06-23 07:40:46 +00:00
SPRINX0\prochazka
ccb18ca302 v6.5.4 2025-06-20 17:03:39 +02:00
SPRINX0\prochazka
e170f36bc6 v6.5.4-premium-beta.1 2025-06-20 16:46:18 +02:00
SPRINX0\prochazka
4bd9cc51ee SYNC: try to fix e2e test 2025-06-20 14:41:03 +00:00
SPRINX0\prochazka
43ffbda1a4 SYNC: removed vorgotten test.only 2025-06-20 13:32:20 +00:00
SPRINX0\prochazka
8240485fd1 changelog 2025-06-20 15:05:18 +02:00
SPRINX0\prochazka
7f053c0567 v6.5.3 2025-06-20 15:01:47 +02:00
SPRINX0\prochazka
d2922eb0b7 SYNC: cloud connections fix 2025-06-20 12:57:09 +00:00
SPRINX0\prochazka
fec10d453f license detection fix 2025-06-20 14:36:00 +02:00
SPRINX0\prochazka
162040545d SYNC: improved about modal 2025-06-20 12:29:11 +00:00
CI workflows
f14577f8bf chore: auto-update github workflows 2025-06-20 11:57:27 +00:00
CI workflows
e5720bd1be Update pro ref 2025-06-20 11:57:15 +00:00
CI workflows
6d4959bac8 Update pro ref 2025-06-20 11:57:11 +00:00
SPRINX0\prochazka
d668128a34 SYNC: private cloud UX + fixes 2025-06-20 11:46:04 +00:00
SPRINX0\prochazka
f2af38da4c v6.5.3-premium-beta.1 2025-06-19 18:00:13 +02:00
CI workflows
4776d18fd7 chore: auto-update github workflows 2025-06-19 15:57:59 +00:00
CI workflows
cdd0be7b78 Update pro ref 2025-06-19 15:57:43 +00:00
SPRINX0\prochazka
cd505abb22 SYNC: charts fix 2025-06-19 15:57:31 +00:00
SPRINX0\prochazka
28439c010f SYNC: fixed all search column settings for alternative grids #1118 2025-06-19 12:43:15 +00:00
CI workflows
e85f43beb1 chore: auto-update github workflows 2025-06-19 12:08:31 +00:00
CI workflows
a06cbc0840 Update pro ref 2025-06-19 12:08:18 +00:00
SPRINX0\prochazka
adef9728f8 SYNC: charts UX, error handling, bucket count limit 2025-06-19 12:08:06 +00:00
CI workflows
ff1b688b6e chore: auto-update github workflows 2025-06-19 08:56:09 +00:00
CI workflows
3e7574a927 Update pro ref 2025-06-19 08:55:55 +00:00
SPRINX0\prochazka
f852ea90ad changelog 2025-06-18 11:31:53 +02:00
SPRINX0\prochazka
d8f6247c32 v6.5.2 2025-06-18 11:00:46 +02:00
SPRINX0\prochazka
9dc28393a5 links added 2025-06-18 10:50:44 +02:00
SPRINX0\prochazka
c442c98ecf SYNC: fixed test 2025-06-18 08:43:56 +00:00
SPRINX0\prochazka
71e0109927 v6.5.2-premium-beta.1 2025-06-18 10:21:57 +02:00
SPRINX0\prochazka
9c7dd5ed1c SYNC: close chart fix 2025-06-18 08:17:58 +00:00
CI workflows
83620848f2 chore: auto-update github workflows 2025-06-18 08:13:39 +00:00
CI workflows
d548a5b4f3 Update pro ref 2025-06-18 08:13:25 +00:00
CI workflows
b6e5307755 chore: auto-update github workflows 2025-06-18 08:05:11 +00:00
CI workflows
4c5dc5a145 Update pro ref 2025-06-18 08:04:54 +00:00
SPRINX0\prochazka
69ed9172b8 SYNC: chart UX 2025-06-18 08:04:41 +00:00
CI workflows
68551ae176 chore: auto-update github workflows 2025-06-18 07:48:21 +00:00
CI workflows
c97d9d35ba Update pro ref 2025-06-18 07:48:07 +00:00
SPRINX0\prochazka
e86cc97cdf SYNC: changed chart logic 2025-06-18 07:47:56 +00:00
SPRINX0\prochazka
9bff8608c1 SYNC: auto-detect charts is disabled by default #1145 2025-06-18 07:30:16 +00:00
SPRINX0\prochazka
a10fe6994a v6.5.1 2025-06-17 16:08:07 +02:00
SPRINX0\prochazka
67e6a37b59 v6.5.1-beta.1 2025-06-17 15:42:26 +02:00
SPRINX0\prochazka
3075a56735 fixed cloud login 2025-06-17 15:42:16 +02:00
SPRINX0\prochazka
7e4a862cc3 v6.5.0 2025-06-17 10:02:43 +02:00
SPRINX0\prochazka
ed2078ee3b handle license errors 2025-06-17 09:55:59 +02:00
SPRINX0\prochazka
f99c23a622 v6.4.3-premium-beta.10 2025-06-17 09:08:40 +02:00
SPRINX0\prochazka
41e7317764 v6.4.3-beta.9 2025-06-17 09:08:18 +02:00
SPRINX0\prochazka
e0a78c2399 equal behaviour for premium and community 2025-06-17 09:06:08 +02:00
SPRINX0\prochazka
95ad39d2d4 admin premium widget 2025-06-17 09:03:57 +02:00
CI workflows
b831f827b1 chore: auto-update github workflows 2025-06-16 11:25:21 +00:00
SPRINX0\prochazka
c5d8413d9c SYNC: fix 2025-06-16 11:25:08 +00:00
CI workflows
4648ea3424 Update pro ref 2025-06-16 11:25:04 +00:00
SPRINX0\prochazka
e05bd6f231 SYNC: front matter in chart screenshot 2025-06-16 11:24:49 +00:00
CI workflows
caadee7901 chore: auto-update github workflows 2025-06-16 10:58:30 +00:00
CI workflows
18c524117d Update pro ref 2025-06-16 10:58:11 +00:00
SPRINX0\prochazka
ad30fb8b04 SYNC: query result chart screenshot 2025-06-16 10:57:58 +00:00
SPRINX0\prochazka
a8077965a9 icon fix 2025-06-16 10:36:57 +02:00
SPRINX0\prochazka
532ab85ebb public cloud improvements 2025-06-16 10:32:49 +02:00
SPRINX0\prochazka
546227eb37 changelog 2025-06-16 09:55:15 +02:00
CI workflows
7ec3b262d3 chore: auto-update github workflows 2025-06-16 07:24:20 +00:00
CI workflows
c435000d24 Update pro ref 2025-06-16 07:24:03 +00:00
SPRINX0\prochazka
eaa60c281e SYNC: chart screenshot 2025-06-16 07:23:51 +00:00
Jan Prochazka
cd7cf63144 SYNC: copy to cloud folder works for connected DB 2025-06-15 08:22:58 +00:00
CI workflows
6a704aa079 chore: auto-update github workflows 2025-06-15 07:25:21 +00:00
CI workflows
d6b5a1cec8 Update pro ref 2025-06-15 07:25:03 +00:00
Jan Prochazka
9a24ad31cc SYNC: pie chart out labels 2025-06-15 07:24:50 +00:00
CI workflows
9331630b54 chore: auto-update github workflows 2025-06-15 06:58:43 +00:00
CI workflows
904e869d7f Update pro ref 2025-06-15 06:58:29 +00:00
Jan Prochazka
307fa4f5e6 SYNC: trim license 2025-06-15 06:58:15 +00:00
SPRINX0\prochazka
131d16d3ea v6.4.3-beta.8 2025-06-13 16:35:05 +02:00
SPRINX0\prochazka
dbc54c45dd v6.4.3-premium-beta.7 2025-06-13 16:34:50 +02:00
SPRINX0\prochazka
a96a84d509 SYNC: grayed scripts for non active database 2025-06-13 14:31:53 +00:00
SPRINX0\prochazka
3eb8863f67 SYNC: cloud connections in tab names 2025-06-13 13:25:34 +00:00
SPRINX0\prochazka
8737ab077b SYNC: fixed status bar color 2025-06-13 13:14:16 +00:00
SPRINX0\prochazka
50bb6a1d19 SYNC: prod cloud 2025-06-13 13:12:09 +00:00
SPRINX0\prochazka
4181b75af7 SYNC: connection color for cloud connections 2025-06-13 13:08:56 +00:00
CI workflows
620705c87a chore: auto-update github workflows 2025-06-13 12:14:41 +00:00
CI workflows
50b7b93529 Update pro ref 2025-06-13 12:14:22 +00:00
SPRINX0\prochazka
6f18f6bd5c SYNC: debug config 2025-06-13 12:14:07 +00:00
SPRINX0\prochazka
01d256eeee SYNC: don't show private cloud for web app 2025-06-13 11:52:03 +00:00
SPRINX0\prochazka
a1405412a8 debug config 2025-06-13 11:49:19 +02:00
SPRINX0\prochazka
58589b3a15 v6.4.3-premium-beta.6 2025-06-13 09:44:49 +02:00
SPRINX0\prochazka
2983266fdf Merge branch 'master' of https://github.com/dbgate/dbgate 2025-06-12 16:55:58 +02:00
SPRINX0\prochazka
e33df8f12d cloud content refactor 2025-06-12 16:55:55 +02:00
CI workflows
0e0e8e9d18 chore: auto-update github workflows 2025-06-12 13:30:21 +00:00
Jan Prochazka
37f8b54752 Merge pull request #1130 from dbgate/feature/firebird
Feature/firebird
2025-06-12 15:29:58 +02:00
SPRINX0\prochazka
e9a086ad23 SYNC: refresh cloud files improvements 2025-06-12 12:26:35 +00:00
SPRINX0\prochazka
7c06a8ac41 SYNC: fix refresh publis files 2025-06-12 12:13:33 +00:00
SPRINX0\prochazka
70801d958e SYNC: security rename 2025-06-12 11:51:22 +00:00
SPRINX0\prochazka
cf3f95c952 SYNC: security fixes 2025-06-12 11:49:53 +00:00
Pavel
d708616a6a feat: add createFirebirdInsertStream with datetime fields transform 2025-06-12 13:44:24 +02:00
Pavel
17711bc5c9 Revert "feat: transform rows suport for json lines reader"
This reverts commit b74b6b3284.
2025-06-12 13:29:48 +02:00
Pavel
1e2474921b Revert "feat: transform firebird model rows"
This reverts commit 5760ada3b4.
2025-06-12 13:29:46 +02:00
SPRINX0\prochazka
3f37b2b728 security fixes 2025-06-12 10:58:46 +02:00
SPRINX0\prochazka
18b11df672 security: prevent file traversal in uploads 2025-06-12 10:43:27 +02:00
SPRINX0\prochazka
c34f2d4da7 better UX when logging in in electron 2025-06-11 17:25:14 +02:00
SPRINX0\prochazka
d61792581a Merge branch 'master' of https://github.com/dbgate/dbgate 2025-06-11 17:02:40 +02:00
SPRINX0\prochazka
76d9a511b8 sql generate aliases automatically #1122 2025-06-11 17:02:38 +02:00
CI workflows
4248326697 chore: auto-update github workflows 2025-06-11 12:41:59 +00:00
SPRINX0\prochazka
a540b38151 don't generate artifacts for check build 2025-06-11 14:41:34 +02:00
SPRINX0\prochazka
8fb5ef0c1d v6.4.3-premium-beta.5 2025-06-11 14:39:00 +02:00
SPRINX0\prochazka
2da4979e59 UX fix 2025-06-11 14:29:59 +02:00
SPRINX0\prochazka
0146e4a1dd #1111 mssql - handle timestamp and computed columns in clonerows 2025-06-11 11:28:46 +02:00
SPRINX0\prochazka
34bdb72ffd #1118 2025-06-11 11:09:07 +02:00
SPRINX0\prochazka
2ef7c63047 copy column names #1119 2025-06-11 10:34:22 +02:00
SPRINX0\prochazka
95f5417761 fixed grid performance problem - limited length of cell string 2025-06-11 09:30:58 +02:00
SPRINX0\prochazka
4922ec4499 Merge branch 'master' into feature/firebird 2025-06-11 08:09:28 +02:00
SPRINX0\prochazka
20d947a199 readme for firebird 2025-06-10 16:46:16 +02:00
Pavel
c9444c5318 Merge branch 'master' into feature/firebird 2025-06-10 14:57:26 +02:00
SPRINX0\prochazka
871dc90ee4 fixed community build (missing JslChart.svelte) 2025-06-09 16:32:57 +02:00
Jan Prochazka
d1925945b4 skipped failed tests 2025-06-09 14:43:55 +02:00
Jan Prochazka
6625080fde Merge pull request #1139 from dbgate/feature/duckdb-1132
Feature/duckdb 1132
2025-06-09 10:52:58 +02:00
CI workflows
1110609e39 chore: auto-update github workflows 2025-06-09 08:39:51 +00:00
CI workflows
f21d2c7253 Update pro ref 2025-06-09 08:39:32 +00:00
SPRINX0\prochazka
9c1d330945 SYNC: data label formatter 2025-06-09 08:39:21 +00:00
SPRINX0\prochazka
cd7800056c SYNC: commented out charts test 2025-06-09 08:25:54 +00:00
CI workflows
7ff4bec3bc chore: auto-update github workflows 2025-06-09 08:16:27 +00:00
CI workflows
d305cf2167 Update pro ref 2025-06-09 08:16:07 +00:00
SPRINX0\prochazka
e77b83bd92 SYNC: chart labels for pie chart 2025-06-09 08:15:54 +00:00
CI workflows
171d58658a chore: auto-update github workflows 2025-06-09 07:16:15 +00:00
CI workflows
a6f6bc4c0a Update pro ref 2025-06-09 07:15:57 +00:00
Jan Prochazka
f03cffe3f8 SYNC: Merge pull request #4 from dbgate/feature/charts 2025-06-09 07:15:45 +00:00
Pavel
809dca184e chore: add start:api:watch script 2025-06-05 20:26:41 +02:00
Pavel
ecda226949 fix: correctly map DuckDBTimeValue to string 2025-06-05 20:26:33 +02:00
Pavel
ff1b58ebd8 fix: correctly map DuckDBDateValue to string 2025-06-05 20:26:17 +02:00
Pavel
5760ada3b4 feat: transform firebird model rows 2025-06-05 16:31:17 +02:00
Pavel
b74b6b3284 feat: transform rows suport for json lines reader 2025-06-05 16:28:50 +02:00
Pavel
e4cc4b6f58 fix: process blob values, update firebird dialect 2025-06-05 16:27:28 +02:00
Pavel
dd90851477 fix: update collumns object id 2025-06-05 14:24:33 +02:00
Pavel
da9b127468 fix: group indexes for firebird 2025-06-05 12:31:36 +02:00
Pavel
d6b05e44cb fix: skip all table renames for firebird 2025-06-05 11:51:17 +02:00
Pavel
58c1b5b98d fix:divide field length by 4 2025-06-05 11:35:00 +02:00
Pavel
c270cba8d6 fix: apply skipRenameTable filter correctly 2025-06-05 10:37:04 +02:00
Pavel
58f1f749fc fix: respect implicitNullDeclaration in alter database queries 2025-06-03 18:15:07 +02:00
Pavel
38d87a7c8f fix: update column deps for firebird 2025-06-03 17:39:49 +02:00
Pavel
4e13598708 fix: add custom create index to firebird dumper 2025-06-03 17:39:40 +02:00
Pavel
4177448d32 fix: correctly processing script outside of transactions for firebird 2025-06-03 17:19:02 +02:00
Pavel
e4911a6f82 feat: discard result support for firebird query 2025-06-03 17:18:44 +02:00
Pavel
159224700f fix: skip table rename for firebird 2025-06-03 16:52:51 +02:00
Pavel
696d4e7342 feat: skip change column for firebird 2025-06-03 16:36:40 +02:00
Pavel
ffb6cfaa4a feat: add writeTable to firebird 2025-06-03 14:20:54 +02:00
Pavel
b8899fcafa fix: always pass runDeployInTransaction to scipt 2025-06-03 14:11:01 +02:00
Pavel
aba829c991 feat: skip data modifiaciton for firebird 2025-06-03 13:56:24 +02:00
Pavel
8f4c61c259 fix: correct transaction syntax for firebird 2025-06-03 13:56:13 +02:00
Pavel
a19648a6e8 fix: correct runSqlInTransaction 2025-06-03 13:07:46 +02:00
Pavel
d5c0f7045e fix: add arguments to runSqlInTransaction 2025-06-03 12:44:04 +02:00
SPRINX0\prochazka
6f69205818 v6.4.3-premium-beta.4 2025-05-30 13:21:10 +02:00
SPRINX0\prochazka
8166da548c db2 config 2025-05-30 10:56:18 +02:00
SPRINX0\prochazka
d54f7293b7 db2 test container config 2025-05-30 08:16:18 +02:00
Pavel
225520a765 feat: add useTransaction option to deployDb 2025-05-29 22:52:38 +02:00
CI workflows
af1eccde8e chore: auto-update github workflows 2025-05-29 16:11:09 +00:00
CI workflows
5d37280643 Update pro ref 2025-05-29 16:10:50 +00:00
SPRINX0\prochazka
80597039f5 SYNC: charts 2025-05-29 16:10:38 +00:00
Pavel
2766aedc01 fix: correct databaseFile for engines with databseFileLocationOnServer 2025-05-29 15:26:58 +02:00
Pavel
da3e12cb7e feat: add indexes to firebird 2025-05-29 15:20:51 +02:00
Pavel
8baff1b0d2 fix: add object id condtion to firbeird uniques 2025-05-29 15:20:39 +02:00
Pavel
4ff5f9204e fix: correctly skip inc analysis 2025-05-29 15:15:57 +02:00
Pavel
515339bbd8 fix: add changeColumnDependencies to firebird 2025-05-29 14:57:38 +02:00
SPRINX0\prochazka
943634b0e2 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-05-29 14:56:12 +02:00
SPRINX0\prochazka
212b26b960 temporatily disable MognoDB profiler support 2025-05-29 14:56:07 +02:00
CI workflows
c0b41987aa chore: auto-update github workflows 2025-05-29 12:55:11 +00:00
CI workflows
b4ef640052 Update pro ref 2025-05-29 12:54:50 +00:00
SPRINX0\prochazka
db6b7f52eb removed charts & profiler 2025-05-29 14:54:08 +02:00
Pavel
55b4b9e02a feat: add script method to firebird driver 2025-05-29 14:53:54 +02:00
Pavel
4e6ae93b13 fix: add dropColumnDependencies to firebird 2025-05-29 14:53:34 +02:00
Pavel
c9a5fe5676 fix: make firebird singledatabase 2025-05-29 14:53:09 +02:00
Pavel
a5adfb7c7f feat: add uniques to firebird 2025-05-29 14:52:44 +02:00
CI workflows
1794b86041 chore: auto-update github workflows 2025-05-29 12:40:42 +00:00
SPRINX0\prochazka
f405124ce4 fix 2025-05-29 14:40:20 +02:00
CI workflows
25060c1477 chore: auto-update github workflows 2025-05-29 12:31:31 +00:00
SPRINX0\prochazka
6ad218f354 upload artifacts forr check build 2025-05-29 14:31:08 +02:00
CI workflows
d1c52548b0 chore: auto-update github workflows 2025-05-29 12:05:29 +00:00
SPRINX0\prochazka
9dc847b72f fix 2025-05-29 14:05:10 +02:00
CI workflows
5b04adb21f chore: auto-update github workflows 2025-05-29 12:00:15 +00:00
SPRINX0\prochazka
356d25e548 build app check 2025-05-29 13:59:53 +02:00
SPRINX0\prochazka
a9958af818 v6.4.3-beta.3 2025-05-29 13:21:17 +02:00
Jan Prochazka
fb359b7f87 Merge pull request #1120 from ProjectInfinity/command-palette-redesign
feat: redesign CommandPalette
2025-05-29 12:50:18 +02:00
SPRINX0\prochazka
7f087819a6 Merge branch 'feature/cloud' 2025-05-29 12:49:01 +02:00
SPRINX0\prochazka
e836fa3d38 show license - better UX 2025-05-29 12:44:31 +02:00
Pavel
9a69f1108d fix: skipIncrementalAnalysis for firebird 2025-05-29 11:22:16 +02:00
Pavel
2c5c58dc90 fix: map datatype to lowerase variants in firebird 2025-05-29 10:23:26 +02:00
SPRINX0\prochazka
cb50d2838a license limit modal 2025-05-28 17:37:55 +02:00
SPRINX0\prochazka
aff7125914 Revert "tmp change"
This reverts commit 45d82dce04.
2025-05-28 16:44:58 +02:00
SPRINX0\prochazka
45d82dce04 tmp change 2025-05-28 15:55:53 +02:00
SPRINX0\prochazka
7a3b27227a stats fixed 2025-05-28 13:21:52 +02:00
SPRINX0\prochazka
7b50a19b2c cloud file, folder operations 2025-05-28 10:46:35 +02:00
SPRINX0\prochazka
741b942dea cloud files WIP 2025-05-28 08:25:10 +02:00
Pavel
f7ca64a49d fix: remove container_name from workflows 2025-05-27 19:16:21 +02:00
Pavel
2f7b3455e5 feat: add views to firebird 2025-05-27 19:05:02 +02:00
Pavel
1568dfc183 feat: add object ids to firebird queries 2025-05-27 18:49:31 +02:00
SPRINX0\prochazka
d3a5df0007 delete conn on cloud, save file to cloud WIP 2025-05-27 16:46:53 +02:00
Pavel
c20cac621a fix: update firebird workflows 2025-05-27 12:52:46 +02:00
SPRINX0\prochazka
74560c3289 duplicate cloud connection 2025-05-26 17:59:03 +02:00
SPRINX0\prochazka
f94bf3f8ce cloud fixes 2025-05-26 17:24:13 +02:00
SPRINX0\prochazka
d26db7096d refactor - handle cloud listeners 2025-05-26 17:02:09 +02:00
SPRINX0\prochazka
afde0a7423 cloud connection save 2025-05-26 16:46:04 +02:00
SPRINX0\prochazka
cc930a3ff9 cloud connections expansion fix 2025-05-26 15:50:48 +02:00
SPRINX0\prochazka
60ecdadc74 cloud account in statusbar 2025-05-26 15:43:14 +02:00
SPRINX0\prochazka
82fc1850cf API rename 2025-05-26 14:43:10 +02:00
SPRINX0\prochazka
88f937f73e save new connection on cloud 2025-05-26 14:41:41 +02:00
SPRINX0\prochazka
b3497c7306 database content UX 2025-05-26 12:58:20 +02:00
SPRINX0\prochazka
366ab2e0cd refresh public cloud files only on session start 2025-05-26 12:23:38 +02:00
SPRINX0\prochazka
98e4fabd2e cloud fixes 2025-05-26 12:06:33 +02:00
SPRINX0\prochazka
716c3573fd fixed scroll 2025-05-26 09:58:25 +02:00
Pavel
842d8dd780 feat: add triggers to firebird tests 2025-05-23 01:00:18 +02:00
Pavel
c767dfb22e fix: add createSql to firebird triggers 2025-05-23 00:59:34 +02:00
Pavel
f94901c3b2 feat: handle implicit null declation in alter-table 2025-05-23 00:28:27 +02:00
SPRINX0\prochazka
c9638aefe9 show cloud API errors 2025-05-22 17:30:24 +02:00
SPRINX0\prochazka
8bd4721686 delete, rename folders 2025-05-22 17:09:25 +02:00
Pavel
808f7504c3 feat: add changeColumn, renameColumn, dropColumn to firebird dumper 2025-05-22 16:49:29 +02:00
Pavel
8ea7d3d5e8 fix: correctly show default value for firebird 2025-05-22 16:48:11 +02:00
Pavel
d4931890ae fix: use table name as pureName for firebird column 2025-05-22 15:50:40 +02:00
SPRINX0\prochazka
f4a879a452 encrypting cloud content 2025-05-22 15:48:59 +02:00
Pavel
78521ffdb4 fix: remove schemaName from firebird columns query 2025-05-22 15:44:15 +02:00
Pavel
8ea3f80b97 feat: add firebird to github workflows 2025-05-22 15:40:43 +02:00
Pavel
0d8d87857c fix: skip autoIncrement for firedb tests 2025-05-22 15:40:23 +02:00
Pavel
3a3a261d9c fix: correct firedb columns notNull query 2025-05-22 15:40:07 +02:00
Pavel
2e00daf63c fix: correct implicitNullDeclaration usage in dumper 2025-05-22 15:39:46 +02:00
SPRINX0\prochazka
1b8bb0c1fd fixes 2025-05-22 13:20:39 +02:00
SPRINX0\prochazka
5c33579544 cloud content fixes 2025-05-22 13:07:45 +02:00
SPRINX0\prochazka
f8081ff09e cloud database content 2025-05-22 10:43:46 +02:00
SPRINX0\prochazka
01b7eeeecf cloud widgets refactor 2025-05-22 10:29:36 +02:00
SPRINX0\prochazka
d2f4c374a9 smaller widget icon panel 2025-05-22 10:12:48 +02:00
SPRINX0\prochazka
07073eebe9 connecting to cloud database 2025-05-22 09:46:07 +02:00
SPRINX0\prochazka
590a4ae476 show cloud content 2025-05-21 17:09:16 +02:00
SPRINX0\prochazka
b553a81d47 load cloud folders 2025-05-21 14:48:28 +02:00
SPRINX0\prochazka
7d4e53e413 content cloud WIP 2025-05-20 16:43:06 +02:00
SPRINX0\prochazka
839b0f6f5e public cloud search 2025-05-20 13:04:57 +02:00
CI workflows
893c5da4ef chore: auto-update github workflows 2025-05-20 08:52:30 +00:00
CI workflows
f9b893edfa Update pro ref 2025-05-20 08:52:14 +00:00
SPRINX0\prochazka
b4fadb39bf cloud files - opening 2025-05-19 16:59:56 +02:00
SPRINX0\prochazka
310f8bf6f7 public cloud widget 2025-05-19 16:33:04 +02:00
CI workflows
903a26a330 chore: auto-update github workflows 2025-05-19 13:24:23 +00:00
CI workflows
41ebd39810 Update pro ref 2025-05-19 13:24:06 +00:00
SPRINX0\prochazka
281de5196e update cloud files 2025-05-19 10:39:35 +02:00
CI workflows
a9ab864cbb chore: auto-update github workflows 2025-05-19 08:11:23 +00:00
CI workflows
f3ff910821 Update pro ref 2025-05-19 08:11:00 +00:00
SPRINX0\prochazka
ba5179f1e8 Merge branch 'master' into feature/cloud 2025-05-16 13:56:34 +02:00
SPRINX0\prochazka
05e8f6ed78 v6.4.3-alpha.1 2025-05-16 13:55:45 +02:00
SPRINX0\prochazka
23150815a0 use default target schema in dbDeploy 2025-05-16 13:55:14 +02:00
SPRINX0\prochazka
a50f223fe3 cloud icons WIP 2025-05-16 13:54:08 +02:00
SPRINX0\prochazka
9329345d98 basic cloud signin workflow 2025-05-16 12:19:26 +02:00
SPRINX0\prochazka
c71c32b363 readme 2025-05-16 08:10:20 +02:00
SPRINX0\prochazka
5590aa7234 feedback URL 2025-05-16 08:04:44 +02:00
SPRINX0\prochazka
4a3491e0b5 feedback menu link 2025-05-16 08:03:08 +02:00
SPRINX0\prochazka
e8cb87ae3d feedback link 2025-05-16 08:00:33 +02:00
Pavel
2f6427af32 feat: set implicitNullDeclaration to true for firebird 2025-05-15 17:00:02 +02:00
CI workflows
5564047001 chore: auto-update github workflows 2025-05-15 14:08:12 +00:00
CI workflows
22577c5f87 Update pro ref 2025-05-15 14:07:52 +00:00
Jan Prochazka
4dc2627da2 cloud login WIP 2025-05-15 16:01:51 +02:00
Pavel
05aaf0de9f feat: add firebird to test engines 2025-05-15 15:05:44 +02:00
Pavel
951bfa23f3 feat: firebird use attachOrCreate on connect, add dbFileExtension and locaiton on server to tests 2025-05-15 15:04:54 +02:00
Pavel
7ac6cfcf25 feat: offsetFirstSkipRangeSyntax support 2025-05-15 13:32:07 +02:00
Pavel
06055a7c4c fix: remove schema from firebird 2025-05-15 13:22:01 +02:00
ProjectInfinity
b33198d1bf feat: redesign CommandPalette 2025-05-14 19:56:49 +02:00
SPRINX0\prochazka
f826b9eb6e 6.4.2 changelog 2025-05-14 14:59:49 +02:00
SPRINX0\prochazka
2b58121552 v6.4.2 2025-05-14 14:53:20 +02:00
Nybkox
3e0f834796 feat: firebird FKs, PKs, procedures, funcs 2025-05-13 19:40:51 +02:00
Nybkox
85f7011e03 fix: remove empty getFastSnapshot 2025-05-13 17:18:17 +02:00
Nybkox
3fd3de1828 feat: add firebird triggers 2025-05-07 00:29:42 +02:00
Nybkox
3e2840ca15 fix: add schema to tables 2025-05-07 00:13:40 +02:00
Nybkox
839ec9a456 feat: basic firebird analyser 2025-05-07 00:00:34 +02:00
Nybkox
bac8bd0006 feat: add firebird to to tests compose 2025-05-06 15:51:49 +02:00
343 changed files with 15128 additions and 2351 deletions

View File

@@ -92,6 +92,7 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-beta.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-beta.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-beta-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-beta-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-beta.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-beta-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-beta-arm64.dmg || true

112
.github/workflows/build-app-check.yaml vendored Normal file
View File

@@ -0,0 +1,112 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Electron app check build
'on':
push:
tags:
- check-[0-9]+-[0-9]+-[0-9]+.[0-9]+
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os:
- macos-14
- windows-2022
- ubuntu-22.04
steps:
- name: Install python 3.11 (MacOS)
if: matrix.os == 'macos-14'
run: |
brew install python@3.11
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 22.x
- name: adjustPackageJson
run: |
node adjustPackageJson --community
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
- name: yarn install
run: |
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
yarn fillPackagedPlugins
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
run: |
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Copy artifacts
run: |
mkdir artifacts
cp app/dist/*.deb artifacts/dbgate-check.deb || true
cp app/dist/*x86*.AppImage artifacts/dbgate-check.AppImage || true
cp app/dist/*arm64*.AppImage artifacts/dbgate-check-arm64.AppImage || true
cp app/dist/*armv7l*.AppImage artifacts/dbgate-check-armv7l.AppImage || true
cp app/dist/*win*.exe artifacts/dbgate-check.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-check.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-check-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-check-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-check.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-check-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-check-arm64.dmg || true
mv app/dist/*.snap artifacts/dbgate-check.snap || true
mv app/dist/*.exe artifacts/ || true
mv app/dist/*.zip artifacts/ || true
mv app/dist/*.tar.gz artifacts/ || true
mv app/dist/*.AppImage artifacts/ || true
mv app/dist/*.deb artifacts/ || true
mv app/dist/*.snap artifacts/ || true
mv app/dist/*.dmg artifacts/ || true
mv app/dist/*.blockmap artifacts/ || true
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -123,6 +123,7 @@ jobs:
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-beta.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-beta.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-beta-arm64.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.exe artifacts/dbgate-windows-premium-beta-arm64.exe || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-beta.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-beta-arm64.dmg || true

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -123,6 +123,7 @@ jobs:
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-latest.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-latest.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-latest-arm64.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.exe artifacts/dbgate-windows-premium-latest-arm64.exe || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-latest.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-latest-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-latest-arm64.dmg || true

View File

@@ -91,6 +91,7 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-latest.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-latest.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-latest-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-latest-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-latest.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-latest-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-latest-arm64.dmg || true

View File

@@ -22,10 +22,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Install jq
run: |
sudo apt-get install jq -y
@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -22,17 +22,17 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -98,3 +98,8 @@ jobs:
cd ..
cd dbgate-merged/plugins/dbgate-plugin-cosmosdb
npm publish
- name: Publish dbgate-plugin-firestore
run: |
cd ..
cd dbgate-merged/plugins/dbgate-plugin-firestore
npm publish

View File

@@ -22,10 +22,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Configure NPM token
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -13,10 +13,10 @@ jobs:
e2e-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1
@@ -26,7 +26,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -107,7 +107,7 @@ jobs:
ports:
- '16009:5556'
mongo:
image: mongo:4.0.12
image: mongo:4.4.29
env:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db

View File

@@ -13,10 +13,10 @@ jobs:
all-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1
@@ -102,3 +102,14 @@ jobs:
image: ghcr.io/tursodatabase/libsql-server:latest
ports:
- '8080:8080'
firebird:
image: firebirdsql/firebird:latest
env:
FIREBIRD_DATABASE: mydatabase.fdb
FIREBIRD_USER: dbuser
FIREBIRD_PASSWORD: dbpassword
ISC_PASSWORD: masterkey
FIREBIRD_TRACE: false
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'

2
.nvmrc
View File

@@ -1 +1 @@
v21.7.3
v24.4.1

59
.vscode/launch.json vendored
View File

@@ -1,20 +1,41 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch API",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/packages/api/src/index.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
]
}
]
}
"version": "0.2.0",
"configurations": [
{
"name": "Debug App",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/packages/api/src/index.js",
"envFile": "${workspaceFolder}/packages/api/.env",
"args": ["--listen-api"],
"console": "integratedTerminal",
"restart": true,
"runtimeExecutable": "node",
"skipFiles": ["<node_internals>/**"]
},
{
"name": "Debug App (Break on Start)",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/packages/api/src/index.js",
"args": ["--listen-api"],
"envFile": "${workspaceFolder}/.env",
"console": "integratedTerminal",
"restart": true,
"runtimeExecutable": "node",
"skipFiles": ["<node_internals>/**"],
"stopOnEntry": true
},
{
"name": "Attach to Process",
"type": "node",
"request": "attach",
"port": 9229,
"restart": true,
"localRoot": "${workspaceFolder}",
"remoteRoot": "${workspaceFolder}",
"skipFiles": ["<node_internals>/**"]
}
]
}

View File

@@ -8,6 +8,78 @@ Builds:
- linux - application for linux
- win - application for Windows
## 6.6.0
- ADDED: Database chat - AI powered chatbot, which knows your database (Premium)
- ADDED: Firestore support (Premium)
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
- FIXED: Chart permissions were ignored (Premium)
## 6.5.6
- ADDED: New object window - quick access to most common functions
- ADDED: Possibility to disable split query by empty line #1162
- ADDED: Possibility to opt out authentication #1152
- FIXED: Separate schema mode now works in Team Premium edition
- FIXED: Handled situation, when user enters expired license, which is already prolonged
- FIXED: Fixed some minor problems of charts
## 6.5.5
- ADDED: Administer cloud folder window
- CHANGED: Cloud menu redesign
- ADDED: Audit log (for Team Premium edition)
- ADDED: Added new timeline chart type (line chart with time axis)
- ADDED: Chart grouping (more measure determined from data)
- CHANGED: Improved chart autodetection - string X axis (with bar type), COUNT as measure, split different measures
- ADDED: Added chart data type detection
- FIXED: Fixed chart displaying problems
- FIXED: Fixed exporting chart to HTML
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual __count)
- FIXED: Problems with authentification administration, especially for Postgres storage
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
## 6.5.3
- CHANGED: Improved DbGate Cloud sign-in workflow
- FIXED: Some fixes and error handling in new charts engine
- ADDED: Charts - ability to choose aggregate function
- CHANGED: Improved About window
## 6.5.2
- CHANGED: Autodetecting charts is disabled by default #1145
- CHANGED: Improved chart displaying workflow
- ADDED: Ability to close chart
## 6.5.1
- FIXED: DbGate Cloud e-mail sign-in method for desktop clients
## 6.5.0
- ADDED: DbGate cloud - online storage for connections, SQL scripts and other objects
- ADDED: Public knowledge base - common SQL scripts for specific DB engines (table sizes, index stats etc.)
- ADDED: Query results could be visualised in charts (Premium)
- REMOVED: Chart from selection, active charts - replaced by query result charts
- ADDED: FirebirdSQL support
- ADDED: SQL front matter - properties of SQL script
- ADDED: Auto-execute SQL script on open (saved in SQL front matter)
- CHANGED: Smaller widget icon panel
- CHANGED: Applications and Single-connection mode removed from widget icon panel
- CHANGED: Temporarily disabled MongoDB profiler support
- FIXED: Pie chart distorted if settings change #838
- FIXED: SQL server generated insert statement should exclude computed and timestamp columns #1111
- ADDED: Added option "Show all columns when searching" #1118
- ADDED: Copy cells/rows (e.g. column names) from Structure view #1119
- ADDED: Setting "Show table aliases in code completion" #1122
- FIXED: Vulnerability - check file paths in web version
- FIXED: Very slow render of tables with very log cells
## 6.4.2
- ADDED: Source label to docker container #1105
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
- ADDED: View PostgreSQL query console output #1108
- FIXED: Single quote generete MySql error #1107
- ADDED: Ability to limit query result count #1098
- CHANGED: Correct processing of bigint columns #1087 #1055 #583
- CHANGED: Improved and optimalized algorithm of loading redis keys #1062, #1034
- FIXED: Fixed loading Redis keys with :: in key name
## 6.4.0
- ADDED: DuckDB support
- ADDED: Data deployer (Premium)

View File

@@ -20,6 +20,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
* Use nodeJs [scripting interface](https://docs.dbgate.io/scripting) ([API documentation](https://docs.dbgate.io/apidoc))
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
* [Give us feedback](https://dbgate.org/feedback) - it will help us to decide, how to improve DbGate in future
* We [offer 2-year PREMIUM license](https://dbgate.org/review/) for any honest review on these platforms (time-limited offer)
## Supported databases
* MySQL
@@ -37,6 +39,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Apache Cassandra
* libSQL/Turso (Premium)
* DuckDB
* Firebird
* Firestore (Premium)
<a href="https://raw.githubusercontent.com/dbgate/dbgate/master/img/screenshot1.png">
@@ -79,6 +83,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Archives - backup your data in NDJSON files on local filesystem (or on DbGate server, when using web application)
* NDJSON data viewer and editor - browse NDJSON data, edit data and structure directly on NDJSON files. Works also for big NDSON files
* Charts, export chart to HTML page
* AI powered database chat
* Show GEO data on map, export map to HTML page
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
* Extensible plugin architecture
@@ -87,10 +92,12 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
Any contributions are welcome. If you want to contribute without coding, consider following:
* Tell your friends about DbGate or share on social networks - when more people will use DbGate, it will grow to be better
* Write review on [Slant.co](https://www.slant.co/improve/options/41086/~dbgate-review) or [G2](https://www.g2.com/products/dbgate/reviews)
* Purchase a [DbGate Premium](https://dbgate.io/purchase/premium/) liocense
* Write review on [Product Hunt](https://www.producthunt.com/products/dbgate) or [G2](https://www.g2.com/products/dbgate/reviews) - we offer [2-year PREMIUM license](https://dbgate.org/review/) for reviewers (time limited offer)
* Create issue, if you find problem in app, or you have idea to new feature. If issue already exists, you could leave comment on it, to prioritise most wanted issues
* Create some tutorial video on [youtube](https://www.youtube.com/playlist?list=PLCo7KjCVXhr0RfUSjM9wJMsp_ShL1q61A)
* Become a backer on [GitHub sponsors](https://github.com/sponsors/dbgate) or [Open collective](https://opencollective.com/dbgate)
* Add a SQL script to [Public Knowledge Base](https://github.com/dbgate/dbgate-knowledge-base)
* Where a small coding is acceptable for you, you could [create plugin](https://docs.dbgate.io/plugin-development). Plugins for new themes can be created actually without JS coding
Thank you!

View File

@@ -43,6 +43,8 @@ function adjustFile(file, isApp = false) {
if (process.argv.includes('--community')) {
delete json.optionalDependencies['mongodb-client-encryption'];
delete json.dependencies['@mongosh/service-provider-node-driver'];
delete json.dependencies['@mongosh/browser-runtime-electron'];
}
if (isApp && process.argv.includes('--premium')) {

View File

@@ -108,6 +108,7 @@ module.exports = ({ editMenu, isMac }) => [
{ command: 'app.openWeb', hideDisabled: true },
{ command: 'app.openIssue', hideDisabled: true },
{ command: 'app.openSponsoring', hideDisabled: true },
{ command: 'app.giveFeedback', hideDisabled: true },
{ divider: true },
{ command: 'settings.commands', hideDisabled: true },
{ command: 'tabs.changelog', hideDisabled: true },

View File

@@ -0,0 +1,129 @@
#!/usr/bin/env node
// assign-dbgm-codes.mjs
import fs from 'fs/promises';
import path from 'path';
const PLACEHOLDER = 'DBGM-00000';
const CODE_RE = /DBGM-(\d{5})/g;
const JS_TS_RE = /\.(mjs|cjs|js|ts|jsx|tsx)$/i;
const IGNORE_DIRS = new Set([
'node_modules',
'.git',
'.hg',
'.svn',
'dist',
'build',
'out',
'.next',
'.turbo',
'.cache',
]);
const IGNORE_FILES = ['assign-dbgm-codes.mjs', 'package.json', 'README.md'];
// --- CLI ---
const args = process.argv.slice(2);
const dryRun = args.includes('--dry');
const rootArg = args.find(a => a !== '--dry') || process.cwd();
const root = path.resolve(rootArg);
// --- helpers ---
async function* walk(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const e of entries) {
if (e.isDirectory()) {
if (IGNORE_DIRS.has(e.name)) continue;
yield* walk(path.join(dir, e.name));
} else if (e.isFile()) {
if (JS_TS_RE.test(e.name) && !IGNORE_FILES.includes(e.name)) yield path.join(dir, e.name);
}
}
}
function formatCode(n) {
return `DBGM-${String(n).padStart(5, '0')}`;
}
// Find the smallest positive integer not in `taken`
function makeNextCodeFn(taken) {
let n = 1;
// advance n to first free
while (taken.has(n)) n++;
return () => {
const code = n;
taken.add(code);
// move n to next free for next call
do {
n++;
} while (taken.has(n));
return formatCode(code);
};
}
// --- main ---
(async () => {
console.log(`Scanning: ${root} ${dryRun ? '(dry run)' : ''}`);
// 1) Collect all taken codes across the repo
const taken = new Set(); // numeric parts only
const files = [];
for await (const file of walk(root)) files.push(file);
await Promise.all(
files.map(async file => {
try {
const text = await fs.readFile(file, 'utf8');
for (const m of text.matchAll(CODE_RE)) {
const num = Number(m[1]);
if (Number.isInteger(num) && num > 0) taken.add(num);
}
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
}
})
);
console.log(`Found ${taken.size} occupied code(s).`);
// 2) Replace placeholders with next available unique code
const nextCode = makeNextCodeFn(taken);
let filesChanged = 0;
let placeholdersReplaced = 0;
for (const file of files) {
let text;
try {
text = await fs.readFile(file, 'utf8');
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
continue;
}
if (!text.includes(PLACEHOLDER)) continue;
let countInFile = 0;
const updated = text.replaceAll(PLACEHOLDER, () => {
countInFile++;
return nextCode();
});
if (countInFile > 0) {
placeholdersReplaced += countInFile;
filesChanged++;
console.log(`${dryRun ? '[dry]' : '[write]'} ${file}${countInFile} replacement(s)`);
if (!dryRun) {
try {
await fs.writeFile(file, updated, 'utf8');
} catch (err) {
console.warn(`! Failed to write ${file}: ${err.message}`);
}
}
}
}
console.log(`Done. Files changed: ${filesChanged}, placeholders replaced: ${placeholdersReplaced}.`);
})().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -22,6 +22,8 @@ const volatilePackages = [
'ssh2',
'wkx',
'@duckdb/node-api',
'@mongosh/browser-runtime-electron',
'@mongosh/service-provider-node-driver',
];
module.exports = volatilePackages;

View File

@@ -7,7 +7,9 @@ const path = require('path');
module.exports = defineConfig({
e2e: {
// baseUrl: 'http://localhost:3000',
// trashAssetsBeforeRuns: false,
chromeWebSecurity: false,
setupNodeEvents(on, config) {
// implement node event listeners here
@@ -40,6 +42,12 @@ module.exports = defineConfig({
case 'multi-sql':
serverProcess = exec('yarn start:multi-sql');
break;
case 'cloud':
serverProcess = exec('yarn start:cloud');
break;
case 'charts':
serverProcess = exec('yarn start:charts');
break;
}
await waitOn({ resources: ['http://localhost:3000'] });

View File

@@ -191,7 +191,8 @@ describe('Data browser data', () => {
it('Query editor - join wizard', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('select * from Invoice');
cy.get('body').realPress('{enter}');
@@ -248,14 +249,14 @@ describe('Data browser data', () => {
cy.themeshot('database-diagram');
});
it('Charts', () => {
cy.testid('WidgetIconPanel_file').click();
cy.contains('pie-chart').click();
cy.contains('line-chart').click();
cy.testid('TabsPanel_buttonSplit').click();
cy.testid('WidgetIconPanel_file').click();
cy.themeshot('view-split-charts');
});
// it('Charts', () => {
// cy.testid('WidgetIconPanel_file').click();
// cy.contains('pie-chart').click();
// cy.contains('line-chart').click();
// cy.testid('TabsPanel_buttonSplit').click();
// cy.testid('WidgetIconPanel_file').click();
// cy.themeshot('view-split-charts');
// });
it('Keyboard configuration', () => {
cy.testid('WidgetIconPanel_settings').click();
@@ -302,7 +303,8 @@ describe('Data browser data', () => {
});
it('Plugin tab', () => {
cy.testid('WidgetIconPanel_plugins').click();
cy.testid('WidgetIconPanel_settings').click();
cy.contains('Manage plugins').click();
cy.contains('dbgate-plugin-theme-total-white').click();
// text from plugin markdown
cy.contains('Total white theme');
@@ -379,19 +381,25 @@ describe('Data browser data', () => {
cy.themeshot('compare-database-settings');
});
it('Query editor - AI assistant', () => {
it('Database chat', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('QueryTab_switchAiAssistantButton').click();
cy.testid('QueryAiAssistant_allowSendToAiServiceButton').click();
cy.testid('ConfirmModal_okButton').click();
cy.testid('QueryAiAssistant_promptInput').type('album names');
cy.testid('QueryAiAssistant_queryFromQuestionButton').click();
cy.contains('Use this', { timeout: 10000 }).click();
cy.testid('QueryTab_executeButton').click();
cy.contains('Balls to the Wall');
cy.themeshot('ai-assistant');
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('find most popular artist');
cy.get('body').realPress('{enter}');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 20000 }).click();
cy.wait(20000);
// cy.contains('Iron Maiden');
cy.themeshot('database-chat');
// cy.testid('DatabaseChatTab_promptInput').click();
// cy.get('body').realType('I need top 10 songs with the biggest income');
// cy.get('body').realPress('{enter}');
// cy.contains('Hot Girl', { timeout: 20000 });
// cy.wait(1000);
// cy.themeshot('database-chat');
});
it('Modify data', () => {

View File

@@ -0,0 +1,112 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('Charts', () => {
it('Auto detect chart', () => {
cy.contains('MySql-connection').click();
cy.contains('charts_sample').click();
cy.testid('WidgetIconPanel_file').click();
cy.contains('chart1').click();
cy.contains('department_name');
// cy.testid('QueryTab_executeButton').click();
// cy.testid('QueryTab_openChartButton').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('choose-detected-chart');
});
it('Two line charts', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('SELECT InvoiceDate, Total from Invoice');
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('two-line-charts');
});
it('Invoice naive autodetection', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('SELECT * from Invoice');
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-naive-autodetection');
});
it('Invoice by country - grouped chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType(
"SELECT InvoiceDate, Total, BillingCountry from Invoice where BillingCountry in ('USA', 'Canada', 'Brazil', 'France', 'Germany')"
);
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('ChartSelector_chart_1').click();
cy.testid('JslChart_customizeButton').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-grouped-autodetected');
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Bar');
cy.testid('ChartDefinitionEditor_xAxisTransformSelect').select('Date (Year)');
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-grouped-bars');
});
it('Public Knowledge base - show chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('WidgetIconPanel_cloud-public').click();
cy.testid('public-cloud-file-tag-mysql/folder-MySQL/tag-premium/top-tables-row-count.sql').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('public-knowledge-base-tables-sizes');
});
it('Auto detect chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Invoice').rightclick();
cy.contains('SQL template').click();
cy.contains('SELECT').click();
cy.testid('QueryTab_detectChartButton').click();
cy.testid('QueryTab_executeButton').click();
cy.contains('Chart 1').click();
cy.testid('ChartSelector_chart_0').click();
cy.testid('JslChart_customizeButton').click();
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Bar');
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Line');
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('query-result-chart');
});
it('New object window', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Invoice').click();
cy.testid('WidgetIconPanel_addButton').click();
cy.contains('Compare database');
cy.themeshot('new-object-window');
});
});

View File

@@ -0,0 +1,56 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('Cloud tests', () => {
it('Private cloud', () => {
cy.testid('WidgetIconPanel_cloudAccount');
cy.window().then(win => {
win.__loginToCloudTest('dbgate.test@gmail.com');
});
cy.contains('dbgate.test@gmail.com');
// cy.testid('WidgetIconPanel_cloudAccount').click();
// cy.origin('https://identity.dbgate.io', () => {
// cy.contains('Sign in with GitHub').click();
// });
// cy.origin('https://github.com', () => {
// cy.get('#login_field').type('dbgatetest');
// cy.get('#password').type('Pwd2020Db');
// cy.get('input[type="submit"]').click();
// });
// cy.wait(3000);
// cy.location('origin').then(origin => {
// if (origin === 'https://github.com') {
// // Still on github.com → an authorization step is waiting
// cy.origin('https://github.com', () => {
// // Try once, don't wait the full default timeout
// cy.get('button[data-octo-click="oauth_application_authorization"]', { timeout: 500, log: false }).click(); // if the button exists it will be clicked
// // if not, the short timeout elapses and we drop out
// });
// } else {
// // Already back on localhost nothing to authorize
// cy.log('OAuth redirect skipped the Authorize screen');
// }
// });
cy.contains('Testing Connections').rightclick();
cy.contains('Administrate access').click();
cy.contains('User email');
cy.themeshot('administer-shared-folder');
});
});

View File

@@ -59,7 +59,8 @@ describe('Transactions', () => {
cy.contains(connectionName).click();
if (databaseName) cy.contains(databaseName).click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').type(
formatQueryWithoutParams(driver, "INSERT INTO ~categories (~category_id, ~category_name) VALUES (5, 'test');")

View File

@@ -1,3 +1,12 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
@@ -80,4 +89,34 @@ describe('Team edition tests', () => {
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('test@example.com');
});
it('Audit logging', () => {
cy.testid('LoginPage_linkAdmin').click();
cy.testid('LoginPage_password').type('adminpwd');
cy.testid('LoginPage_submitLogin').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.contains('Audit log is not enabled');
cy.testid('AdminMenuWidget_itemSettings').click();
cy.testid('AdminSettingsTab_auditLogCheckbox').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.contains('No data for selected date');
cy.testid('AdminMenuWidget_itemConnections').click();
cy.contains('Open table').click();
cy.contains('displayName');
cy.get('.toolstrip').contains('Export').click();
cy.contains('CSV file').click();
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('Open table').click();
cy.contains('login');
cy.get('.toolstrip').contains('Export').click();
cy.contains('XML file').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.testid('AdminAuditLogTab_refreshButton').click();
cy.contains('Exporting query').click();
cy.themeshot('auditlog');
});
});

View File

@@ -42,3 +42,11 @@ beforeEach(() => {
});
});
});
// Cypress.Screenshot.defaults({
// onBeforeScreenshot() {
// if (window.Chart) {
// Object.values(window.Chart.instances).forEach(c => c.resize());
// }
// },
// });

View File

@@ -0,0 +1,6 @@
{"__isStreamHeader":true,"pureName":"departments","schemaName":"dbo","objectId":1205579333,"createDate":"2025-06-12T10:30:34.083Z","modifyDate":"2025-06-12T10:30:34.120Z","contentHash":"2025-06-12T10:30:34.120Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__departme__3213E83FE8E7043D","schemaName":"dbo","pureName":"departments","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"IT"}
{"id":2,"name":"Marketing"}
{"id":3,"name":"Finance"}
{"id":4,"name":"Human Resources"}
{"id":5,"name":"Research and Development"}

View File

@@ -0,0 +1,12 @@
name: departments
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
primaryKey:
- id

View File

@@ -0,0 +1,39 @@
{"__isStreamHeader":true,"pureName":"employee_project","schemaName":"dbo","objectId":1333579789,"createDate":"2025-06-12T10:30:34.133Z","modifyDate":"2025-06-12T10:30:34.133Z","contentHash":"2025-06-12T10:30:34.133Z","columns":[{"columnName":"employee_id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"project_id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"role","dataType":"varchar(50)","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__employee__2EE9924949ED9668","schemaName":"dbo","pureName":"employee_project","constraintType":"primaryKey","columns":[{"columnName":"employee_id"},{"columnName":"project_id"}]},"foreignKeys":[{"constraintName":"FK__employee___emplo__5165187F","constraintType":"foreignKey","schemaName":"dbo","pureName":"employee_project","refSchemaName":"dbo","refTableName":"employees","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"employee_id","refColumnName":"id"}]},{"constraintName":"FK__employee___proje__52593CB8","constraintType":"foreignKey","schemaName":"dbo","pureName":"employee_project","refSchemaName":"dbo","refTableName":"projects","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"project_id","refColumnName":"id"}]}],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"employee_id":1,"project_id":6,"role":"Manager"}
{"employee_id":1,"project_id":8,"role":"Developer"}
{"employee_id":2,"project_id":7,"role":"Tester"}
{"employee_id":2,"project_id":8,"role":"Developer"}
{"employee_id":3,"project_id":4,"role":"Analyst"}
{"employee_id":3,"project_id":6,"role":"Developer"}
{"employee_id":4,"project_id":2,"role":"Manager"}
{"employee_id":4,"project_id":4,"role":"Analyst"}
{"employee_id":4,"project_id":5,"role":"Analyst"}
{"employee_id":5,"project_id":5,"role":"Tester"}
{"employee_id":6,"project_id":1,"role":"Analyst"}
{"employee_id":6,"project_id":6,"role":"Tester"}
{"employee_id":6,"project_id":9,"role":"Manager"}
{"employee_id":7,"project_id":8,"role":"Manager"}
{"employee_id":8,"project_id":10,"role":"Analyst"}
{"employee_id":9,"project_id":2,"role":"Analyst"}
{"employee_id":9,"project_id":6,"role":"Analyst"}
{"employee_id":9,"project_id":7,"role":"Developer"}
{"employee_id":10,"project_id":2,"role":"Manager"}
{"employee_id":10,"project_id":6,"role":"Analyst"}
{"employee_id":11,"project_id":1,"role":"Tester"}
{"employee_id":12,"project_id":4,"role":"Tester"}
{"employee_id":13,"project_id":2,"role":"Developer"}
{"employee_id":13,"project_id":3,"role":"Analyst"}
{"employee_id":13,"project_id":7,"role":"Developer"}
{"employee_id":14,"project_id":3,"role":"Developer"}
{"employee_id":14,"project_id":9,"role":"Manager"}
{"employee_id":15,"project_id":1,"role":"Developer"}
{"employee_id":15,"project_id":5,"role":"Manager"}
{"employee_id":16,"project_id":3,"role":"Tester"}
{"employee_id":16,"project_id":5,"role":"Developer"}
{"employee_id":17,"project_id":6,"role":"Analyst"}
{"employee_id":18,"project_id":1,"role":"Tester"}
{"employee_id":18,"project_id":5,"role":"Tester"}
{"employee_id":18,"project_id":6,"role":"Manager"}
{"employee_id":19,"project_id":6,"role":"Analyst"}
{"employee_id":20,"project_id":2,"role":"Developer"}
{"employee_id":20,"project_id":4,"role":"Developer"}

View File

@@ -0,0 +1,18 @@
name: employee_project
columns:
- name: employee_id
type: int
default: null
notNull: true
references: employees
- name: project_id
type: int
default: null
notNull: true
references: projects
- name: role
type: varchar(50)
default: null
primaryKey:
- employee_id
- project_id

View File

@@ -0,0 +1,21 @@
{"__isStreamHeader":true,"pureName":"employees","schemaName":"dbo","objectId":1237579447,"createDate":"2025-06-12T10:30:34.113Z","modifyDate":"2025-06-12T12:35:22.140Z","contentHash":"2025-06-12T12:35:22.140Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"email","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"hire_date","dataType":"date","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"department_id","dataType":"int","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__employee__3213E83FE576E55A","schemaName":"dbo","pureName":"employees","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[{"constraintName":"FK__employees__depar__4CA06362","constraintType":"foreignKey","schemaName":"dbo","pureName":"employees","refSchemaName":"dbo","refTableName":"departments","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"department_id","refColumnName":"id"}]}],"indexes":[],"uniques":[{"constraintName":"UQ__employee__AB6E6164E18D883F","columns":[{"columnName":"email"}]}],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"John Smith","email":"john.smith@example.com","hire_date":"2018-07-09T00:00:00.000Z","department_id":2}
{"id":2,"name":"Jane Garcia","email":"jane.garcia@example.com","hire_date":"2019-10-13T00:00:00.000Z","department_id":5}
{"id":3,"name":"Grace Smith","email":"grace.smith@example.com","hire_date":"2019-03-16T00:00:00.000Z","department_id":1}
{"id":4,"name":"Charlie Williams","email":"charlie.williams@example.com","hire_date":"2020-10-18T00:00:00.000Z","department_id":2}
{"id":5,"name":"Eve Brown","email":"eve.brown@example.com","hire_date":"2018-04-12T00:00:00.000Z","department_id":4}
{"id":6,"name":"Alice Moore","email":"alice.moore@example.com","hire_date":"2019-04-20T00:00:00.000Z","department_id":2}
{"id":7,"name":"Eve Williams","email":"eve.williams@example.com","hire_date":"2020-04-26T00:00:00.000Z","department_id":4}
{"id":8,"name":"Eve Jones","email":"eve.jones@example.com","hire_date":"2022-10-04T00:00:00.000Z","department_id":3}
{"id":9,"name":"Diana Miller","email":"diana.miller@example.com","hire_date":"2021-03-28T00:00:00.000Z","department_id":1}
{"id":10,"name":"Diana Smith","email":"diana.smith@example.com","hire_date":"2018-04-12T00:00:00.000Z","department_id":2}
{"id":11,"name":"Hank Johnson","email":"hank.johnson@example.com","hire_date":"2020-09-16T00:00:00.000Z","department_id":2}
{"id":12,"name":"Frank Miller","email":"frank.miller@example.com","hire_date":"2023-01-12T00:00:00.000Z","department_id":4}
{"id":13,"name":"Jane Brown","email":"jane.brown@example.com","hire_date":"2023-05-07T00:00:00.000Z","department_id":3}
{"id":14,"name":"Grace Davis","email":"grace.davis@example.com","hire_date":"2019-08-22T00:00:00.000Z","department_id":3}
{"id":15,"name":"Jane Black","email":"jane.black@example.com","hire_date":"2019-04-28T00:00:00.000Z","department_id":2}
{"id":16,"name":"Charlie Smith","email":"charlie.smith@example.com","hire_date":"2019-06-12T00:00:00.000Z","department_id":5}
{"id":17,"name":"Eve Johnson","email":"eve.johnson@example.com","hire_date":"2020-11-07T00:00:00.000Z","department_id":5}
{"id":18,"name":"Jane Johnson","email":"jane.johnson@example.com","hire_date":"2020-04-06T00:00:00.000Z","department_id":2}
{"id":19,"name":"Hank Brown","email":"hank.brown@example.com","hire_date":"2023-05-10T00:00:00.000Z","department_id":2}
{"id":20,"name":"Frank Jones","email":"frank.jones@example.com","hire_date":"2020-10-26T00:00:00.000Z","department_id":1}

View File

@@ -0,0 +1,28 @@
name: employees
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
- name: email
type: varchar(100)
default: null
notNull: true
- name: hire_date
type: date
default: null
notNull: true
- name: department_id
type: int
default: null
references: departments
primaryKey:
- id
uniques:
- name: UQ__employee__AB6E6164E18D883F
columns:
- email

View File

@@ -0,0 +1,141 @@
{"__isStreamHeader":true,"pureName":"finance_reports","schemaName":"dbo","objectId":338100245,"createDate":"2025-06-23T12:15:08.727Z","modifyDate":"2025-06-23T12:15:08.750Z","contentHash":"2025-06-23T12:15:08.750Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"date","dataType":"date","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"profit","dataType":"money","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"foreignKeys":[{"constraintName":"project_id","constraintType":"foreignKey","schemaName":"dbo","pureName":"finance_reports","refSchemaName":"dbo","refTableName":"projects","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"id","refColumnName":"id"}]}],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"date":"2022-01-01T00:00:00.000Z","profit":73923.4}
{"id":1,"date":"2022-01-31T00:00:00.000Z","profit":21837.75}
{"id":1,"date":"2022-03-02T00:00:00.000Z","profit":67859.8}
{"id":1,"date":"2022-04-01T00:00:00.000Z","profit":77403.3}
{"id":1,"date":"2022-05-01T00:00:00.000Z","profit":84083.19}
{"id":1,"date":"2022-05-31T00:00:00.000Z","profit":30040.55}
{"id":1,"date":"2022-06-30T00:00:00.000Z","profit":50947.14}
{"id":1,"date":"2022-07-30T00:00:00.000Z","profit":63345.62}
{"id":1,"date":"2022-08-29T00:00:00.000Z","profit":23819.45}
{"id":1,"date":"2022-09-28T00:00:00.000Z","profit":-25919.19}
{"id":1,"date":"2022-10-28T00:00:00.000Z","profit":27967.6}
{"id":1,"date":"2022-11-27T00:00:00.000Z","profit":-37402.36}
{"id":1,"date":"2022-12-27T00:00:00.000Z","profit":94528.8}
{"id":1,"date":"2023-01-26T00:00:00.000Z","profit":29491.03}
{"id":1,"date":"2023-02-25T00:00:00.000Z","profit":81541.29}
{"id":2,"date":"2022-01-01T00:00:00.000Z","profit":18070.94}
{"id":2,"date":"2022-01-31T00:00:00.000Z","profit":-40609.87}
{"id":2,"date":"2022-03-02T00:00:00.000Z","profit":42435.51}
{"id":2,"date":"2022-04-01T00:00:00.000Z","profit":-11915.15}
{"id":2,"date":"2022-05-01T00:00:00.000Z","profit":-37417.4}
{"id":2,"date":"2022-05-31T00:00:00.000Z","profit":23028.66}
{"id":2,"date":"2022-06-30T00:00:00.000Z","profit":-6895.49}
{"id":2,"date":"2022-07-30T00:00:00.000Z","profit":63114.54}
{"id":2,"date":"2022-08-29T00:00:00.000Z","profit":94646.99}
{"id":2,"date":"2022-09-28T00:00:00.000Z","profit":99560.77}
{"id":2,"date":"2022-10-28T00:00:00.000Z","profit":62216.22}
{"id":2,"date":"2022-11-27T00:00:00.000Z","profit":85094.32}
{"id":2,"date":"2022-12-27T00:00:00.000Z","profit":-23378.37}
{"id":2,"date":"2023-01-26T00:00:00.000Z","profit":47635.86}
{"id":2,"date":"2023-02-25T00:00:00.000Z","profit":33727.72}
{"id":3,"date":"2022-01-01T00:00:00.000Z","profit":33088.03}
{"id":3,"date":"2022-01-31T00:00:00.000Z","profit":66668.91}
{"id":3,"date":"2022-03-02T00:00:00.000Z","profit":5344.27}
{"id":3,"date":"2022-04-01T00:00:00.000Z","profit":22122.99}
{"id":3,"date":"2022-05-01T00:00:00.000Z","profit":27342.01}
{"id":3,"date":"2022-05-31T00:00:00.000Z","profit":55479.42}
{"id":3,"date":"2022-06-30T00:00:00.000Z","profit":35956.11}
{"id":3,"date":"2022-07-30T00:00:00.000Z","profit":9667.12}
{"id":3,"date":"2022-08-29T00:00:00.000Z","profit":63430.18}
{"id":3,"date":"2022-09-28T00:00:00.000Z","profit":-4883.41}
{"id":3,"date":"2022-10-28T00:00:00.000Z","profit":38902.8}
{"id":3,"date":"2022-11-27T00:00:00.000Z","profit":-25500.13}
{"id":3,"date":"2022-12-27T00:00:00.000Z","profit":65074.21}
{"id":3,"date":"2023-01-26T00:00:00.000Z","profit":12570.27}
{"id":3,"date":"2023-02-25T00:00:00.000Z","profit":35418.36}
{"id":4,"date":"2022-01-01T00:00:00.000Z","profit":68282.98}
{"id":4,"date":"2022-01-31T00:00:00.000Z","profit":77778.99}
{"id":4,"date":"2022-03-02T00:00:00.000Z","profit":95490.49}
{"id":4,"date":"2022-04-01T00:00:00.000Z","profit":-44466.37}
{"id":4,"date":"2022-05-01T00:00:00.000Z","profit":40215.71}
{"id":4,"date":"2022-05-31T00:00:00.000Z","profit":-31228.87}
{"id":4,"date":"2022-06-30T00:00:00.000Z","profit":60667.69}
{"id":4,"date":"2022-07-30T00:00:00.000Z","profit":71439.16}
{"id":4,"date":"2022-08-29T00:00:00.000Z","profit":-25077.4}
{"id":4,"date":"2022-09-28T00:00:00.000Z","profit":-36128.2}
{"id":4,"date":"2022-10-28T00:00:00.000Z","profit":36727.68}
{"id":4,"date":"2022-11-27T00:00:00.000Z","profit":-24207.2}
{"id":4,"date":"2022-12-27T00:00:00.000Z","profit":63846.96}
{"id":5,"date":"2022-01-01T00:00:00.000Z","profit":21648.3}
{"id":5,"date":"2022-01-31T00:00:00.000Z","profit":59263.22}
{"id":5,"date":"2022-03-02T00:00:00.000Z","profit":49154.51}
{"id":5,"date":"2022-04-01T00:00:00.000Z","profit":34787.48}
{"id":5,"date":"2022-05-01T00:00:00.000Z","profit":-24120.19}
{"id":5,"date":"2022-05-31T00:00:00.000Z","profit":98437.86}
{"id":5,"date":"2022-06-30T00:00:00.000Z","profit":18614.77}
{"id":5,"date":"2022-07-30T00:00:00.000Z","profit":17680.34}
{"id":5,"date":"2022-08-29T00:00:00.000Z","profit":74406.86}
{"id":5,"date":"2022-09-28T00:00:00.000Z","profit":61845.3}
{"id":5,"date":"2022-10-28T00:00:00.000Z","profit":-37889.59}
{"id":5,"date":"2022-11-27T00:00:00.000Z","profit":76651.05}
{"id":5,"date":"2022-12-27T00:00:00.000Z","profit":58739.6}
{"id":5,"date":"2023-01-26T00:00:00.000Z","profit":82605.85}
{"id":6,"date":"2022-01-01T00:00:00.000Z","profit":-5206.8}
{"id":6,"date":"2022-01-31T00:00:00.000Z","profit":27498.27}
{"id":6,"date":"2022-03-02T00:00:00.000Z","profit":-2939.84}
{"id":6,"date":"2022-04-01T00:00:00.000Z","profit":-37261.08}
{"id":6,"date":"2022-05-01T00:00:00.000Z","profit":37069.04}
{"id":6,"date":"2022-05-31T00:00:00.000Z","profit":524.88}
{"id":6,"date":"2022-06-30T00:00:00.000Z","profit":-29620.85}
{"id":6,"date":"2022-07-30T00:00:00.000Z","profit":35540.81}
{"id":6,"date":"2022-08-29T00:00:00.000Z","profit":20608.94}
{"id":6,"date":"2022-09-28T00:00:00.000Z","profit":34809.33}
{"id":6,"date":"2022-10-28T00:00:00.000Z","profit":-44949.05}
{"id":6,"date":"2022-11-27T00:00:00.000Z","profit":-22524.26}
{"id":6,"date":"2022-12-27T00:00:00.000Z","profit":37841.58}
{"id":7,"date":"2022-01-01T00:00:00.000Z","profit":6903.17}
{"id":7,"date":"2022-01-31T00:00:00.000Z","profit":58480.84}
{"id":7,"date":"2022-03-02T00:00:00.000Z","profit":48217.34}
{"id":7,"date":"2022-04-01T00:00:00.000Z","profit":73592.44}
{"id":7,"date":"2022-05-01T00:00:00.000Z","profit":-21831.18}
{"id":7,"date":"2022-05-31T00:00:00.000Z","profit":-40926.16}
{"id":7,"date":"2022-06-30T00:00:00.000Z","profit":62299.5}
{"id":7,"date":"2022-07-30T00:00:00.000Z","profit":95376.53}
{"id":7,"date":"2022-08-29T00:00:00.000Z","profit":-13317.36}
{"id":7,"date":"2022-09-28T00:00:00.000Z","profit":81565.05}
{"id":7,"date":"2022-10-28T00:00:00.000Z","profit":77420.52}
{"id":7,"date":"2022-11-27T00:00:00.000Z","profit":-12052.47}
{"id":7,"date":"2022-12-27T00:00:00.000Z","profit":37742.07}
{"id":7,"date":"2023-01-26T00:00:00.000Z","profit":-8057.99}
{"id":8,"date":"2022-01-01T00:00:00.000Z","profit":27213.73}
{"id":8,"date":"2022-01-31T00:00:00.000Z","profit":34271.75}
{"id":8,"date":"2022-03-02T00:00:00.000Z","profit":-44549.47}
{"id":8,"date":"2022-04-01T00:00:00.000Z","profit":15236.34}
{"id":8,"date":"2022-05-01T00:00:00.000Z","profit":-27759.81}
{"id":8,"date":"2022-05-31T00:00:00.000Z","profit":7955.12}
{"id":8,"date":"2022-06-30T00:00:00.000Z","profit":-34484.38}
{"id":8,"date":"2022-07-30T00:00:00.000Z","profit":-49758.7}
{"id":8,"date":"2022-08-29T00:00:00.000Z","profit":-41990.86}
{"id":8,"date":"2022-09-28T00:00:00.000Z","profit":58123.01}
{"id":8,"date":"2022-10-28T00:00:00.000Z","profit":30128.78}
{"id":8,"date":"2022-11-27T00:00:00.000Z","profit":-10151.17}
{"id":8,"date":"2022-12-27T00:00:00.000Z","profit":54048.33}
{"id":8,"date":"2023-01-26T00:00:00.000Z","profit":-43123.17}
{"id":9,"date":"2022-01-01T00:00:00.000Z","profit":61031.83}
{"id":9,"date":"2022-01-31T00:00:00.000Z","profit":68577.58}
{"id":9,"date":"2022-03-02T00:00:00.000Z","profit":88698.97}
{"id":9,"date":"2022-04-01T00:00:00.000Z","profit":8906.03}
{"id":9,"date":"2022-05-01T00:00:00.000Z","profit":28824.73}
{"id":9,"date":"2022-05-31T00:00:00.000Z","profit":88280.34}
{"id":9,"date":"2022-06-30T00:00:00.000Z","profit":35266.09}
{"id":9,"date":"2022-07-30T00:00:00.000Z","profit":-38025.36}
{"id":9,"date":"2022-08-29T00:00:00.000Z","profit":-12118.53}
{"id":9,"date":"2022-09-28T00:00:00.000Z","profit":-27265.86}
{"id":9,"date":"2022-10-28T00:00:00.000Z","profit":56870.57}
{"id":9,"date":"2022-11-27T00:00:00.000Z","profit":88078.95}
{"id":9,"date":"2022-12-27T00:00:00.000Z","profit":-24059.67}
{"id":9,"date":"2023-01-26T00:00:00.000Z","profit":-13301.43}
{"id":10,"date":"2022-01-01T00:00:00.000Z","profit":-22479.23}
{"id":10,"date":"2022-01-31T00:00:00.000Z","profit":8106.27}
{"id":10,"date":"2022-03-02T00:00:00.000Z","profit":69372.19}
{"id":10,"date":"2022-04-01T00:00:00.000Z","profit":-11895.74}
{"id":10,"date":"2022-05-01T00:00:00.000Z","profit":-33206.5}
{"id":10,"date":"2022-05-31T00:00:00.000Z","profit":56073.34}
{"id":10,"date":"2022-06-30T00:00:00.000Z","profit":67488.3}
{"id":10,"date":"2022-07-30T00:00:00.000Z","profit":48529.23}
{"id":10,"date":"2022-08-29T00:00:00.000Z","profit":28680.2}
{"id":10,"date":"2022-09-28T00:00:00.000Z","profit":59311.16}
{"id":10,"date":"2022-10-28T00:00:00.000Z","profit":25315.78}
{"id":10,"date":"2022-11-27T00:00:00.000Z","profit":36116.38}
{"id":10,"date":"2022-12-27T00:00:00.000Z","profit":-42040.4}

View File

@@ -0,0 +1,15 @@
name: finance_reports
columns:
- name: id
type: int
default: null
notNull: true
references: projects
- name: date
type: date
default: null
notNull: true
- name: profit
type: money
default: null
notNull: true

View File

@@ -0,0 +1,11 @@
{"__isStreamHeader":true,"pureName":"projects","schemaName":"dbo","objectId":1301579675,"createDate":"2025-06-12T10:30:34.127Z","modifyDate":"2025-06-23T12:15:08.750Z","contentHash":"2025-06-23T12:15:08.750Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"start_date","dataType":"date","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"end_date","dataType":"date","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__projects__3213E83F26A7ED11","schemaName":"dbo","pureName":"projects","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"Apollo Upgrade","start_date":"2020-04-27T00:00:00.000Z","end_date":"2020-10-19T00:00:00.000Z"}
{"id":2,"name":"Market Expansion","start_date":"2022-08-04T00:00:00.000Z","end_date":"2023-06-20T00:00:00.000Z"}
{"id":3,"name":"AI Integration","start_date":"2020-05-11T00:00:00.000Z","end_date":"2021-07-10T00:00:00.000Z"}
{"id":4,"name":"Cost Reduction","start_date":"2022-01-08T00:00:00.000Z","end_date":"2022-07-12T00:00:00.000Z"}
{"id":5,"name":"Cloud Migration","start_date":"2021-01-11T00:00:00.000Z","end_date":"2021-05-27T00:00:00.000Z"}
{"id":6,"name":"Customer Portal","start_date":"2021-07-13T00:00:00.000Z","end_date":"2022-09-22T00:00:00.000Z"}
{"id":7,"name":"Data Lake","start_date":"2021-02-25T00:00:00.000Z","end_date":"2021-08-21T00:00:00.000Z"}
{"id":8,"name":"UX Overhaul","start_date":"2021-05-20T00:00:00.000Z","end_date":"2022-09-10T00:00:00.000Z"}
{"id":9,"name":"Security Hardening","start_date":"2021-05-28T00:00:00.000Z","end_date":"2022-07-28T00:00:00.000Z"}
{"id":10,"name":"Mobile App Revamp","start_date":"2021-11-17T00:00:00.000Z","end_date":"2022-06-04T00:00:00.000Z"}

View File

@@ -0,0 +1,18 @@
name: projects
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
- name: start_date
type: date
default: null
- name: end_date
type: date
default: null
primaryKey:
- id

View File

@@ -0,0 +1,23 @@
-- >>>
-- autoExecute: true
-- splitterInitialValue: 20%
-- selected-chart: 1
-- <<<
SELECT
d.name AS department_name,
FORMAT(fr.date, 'yyyy-MM') AS month,
SUM(fr.profit) AS total_monthly_profit
FROM
departments d
JOIN
employees e ON d.id = e.department_id
JOIN
employee_project ep ON e.id = ep.employee_id
JOIN
finance_reports fr ON ep.project_id = fr.id
GROUP BY
d.name, FORMAT(fr.date, 'yyyy-MM')
ORDER BY
d.name, month;

View File

@@ -37,7 +37,7 @@ services:
- "16009:5556"
mongo:
image: mongo:4.0.12
image: mongo:4.4.29
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: root

8
e2e-tests/env/charts/.env vendored Normal file
View File

@@ -0,0 +1,8 @@
CONNECTIONS=mysql
LABEL_mysql=MySql-connection
SERVER_mysql=localhost
USER_mysql=root
PASSWORD_mysql=Pwd2020Db
PORT_mysql=16004
ENGINE_mysql=mysql@dbgate-plugin-mysql

2
e2e-tests/env/cloud/.env vendored Normal file
View File

@@ -0,0 +1,2 @@
ALLOW_DBGATE_PRIVATE_CLOUD=1
REDIRECT_TO_DBGATE_CLOUD_LOGIN=1

96
e2e-tests/init/charts.js Normal file
View File

@@ -0,0 +1,96 @@
const path = require('path');
const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
const dbgateApi = require('dbgate-api');
dbgateApi.initializeApiEnvironment();
const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
async function copyFolder(source, target) {
if (!fs.existsSync(target)) {
fs.mkdirSync(target, { recursive: true });
}
for (const file of fs.readdirSync(source)) {
fs.copyFileSync(path.join(source, file), path.join(target, file));
}
}
async function initMySqlDatabase(dbname, inputFile) {
await dbgateApi.executeQuery({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
},
sql: `drop database if exists ${dbname}`,
});
await dbgateApi.executeQuery({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
},
sql: `create database ${dbname}`,
});
await dbgateApi.importDatabase({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
database: dbname,
engine: 'mysql@dbgate-plugin-mysql',
},
inputFile,
});
}
async function run() {
const connection = {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
};
try {
await dbgateApi.executeQuery({
connection,
sql: 'drop database if exists charts_sample',
});
} catch (err) {
console.error('Failed to drop database', err);
}
await dbgateApi.executeQuery({
connection,
sql: 'create database charts_sample',
});
await dbgateApi.importDbFromFolder({
connection: {
...connection,
database: 'charts_sample',
},
folder: path.resolve(path.join(__dirname, '../data/charts-sample')),
});
await copyFolder(
path.resolve(path.join(__dirname, '../data/files/sql')),
path.join(baseDir, 'files-e2etests', 'sql')
);
await initMySqlDatabase('MyChinook', path.resolve(path.join(__dirname, '../data/chinook-mysql.sql')));
}
dbgateApi.runScript(run);

View File

@@ -21,6 +21,8 @@
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
"cy:run:cloud": "cypress run --spec cypress/e2e/cloud.cy.js",
"cy:run:charts": "cypress run --spec cypress/e2e/charts.cy.js",
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
@@ -28,6 +30,8 @@
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:cloud": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/cloud/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:charts": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/charts/.env node e2e-tests/init/charts.js && env-cmd -f e2e-tests/env/charts/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
@@ -35,8 +39,10 @@
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
"test:cloud": "start-server-and-test start:cloud http://localhost:3000 cy:run:cloud",
"test:charts": "start-server-and-test start:charts http://localhost:3000 cy:run:charts",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts",
"test:ci": "yarn test"
},
"dependencies": {}

View File

@@ -29,7 +29,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
driver,
`create table ~t2 (
~id int not null primary key,
~t1_id int null references ~t1(~id)
~t1_id int ${driver.dialect.implicitNullDeclaration ? '' : 'null'} references ~t1(~id)
)`
);

View File

@@ -60,7 +60,9 @@ async function testTableDiff(engine, conn, driver, mangle) {
if (!engine.skipReferences) {
const query = formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
`create table ~t2 (~id int not null primary key, ~fkval int ${
driver.dialect.implicitNullDeclaration ? '' : 'null'
} references ~t1(~col_ref))`
);
await driver.query(conn, transformSqlForEngine(engine, query));
@@ -116,6 +118,31 @@ describe('Alter table', () => {
})
);
test.each(engines.filter(i => i.supportTableComments).map(engine => [engine.label, engine]))(
'Add comment to table - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.objectComment = 'Added table comment';
});
})
);
test.each(engines.filter(i => i.supportColumnComments).map(engine => [engine.label, engine]))(
'Add comment to column - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.push({
columnName: 'added',
columnComment: 'Added column comment',
dataType: 'int',
pairingId: crypto.randomUUID(),
notNull: false,
autoIncrement: false,
});
});
})
);
test.each(
createEnginesColumnsSource(engines.filter(x => !x.skipDropColumn)).filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')

View File

@@ -4,7 +4,7 @@ const { testWrapper } = require('../tools');
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
const deployDb = require('dbgate-api/src/shell/deployDb');
const storageModel = require('dbgate-api/src/storageModel');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
const { runCommandOnDriver, runQueryOnDriver, adaptDatabaseInfo } = require('dbgate-tools');
describe('Data replicator', () => {
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
@@ -162,7 +162,7 @@ describe('Data replicator', () => {
await deployDb({
systemConnection: conn,
driver,
loadedDbModel: storageModel,
loadedDbModel: adaptDatabaseInfo(storageModel, driver),
targetSchema: engine.defaultSchemaName,
});
@@ -176,11 +176,11 @@ describe('Data replicator', () => {
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeFalsy();
).toBeTruthy();
const DB1 = {
auth_methods: [
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 0 },
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
@@ -266,7 +266,7 @@ describe('Data replicator', () => {
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeTruthy();
).toEqual('0');
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');

View File

@@ -106,7 +106,9 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
for (const loadedDbModel of dbModelsYaml) {
if (_.isString(loadedDbModel)) {
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel));
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel), {
useTransaction: engine.runDeployInTransaction,
});
} else {
const { sql, isEmpty } = await generateDeploySql({
systemConnection: conn.isPreparedOnly ? undefined : conn,
@@ -131,6 +133,7 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
driver,
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
dbdiffOptionsExtra,
useTransaction: engine.runDeployInTransaction,
});
}
@@ -606,7 +609,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
'Mark table removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
@@ -822,7 +825,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
'Mark table removed, one remains - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {

View File

@@ -94,7 +94,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table add - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
@@ -112,7 +112,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
@@ -130,7 +130,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));

View File

@@ -64,6 +64,40 @@ describe('Table create', () => {
})
);
test.each(
engines.filter(i => i.supportTableComments || i.supportColumnComments).map(engine => [engine.label, engine])
)(
'Simple table with comment - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(engine, conn, driver, {
...(engine.supportTableComments && {
schemaName: 'dbo',
objectComment: 'table comment',
}),
...(engine.defaultSchemaName && {
schemaName: engine.defaultSchemaName,
}),
columns: [
{
columnName: 'col1',
dataType: 'int',
pureName: 'tested',
...(engine.skipNullability ? {} : { notNull: true }),
...(engine.supportColumnComments && {
columnComment: 'column comment',
}),
...(engine.defaultSchemaName && {
schemaName: engine.defaultSchemaName,
}),
},
],
primaryKey: {
columns: [{ columnName: 'col1' }],
},
});
})
);
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Table with index - %s',
testWrapper(async (conn, driver, engine) => {

View File

@@ -17,6 +17,17 @@ services:
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
db2:
image: icr.io/db2_community/db2:11.5.8.0
privileged: true
ports:
- "15055:50000"
environment:
LICENSE: accept
DB2INST1_PASSWORD: Pwd2020Db
DBNAME: testdb
DB2INSTANCE: db2inst1
# mysql:
# image: mysql:8.0.18
# command: --default-authentication-plugin=mysql_native_password
@@ -89,3 +100,28 @@ services:
# - '5002:5001'
# volumes:
# - ./data/libsql:/var/lib/sqld
firebird:
image: firebirdsql/firebird:latest
container_name: firebird-db
environment:
- FIREBIRD_DATABASE=mydatabase.fdb
- FIREBIRD_USER=dbuser
- FIREBIRD_PASSWORD=dbpassword
- ISC_PASSWORD=masterkey
- FIREBIRD_TRACE=false
- FIREBIRD_USE_LEGACY_AUTH=true
ports:
- '3050:3050'
volumes:
- firebird-data:/firebird/data
- ./firebird.conf:/firebird/firebird.conf # Mount custom config file
healthcheck:
test: ['CMD', 'nc', '-z', 'localhost', '3050']
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
volumes:
firebird-data:

View File

@@ -443,6 +443,8 @@ const sqlServerEngine = {
supportSchemas: true,
supportRenameSqlObject: true,
defaultSchemaName: 'dbo',
supportTableComments: true,
supportColumnComments: true,
// skipSeparateSchemas: true,
triggers: [
{
@@ -680,6 +682,56 @@ const duckdbEngine = {
skipDropReferences: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const firebirdEngine = {
label: 'Firebird',
generateDbFile: true,
databaseFileLocationOnServer: '/var/lib/firebird/data/',
defaultSchemaName: 'main',
connection: {
engine: 'firebird@dbgate-plugin-firebird',
server: 'localhost',
port: 3050,
// databaseUrl: '/var/lib/firebird/data/mydatabase.fdb',
// databaseFile: '/var/lib/firebird/data/mydatabase.fdb',
user: 'SYSDBA',
password: 'masterkey',
},
objects: [],
triggers: [
{
testName: 'triggers after each row',
create: `CREATE OR ALTER TRIGGER ~obj1 AFTER INSERT ON ~t1 AS BEGIN END;`,
drop: 'DROP TRIGGER ~obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
tableName: 't1',
eventType: 'INSERT',
triggerTiming: 'AFTER',
},
},
],
skipOnCI: false,
runDeployInTransaction: true,
skipDataModifications: true,
skipChangeColumn: true,
// skipIndexes: true,
// skipStringLength: true,
// skipTriggers: true,
skipDataReplicator: true,
skipAutoIncrement: true,
// skipDropColumn: true,
skipRenameColumn: true,
// skipChangeNullability: true,
// skipDeploy: true,
// supportRenameSqlObject: true,
skipIncrementalAnalysis: true,
skipRenameTable: true,
// skipDefaultValue: true,
skipDropReferences: true,
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -694,6 +746,7 @@ const enginesOnCi = [
oracleEngine,
cassandraEngine,
duckdbEngine,
firebirdEngine,
];
const enginesOnLocal = [
@@ -709,7 +762,8 @@ const enginesOnLocal = [
// libsqlFileEngine,
// libsqlWsEngine,
// oracleEngine,
duckdbEngine,
// duckdbEngine,
firebirdEngine,
];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
@@ -727,3 +781,4 @@ module.exports.cassandraEngine = cassandraEngine;
module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;
module.exports.firebirdEngine = firebirdEngine;

View File

@@ -0,0 +1,45 @@
# Custom Firebird Configuration
# Wire encryption settings
# Options: Enabled, Required, Disabled
WireCrypt = Disabled
# Authentication settings
# Add Legacy_Auth to support older clients
AuthServer = Legacy_Auth
# User manager plugin
UserManager = Legacy_UserManager
# Default character set
DefaultCharSet = UTF8
# Buffer settings for better performance
DefaultDbCachePages = 2048
TempCacheLimit = 512M
# Connection settings
ConnectionTimeout = 180
DatabaseGrowthIncrement = 128M
# TCP Protocol settings
TcpRemoteBufferSize = 8192
TcpNoNagle = 1
# Security settings
RemoteServiceName = gds_db
RemoteServicePort = 3050
RemoteAuxPort = 0
RemotePipeName = firebird
# Lock settings
LockMemSize = 1M
LockHashSlots = 8191
LockAcquireSpins = 0
# Log settings
FileSystemCacheThreshold = 65536
FileSystemCacheSize = 0
# Compatibility settings for older clients
CompatiblityDialect = 3

View File

@@ -5,7 +5,12 @@ const crypto = require('crypto');
function randomDbName(dialect) {
const generatedKey = crypto.randomBytes(6);
const newKey = generatedKey.toString('hex');
const res = `db${newKey}`;
let res = `db${newKey}`;
if (dialect.dbFileExtension) {
res += dialect.dbFileExtension;
}
if (dialect.upperCaseAllDbObjectNames) return res.toUpperCase();
return res;
}
@@ -17,7 +22,7 @@ async function connect(engine, database) {
if (engine.generateDbFile) {
const conn = await driver.connect({
...connection,
databaseFile: `dbtemp/${database}`,
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
});
return conn;
} else {
@@ -42,7 +47,7 @@ async function prepareConnection(engine, database) {
if (engine.generateDbFile) {
return {
...connection,
databaseFile: `dbtemp/${database}`,
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
isPreparedOnly: true,
};
} else {

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "4.6.2-beta.4",
"version": "6.6.1-beta.18",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -9,6 +9,7 @@
],
"scripts": {
"start:api": "yarn workspace dbgate-api start | pino-pretty",
"start:api:watch": "nodemon --watch 'src/**' --ext 'ts,json,js' --exec yarn start:api",
"start:api:json": "yarn workspace dbgate-api start",
"start:app": "cd app && yarn start | pino-pretty",
"start:app:singledb": "CONNECTIONS=con1 SERVER_con1=localhost ENGINE_con1=mysql@dbgate-plugin-mysql USER_con1=root PASSWORD_con1=Pwd2020Db SINGLE_CONNECTION=con1 SINGLE_DATABASE=Chinook yarn start:app",
@@ -42,7 +43,7 @@
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"storage-json": "node packages/dbmodel/bin/dbmodel.js model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
"build:app:local": "yarn plugins:copydist && cd app && yarn build:local",
"start:app:local": "cd app && yarn start:local",
@@ -71,7 +72,8 @@
"translations:extract": "node common/translations-cli/index.js extract",
"translations:add-missing": "node common/translations-cli/index.js add-missing",
"translations:remove-unused": "node common/translations-cli/index.js remove-unused",
"translations:check": "node common/translations-cli/index.js check"
"translations:check": "node common/translations-cli/index.js check",
"errors": "node common/assign-dbgm-codes.mjs ."
},
"dependencies": {
"concurrently": "^5.1.0",

View File

@@ -1,5 +1,14 @@
DEVMODE=1
SHELL_SCRIPTING=1
ALLOW_DBGATE_PRIVATE_CLOUD=1
DEVWEB=1
# LOCAL_AI_GATEWAY=true
# REDIRECT_TO_DBGATE_CLOUD_LOGIN=1
# PROD_DBGATE_CLOUD=1
# PROD_DBGATE_IDENTITY=1
# LOCAL_DBGATE_CLOUD=1
# LOCAL_DBGATE_IDENTITY=1
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
@@ -7,7 +16,6 @@ SHELL_SCRIPTING=1
# DISABLE_SHELL=1
# HIDE_APP_EDITOR=1
# DEVWEB=1
# LOGINS=admin,test

View File

@@ -56,7 +56,7 @@
"ncp": "^2.0.0",
"node-cron": "^2.0.3",
"on-finished": "^2.4.1",
"pinomin": "^1.0.4",
"pinomin": "^1.0.5",
"portfinder": "^1.0.28",
"rimraf": "^3.0.0",
"semver": "^7.6.3",
@@ -68,6 +68,7 @@
},
"scripts": {
"start": "env-cmd -f .env node src/index.js --listen-api",
"start:debug": "env-cmd -f .env node --inspect src/index.js --listen-api",
"start:portal": "env-cmd -f env/portal/.env node src/index.js --listen-api",
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js --listen-api",
"start:auth": "env-cmd -f env/auth/.env node src/index.js --listen-api",

View File

@@ -11,7 +11,7 @@ const logger = getLogger('authProvider');
class AuthProviderBase {
amoid = 'none';
async login(login, password, options = undefined) {
async login(login, password, options = undefined, req = undefined) {
return {
accessToken: jwt.sign(
{
@@ -23,7 +23,7 @@ class AuthProviderBase {
};
}
oauthToken(params) {
oauthToken(params, req) {
return {};
}
@@ -94,7 +94,7 @@ class OAuthProvider extends AuthProviderBase {
payload = jwt.decode(id_token);
}
logger.info({ payload }, 'User payload returned from OAUTH');
logger.info({ payload }, 'DBGM-00002 User payload returned from OAUTH');
const login =
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]

View File

@@ -102,7 +102,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error reading archive files');
logger.error(extractErrorLogData(err), 'DBGM-00001 Error reading archive files');
return [];
}
},

View File

@@ -13,6 +13,21 @@ const {
} = require('../auth/authProvider');
const storage = require('./storage');
const { decryptPasswordString } = require('../utility/crypting');
const {
createDbGateIdentitySession,
startCloudTokenChecking,
readCloudTokenHolder,
readCloudTestTokenHolder,
} = require('../utility/cloudIntf');
const socket = require('../utility/socket');
const { sendToAuditLog } = require('../utility/auditlog');
const {
isLoginLicensed,
LOGIN_LIMIT_ERROR,
markTokenAsLoggedIn,
markUserAsActive,
markLoginAsLoggedOut,
} = require('../utility/loginchecker');
const logger = getLogger('auth');
@@ -52,6 +67,11 @@ function authMiddleware(req, res, next) {
// const isAdminPage = req.headers['x-is-admin-page'] == 'true';
if (process.env.SKIP_ALL_AUTH) {
// API is not authorized for basic auth
return next();
}
if (process.env.BASIC_AUTH) {
// API is not authorized for basic auth
return next();
@@ -70,6 +90,8 @@ function authMiddleware(req, res, next) {
try {
const decoded = jwt.verify(token, getTokenSecret());
req.user = decoded;
markUserAsActive(decoded.licenseUid, token);
return next();
} catch (err) {
if (skipAuth) {
@@ -77,7 +99,7 @@ function authMiddleware(req, res, next) {
return next();
}
logger.error(extractErrorLogData(err), 'Sending invalid token error');
logger.error(extractErrorLogData(err), 'DBGM-00098 Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}
@@ -85,12 +107,12 @@ function authMiddleware(req, res, next) {
module.exports = {
oauthToken_meta: true,
async oauthToken(params) {
async oauthToken(params, req) {
const { amoid } = params;
return getAuthProviderById(amoid).oauthToken(params);
return getAuthProviderById(amoid).oauthToken(params, req);
},
login_meta: true,
async login(params) {
async login(params, req) {
const { amoid, login, password, isAdminPage } = params;
if (isAdminPage) {
@@ -100,25 +122,52 @@ module.exports = {
adminPassword = decryptPasswordString(adminConfig?.adminPassword);
}
if (adminPassword && adminPassword == password) {
if (!(await isLoginLicensed(req, `superadmin`))) {
return { error: LOGIN_LIMIT_ERROR };
}
sendToAuditLog(req, {
category: 'auth',
component: 'AuthController',
action: 'login',
event: 'login.admin',
severity: 'info',
message: 'Administration login successful',
});
const licenseUid = `superadmin`;
const accessToken = jwt.sign(
{
login: 'superadmin',
permissions: await storage.loadSuperadminPermissions(),
roleId: -3,
licenseUid,
},
getTokenSecret(),
{
expiresIn: getTokenLifetime(),
}
);
markTokenAsLoggedIn(licenseUid, accessToken);
return {
accessToken: jwt.sign(
{
login: 'superadmin',
permissions: await storage.loadSuperadminPermissions(),
roleId: -3,
},
getTokenSecret(),
{
expiresIn: getTokenLifetime(),
}
),
accessToken,
};
}
sendToAuditLog(req, {
category: 'auth',
component: 'AuthController',
action: 'loginFail',
event: 'login.adminFailed',
severity: 'warn',
message: 'Administraton login failed',
});
return { error: 'Login failed' };
}
return getAuthProviderById(amoid).login(login, password);
return getAuthProviderById(amoid).login(login, password, undefined, req);
},
getProviders_meta: true,
@@ -135,5 +184,40 @@ module.exports = {
return getAuthProviderById(amoid).redirect(params);
},
createCloudLoginSession_meta: true,
async createCloudLoginSession({ client, redirectUri }) {
const res = await createDbGateIdentitySession(client, redirectUri);
startCloudTokenChecking(res.sid, tokenHolder => {
socket.emit('got-cloud-token', tokenHolder);
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
});
return res;
},
cloudLoginRedirected_meta: true,
async cloudLoginRedirected({ sid }) {
const tokenHolder = await readCloudTokenHolder(sid);
return tokenHolder;
},
cloudTestLogin_meta: true,
async cloudTestLogin({ email }) {
const tokenHolder = await readCloudTestTokenHolder(email);
return tokenHolder;
},
logoutAdmin_meta: true,
async logoutAdmin() {
await markLoginAsLoggedOut('superadmin');
return true;
},
logoutUser_meta: true,
async logoutUser({}, req) {
await markLoginAsLoggedOut(req?.user?.licenseUid);
return true;
},
authMiddleware,
};

View File

@@ -0,0 +1,293 @@
const {
getPublicCloudFiles,
getPublicFileData,
refreshPublicFiles,
callCloudApiGet,
callCloudApiPost,
getCloudFolderEncryptor,
getCloudContent,
putCloudContent,
removeCloudCachedConnection,
} = require('../utility/cloudIntf');
const connections = require('./connections');
const socket = require('../utility/socket');
const { recryptConnection, getInternalEncryptor, encryptConnection } = require('../utility/crypting');
const { getConnectionLabel, getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('cloud');
const _ = require('lodash');
const fs = require('fs-extra');
const { getAiGatewayServer } = require('../utility/authProxy');
module.exports = {
publicFiles_meta: true,
async publicFiles() {
const res = await getPublicCloudFiles();
return res;
},
publicFileData_meta: true,
async publicFileData({ path }) {
const res = getPublicFileData(path);
return res;
},
refreshPublicFiles_meta: true,
async refreshPublicFiles({ isRefresh }) {
await refreshPublicFiles(isRefresh);
return {
status: 'ok',
};
},
contentList_meta: true,
async contentList() {
try {
const resp = await callCloudApiGet('content-list');
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'DBGM-00099 Error getting cloud content list');
return [];
}
},
getContent_meta: true,
async getContent({ folid, cntid }) {
const resp = await getCloudContent(folid, cntid);
return resp;
},
putContent_meta: true,
async putContent({ folid, cntid, content, name, type }) {
const resp = await putCloudContent(folid, cntid, content, name, type, {});
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
createFolder_meta: true,
async createFolder({ name }) {
const resp = await callCloudApiPost(`folders/create`, { name });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
grantFolder_meta: true,
async grantFolder({ inviteLink }) {
const m = inviteLink.match(/^dbgate\:\/\/folder\/v1\/([a-zA-Z0-9]+)\?mode=(read|write|admin)$/);
if (!m) {
throw new Error('Invalid invite link format');
}
const invite = m[1];
const mode = m[2];
const resp = await callCloudApiPost(`folders/grant/${mode}`, { invite });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
renameFolder_meta: true,
async renameFolder({ folid, name }) {
const resp = await callCloudApiPost(`folders/rename`, { folid, name });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
deleteFolder_meta: true,
async deleteFolder({ folid }) {
const resp = await callCloudApiPost(`folders/delete`, { folid });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
getInviteToken_meta: true,
async getInviteToken({ folid, role }) {
const resp = await callCloudApiGet(`invite-token/${folid}/${role}`);
return resp;
},
refreshContent_meta: true,
async refreshContent() {
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return {
status: 'ok',
};
},
copyConnectionCloud_meta: true,
async copyConnectionCloud({ conid, folid }) {
const conn = await connections.getCore({ conid });
const folderEncryptor = await getCloudFolderEncryptor(folid);
const recryptedConn = recryptConnection(conn, getInternalEncryptor(), folderEncryptor);
const connToSend = _.omit(recryptedConn, ['_id']);
const resp = await putCloudContent(
folid,
undefined,
JSON.stringify(connToSend),
getConnectionLabel(conn),
'connection',
{
connectionColor: conn.connectionColor,
connectionEngine: conn.engine,
}
);
return resp;
},
saveConnection_meta: true,
async saveConnection({ folid, connection }) {
let cntid = undefined;
if (connection._id) {
const m = connection._id.match(/^cloud\:\/\/(.+)\/(.+)$/);
if (!m) {
throw new Error('Invalid cloud connection ID format');
}
folid = m[1];
cntid = m[2];
}
if (!folid) {
throw new Error('Missing cloud folder ID');
}
const folderEncryptor = await getCloudFolderEncryptor(folid);
const recryptedConn = encryptConnection(connection, folderEncryptor);
const resp = await putCloudContent(
folid,
cntid,
JSON.stringify(recryptedConn),
getConnectionLabel(recryptedConn),
'connection',
{
connectionColor: connection.connectionColor,
connectionEngine: connection.engine,
}
);
if (resp.apiErrorMessage) {
return resp;
}
removeCloudCachedConnection(folid, resp.cntid);
cntid = resp.cntid;
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return {
...recryptedConn,
_id: `cloud://${folid}/${cntid}`,
};
},
duplicateConnection_meta: true,
async duplicateConnection({ conid }) {
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
if (!m) {
throw new Error('Invalid cloud connection ID format');
}
const folid = m[1];
const cntid = m[2];
const respGet = await getCloudContent(folid, cntid);
const conn = JSON.parse(respGet.content);
const conn2 = {
...conn,
displayName: getConnectionLabel(conn) + ' - copy',
};
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection', {
connectionColor: conn.connectionColor,
connectionEngine: conn.engine,
});
return respPut;
},
deleteConnection_meta: true,
async deleteConnection({ conid }) {
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
if (!m) {
throw new Error('Invalid cloud connection ID format');
}
const folid = m[1];
const cntid = m[2];
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
deleteContent_meta: true,
async deleteContent({ folid, cntid }) {
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
renameContent_meta: true,
async renameContent({ folid, cntid, name }) {
const resp = await callCloudApiPost(`content/rename/${folid}/${cntid}`, { name });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
saveFile_meta: true,
async saveFile({ folid, cntid, fileName, data, contentFolder, format }) {
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', { contentFolder, contentType: format });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
copyFile_meta: true,
async copyFile({ folid, cntid, name }) {
const resp = await callCloudApiPost(`content/duplicate/${folid}/${cntid}`, { name });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
},
exportFile_meta: true,
async exportFile({ folid, cntid, filePath }, req) {
const { content } = await getCloudContent(folid, cntid);
if (!content) {
throw new Error('File not found');
}
await fs.writeFile(filePath, content);
return true;
},
folderUsers_meta: true,
async folderUsers({ folid }) {
const resp = await callCloudApiGet(`content-folders/users/${folid}`);
return resp;
},
setFolderUserRole_meta: true,
async setFolderUserRole({ folid, email, role }) {
const resp = await callCloudApiPost(`content-folders/set-user-role/${folid}`, { email, role });
return resp;
},
removeFolderUser_meta: true,
async removeFolderUser({ folid, email }) {
const resp = await callCloudApiPost(`content-folders/remove-user/${folid}`, { email });
return resp;
},
getAiGateway_meta: true,
async getAiGateway() {
return getAiGatewayServer();
},
// chatStream_meta: {
// raw: true,
// method: 'post',
// },
// chatStream(req, res) {
// callChatStream(req.body, res);
// },
};

View File

@@ -16,7 +16,7 @@ const connections = require('../controllers/connections');
const { getAuthProviderFromReq } = require('../auth/authProvider');
const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy');
const { getAuthProxyUrl, tryToGetRefreshedLicense } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools');
const {
@@ -29,6 +29,7 @@ const {
} = require('../utility/crypting');
const lock = new AsyncLock();
let cachedSettingsValue = null;
module.exports = {
// settingsValue: {},
@@ -108,6 +109,7 @@ module.exports = {
),
isAdminPasswordMissing,
isInvalidToken: req?.isInvalidToken,
skipAllAuth: !!process.env.SKIP_ALL_AUTH,
adminPasswordState: adminConfig?.adminPasswordState,
storageDatabase: process.env.STORAGE_DATABASE,
logsFilePath: getLogsFilePath(),
@@ -116,7 +118,9 @@ module.exports = {
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
),
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
allowPrivateCloud: platformInfo.isElectron || !!process.env.ALLOW_DBGATE_PRIVATE_CLOUD,
...currentVersion,
redirectToDbGateCloudLogin: !!process.env.REDIRECT_TO_DBGATE_CLOUD_LOGIN,
};
return configResult;
@@ -143,6 +147,13 @@ module.exports = {
return res;
},
async getCachedSettings() {
if (!cachedSettingsValue) {
cachedSettingsValue = await this.loadSettings();
}
return cachedSettingsValue;
},
deleteSettings_meta: true,
async deleteSettings() {
await fs.unlink(path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'));
@@ -181,6 +192,7 @@ module.exports = {
return {
...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
// 'other.licenseKey': await this.loadLicenseKey(),
};
}
} catch (err) {
@@ -198,21 +210,34 @@ module.exports = {
},
saveLicenseKey_meta: true,
async saveLicenseKey({ licenseKey }) {
const decoded = jwt.decode(licenseKey);
if (!decoded) {
return {
status: 'error',
errorMessage: 'Invalid license key',
};
}
async saveLicenseKey({ licenseKey, forceSave = false, tryToRenew = false }) {
if (!forceSave) {
const decoded = jwt.decode(licenseKey?.trim());
if (!decoded) {
return {
status: 'error',
errorMessage: 'Invalid license key',
};
}
const { exp } = decoded;
if (exp * 1000 < Date.now()) {
return {
status: 'error',
errorMessage: 'License key is expired',
};
const { exp } = decoded;
if (exp * 1000 < Date.now()) {
let renewed = false;
if (tryToRenew) {
const newLicenseKey = await tryToGetRefreshedLicense(licenseKey);
if (newLicenseKey.status == 'ok') {
licenseKey = newLicenseKey.token;
renewed = true;
}
}
if (!renewed) {
return {
status: 'error',
errorMessage: 'License key is expired',
};
}
}
}
try {
@@ -256,6 +281,7 @@ module.exports = {
updateSettings_meta: true,
async updateSettings(values, req) {
if (!hasPermission(`settings/change`, req)) return false;
cachedSettingsValue = null;
const res = await lock.acquire('settings', async () => {
const currentValue = await this.loadSettings();
@@ -264,7 +290,11 @@ module.exports = {
if (process.env.STORAGE_DATABASE) {
updated = {
...currentValue,
...values,
..._.mapValues(values, v => {
if (v === true) return 'true';
if (v === false) return 'false';
return v;
}),
};
await storage.writeConfig({
group: 'settings',
@@ -282,7 +312,7 @@ module.exports = {
// this.settingsValue = updated;
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'], forceSave: true });
socket.emitChanged(`config-changed`);
}
}
@@ -302,7 +332,7 @@ module.exports = {
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
return resp.data;
} catch (err) {
return ''
return '';
}
},
@@ -312,6 +342,16 @@ module.exports = {
return resp;
},
getNewLicense_meta: true,
async getNewLicense({ oldLicenseKey }) {
const newLicenseKey = await tryToGetRefreshedLicense(oldLicenseKey);
const res = await checkLicenseKey(newLicenseKey.token);
if (res.status == 'ok') {
res.licenseKey = newLicenseKey.token;
}
return res;
},
recryptDatabaseForExport(db) {
const encryptionKey = generateTransportEncryptionKey();
const transportEncryptor = createTransportEncryptor(encryptionKey);

View File

@@ -116,12 +116,12 @@ function getPortalCollections() {
}
}
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'DBGM-00005 Using connections from ENV variables');
const noengine = connections.filter(x => !x.engine);
if (noengine.length > 0) {
logger.warn(
{ connections: noengine.map(x => x._id) },
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
'DBGM-00006 Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
);
}
return connections;
@@ -239,6 +239,19 @@ module.exports = {
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
},
async getUsedEngines() {
const storage = require('./storage');
const storageEngines = await storage.getUsedEngines();
if (storageEngines) {
return storageEngines;
}
if (portalConnections) {
return _.uniq(_.compact(portalConnections.map(x => x.engine)));
}
return _.uniq((await this.datastore.find()).map(x => x.engine));
},
test_meta: true,
test({ connection, requestDbList = false }) {
const subprocess = fork(
@@ -410,6 +423,13 @@ module.exports = {
return volatile;
}
const cloudMatch = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
if (cloudMatch) {
const { loadCachedCloudConnection } = require('../utility/cloudIntf');
const conn = await loadCachedCloudConnection(cloudMatch[1], cloudMatch[2]);
return conn;
}
const storage = require('./storage');
const storageConnection = await storage.getConnection({ conid });
@@ -510,40 +530,40 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true };
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00100 Error getting DB token');
return { error: err.message };
}
},
dbloginAuthToken_meta: true,
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }) {
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }, req) {
try {
const connection = await this.getCore({ conid });
const driver = requireEngineDriver(connection);
const accessToken = await driver.getAuthTokenFromCode(connection, { code, redirectUri, sid });
const volatile = await this.saveVolatile({ conid, accessToken });
const authProvider = getAuthProviderById(amoid);
const resp = await authProvider.login(null, null, { conid: volatile._id });
const resp = await authProvider.login(null, null, { conid: volatile._id }, req);
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00101 Error getting DB token');
return { error: err.message };
}
},
dbloginAuth_meta: true,
async dbloginAuth({ amoid, conid, user, password }) {
async dbloginAuth({ amoid, conid, user, password }, req) {
if (user || password) {
const saveResp = await this.saveVolatile({ conid, user, password, test: true });
if (saveResp.msgtype == 'connected') {
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id });
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id }, req);
return loginResp;
}
return saveResp;
}
// user and password is stored in connection, volatile connection is not needed
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid });
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid }, req);
return loginResp;
},

View File

@@ -41,6 +41,7 @@ const { decryptConnection } = require('../utility/crypting');
const { getSshTunnel } = require('../utility/sshTunnel');
const sessions = require('./sessions');
const jsldata = require('./jsldata');
const { sendToAuditLog } = require('../utility/auditlog');
const logger = getLogger('databaseConnections');
@@ -75,7 +76,7 @@ module.exports = {
handle_error(conid, database, props) {
const { error } = props;
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
logger.error(`DBGM-00102 Error in database connection ${conid}, database ${database}: ${error}`);
if (props?.msgid) {
const [resolve, reject] = this.requests[props?.msgid];
reject(error);
@@ -83,8 +84,11 @@ module.exports = {
}
},
handle_response(conid, database, { msgid, ...response }) {
const [resolve, reject] = this.requests[msgid];
const [resolve, reject, additionalData] = this.requests[msgid];
resolve(response);
if (additionalData?.auditLogger) {
additionalData?.auditLogger(response);
}
delete this.requests[msgid];
},
handle_status(conid, database, { status }) {
@@ -140,7 +144,7 @@ module.exports = {
handle_copyStreamError(conid, database, { copyStreamError }) {
const { progressName } = copyStreamError;
const { runid } = progressName;
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
logger.error(`DBGM-00103 Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
socket.emit(`runner-done-${runid}`);
},
@@ -148,6 +152,9 @@ module.exports = {
const existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) return existing;
const connection = await connections.getCore({ conid });
if (!connection) {
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
}
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
}
@@ -186,7 +193,7 @@ module.exports = {
if (newOpened.disconnected) return;
const funcName = `handle_${msgtype}`;
if (!this[funcName]) {
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
logger.error(`DBGM-00104 Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
return;
}
@@ -197,7 +204,7 @@ module.exports = {
this.close(conid, database, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00114 Error in database connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, database, false);
});
@@ -212,14 +219,14 @@ module.exports = {
},
/** @param {import('dbgate-types').OpenedDatabaseConnection} conn */
sendRequest(conn, message) {
sendRequest(conn, message, additionalData = {}) {
const msgid = crypto.randomUUID();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.requests[msgid] = [resolve, reject, additionalData];
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request do process');
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
this.close(conn.conid, conn.database);
}
});
@@ -229,7 +236,7 @@ module.exports = {
queryData_meta: true,
async queryData({ conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing query');
logger.info({ conid, database, sql }, 'DBGM-00007 Processing query');
const opened = await this.ensureOpened(conid, database);
// if (opened && opened.status && opened.status.name == 'error') {
// return opened.status;
@@ -239,18 +246,57 @@ module.exports = {
},
sqlSelect_meta: true,
async sqlSelect({ conid, database, select }, req) {
async sqlSelect({ conid, database, select, auditLogSessionGroup }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'sqlSelect', select });
const res = await this.sendRequest(
opened,
{ msgtype: 'sqlSelect', select },
{
auditLogger:
auditLogSessionGroup && select?.from?.name?.pureName
? response => {
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.select',
action: 'select',
severity: 'info',
conid,
database,
schemaName: select?.from?.name?.schemaName,
pureName: select?.from?.name?.pureName,
sumint1: response?.rows?.length,
sessionParam: `${conid}::${database}::${select?.from?.name?.schemaName || '0'}::${
select?.from?.name?.pureName
}`,
sessionGroup: auditLogSessionGroup,
message: `Loaded table data from ${select?.from?.name?.pureName}`,
});
}
: null,
}
);
return res;
},
runScript_meta: true,
async runScript({ conid, database, sql, useTransaction }, req) {
async runScript({ conid, database, sql, useTransaction, logMessage }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing script');
logger.info({ conid, database, sql }, 'DBGM-00008 Processing script');
const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.runscript',
action: 'runscript',
severity: 'info',
conid,
database,
detail: sql,
message: logMessage || `Running SQL script`,
});
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql, useTransaction });
return res;
},
@@ -258,17 +304,54 @@ module.exports = {
runOperation_meta: true,
async runOperation({ conid, database, operation, useTransaction }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, operation }, 'Processing operation');
logger.info({ conid, database, operation }, 'DBGM-00009 Processing operation');
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.runoperation',
action: operation.type,
severity: 'info',
conid,
database,
detail: operation,
message: `Running DB operation: ${operation.type}`,
});
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'runOperation', operation, useTransaction });
return res;
},
collectionData_meta: true,
async collectionData({ conid, database, options }, req) {
async collectionData({ conid, database, options, auditLogSessionGroup }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'collectionData', options });
const res = await this.sendRequest(
opened,
{ msgtype: 'collectionData', options },
{
auditLogger:
auditLogSessionGroup && options?.pureName
? response => {
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'nosql.collectionData',
action: 'select',
severity: 'info',
conid,
database,
pureName: options?.pureName,
sumint1: response?.result?.rows?.length,
sessionParam: `${conid}::${database}::${options?.pureName}`,
sessionGroup: auditLogSessionGroup,
message: `Loaded collection data ${options?.pureName}`,
});
}
: null,
}
);
return res.result || null;
},
@@ -398,7 +481,7 @@ module.exports = {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
logger.error(extractErrorLogData(err), 'DBGM-00116 Error pinging DB connection');
this.close(conid, database);
return {
@@ -447,7 +530,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00117 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
@@ -489,6 +572,20 @@ module.exports = {
}
const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
action: 'structure',
event: 'dbStructure.get',
severity: 'info',
conid,
database,
sessionParam: `${conid}::${database}`,
sessionGroup: 'getStructure',
message: `Loaded database structure for ${database}`,
});
return opened.structure;
// const existing = this.opened.find((x) => x.conid == conid && x.database == database);
// if (existing) return existing.status;
@@ -827,7 +924,7 @@ module.exports = {
executeSessionQuery_meta: true,
async executeSessionQuery({ sesid, conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'Processing query');
logger.info({ sesid, sql }, 'DBGM-00010 Processing query');
sessions.dispatchMessage(sesid, 'Query execution started');
const opened = await this.ensureOpened(conid, database);

View File

@@ -1,7 +1,7 @@
const fs = require('fs-extra');
const path = require('path');
const crypto = require('crypto');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir } = require('../utility/directories');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir, jsldir } = require('../utility/directories');
const getChartExport = require('../utility/getChartExport');
const { hasPermission } = require('../utility/hasPermission');
const socket = require('../utility/socket');
@@ -11,6 +11,9 @@ const apps = require('./apps');
const getMapExport = require('../utility/getMapExport');
const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
const logger = getLogger('files');
function serialize(format, data) {
@@ -51,6 +54,9 @@ module.exports = {
delete_meta: true,
async delete({ folder, file }, req) {
if (!hasPermission(`files/${folder}/write`, req)) return false;
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
await fs.unlink(path.join(filesdir(), folder, file));
socket.emitChanged(`files-changed`, { folder });
socket.emitChanged(`all-files-changed`);
@@ -60,6 +66,9 @@ module.exports = {
rename_meta: true,
async rename({ folder, file, newFile }, req) {
if (!hasPermission(`files/${folder}/write`, req)) return false;
if (!checkSecureFilePathsWithoutDirectory(folder, file, newFile)) {
return false;
}
await fs.rename(path.join(filesdir(), folder, file), path.join(filesdir(), folder, newFile));
socket.emitChanged(`files-changed`, { folder });
socket.emitChanged(`all-files-changed`);
@@ -77,6 +86,9 @@ module.exports = {
copy_meta: true,
async copy({ folder, file, newFile }, req) {
if (!checkSecureFilePathsWithoutDirectory(folder, file, newFile)) {
return false;
}
if (!hasPermission(`files/${folder}/write`, req)) return false;
await fs.copyFile(path.join(filesdir(), folder, file), path.join(filesdir(), folder, newFile));
socket.emitChanged(`files-changed`, { folder });
@@ -86,6 +98,10 @@ module.exports = {
load_meta: true,
async load({ folder, file, format }, req) {
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
if (folder.startsWith('archive:')) {
const text = await fs.readFile(path.join(resolveArchiveFolder(folder.substring('archive:'.length)), file), {
encoding: 'utf-8',
@@ -105,12 +121,20 @@ module.exports = {
loadFrom_meta: true,
async loadFrom({ filePath, format }, req) {
if (!platformInfo.isElectron) {
// this is available only in electron app
return false;
}
const text = await fs.readFile(filePath, { encoding: 'utf-8' });
return deserialize(format, text);
},
save_meta: true,
async save({ folder, file, data, format }, req) {
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
if (folder.startsWith('archive:')) {
if (!hasPermission(`archive/write`, req)) return false;
const dir = resolveArchiveFolder(folder.substring('archive:'.length));
@@ -143,6 +167,11 @@ module.exports = {
saveAs_meta: true,
async saveAs({ filePath, data, format }) {
if (!platformInfo.isElectron) {
// this is available only in electron app
return false;
}
await fs.writeFile(filePath, serialize(format, data));
},
@@ -175,10 +204,10 @@ module.exports = {
},
exportChart_meta: true,
async exportChart({ filePath, title, config, image }) {
async exportChart({ filePath, title, config, image, plugins }) {
const fileName = path.parse(filePath).base;
const imageFile = fileName.replace('.html', '-preview.png');
const html = getChartExport(title, config, imageFile);
const html = getChartExport(title, config, imageFile, plugins);
await fs.writeFile(filePath, html);
if (image) {
const index = image.indexOf('base64,');
@@ -225,7 +254,7 @@ module.exports = {
createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`);
logger.info(`DBGM-00011 Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath);
return true;
},
@@ -251,7 +280,7 @@ module.exports = {
const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`);
logger.info(`DBGM-00012 Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder });
@@ -263,7 +292,7 @@ module.exports = {
}
}
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
throw new Error(`DBGM-00013 ${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
},
exportFile_meta: true,
@@ -275,7 +304,31 @@ module.exports = {
simpleCopy_meta: true,
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
if (!platformInfo.isElectron) {
if (!checkSecureDirectories(sourceFilePath, targetFilePath)) {
return false;
}
}
await fs.copyFile(sourceFilePath, targetFilePath);
return true;
},
fillAppLogs_meta: true,
async fillAppLogs({ dateFrom = 0, dateTo = new Date().getTime(), prepareForExport = false }) {
const jslid = crypto.randomUUID();
const outputFile = path.join(jsldir(), `${jslid}.jsonl`);
await copyAppLogsIntoFile(dateFrom, dateTo, outputFile, prepareForExport);
return {
jslid,
};
},
getRecentAppLog_meta: true,
getRecentAppLog({ limit }) {
const res = getRecentAppLogRecords();
if (limit) {
return res.slice(-limit);
}
return res;
},
};

View File

@@ -10,6 +10,7 @@ const requirePluginFunction = require('../utility/requirePluginFunction');
const socket = require('../utility/socket');
const crypto = require('crypto');
const dbgateApi = require('../shell');
const { ChartProcessor } = require('dbgate-datalib');
function readFirstLine(file) {
return new Promise((resolve, reject) => {
@@ -302,4 +303,19 @@ module.exports = {
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
return { jslid };
},
buildChart_meta: true,
async buildChart({ jslid, definition }) {
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
const processor = new ChartProcessor(definition ? [definition] : undefined);
await datastore.enumRows(row => {
processor.addRow(row);
return true;
});
processor.finalize();
return {
charts: processor.charts,
columns: processor.availableColumns,
};
},
};

View File

@@ -19,6 +19,8 @@ const {
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const platformInfo = require('../utility/platformInfo');
const { checkSecureDirectories, checkSecureDirectoriesInScript } = require('../utility/security');
const { sendToAuditLog, logJsonRunnerScript } = require('../utility/auditlog');
const logger = getLogger('runners');
function extractPlugins(script) {
@@ -46,7 +48,7 @@ require=null;
async function run() {
${script}
await dbgateApi.finalizer.run();
logger.info('Finished job script');
logger.info('DBGM-00014 Finished job script');
}
dbgateApi.runScript(run);
`;
@@ -72,7 +74,8 @@ module.exports = {
dispatchMessage(runid, message) {
if (message) {
if (_.isPlainObject(message)) logger.log(message);
if (_.isPlainObject(message))
logger.log({ ...message, msg: message.msg || message.message || '', message: undefined });
else logger.info(message);
const toEmit = _.isPlainObject(message)
@@ -130,7 +133,7 @@ module.exports = {
const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText);
logger.info({ scriptFile }, 'Running script');
logger.info({ scriptFile }, 'DBGM-00015 Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
scriptFile,
@@ -169,7 +172,7 @@ module.exports = {
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
socket.emit(`runner-done-${runid}`, code);
this.opened = this.opened.filter(x => x.runid != runid);
});
@@ -220,7 +223,7 @@ module.exports = {
subprocess.on('exit', code => {
console.log('... EXITED', code);
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
this.dispatchMessage(runid, `Finished external process with code ${code}`);
socket.emit(`runner-done-${runid}`, code);
if (onFinished) {
@@ -256,7 +259,7 @@ module.exports = {
severity: 'error',
message: extractErrorMessage(err),
});
logger.error(extractErrorLogData(err), 'Caught error on stdin');
logger.error(extractErrorLogData(err), 'DBGM-00118 Caught error on stdin');
});
}
@@ -269,18 +272,46 @@ module.exports = {
},
start_meta: true,
async start({ script }) {
async start({ script }, req) {
const runid = crypto.randomUUID();
if (script.type == 'json') {
if (!platformInfo.isElectron) {
if (!checkSecureDirectoriesInScript(script)) {
return { errorMessage: 'Unallowed directories in script' };
}
}
logJsonRunnerScript(req, script);
const js = await jsonScriptToJavascript(script);
return this.startCore(runid, scriptTemplate(js, false));
}
if (!platformInfo.allowShellScripting) {
sendToAuditLog(req, {
category: 'shell',
component: 'RunnersController',
event: 'script.runFailed',
action: 'script',
severity: 'warn',
detail: script,
message: 'Scripts are not allowed',
});
return { errorMessage: 'Shell scripting is not allowed' };
}
sendToAuditLog(req, {
category: 'shell',
component: 'RunnersController',
event: 'script.run.shell',
action: 'script',
severity: 'info',
detail: script,
message: 'Running JS script',
});
return this.startCore(runid, scriptTemplate(script, false));
},
@@ -317,6 +348,11 @@ module.exports = {
loadReader_meta: true,
async loadReader({ functionName, props }) {
if (!platformInfo.isElectron) {
if (props?.fileName && !checkSecureDirectories(props.fileName)) {
return { errorMessage: 'Unallowed file' };
}
}
const prefix = extractShellApiPlugins(functionName)
.map(packageName => `// @require ${packageName}\n`)
.join('');

View File

@@ -24,7 +24,7 @@ module.exports = {
if (!match) return;
const pattern = match[1];
if (!cron.validate(pattern)) return;
logger.info(`Schedule script ${file} with pattern ${pattern}`);
logger.info(`DBGM-00018 Schedule script ${file} with pattern ${pattern}`);
const task = cron.schedule(pattern, () => runners.start({ script: text }));
this.tasks.push(task);
},

View File

@@ -12,6 +12,7 @@ const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { sendToAuditLog } = require('../utility/auditlog');
const logger = getLogger('serverConnection');
@@ -52,7 +53,7 @@ module.exports = {
if (existing) return existing;
const connection = await connections.getCore({ conid });
if (!connection) {
throw new Error(`Connection with conid="${conid}" not found`);
throw new Error(`serverConnections: Connection with conid="${conid}" not found`);
}
if (connection.singleDatabase) {
return null;
@@ -102,7 +103,7 @@ module.exports = {
this.close(conid, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00119 Error in server connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, false);
});
@@ -120,7 +121,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00120 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
@@ -145,6 +146,17 @@ module.exports = {
if (conid == '__model') return [];
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
sendToAuditLog(req, {
category: 'serverop',
component: 'ServerConnectionsController',
action: 'listDatabases',
event: 'databases.list',
severity: 'info',
conid,
sessionParam: `${conid}`,
sessionGroup: 'listDatabases',
message: `Loaded databases for connection`,
});
return opened?.databases ?? [];
},
@@ -179,7 +191,7 @@ module.exports = {
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging server connection');
logger.error(extractErrorLogData(err), 'DBGM-00121 Error pinging server connection');
this.close(conid);
}
})
@@ -232,7 +244,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request');
logger.error(extractErrorLogData(err), 'DBGM-00122 Error sending request');
this.close(conn.conid);
}
});

View File

@@ -11,6 +11,7 @@ const { appdir } = require('../utility/directories');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const pipeForkLogs = require('../utility/pipeForkLogs');
const config = require('./config');
const { sendToAuditLog } = require('../utility/auditlog');
const logger = getLogger('sessions');
@@ -83,6 +84,11 @@ module.exports = {
jsldata.notifyChangedStats(stats);
},
handle_charts(sesid, props) {
const { jslid, charts, resultIndex } = props;
socket.emit(`session-charts-${sesid}`, { jslid, resultIndex, charts });
},
handle_initializeFile(sesid, props) {
const { jslid } = props;
socket.emit(`session-initialize-file-${jslid}`);
@@ -141,15 +147,34 @@ module.exports = {
},
executeQuery_meta: true,
async executeQuery({ sesid, sql, autoCommit, limitRows }) {
async executeQuery({ sesid, sql, autoCommit, autoDetectCharts, limitRows, frontMatter }, req) {
const session = this.opened.find(x => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
}
logger.info({ sesid, sql }, 'Processing query');
sendToAuditLog(req, {
category: 'dbop',
component: 'SessionController',
action: 'executeQuery',
event: 'query.execute',
severity: 'info',
detail: sql,
conid: session.conid,
database: session.database,
message: 'Executing query',
});
logger.info({ sesid, sql }, 'DBGM-00019 Processing query');
this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit, limitRows });
session.subprocess.send({
msgtype: 'executeQuery',
sql,
autoCommit,
autoDetectCharts: autoDetectCharts || !!frontMatter?.['selected-chart'],
limitRows,
frontMatter,
});
return { state: 'ok' };
},
@@ -161,7 +186,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid, command }, 'Processing control command');
logger.info({ sesid, command }, 'DBGM-00020 Processing control command');
this.dispatchMessage(sesid, `${_.startCase(command)} started`);
session.subprocess.send({ msgtype: 'executeControlCommand', command });
@@ -199,7 +224,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid }, 'Starting profiler');
logger.info({ sesid }, 'DBGM-00021 Starting profiler');
session.loadingReader_jslid = jslid;
session.subprocess.send({ msgtype: 'startProfiler', jslid });
@@ -246,7 +271,7 @@ module.exports = {
try {
session.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging session');
logger.error(extractErrorLogData(err), 'DBGM-00145 Error pinging session');
return {
status: 'error',

View File

@@ -31,5 +31,14 @@ module.exports = {
return {};
},
sendAuditLog_meta: true,
async sendAuditLog({}) {
return null;
},
startRefreshLicense() {},
async getUsedEngines() {
return null;
},
};

View File

@@ -28,7 +28,7 @@ module.exports = {
}
const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName);
logger.info(`Uploading file ${data.name}, size=${data.size}`);
logger.info(`DBGM-00025 Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => {
res.json({
@@ -44,6 +44,10 @@ module.exports = {
raw: true,
},
get(req, res) {
if (req.query.file.includes('..') || req.query.file.includes('/') || req.query.file.includes('\\')) {
res.status(400).send('Invalid file path');
return;
}
res.sendFile(path.join(uploadsdir(), req.query.file));
},
@@ -111,7 +115,7 @@ module.exports = {
return response.data;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error uploading gist');
logger.error(extractErrorLogData(err), 'DBGM-00148 Error uploading gist');
return {
apiErrorMessage: err.message,

View File

@@ -9,7 +9,7 @@ const currentVersion = require('./currentVersion');
const logger = getLogger('apiIndex');
process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
logger.fatal(extractErrorLogData(err), 'DBGM-00259 Uncaught exception, exiting process');
process.exit(1);
});
@@ -33,6 +33,9 @@ if (processArgs.processDisplayName) {
// }
function configureLogger() {
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/appLogStore');
initializeRecentLogProvider();
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
setLogsFilePath(logsFilePath);
setLoggerName('main');
@@ -40,6 +43,8 @@ function configureLogger() {
const consoleLogLevel = process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info';
const fileLogLevel = process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'debug';
const streamsByDatePart = {};
const logConfig = {
base: { pid: process.pid },
targets: [
@@ -49,10 +54,35 @@ function configureLogger() {
level: consoleLogLevel,
},
{
type: 'stream',
type: 'objstream',
// @ts-ignore
level: fileLogLevel,
stream: fs.createWriteStream(logsFilePath, { flags: 'a' }),
objstream: {
send(msg) {
const datePart = moment(msg.time).format('YYYY-MM-DD');
if (!streamsByDatePart[datePart]) {
streamsByDatePart[datePart] = fs.createWriteStream(
path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`),
{ flags: 'a' }
);
}
const additionals = {};
const finalMsg =
msg.msg && msg.msg.match(/^DBGM-\d\d\d\d\d/)
? {
...msg,
msg: msg.msg.substring(10).trimStart(),
msgcode: msg.msg.substring(0, 10),
...additionals,
}
: {
...msg,
...additionals,
};
streamsByDatePart[datePart].write(`${JSON.stringify(finalMsg)}\n`);
pushToRecentLogs(finalMsg);
},
},
},
],
};
@@ -101,10 +131,10 @@ function configureLogger() {
if (processArgs.listenApi) {
configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`);
logger.info(`DBGM-00026 Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:');
logger.info('DBGM-00027 Debug print environment variables:');
for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
}

View File

@@ -6,6 +6,7 @@ const http = require('http');
const cors = require('cors');
const getPort = require('get-port');
const path = require('path');
const fs = require('fs/promises');
const useController = require('./utility/useController');
const socket = require('./utility/socket');
@@ -27,6 +28,7 @@ const plugins = require('./controllers/plugins');
const files = require('./controllers/files');
const scheduler = require('./controllers/scheduler');
const queryHistory = require('./controllers/queryHistory');
const cloud = require('./controllers/cloud');
const onFinished = require('on-finished');
const processArgs = require('./utility/processArgs');
@@ -39,9 +41,52 @@ const { getDefaultAuthProvider } = require('./auth/authProvider');
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
const { isProApp } = require('./utility/checkLicense');
const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
const { startCloudFiles } = require('./utility/cloudIntf');
const logger = getLogger('main');
function registerExpressStatic(app, publicDir) {
app.get([getExpressPath('/'), getExpressPath('/*.html')], async (req, res, next) => {
try {
const relPath = req.path === getExpressPath('/') ? '/index.html' : req.path;
const filePath = path.join(publicDir, relPath);
let html = await fs.readFile(filePath, 'utf8');
if (process.env.DBGATE_GTM_ID) {
html = html.replace(
/<!--HEAD_SCRIPT-->/g,
`<!-- Google Tag Manager -->
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','${process.env.DBGATE_GTM_ID}');</script>
<!-- End Google Tag Manager -->`
);
html = html.replace(
/<!--BODY_SCRIPT-->/g,
process.env.PAGE_BODY_SCRIPT ??
`<!-- Google Tag Manager (noscript) -->
<noscript><iframe src="https://www.googletagmanager.com/ns.html?id=${process.env.DBGATE_GTM_ID}" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript>
<!-- End Google Tag Manager (noscript) -->`
);
} else {
html = html.replace(/<!--HEAD_SCRIPT-->/g, process.env.PAGE_HEAD_SCRIPT ?? '');
html = html.replace(/<!--BODY_SCRIPT-->/g, process.env.PAGE_BODY_SCRIPT ?? '');
}
res.type('html').send(html);
} catch (err) {
if (err.code === 'ENOENT') return next();
next(err);
}
});
// 2) Static assets for everything else (css/js/images/etc.)
app.use(getExpressPath('/'), express.static(publicDir));
}
function start() {
// console.log('process.argv', process.argv);
@@ -76,22 +121,18 @@ function start() {
if (platformInfo.isDocker) {
// server static files inside docker container
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
registerExpressStatic(app, '/home/dbgate-docker/public');
} else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
registerExpressStatic(app, '/home/dbgate-docker/public');
registerExpressStatic(app, '/home/ubuntu/build/public');
} else if (platformInfo.isAzureUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/azureuser/build/public'));
registerExpressStatic(app, '/home/azureuser/build/public');
} else if (processArgs.runE2eTests) {
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public')));
registerExpressStatic(app, path.resolve('packer/build/public'));
} else if (platformInfo.isNpmDist) {
app.use(
getExpressPath('/'),
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
);
registerExpressStatic(app, path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'));
} else if (process.env.DEVWEB) {
// console.log('__dirname', __dirname);
// console.log(path.join(__dirname, '../../web/public/build'));
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../web/public')));
registerExpressStatic(app, path.join(__dirname, '../../web/public'));
} else {
app.get(getExpressPath('/'), (req, res) => {
res.send('DbGate API');
@@ -150,15 +191,15 @@ function start() {
if (platformInfo.isDocker) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`);
logger.info(`DBGM-00028 DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
logger.info(`DBGM-00029 DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isAzureUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (Azure VM build)`);
logger.info(`DBGM-00030 DbGate API listening on port ${port} (Azure VM build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
@@ -168,27 +209,27 @@ function start() {
),
}).then(port => {
server.listen(port, () => {
logger.info(`DbGate API listening on port ${port} (NPM build)`);
logger.info(`DBGM-00031 DbGate API listening on port ${port} (NPM build)`);
});
});
} else if (process.env.DEVWEB) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API & web listening on port ${port} (dev web build)`);
logger.info(`DBGM-00032 DbGate API & web listening on port ${port} (dev web build)`);
server.listen(port);
} else {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (dev API build)`);
logger.info(`DBGM-00033 DbGate API listening on port ${port} (dev API build)`);
server.listen(port);
}
function shutdown() {
logger.info('\nShutting down DbGate API server');
logger.info('DBGM-00034 Shutting down DbGate API server');
server.close(() => {
logger.info('Server shut down, terminating');
logger.info('DBGM-00035 Server shut down, terminating');
process.exit(0);
});
setTimeout(() => {
logger.info('Server close timeout, terminating');
logger.info('DBGM-00036 Server close timeout, terminating');
process.exit(0);
}, 1000);
}
@@ -200,6 +241,8 @@ function start() {
if (process.env.CLOUD_UPGRADE_FILE) {
startCloudUpgradeTimer();
}
startCloudFiles();
}
function useAllControllers(app, electron) {
@@ -220,6 +263,7 @@ function useAllControllers(app, electron) {
useController(app, electron, '/query-history', queryHistory);
useController(app, electron, '/apps', apps);
useController(app, electron, '/auth', auth);
useController(app, electron, '/cloud', cloud);
}
function setElectronSender(electronSender) {

View File

@@ -28,14 +28,7 @@ function start() {
let version = {
version: 'Unknown',
};
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
version = {
version: 'Unknown',
};
}
version = await driver.getVersion(dbhan);
let databases = undefined;
if (requestDbList) {
databases = await driver.listDatabases(dbhan);

View File

@@ -6,7 +6,6 @@ const {
extractIntSettingsValue,
getLogger,
isCompositeDbName,
dbNameLogCategory,
extractErrorMessage,
extractErrorLogData,
ScriptWriterEval,
@@ -45,6 +44,14 @@ function getStatusCounter() {
return statusCounter;
}
function getLogInfo() {
return {
database: dbhan ? dbhan.database : undefined,
conid: dbhan ? dbhan.conid : undefined,
engine: storedConnection ? storedConnection.engine : undefined,
};
}
async function checkedAsyncCall(promise) {
try {
const res = await promise;
@@ -131,10 +138,10 @@ async function readVersion() {
const driver = requireEngineDriver(storedConnection);
try {
const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`);
logger.debug(getLogInfo(), `DBGM-00037 Got server version: ${version.version}`);
serverVersion = version;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00149 Error getting DB server version');
serverVersion = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version: serverVersion });
@@ -148,9 +155,8 @@ async function handleConnect({ connection, structure, globalSettings }) {
const driver = requireEngineDriver(storedConnection);
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
getLogInfo(),
`DBGM-00038 Connected to database, separate schemas: ${storedConnection.useSeparateSchemas ? 'YES' : 'NO'}`
);
dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion());
@@ -257,13 +263,16 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
} catch (err) {
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
logger.error(
extractErrorLogData(err, { logName, ...getLogInfo() }),
`DBGM-00150 Error when handling message ${logName}`
);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
}
}
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
logger.debug(getLogInfo(), 'DBGM-00039 Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
}
@@ -351,7 +360,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(async () => {
logger.error('Exiting because of unhandled exception');
logger.error(getLogInfo(), 'DBGM-00151 Exiting because of unhandled exception');
await driver.close(dbhan);
process.exit(0);
}, 500);
@@ -485,7 +494,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting');
logger.info(getLogInfo(), 'DBGM-00040 Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -497,10 +506,10 @@ function start() {
try {
await handleMessage(message);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error in DB connection');
logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00041 Error in DB connection');
process.send({
msgtype: 'error',
error: extractErrorMessage(err, 'Error processing message'),
error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),
msgid: message?.msgid,
});
}

View File

@@ -39,7 +39,7 @@ async function handleRefresh() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
logger.error(extractErrorLogData(err), 'DBGM-00152 Error refreshing server databases');
setTimeout(() => process.exit(1), 1000);
}
}
@@ -50,7 +50,7 @@ async function readVersion() {
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err), 'DBGM-00153 Error getting DB server version');
version = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version });
@@ -90,7 +90,7 @@ async function handleConnect(connection) {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error connecting to server');
logger.error(extractErrorLogData(err), 'DBGM-00154 Error connecting to server');
setTimeout(() => process.exit(1), 1000);
}
@@ -120,7 +120,7 @@ async function handleDatabaseOp(op, { msgid, name }) {
} else {
const dmp = driver.createDumper();
dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script');
logger.info({ sql: dmp.s }, 'DBGM-00043 Running script');
await driver.query(dbhan, dmp.s, { discardResult: true });
}
await handleRefresh();
@@ -170,7 +170,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting');
logger.info('DBGM-00044 Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
if (dbhan) {
await driver.close(dbhan);
@@ -188,7 +188,7 @@ function start() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
logger.error(extractErrorLogData(err), `DBGM-00155 Error processing message ${message?.['msgtype']}`);
}
});
}

View File

@@ -117,7 +117,7 @@ async function handleExecuteControlCommand({ command }) {
}
}
async function handleExecuteQuery({ sql, autoCommit, limitRows }) {
async function handleExecuteQuery({ sql, autoCommit, autoDetectCharts, limitRows, frontMatter }) {
lastActivity = new Date().getTime();
await waitConnected();
@@ -146,7 +146,16 @@ async function handleExecuteQuery({ sql, autoCommit, limitRows }) {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, undefined, limitRows);
await handleQueryStream(
dbhan,
driver,
queryStreamInfoHolder,
sqlItem,
undefined,
limitRows,
frontMatter,
autoDetectCharts
);
// const handler = new StreamHandler(resultIndex);
// const stream = await driver.stream(systemConnection, sqlItem, handler);
// handler.stream = stream;
@@ -221,7 +230,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting');
logger.info('DBGM-00045 Session not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -241,7 +250,7 @@ function start() {
!currentProfiler &&
executingScripts == 0
) {
logger.info('Session not active, exiting');
logger.info('DBGM-00046 Session not active, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);

View File

@@ -41,7 +41,7 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
logger.error(extractErrorLogData(err), 'DBGM-00156 Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',

View File

@@ -10,7 +10,7 @@ const logger = getLogger();
function archiveWriter({ folderName, fileName }) {
const dir = resolveArchiveFolder(folderName);
if (!fs.existsSync(dir)) {
logger.info(`Creating directory ${dir}`);
logger.info(`DBGM-00047 Creating directory ${dir}`);
fs.mkdirSync(dir);
}
const jsonlFile = path.join(dir, `${fileName}.jsonl`);

View File

@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
});
}
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
// throw err;
}
}

View File

@@ -14,12 +14,13 @@ const crypto = require('crypto');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {import('dbgate-tools').DatabaseModelFile[] | import('dbgate-types').DatabaseInfo} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
* @param {string} options.targetSchema - target schema for deployment
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
* @param {boolean} options.useTransaction - run deploy in transaction. If not provided, it will be set to true if driver supports transactions
*/
async function deployDb({
connection,
@@ -33,6 +34,7 @@ async function deployDb({
ignoreNameRegex = '',
targetSchema = null,
maxMissingTablesRatio = undefined,
useTransaction,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
@@ -60,7 +62,14 @@ async function deployDb({
maxMissingTablesRatio,
});
// console.log('RUNNING DEPLOY SCRIPT:', sql);
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
await executeQuery({
connection,
systemConnection: dbhan,
driver,
sql,
logScriptItems: true,
useTransaction,
});
await scriptDeployer.runPost();
} finally {

View File

@@ -14,6 +14,7 @@ const logger = getLogger('execQuery');
* @param {string} [options.sql] - SQL query
* @param {string} [options.sqlFile] - SQL file
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
* @param {boolean} [options.useTransaction] - run query in transaction
* @param {boolean} [options.skipLogging] - whether to skip logging
*/
async function executeQuery({
@@ -24,25 +25,26 @@ async function executeQuery({
sqlFile = undefined,
logScriptItems = false,
skipLogging = false,
useTransaction,
}) {
if (!logScriptItems && !skipLogging) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
}
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
if (sqlFile) {
logger.debug(`Loading SQL file ${sqlFile}`);
logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
}
try {
if (!skipLogging) {
logger.debug(`Running SQL query, length: ${sql.length}`);
logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
}
await driver.script(dbhan, sql, { logScriptItems });
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
} finally {
if (!systemConnection) {
await driver.close(dbhan);

View File

@@ -23,7 +23,7 @@ const { connectUtility } = require('../utility/connectUtility');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {import('dbgate-tools').DatabaseModelFile[] | import('dbgate-types').DatabaseInfo} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
@@ -52,7 +52,10 @@ async function generateDeploySql({
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
) {
throw new Error('targetSchema is required for databases with multiple schemas');
if (!driver?.dialect?.defaultSchemaName) {
throw new Error('targetSchema is required for databases with multiple schemas');
}
targetSchema = driver.dialect.defaultSchemaName;
}
try {

View File

@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`);
logger.info(`DBGM-00051 Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Input file: ${inputFile}`);
logger.info(`DBGM-00052 Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, {

View File

@@ -3,7 +3,7 @@ const fs = require('fs-extra');
const executeQuery = require('./executeQuery');
const { connectUtility } = require('../utility/connectUtility');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver } = require('dbgate-tools');
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver, adaptDatabaseInfo } = require('dbgate-tools');
const importDbModel = require('../utility/importDbModel');
const jsonLinesReader = require('./jsonLinesReader');
const tableWriter = require('./tableWriter');
@@ -26,10 +26,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
if (driver?.databaseEngineTypes?.includes('sql')) {
const model = await importDbModel(folder);
let modelAdapted = {
...model,
tables: model.tables.map(table => driver.adaptTableInfo(table)),
};
let modelAdapted = adaptDatabaseInfo(model, driver);
for (const transform of modelTransforms || []) {
modelAdapted = transform(modelAdapted);
}

View File

@@ -42,7 +42,7 @@ class ParseStream extends stream.Transform {
* @returns {Promise<readerType>} - reader object
*/
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`);
logger.info(`DBGM-00054 Reading file ${fileName}`);
const downloadedFile = await download(fileName);

View File

@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00055 Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -63,7 +63,7 @@ async function jsonReader({
encoding = 'utf-8',
limitRows = undefined,
}) {
logger.info(`Reading file ${fileName}`);
logger.info(`DBGM-00056 Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(

View File

@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00057 Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.debug(`Analysing database`);
logger.debug(`DBGM-00058 Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
const dbInfo = await driver.analyseFull(dbhan);
logger.debug(`Analyse finished`);
logger.debug(`DBGM-00059 Analyse finished`);
await exportDbModel(dbInfo, outputDir);
} finally {

View File

@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
mergeKey = null,
mergeMode = 'merge',
}) {
logger.info(`Reading file ${fileName} with change set`);
logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
const fileStream = fs.createReadStream(
fileName,

View File

@@ -29,7 +29,7 @@ async function queryReader({
// if (!sql && !json) {
// throw new Error('One of sql or json must be set');
// }
logger.info({ sql: query || sql }, `Reading query`);
logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
// else console.log(`Reading query ${JSON.stringify(json)}`);
if (!driver) {

View File

@@ -4,6 +4,7 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
const platformInfo = require('../utility/platformInfo');
const authProxy = require('../utility/authProxy');
const { getLogger } = require('dbgate-tools');
//
const logger = getLogger('requirePlugin');
const loadedPlugins = {};
@@ -12,6 +13,10 @@ const dbgateEnv = {
dbgateApi: null,
platformInfo,
authProxy,
isProApp: () =>{
const { isProApp } = require('../utility/checkLicense');
return isProApp();
}
};
function requirePlugin(packageName, requiredPlugin = null) {
if (!packageName) throw new Error('Missing packageName in plugin');
@@ -20,7 +25,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
if (requiredPlugin == null) {
let module;
const modulePath = getPluginBackendPath(packageName);
logger.info(`Loading module ${packageName} from ${modulePath}`);
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
try {
// @ts-ignore
module = __non_webpack_require__(modulePath);

View File

@@ -11,7 +11,7 @@ async function runScript(func) {
await func();
process.exit(0);
} catch (err) {
logger.error(extractErrorLogData(err), `Error running script`);
logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
process.exit(1);
}
}

View File

@@ -41,7 +41,7 @@ class SqlizeStream extends stream.Transform {
}
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00063 Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -23,7 +23,7 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
if (driver.databaseEngineTypes.includes('document')) {
// @ts-ignore
logger.info(`Reading collection ${fullNameToString(fullName)}`);
logger.info(`DBGM-00064 Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore
return await driver.readQuery(dbhan, JSON.stringify(fullName));
}
@@ -32,14 +32,14 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) {
// @ts-ignore
logger.info(`Reading table ${fullNameToString(table)}`);
logger.info(`DBGM-00065 Reading table ${fullNameToString(table)}`);
// @ts-ignore
return await driver.readQuery(dbhan, query, table);
}
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
if (view) {
// @ts-ignore
logger.info(`Reading view ${fullNameToString(view)}`);
logger.info(`DBGM-00066 Reading view ${fullNameToString(view)}`);
// @ts-ignore
return await driver.readQuery(dbhan, query, view);
}

View File

@@ -20,7 +20,7 @@ const logger = getLogger('tableWriter');
* @returns {Promise<writerType>} - writer object
*/
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
logger.info(`DBGM-00067 Writing table ${fullNameToString({ schemaName, pureName })}`);
if (!driver) {
driver = requireEngineDriver(connection);

View File

@@ -52,14 +52,14 @@ function unzipDirectory(zipPath, outputDirectory) {
readStream.on('end', () => zipFile.readEntry());
writeStream.on('finish', () => {
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
logger.error(
extractErrorLogData(writeErr),
`Error extracting "${entry.fileName}" from "${zipPath}".`
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
);
rej(writeErr);
});
@@ -74,14 +74,14 @@ function unzipDirectory(zipPath, outputDirectory) {
zipFile.on('end', () => {
Promise.all(pending)
.then(() => {
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err);
});
});

View File

@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
reject(err);
});

View File

@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
reject(err);
});

View File

@@ -1,5 +1,192 @@
module.exports = {
"tables": [
{
"pureName": "audit_log",
"columns": [
{
"pureName": "audit_log",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "audit_log",
"columnName": "created",
"dataType": "bigint",
"notNull": true
},
{
"pureName": "audit_log",
"columnName": "modified",
"dataType": "bigint",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "user_id",
"dataType": "int",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "user_login",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "category",
"dataType": "varchar(50)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "component",
"dataType": "varchar(50)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "action",
"dataType": "varchar(50)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "severity",
"dataType": "varchar(50)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "event",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "message",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "detail",
"dataType": "varchar(1000)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "detail_full_length",
"dataType": "int",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "session_id",
"dataType": "varchar(200)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "session_group",
"dataType": "varchar(50)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "session_param",
"dataType": "varchar(200)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "conid",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "connection_data",
"dataType": "varchar(1000)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "database",
"dataType": "varchar(200)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "schema_name",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "pure_name",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "sumint_1",
"dataType": "int",
"notNull": false
},
{
"pureName": "audit_log",
"columnName": "sumint_2",
"dataType": "int",
"notNull": false
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_audit_log_user_id",
"pureName": "audit_log",
"refTableName": "users",
"deleteAction": "SET NULL",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
}
],
"indexes": [
{
"constraintName": "idx_audit_log_session",
"pureName": "audit_log",
"constraintType": "index",
"columns": [
{
"columnName": "session_group"
},
{
"columnName": "session_id"
},
{
"columnName": "session_param"
}
]
}
],
"primaryKey": {
"pureName": "audit_log",
"constraintType": "primaryKey",
"constraintName": "PK_audit_log",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "auth_methods",
"columns": [
@@ -50,6 +237,7 @@ module.exports = {
"primaryKey": {
"pureName": "auth_methods",
"constraintType": "primaryKey",
"constraintName": "PK_auth_methods",
"columns": [
{
"columnName": "id"
@@ -61,7 +249,8 @@ module.exports = {
"id": -1,
"amoid": "790ca4d2-7f01-4800-955b-d691b890cc50",
"name": "Anonymous",
"type": "none"
"type": "none",
"is_disabled": 1
},
{
"id": -2,
@@ -69,6 +258,9 @@ module.exports = {
"name": "Local",
"type": "local"
}
],
"preloadedRowsInsertOnly": [
"is_disabled"
]
},
{
@@ -103,6 +295,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_auth_methods_config_auth_method_id",
"pureName": "auth_methods_config",
"refTableName": "auth_methods",
"deleteAction": "CASCADE",
@@ -114,9 +307,25 @@ module.exports = {
]
}
],
"uniques": [
{
"constraintName": "UQ_auth_methods_config_auth_method_id_key",
"pureName": "auth_methods_config",
"constraintType": "unique",
"columns": [
{
"columnName": "auth_method_id"
},
{
"columnName": "key"
}
]
}
],
"primaryKey": {
"pureName": "auth_methods_config",
"constraintType": "primaryKey",
"constraintName": "PK_auth_methods_config",
"columns": [
{
"columnName": "id"
@@ -154,9 +363,25 @@ module.exports = {
}
],
"foreignKeys": [],
"uniques": [
{
"constraintName": "UQ_config_group_key",
"pureName": "config",
"constraintType": "unique",
"columns": [
{
"columnName": "group"
},
{
"columnName": "key"
}
]
}
],
"primaryKey": {
"pureName": "config",
"constraintType": "primaryKey",
"constraintName": "PK_config",
"columns": [
{
"columnName": "id"
@@ -294,6 +519,12 @@ module.exports = {
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "useSeparateSchemas",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "defaultDatabase",
@@ -443,12 +674,19 @@ module.exports = {
"columnName": "awsRegion",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "connectionDefinition",
"dataType": "text",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "connections",
"constraintType": "primaryKey",
"constraintName": "PK_connections",
"columns": [
{
"columnName": "id"
@@ -477,6 +715,7 @@ module.exports = {
"primaryKey": {
"pureName": "roles",
"constraintType": "primaryKey",
"constraintName": "PK_roles",
"columns": [
{
"columnName": "id"
@@ -524,6 +763,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_role_connections_role_id",
"pureName": "role_connections",
"refTableName": "roles",
"deleteAction": "CASCADE",
@@ -536,6 +776,7 @@ module.exports = {
},
{
"constraintType": "foreignKey",
"constraintName": "FK_role_connections_connection_id",
"pureName": "role_connections",
"refTableName": "connections",
"deleteAction": "CASCADE",
@@ -550,6 +791,7 @@ module.exports = {
"primaryKey": {
"pureName": "role_connections",
"constraintType": "primaryKey",
"constraintName": "PK_role_connections",
"columns": [
{
"columnName": "id"
@@ -583,6 +825,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_role_permissions_role_id",
"pureName": "role_permissions",
"refTableName": "roles",
"deleteAction": "CASCADE",
@@ -597,6 +840,7 @@ module.exports = {
"primaryKey": {
"pureName": "role_permissions",
"constraintType": "primaryKey",
"constraintName": "PK_role_permissions",
"columns": [
{
"columnName": "id"
@@ -637,6 +881,7 @@ module.exports = {
"primaryKey": {
"pureName": "users",
"constraintType": "primaryKey",
"constraintName": "PK_users",
"columns": [
{
"columnName": "id"
@@ -670,6 +915,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_user_connections_user_id",
"pureName": "user_connections",
"refTableName": "users",
"deleteAction": "CASCADE",
@@ -682,6 +928,7 @@ module.exports = {
},
{
"constraintType": "foreignKey",
"constraintName": "FK_user_connections_connection_id",
"pureName": "user_connections",
"refTableName": "connections",
"deleteAction": "CASCADE",
@@ -696,6 +943,7 @@ module.exports = {
"primaryKey": {
"pureName": "user_connections",
"constraintType": "primaryKey",
"constraintName": "PK_user_connections",
"columns": [
{
"columnName": "id"
@@ -729,6 +977,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_user_permissions_user_id",
"pureName": "user_permissions",
"refTableName": "users",
"deleteAction": "CASCADE",
@@ -743,6 +992,7 @@ module.exports = {
"primaryKey": {
"pureName": "user_permissions",
"constraintType": "primaryKey",
"constraintName": "PK_user_permissions",
"columns": [
{
"columnName": "id"
@@ -776,6 +1026,7 @@ module.exports = {
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_user_roles_user_id",
"pureName": "user_roles",
"refTableName": "users",
"deleteAction": "CASCADE",
@@ -788,6 +1039,7 @@ module.exports = {
},
{
"constraintType": "foreignKey",
"constraintName": "FK_user_roles_role_id",
"pureName": "user_roles",
"refTableName": "roles",
"deleteAction": "CASCADE",
@@ -802,6 +1054,7 @@ module.exports = {
"primaryKey": {
"pureName": "user_roles",
"constraintType": "primaryKey",
"constraintName": "PK_user_roles",
"columns": [
{
"columnName": "id"
@@ -815,5 +1068,6 @@ module.exports = {
"matviews": [],
"functions": [],
"procedures": [],
"triggers": []
"triggers": [],
"schedulerEvents": []
};

Some files were not shown because too many files have changed in this diff Show More