Compare commits

...

368 Commits

Author SHA1 Message Date
Jan Prochazka
d27b0a7be3 v5.5.7-packer-beta.2 2024-10-24 12:00:55 +02:00
Jan Prochazka
076b20ef6d fix 2024-10-24 12:00:42 +02:00
Jan Prochazka
d93d107039 v5.5.7-packer-beta.1 2024-10-24 11:59:26 +02:00
Jan Prochazka
708dcfd088 packer pipeline 2024-10-24 11:58:44 +02:00
Jan Prochazka
14501a70b9 reporting SSH tunnel errors 2024-10-23 16:35:56 +02:00
Jan Prochazka
3b82679c2d port changed 2024-10-23 12:14:28 +02:00
SPRINX0\prochazka
48d4fb4fec UX 2024-10-23 09:49:10 +02:00
SPRINX0\prochazka
a03ca73d93 admin page workflow 2024-10-23 09:41:39 +02:00
SPRINX0\prochazka
a46e592cfb workflow changes 2024-10-22 17:06:56 +02:00
SPRINX0\prochazka
634bea1bda missing logging 2024-10-22 16:33:47 +02:00
SPRINX0\prochazka
3dfa23a30c sortable object list controls #922 2024-10-22 16:09:48 +02:00
SPRINX0\prochazka
24408dd7c2 empty value testing 2024-10-22 15:23:32 +02:00
Jan Prochazka
32c7919885 special pages workflow changed 2024-10-21 17:36:46 +02:00
Jan Prochazka
967615b6e5 special page refactor 2024-10-21 13:18:16 +02:00
Jan Prochazka
aee3a28465 refactor 2024-10-21 13:07:29 +02:00
Jan Prochazka
3fdf27f820 special page refactor 2024-10-21 13:02:50 +02:00
Jan Prochazka
e9302c7d6f performance fix 2024-10-18 15:08:59 +02:00
Jan Prochazka
c38fe83e48 aws ubuntu layout 2024-10-18 13:57:55 +02:00
Jan Prochazka
adfc427d25 aws AMI layout 2024-10-18 13:32:17 +02:00
Jan Prochazka
3912a58127 build ami script 2024-10-18 11:35:33 +02:00
SPRINX0\prochazka
720d25e838 changelog 2024-10-17 13:33:14 +02:00
SPRINX0\prochazka
bc1d77a6f8 v5.5.6 2024-10-17 13:25:01 +02:00
SPRINX0\prochazka
00a8d472ff v5.5.6-beta.11 2024-10-17 12:53:56 +02:00
SPRINX0\prochazka
5076ee0463 v5.5.6 2024-10-17 12:52:26 +02:00
SPRINX0\prochazka
4a5ddd65f4 v5.5.6-premium-beta.10 2024-10-17 11:58:06 +02:00
SPRINX0\prochazka
ec3c224f44 show storage connection error 2024-10-17 11:56:24 +02:00
SPRINX0\prochazka
39b99ecf8f v5.5.6-premium-beta.9 2024-10-15 16:51:11 +02:00
SPRINX0\prochazka
50232f9b90 v5.5.6-premium-beta.8 2024-10-15 16:24:02 +02:00
SPRINX0\prochazka
66e8cc6e1d v5.5.6-premium-beta.7 2024-10-15 15:10:16 +02:00
SPRINX0\prochazka
f6b4c94d00 fix 2024-10-15 15:10:05 +02:00
SPRINX0\prochazka
f3ce240bcd v5.5.6-premium-beta.6 2024-10-15 14:38:18 +02:00
SPRINX0\prochazka
7be9bf1bab logging 2024-10-15 14:37:16 +02:00
SPRINX0\prochazka
d39871be70 v5.5.6-premium-beta.5 2024-10-15 13:55:19 +02:00
SPRINX0\prochazka
3324eec011 support for DEBUG_PRINT_ENV_VARIABLES 2024-10-15 13:55:00 +02:00
SPRINX0\prochazka
dd9790fca5 removed deprecated mongodb useUnifiedTopology options 2024-10-15 12:30:04 +02:00
SPRINX0\prochazka
41a3769c5f fixed headers sent error 2024-10-15 12:28:47 +02:00
SPRINX0\prochazka
42601ff960 fixed mongo update for mongo4 #916 2024-10-15 10:38:44 +02:00
SPRINX0\prochazka
e54cffbaf3 v5.5.6-beta.4 2024-10-15 10:21:03 +02:00
Jan Prochazka
f8dbad362c Merge branch 'master' of github.com:dbgate/dbgate 2024-10-15 10:20:08 +02:00
SPRINX0\prochazka
925db50418 log 2024-10-15 10:08:15 +02:00
SPRINX0\prochazka
f40db68579 process should exit on unhandled exception 2024-10-15 10:05:18 +02:00
Jan Prochazka
3afde5f1fa v5.5.6-beta.3 2024-10-15 09:44:22 +02:00
Jan Prochazka
b631519009 exit forked process after PIPE is closed #917 #915 2024-10-15 09:41:02 +02:00
SPRINX0\prochazka
f05c60c628 v5.5.6-premium-beta.2 2024-10-14 17:53:51 +02:00
SPRINX0\prochazka
e235de6d56 versionText field added 2024-10-14 17:48:04 +02:00
SPRINX0\prochazka
da47c437e0 v5.5.6-beta.1 2024-10-14 14:29:14 +02:00
SPRINX0\prochazka
f0048bc6cf correctly close connections #920 2024-10-14 14:28:26 +02:00
SPRINX0\prochazka
f80ae284fa correctly close idle connections #920 2024-10-14 14:03:07 +02:00
Jan Prochazka
06753ff312 v5.5.5 2024-10-11 10:22:00 +02:00
Jan Prochazka
1e07614306 changelog 2024-10-11 10:20:37 +02:00
Jan Prochazka
2d716ba43a v5.5.5-premium-beta.5 2024-10-11 09:59:28 +02:00
Jan Prochazka
c39dc6295d css fix 2024-10-11 09:58:54 +02:00
Jan Prochazka
7557666135 v5.5.5-premium-beta.4 2024-10-11 09:40:30 +02:00
SPRINX0\prochazka
5ee65124cb v5.5.5-beta.3 2024-10-10 16:20:01 +02:00
SPRINX0\prochazka
ab79617377 v5.5.5-premium-beta.2 2024-10-10 16:19:41 +02:00
SPRINX0\prochazka
3b2a47a4ef login admin/user switch 2024-10-10 15:39:56 +02:00
SPRINX0\prochazka
7b4d9d8717 quick login buttons 2024-10-10 15:17:54 +02:00
SPRINX0\prochazka
4c9734ac7f fixed hiding columns #887 #911 2024-10-10 13:59:41 +02:00
SPRINX0\prochazka
152e8a80ab title 2024-10-10 13:48:42 +02:00
SPRINX0\prochazka
98a348b091 CSS 2024-10-10 13:37:08 +02:00
SPRINX0\prochazka
e448f63ec3 store query parameters 2024-10-10 13:21:48 +02:00
SPRINX0\prochazka
0b13850eca query parameters #913 2024-10-10 13:09:34 +02:00
SPRINX0\prochazka
de97404602 redis load keys fix 2024-10-10 07:53:41 +02:00
SPRINX0\prochazka
cbd6ce7872 v5.5.5-premium-beta.1 2024-10-09 16:08:30 +02:00
SPRINX0\prochazka
3c479eb33c changeset function 2024-10-09 16:07:18 +02:00
SPRINX0\prochazka
beaff158cc query result - use editor behaviour from driver 2024-10-08 15:12:12 +02:00
SPRINX0\prochazka
6806620d90 fixed datetime filtering #912 2024-10-08 14:38:46 +02:00
SPRINX0\prochazka
4459347169 fix 2024-10-08 13:48:50 +02:00
SPRINX0\prochazka
98e01497e9 checkout source parameter 2024-10-08 13:10:48 +02:00
SPRINX0\prochazka
4ed4c8c32c fix 2024-10-08 12:49:15 +02:00
SPRINX0\prochazka
620acecdff trial days left warning 2024-10-08 12:43:01 +02:00
SPRINX0\prochazka
2d214cfdb3 fix 2024-10-08 10:02:03 +02:00
SPRINX0\prochazka
cd36259739 AWS IAM auth for PostgreSQL 2024-10-08 09:55:51 +02:00
SPRINX0\prochazka
d049d8c571 AWS IAM connection for MySQL 2024-10-08 09:30:51 +02:00
SPRINX0\prochazka
7c51fcad96 AWS IAM WIP 2024-10-07 16:05:12 +02:00
SPRINX0\prochazka
1948c8ef89 fix export dialog for useSeparateSchemas=true 2024-10-07 08:22:35 +02:00
SPRINX0\prochazka
f2b2ac6fd0 v5.5.4 2024-10-04 15:29:08 +02:00
SPRINX0\prochazka
4098f63ce2 changelog 2024-10-04 15:19:31 +02:00
SPRINX0\prochazka
4d903abd85 changelog 2024-10-04 15:17:42 +02:00
SPRINX0\prochazka
f00eb2d3ef v5.5.4-beta.10 2024-10-04 14:54:44 +02:00
SPRINX0\prochazka
6752dcfd39 fixed crash #908 2024-10-04 14:54:29 +02:00
SPRINX0\prochazka
c3022eb80a v5.5.4-premium-beta.9 2024-10-04 14:42:45 +02:00
SPRINX0\prochazka
2f6cbf25df v5.5.5 2024-10-04 14:42:03 +02:00
SPRINX0\prochazka
8dfc2e7bcd handle expired license 2024-10-04 14:32:46 +02:00
Jan Prochazka
fa0ad477cc fixed crash #908 2024-10-04 07:58:32 +02:00
SPRINX0\prochazka
ac6a68c38d v5.5.4-alpha.8 2024-10-03 09:58:24 +02:00
SPRINX0\prochazka
25223471e7 fix deployer 2024-10-03 09:58:03 +02:00
SPRINX0\prochazka
56535b1e6f v5.5.4-alpha.7 2024-10-02 16:04:00 +02:00
SPRINX0\prochazka
131c51f7c4 v5.4.4-alpha.7 2024-10-02 16:02:02 +02:00
SPRINX0\prochazka
a27a2077ed comment 2024-10-02 10:45:06 +02:00
SPRINX0\prochazka
7758fabc89 code style 2024-10-02 10:44:35 +02:00
SPRINX0\prochazka
c9da9bdd23 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-02 10:43:32 +02:00
Jan Prochazka
9520b053af Merge pull request #907 from yoadey/master
Fix 727: access_token not a jwt
2024-10-02 10:43:27 +02:00
SPRINX0\prochazka
8a1e717c1b v5.5.4-premium-beta.6 2024-10-02 10:26:43 +02:00
SPRINX0\prochazka
21127f661a better error reporting 2024-10-02 10:25:11 +02:00
yoadey
7d614a2395 Fix 727: access_token not a jwt 2024-10-02 10:24:19 +02:00
SPRINX0\prochazka
669ae024f9 v5.5.4-beta.5 2024-10-01 16:43:09 +02:00
SPRINX0\prochazka
9d8dd558e2 fixed load postgres schema on Azure #906 2024-10-01 16:36:26 +02:00
SPRINX0\prochazka
67f58a8dfe fix 2024-10-01 12:42:56 +02:00
SPRINX0\prochazka
52d230b9e2 v5.5.4-alpha.4 2024-10-01 12:21:57 +02:00
SPRINX0\prochazka
fd2a35fb4a Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-01 12:17:36 +02:00
Jan Prochazka
d61b5e135f safer env vars in dbmodel connection 2024-10-01 12:17:25 +02:00
SPRINX0\prochazka
ef23b786ac better error reporting 2024-10-01 12:15:22 +02:00
SPRINX0\prochazka
29a66bfcb0 v5.5.4-alpha.3 2024-10-01 11:07:52 +02:00
Jan Prochazka
ef5e30df3d dbmodel - allow connection from env variables 2024-10-01 11:07:16 +02:00
SPRINX0\prochazka
ab28a06bef close dbhandles after shell script (missing tableReader) 2024-10-01 10:56:52 +02:00
SPRINX0\prochazka
9910c54aa6 v5.5.4-alpha.2 2024-10-01 10:39:32 +02:00
Jan Prochazka
6ec431f471 dbmodel fix 2024-10-01 10:38:35 +02:00
Jan Prochazka
3ec7f651c1 not connected deploy test 2024-10-01 10:37:05 +02:00
SPRINX0\prochazka
87aa60bc3e v5.5.4-alpha.1 2024-10-01 09:19:47 +02:00
SPRINX0\prochazka
73874aa5a1 added missing dependency 2024-10-01 09:19:32 +02:00
Jan Prochazka
976438f860 fixed LOGIN & PASSWORD scenario #903 2024-09-27 10:46:33 +02:00
Jan Prochazka
eb095b7c44 changelog 2024-09-27 10:12:07 +02:00
Jan Prochazka
3ca745e74b v5.5.3 2024-09-27 10:11:58 +02:00
Jan Prochazka
040de84d93 changelog 2024-09-27 10:10:31 +02:00
Jan Prochazka
4f1d63440e v5.5.3-beta.4 2024-09-27 08:39:17 +02:00
Jan Prochazka
7c3cf1bb67 Merge branch 'feature/copy-stdin-import' 2024-09-27 08:34:03 +02:00
Jan Prochazka
cbf1b0a3cc import SQL dump tests 2024-09-27 08:33:16 +02:00
Jan Prochazka
5287a86397 import from postgres dump 2024-09-27 08:08:13 +02:00
SPRINX0\prochazka
ae599ac6f6 copy from stdin WIP 2024-09-26 16:06:54 +02:00
SPRINX0\prochazka
a08a8ef208 upgraded dbgate-query-splitter 2024-09-26 15:50:08 +02:00
SPRINX0\prochazka
19a4d97765 postgres copystream support 2024-09-26 15:48:29 +02:00
SPRINX0\prochazka
6f1f5f84c6 fix 2024-09-26 14:52:50 +02:00
SPRINX0\prochazka
e2f352149d fix + ability to choose imported table 2024-09-26 14:49:58 +02:00
SPRINX0\prochazka
1fa39b20d2 fixedTargetName fix 2024-09-26 13:57:10 +02:00
SPRINX0\prochazka
53dc2e6f03 fixed import/export for separate schemas 2024-09-26 13:22:36 +02:00
SPRINX0\prochazka
555f30c0b3 v5.5.3-beta.3 2024-09-26 12:39:45 +02:00
SPRINX0\prochazka
7549d37a04 better column mapping 2024-09-26 12:39:29 +02:00
SPRINX0\prochazka
29072eb71b v5.5.3-beta.2 2024-09-26 12:38:05 +02:00
SPRINX0\prochazka
48e9e77be5 column drop down in column map modal 2024-09-26 12:29:24 +02:00
SPRINX0\prochazka
4dd3f15ba3 better column chooser 2024-09-26 12:14:10 +02:00
SPRINX0\prochazka
3603501ae2 show single schema only if it is default schema 2024-09-26 10:01:02 +02:00
Jan Prochazka
338180a21a fix 2024-09-26 09:44:37 +02:00
Jan Prochazka
28193ed6f3 new_table - id should be not null 2024-09-26 09:41:32 +02:00
Jan Prochazka
0509710602 handle DB errors 2024-09-26 09:38:49 +02:00
Jan Prochazka
a4872b4159 fixed Syntax error when trying to sort by UUID column #895 2024-09-26 09:06:14 +02:00
Jan Prochazka
888f5c6260 v5.5.3-beta.1 2024-09-26 08:20:37 +02:00
Jan Prochazka
7a5abb5f47 Fixed separate schema mode, more logging #894 2024-09-26 08:12:51 +02:00
Jan Prochazka
61a9f02899 fix WIP 2024-09-25 17:01:14 +02:00
Jan Prochazka
354c4201f7 changelog 2024-09-25 12:07:35 +02:00
Jan Prochazka
d8340087c5 v5.5.2 2024-09-25 10:50:18 +02:00
Jan Prochazka
e3249c6d79 fixed readonly connection for MySQL 2024-09-25 10:49:37 +02:00
Jan Prochazka
58e65608e4 fixed postgres connections for readonly connection #900 2024-09-25 10:46:07 +02:00
Jan Prochazka
2b6fdf5a6a readme 2024-09-25 10:32:55 +02:00
Jan Prochazka
967d7849ee azure SQL in changelog 2024-09-25 10:26:38 +02:00
Jan Prochazka
7c476ab2f0 changelog 2024-09-25 09:31:40 +02:00
Jan Prochazka
caaf35e45a v5.5.1 2024-09-25 09:30:23 +02:00
Jan Prochazka
6ddc9ee6c5 fix 2024-09-25 09:25:00 +02:00
Jan Prochazka
39aa250223 changelog 2024-09-25 09:23:04 +02:00
Jan Prochazka
031a92db8e v5.5.0 2024-09-25 09:22:36 +02:00
Jan Prochazka
0c2579897f fixed multiple shortcuts handling #898 2024-09-25 09:10:54 +02:00
Jan Prochazka
b63479bf45 changelog 2024-09-25 08:50:44 +02:00
Jan Prochazka
4c89552265 v5.4.5-beta.15 2024-09-25 08:23:04 +02:00
SPRINX0\prochazka
517002e079 fixed importing mysql dump #702 2024-09-24 15:54:54 +02:00
SPRINX0\prochazka
85bfb1986d fixed redirect_uri parameter #891 2024-09-24 14:45:00 +02:00
SPRINX0\prochazka
632421eb73 copy connection error to clipboard 2024-09-24 14:23:06 +02:00
SPRINX0\prochazka
21365be411 v5.4.5-beta.14 2024-09-24 09:00:23 +02:00
SPRINX0\prochazka
eaa54022fc css fix 2024-09-24 09:00:05 +02:00
SPRINX0\prochazka
55f7f39efd postgres - user current_schema instead of search_path 2024-09-24 08:59:58 +02:00
SPRINX0\prochazka
71e709b346 v5.4.5-beta.13 2024-09-23 09:40:49 +02:00
SPRINX0\prochazka
1d2d295a45 log driver errors, even when they are sent to client 2024-09-23 09:40:29 +02:00
SPRINX0\prochazka
5ed23beff0 v5.4.5-premium-beta.12 2024-09-20 16:48:01 +02:00
SPRINX0\prochazka
43a8db55a2 v5.4.5-beta.11 2024-09-20 16:00:53 +02:00
SPRINX0\prochazka
0a9cba7bf7 quick export from table result #892 2024-09-20 16:00:03 +02:00
SPRINX0\prochazka
02af761bf7 postgre fix 2024-09-20 15:50:16 +02:00
SPRINX0\prochazka
6882a146e7 fixed build error 2024-09-20 15:00:11 +02:00
SPRINX0\prochazka
c9834f9792 Merge branch 'feature/separate-schemas-2' 2024-09-20 14:54:07 +02:00
SPRINX0\prochazka
21b4baf700 v5.4.5-beta.10 2024-09-20 14:52:47 +02:00
SPRINX0\prochazka
d3a24627dd postgres - show system databases when using separate schemas 2024-09-20 14:52:29 +02:00
SPRINX0\prochazka
8aac9cf59d loading schemas indicator + error reporting 2024-09-20 14:40:14 +02:00
SPRINX0\prochazka
ce70b2e71a support separate schemas for mssql 2024-09-20 13:30:39 +02:00
SPRINX0\prochazka
62a5ef60f6 v5.4.5-beta.9 2024-09-20 12:56:32 +02:00
SPRINX0\prochazka
95430e9c11 useSeparateSchemas option for docker 2024-09-20 12:35:02 +02:00
SPRINX0\prochazka
1cee36cc9b force exit after tests 2024-09-20 12:29:29 +02:00
SPRINX0\prochazka
aa475f81a0 uncommented test + fix 2024-09-20 12:19:03 +02:00
SPRINX0\prochazka
1173d5db1d sqlserver fix 2024-09-20 10:49:53 +02:00
SPRINX0\prochazka
f34d0cbb90 fixed SQLite 2024-09-20 10:47:51 +02:00
SPRINX0\prochazka
780d187911 skip SQLite on CI 2024-09-20 10:41:31 +02:00
SPRINX0\prochazka
48d4374346 introduced dbhandle instead of overwriting 3rd party client's fields 2024-09-20 10:27:03 +02:00
Jan Prochazka
6b8b511d0d try to comment problematic test 2024-09-19 19:01:19 +02:00
Jan Prochazka
c6be115634 fixed test 2024-09-19 18:54:29 +02:00
Jan Prochazka
dadde225f1 db handle 2024-09-19 18:53:12 +02:00
Jan Prochazka
31dfc1dc28 try to fix test 2024-09-19 18:42:08 +02:00
Jan Prochazka
1de163af44 unique db name prop 2024-09-19 18:38:30 +02:00
Jan Prochazka
2181eada53 fix 2024-09-19 18:35:36 +02:00
Jan Prochazka
75e63d2710 JEST --detectOpenHandles flag 2024-09-19 18:31:52 +02:00
Jan Prochazka
fbfcdcbc40 Revert "test"
This reverts commit 0238e6a7f1.
2024-09-19 18:30:52 +02:00
Jan Prochazka
0238e6a7f1 test 2024-09-19 18:29:35 +02:00
Jan Prochazka
be17301c91 try to fix test 2024-09-19 18:23:25 +02:00
Jan Prochazka
b1118c7f43 try to fix test 2024-09-19 18:17:30 +02:00
Jan Prochazka
24bf5e5b0c fix 2024-09-19 18:10:27 +02:00
Jan Prochazka
122471f81f neutral schema cond 2024-09-19 18:03:20 +02:00
Jan Prochazka
a6136cee25 skipSeparateSchemas flag 2024-09-19 17:07:34 +02:00
Jan Prochazka
83357ba2cc fix 2024-09-19 16:05:54 +02:00
Jan Prochazka
4fe10b26b0 postgre analyser fix 2024-09-19 16:00:54 +02:00
Jan Prochazka
485f6c9759 v5.4.5-beta.8 2024-09-19 15:22:00 +02:00
Jan Prochazka
732c5b763b fix 2024-09-19 15:21:41 +02:00
Jan Prochazka
4431d08a88 separate schema selector in frontend 2024-09-19 15:19:16 +02:00
Jan Prochazka
cb7224ac94 fix 2024-09-19 14:23:34 +02:00
Jan Prochazka
66b39c1f80 fixes 2024-09-19 14:17:11 +02:00
Jan Prochazka
b30f139b5d postgre analyser supports compisite db names 2024-09-19 14:15:22 +02:00
Jan Prochazka
f39ec26c29 UI fix 2024-09-19 13:43:55 +02:00
Jan Prochazka
8c3c32aeba default schema refactor 2024-09-19 13:41:49 +02:00
Jan Prochazka
9eb27f5e92 refresh schema list 2024-09-19 11:15:44 +02:00
Jan Prochazka
3e5b45de8f schemaList moved from dbinfo to separate request 2024-09-19 10:59:09 +02:00
Jan Prochazka
e7b4a6ffcc Merge branch 'feature/db-schema' 2024-09-19 09:53:55 +02:00
Jan Prochazka
e7ec75138d fix 2024-09-19 09:52:54 +02:00
Jan Prochazka
6c4679d83b fixed scenario after save table 2024-09-19 09:32:49 +02:00
Jan Prochazka
5f23b29c4e create table in multi-schema 2024-09-19 09:24:08 +02:00
Jan Prochazka
55db98fe1b removed unused imports 2024-09-19 09:02:20 +02:00
Jan Prochazka
f7c5ffa0ce create table - changed workflow 2024-09-19 09:00:13 +02:00
Jan Prochazka
d1e98e5640 fixed incremental analysis when changed schema+test 2024-09-18 16:30:45 +02:00
Jan Prochazka
e785fdf9b7 test fix for clickhouse 2024-09-18 16:16:46 +02:00
Jan Prochazka
fc0db925c5 schema analyser test 2024-09-18 16:01:02 +02:00
SPRINX0\prochazka
5ab686b721 schema update in database analyser 2024-09-18 15:37:34 +02:00
SPRINX0\prochazka
327d43096f schema selector is cached by conid and database 2024-09-18 14:07:33 +02:00
SPRINX0\prochazka
1f7b632553 fix - show schema selector, when no schema is available 2024-09-18 14:01:58 +02:00
SPRINX0\prochazka
592d7987ab show objects by schemas 2024-09-18 13:50:02 +02:00
SPRINX0\prochazka
c429424fda v5.4.5-beta.7 2024-09-17 17:17:32 +02:00
SPRINX0\prochazka
0b4709d383 import/export tab title 2024-09-17 17:14:57 +02:00
SPRINX0\prochazka
336929ff3f export menu changed 2024-09-17 16:56:41 +02:00
SPRINX0\prochazka
677f83cc4b fixed filtering in json columns for postgres #889 2024-09-17 16:42:07 +02:00
SPRINX0\prochazka
5c58c35a64 Merge branch 'feature/import-export' 2024-09-17 16:17:46 +02:00
SPRINX0\prochazka
b346a458a6 fix 2024-09-17 16:00:59 +02:00
SPRINX0\prochazka
226512a4ca removed open wizard from shell function 2024-09-17 15:50:01 +02:00
SPRINX0\prochazka
a0527d78e9 save import/export jobs 2024-09-17 15:47:40 +02:00
SPRINX0\prochazka
3357295d98 removed ImportExportModal 2024-09-17 15:19:45 +02:00
SPRINX0\prochazka
fc6a43b4fe download fileat first in imports 2024-09-17 15:06:54 +02:00
SPRINX0\prochazka
260b2e4b12 JSON export - support for object style, key field, root field 2024-09-17 14:28:31 +02:00
Jan Prochazka
f080b18d3f refactor 2024-09-17 13:47:28 +02:00
Jan Prochazka
56f015ffd5 JSON import rootField support 2024-09-17 13:23:51 +02:00
Jan Prochazka
fd8a28831e JSON object import 2024-09-17 12:52:53 +02:00
Jan Prochazka
503e09ddd1 import test small refactor 2024-09-17 12:40:41 +02:00
Jan Prochazka
880912806a JSON import 2024-09-17 12:32:03 +02:00
SPRINX0\prochazka
665ce22741 JSON import 2024-09-17 12:16:59 +02:00
SPRINX0\prochazka
e5c9ec7681 Merge branch 'feature/import-export' of https://github.com/dbgate/dbgate into feature/import-export 2024-09-17 10:45:43 +02:00
SPRINX0\prochazka
74fceeec78 fix 2024-09-17 10:45:41 +02:00
Jan Prochazka
77d60ccfa5 auto-detect CSV delimiter in test 2024-09-17 10:29:49 +02:00
SPRINX0\prochazka
0c2b25f79a auto-detect CSV delimiter 2024-09-17 10:28:58 +02:00
Jan Prochazka
4065e05013 CSV import fixed 2024-09-17 09:59:47 +02:00
Jan Prochazka
319a7fd003 csv import test (failing) 2024-09-16 18:50:14 +02:00
Jan Prochazka
26c01f43f9 drag & drop file to export/import tab 2024-09-16 17:28:30 +02:00
Jan Prochazka
88d7e07bea fixed upload file 2024-09-16 17:16:54 +02:00
SPRINX0\prochazka
a9a5a3491e showModal(ImportExportModal => openImportExportTab 2024-09-16 13:03:49 +02:00
SPRINX0\prochazka
d255273368 open import/export tab function 2024-09-16 12:47:13 +02:00
SPRINX0\prochazka
a7846b4adf import export tab working 2024-09-16 12:15:43 +02:00
SPRINX0\prochazka
ce431e6e21 fix 2024-09-16 10:25:52 +02:00
SPRINX0\prochazka
f8e39a2a5d runtests on feature branch 2024-09-16 10:25:30 +02:00
SPRINX0\prochazka
e5135b1a9d run tests on feature branch 2024-09-16 10:23:31 +02:00
SPRINX0\prochazka
c45a6f1299 v5.4.5-beta.6 2024-09-16 10:11:21 +02:00
SPRINX0\prochazka
873e60c26a Merge branch 'develop' 2024-09-16 10:10:54 +02:00
Jan Prochazka
b0134b221b added clickhouse server 2024-09-16 09:56:14 +02:00
Jan Prochazka
f4bb13f617 clickhouse tests - run on CI 2024-09-16 09:51:01 +02:00
Jan Prochazka
c32955a7c9 skipped some tests for clickhouse 2024-09-16 09:50:36 +02:00
Jan Prochazka
f8fe444f29 clickhouse table analyser test 2024-09-16 09:47:44 +02:00
Jan Prochazka
08dd2ae38f table analyse test WIP 2024-09-13 16:30:48 +02:00
Jan Prochazka
a88a64710b analysis test refactor 2024-09-13 16:11:38 +02:00
Jan Prochazka
c410a7bb07 clickhouse tests 2024-09-13 15:55:33 +02:00
Jan Prochazka
0ba7b5fb39 skip preload tests for clickhouse - not supported 2024-09-13 15:51:10 +02:00
Jan Prochazka
334440f691 fixed import for clickhouse 2024-09-13 15:47:12 +02:00
Jan Prochazka
89c9d5e792 skip data duplicator 2024-09-13 15:24:29 +02:00
Jan Prochazka
0d1b6702a7 alter table tests 2024-09-13 15:11:45 +02:00
Jan Prochazka
b366a7d451 alter table fixes WIP 2024-09-13 14:23:37 +02:00
Jan Prochazka
c1106c1b01 v5.4.5-alpha.5 2024-09-13 13:36:55 +02:00
Jan Prochazka
9c48608588 clickhouse plugin version 2024-09-13 13:36:37 +02:00
Jan Prochazka
b32a6daeab v5.4.5-alpha.4 2024-09-13 13:23:59 +02:00
Jan Prochazka
b1f018905b publish clickhouse plugin 2024-09-13 13:23:46 +02:00
Jan Prochazka
17537e592f v5.4.5-alpha.3 2024-09-13 13:11:22 +02:00
Jan Prochazka
0211cf59af clickhouse tests WIP 2024-09-13 13:09:33 +02:00
Jan Prochazka
2728d60422 updated upload-artifacts action 2024-09-12 16:13:52 +02:00
Jan Prochazka
e5079f6dbf v5.4.5-beta.2 2024-09-12 16:00:04 +02:00
Jan Prochazka
487ac94034 updated upload artifact action 2024-09-12 15:59:50 +02:00
Jan Prochazka
fda350c05b v5.4.5-beta.1 2024-09-12 15:57:46 +02:00
Jan Prochazka
6f32e27eec fix 2024-09-12 15:53:56 +02:00
Jan Prochazka
3c4fad108b clickhouse fix 2024-09-12 15:42:16 +02:00
Jan Prochazka
b232263708 clickhouse import 2024-09-12 15:39:48 +02:00
Jan Prochazka
086bc0d9f3 clickhouse export 2024-09-12 14:40:29 +02:00
Jan Prochazka
e21c6d4872 clickhouse - view support, incremental structure update 2024-09-12 13:49:10 +02:00
Jan Prochazka
d2e49967e4 hide indexes from clickhouse 2024-09-12 13:20:15 +02:00
Jan Prochazka
2f1cbbd75e sorting key support, clickhouse recreate table support 2024-09-12 11:59:03 +02:00
Jan Prochazka
670cfb9dc0 don't show primary key name when anonymousePrimaryKey=true 2024-09-12 09:04:52 +02:00
Jan Prochazka
e54bd1da3f logs 2024-09-12 08:43:34 +02:00
Jan Prochazka
fb2b47615f option allowEmptyValue flag 2024-09-12 08:37:09 +02:00
Jan Prochazka
00a6c19f09 test log messages 2024-09-12 08:24:50 +02:00
Jan Prochazka
51c8169232 Revert "try to fix build"
This reverts commit 8ab814cb8b.
2024-09-12 08:14:46 +02:00
Jan Prochazka
8ab814cb8b try to fix build 2024-09-12 07:54:45 +02:00
Jan Prochazka
577517e043 fix connection label 2024-09-12 07:01:14 +02:00
Jan Prochazka
d17a667cf4 postgres and oracle don't have anonymouse PKs 2024-09-11 17:06:52 +02:00
Jan Prochazka
575f8f23a7 clickhouse: sorting key editor support 2024-09-11 16:53:11 +02:00
Jan Prochazka
33eed816aa clickhouse: rename & change column 2024-09-11 16:28:35 +02:00
Jan Prochazka
08fce96691 specificNullabilityImplementation 2024-09-11 15:43:14 +02:00
Jan Prochazka
f74533b42f clickhouse: added specifcNotNull dialect option 2024-09-11 15:41:26 +02:00
Jan Prochazka
fb39cd1302 clickhouse + mysql: modify table option 2024-09-11 15:09:16 +02:00
Jan Prochazka
7ad1950777 getTableFormOptions moved to dialect 2024-09-11 14:01:11 +02:00
Jan Prochazka
b0165c14e9 tabl eoptions for mysql - comment, engine 2024-09-11 13:41:04 +02:00
Jan Prochazka
4f429c27c0 fix 2024-09-11 13:05:42 +02:00
Jan Prochazka
ff33ec668b clickhouse: edit table options 2024-09-11 12:51:09 +02:00
Jan Prochazka
f6e0b634f0 collapsible table editor parts 2024-09-11 09:53:16 +02:00
Jan Prochazka
36a65ea13a mysql - engine 2024-09-11 09:34:26 +02:00
Jan Prochazka
ae9ffe1aef editing table works 2024-09-11 09:16:08 +02:00
Jan Prochazka
15c400747e table engine shown in object tree 2024-09-11 08:48:15 +02:00
Jan Prochazka
448c15c308 supportsTransactions driver parameter 2024-09-11 08:16:54 +02:00
SPRINX0\prochazka
293ef047d0 add column WIP 2024-09-10 16:32:02 +02:00
SPRINX0\prochazka
5c50faa0a2 clickhouse: show sorting key 2024-09-10 16:14:25 +02:00
SPRINX0\prochazka
18e6200c3b wip 2024-09-10 15:42:22 +02:00
SPRINX0\prochazka
8d865ab3b3 clickhouse: nullable types 2024-09-10 15:18:57 +02:00
SPRINX0\prochazka
ceb51a2597 basic driver works 2024-09-10 14:38:33 +02:00
SPRINX0\prochazka
f2d29f97dc clickhouse plugin - initial import 2024-09-10 14:29:51 +02:00
SPRINX0\prochazka
d75f533b76 v5.4.4 2024-09-10 09:48:33 +02:00
SPRINX0\prochazka
7e74ce8366 Revert "temp build beta app - only windows"
This reverts commit 52e774f2cc.
2024-09-10 09:47:50 +02:00
SPRINX0\prochazka
c2f41e51da changelog 2024-09-10 09:42:00 +02:00
SPRINX0\prochazka
9158b69b1e v5.4.4-beta.12 2024-09-10 09:29:51 +02:00
SPRINX0\prochazka
f9ce6ed8f4 changelog 2024-09-10 09:23:17 +02:00
SPRINX0\prochazka
2f90106e32 v5.4.4-beta.11 2024-09-10 09:11:16 +02:00
SPRINX0\prochazka
a74f6db1e0 messages 2024-09-10 09:11:04 +02:00
SPRINX0\prochazka
f1ad4e190a shorter update messages 2024-09-10 09:08:30 +02:00
SPRINX0\prochazka
52e774f2cc temp build beta app - only windows 2024-09-10 09:07:05 +02:00
SPRINX0\prochazka
14331501ba fixed electron build 2024-09-10 09:06:47 +02:00
SPRINX0\prochazka
49e00a8a0f Merge branch 'autoupgrade' 2024-09-10 08:49:58 +02:00
SPRINX0\prochazka
69bc9d6111 v5.4.4-beta.10 2024-09-10 08:41:15 +02:00
SPRINX0\prochazka
64ab1bb111 autoupgrade status 2024-09-10 08:41:03 +02:00
SPRINX0\prochazka
818f4eaa10 update status with icon 2024-09-10 08:34:14 +02:00
SPRINX0\prochazka
6e6699f60a copy blockmap 2024-09-09 17:00:21 +02:00
SPRINX0\prochazka
ba665931dd v5.4.4-beta.9 2024-09-09 16:58:47 +02:00
SPRINX0\prochazka
5b010bcc53 fix 2024-09-09 16:58:11 +02:00
SPRINX0\prochazka
fdb5fdfadd fix 2024-09-09 16:54:11 +02:00
SPRINX0\prochazka
628d8eb5dc fix 2024-09-09 16:33:47 +02:00
SPRINX0\prochazka
a78c375b90 v5.4.4-beta.8 2024-09-09 16:22:48 +02:00
SPRINX0\prochazka
f5f653965f copy blockmap 2024-09-09 16:21:50 +02:00
SPRINX0\prochazka
0ea84fe034 checking for updates moved into app-ready 2024-09-09 16:16:47 +02:00
SPRINX0\prochazka
11e8cff77e v5.4.4-beta.7 2024-09-09 16:04:04 +02:00
SPRINX0\prochazka
2db3f14509 configurable auto-update mode 2024-09-09 16:03:49 +02:00
SPRINX0\prochazka
db1d4aa555 v5.4.4-beta.6 2024-09-09 15:52:08 +02:00
SPRINX0\prochazka
1fcaf08644 auto close snackbar 2024-09-09 15:51:41 +02:00
SPRINX0\prochazka
703a4bdb57 checking for update 2024-09-09 15:44:34 +02:00
SPRINX0\prochazka
3303fd1ee9 removed obsolete code 2024-09-09 14:51:43 +02:00
SPRINX0\prochazka
5b796a4d88 v5.4.4-premium-beta.5 2024-09-09 13:12:47 +02:00
SPRINX0\prochazka
e5ab354d15 v5.4.4-premim-beta.4 2024-09-09 13:11:54 +02:00
SPRINX0\prochazka
8fc8bc19d4 build 2024-09-09 13:11:32 +02:00
SPRINX0\prochazka
590bd166fd v5.4.4-beta.3 2024-09-09 13:09:30 +02:00
SPRINX0\prochazka
4f360eec96 premium beta build 2024-09-09 13:08:21 +02:00
SPRINX0\prochazka
d9c16e6d01 v5.4.4-beta.2 2024-09-09 11:59:26 +02:00
SPRINX0\prochazka
2a94e5da27 v5.4.4-beta.1 2024-09-09 11:53:51 +02:00
SPRINX0\prochazka
cb32d2152e Merge branch 'master' of https://github.com/dbgate/dbgate 2024-09-09 11:48:51 +02:00
SPRINX0\prochazka
a5d482ad18 updated electron updater 2024-09-09 11:48:48 +02:00
Jan Prochazka
017366f3aa v5.4.3 2024-09-06 14:19:47 +02:00
Jan Prochazka
582c982a9c build fix 2024-09-06 14:19:26 +02:00
Jan Prochazka
ad6a93bfb5 v5.4.3-beta.1 2024-09-06 14:02:04 +02:00
Jan Prochazka
bc92a63111 fixed SSL with MongoDB #885 2024-09-06 13:59:17 +02:00
Jan Prochazka
5ff1009c22 fix 2024-09-06 13:52:22 +02:00
Jan Prochazka
c1e6a01b63 v5.4.2 2024-09-06 13:50:32 +02:00
SPRINX0\prochazka
5daf64360c v5.4.2-beta.7 2024-09-05 15:26:02 +02:00
SPRINX0\prochazka
3fd887d6cf oracle fixes 2024-09-05 15:09:09 +02:00
SPRINX0\prochazka
486d7a946d build all platforms 2024-09-05 14:20:25 +02:00
SPRINX0\prochazka
22a81ed2ee refactor 2024-09-05 13:58:31 +02:00
SPRINX0\prochazka
77b6bddd87 oracle views fixed 2024-09-05 13:43:40 +02:00
SPRINX0\prochazka
0085505b7d v5.4.2-pro.6 2024-09-05 13:17:50 +02:00
SPRINX0\prochazka
880b07a328 support range select for oracle <12 2024-09-05 13:13:58 +02:00
SPRINX0\prochazka
f0f9be3051 v5.4.2-pro.5 2024-09-05 12:59:10 +02:00
SPRINX0\prochazka
176f28a178 v5.3.2-pro.4 2024-09-05 12:36:34 +02:00
SPRINX0\prochazka
e31c377d4e build only windows temporarily 2024-09-05 12:35:44 +02:00
SPRINX0\prochazka
0f247450c7 electron updater allow prerelease 2024-09-05 12:34:38 +02:00
SPRINX0\prochazka
2e67769491 v5.4.2-pro.3 2024-09-05 12:04:30 +02:00
SPRINX0\prochazka
b80c428224 changed manifest build 2024-09-05 11:52:53 +02:00
SPRINX0\prochazka
6940bb4556 fix 2024-09-05 11:40:40 +02:00
SPRINX0\prochazka
44142e8b25 added info to trial payload 2024-09-05 11:05:15 +02:00
SPRINX0\prochazka
ccb22be8bf v5.4.2-pro.2 2024-09-05 10:23:14 +02:00
SPRINX0\prochazka
64ff5d61a4 fix 2024-09-05 10:23:02 +02:00
299 changed files with 39391 additions and 2381 deletions

View File

@@ -30,6 +30,9 @@ jobs:
- name: yarn adjustPackageJson
run: |
yarn adjustPackageJson
- name: setUpdaterChannel beta
run: |
node setUpdaterChannel beta
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
@@ -99,9 +102,13 @@ jobs:
mv app/dist/*.deb artifacts/ || true
mv app/dist/*.snap artifacts/ || true
mv app/dist/*.dmg artifacts/ || true
mv app/dist/*.blockmap artifacts/ || true
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts

View File

@@ -1,10 +1,9 @@
name: Electron app BETA PREMIUM
name: Electron app PREMIUM BETA
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-pro.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
jobs:
build:
@@ -13,6 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
# os: [windows-2022]
# os: [ubuntu-22.04]
# os: [windows-2022, ubuntu-22.04]
os: [macos-12, windows-2022, ubuntu-22.04]
@@ -55,11 +55,12 @@ jobs:
cd ..
cd dbgate-merged
yarn adjustPackageJson
- name: yarn adjustPackageJsonPremium
- name: adjustPackageJsonPremium
run: |
cd ..
cd dbgate-merged
node adjustPackageJsonPremium
- name: setUpdaterChannel premium-beta
run: |
cd ..
cd dbgate-merged
@@ -131,11 +132,13 @@ jobs:
mv ../dbgate-merged/app/dist/*.deb artifacts/ || true
mv ../dbgate-merged/app/dist/*.snap artifacts/ || true
mv ../dbgate-merged/app/dist/*.dmg artifacts/ || true
mv ../dbgate-merged/app/dist/*.blockmap artifacts/ || true
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts

View File

@@ -61,6 +61,11 @@ jobs:
cd ..
cd dbgate-merged
node adjustPackageJsonPremium
- name: setUpdaterChannel premium
run: |
cd ..
cd dbgate-merged
node setUpdaterChannel premium
- name: yarn set timeout
run: |
cd ..
@@ -130,24 +135,13 @@ jobs:
mv ../dbgate-merged/app/dist/*.AppImage artifacts/ || true
mv ../dbgate-merged/app/dist/*.deb artifacts/ || true
mv ../dbgate-merged/app/dist/*.dmg artifacts/ || true
mv ../dbgate-merged/app/dist/*.blockmap artifacts/ || true
- name: Copy latest.yml (windows)
if: matrix.os == 'windows-2022'
run: |
mv ../dbgate-merged/app/dist/latest.yml artifacts/latest-premium.yml || true
- name: Copy latest-linux.yml
if: matrix.os == 'ubuntu-22.04'
run: |
mv ../dbgate-merged/app/dist/latest-linux.yml artifacts/latest-premium-linux.yml || true
- name: Copy latest-mac.yml
if: matrix.os == 'macos-12'
run: |
mv ../dbgate-merged/app/dist/latest-mac.yml artifacts/latest-premium-mac.yml || true
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts

View File

@@ -109,6 +109,10 @@ jobs:
mv app/dist/*.deb artifacts/ || true
mv app/dist/*.dmg artifacts/ || true
mv app/dist/*.snap artifacts/dbgate-latest.snap || true
mv app/dist/*.blockmap artifacts/ || true
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
# - name: Copy artifacts Linux, MacOs
# if: matrix.os != 'windows-2016'
@@ -134,24 +138,13 @@ jobs:
# mv app/dist/latest.yml artifacts/latest.yml || true
- name: Copy latest.yml (windows)
- name: Copy PAD file
if: matrix.os == 'windows-2022'
run: |
mv app/dist/latest.yml artifacts/latest.yml || true
mv app/dist/dbgate-pad.xml artifacts/ || true
- name: Copy latest-linux.yml
if: matrix.os == 'ubuntu-22.04'
run: |
mv app/dist/latest-linux.yml artifacts/latest-linux.yml || true
- name: Copy latest-mac.yml
if: matrix.os == 'macos-12'
run: |
mv app/dist/latest-mac.yml artifacts/latest-mac.yml || true
- name: Upload artifacts
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts

View File

@@ -0,0 +1,112 @@
name: AWS image PREMIUM
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04]
steps:
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Setup `packer`
uses: hashicorp/setup-packer@main
with:
version: latest
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
- name: Prepare packer build
run: |
cd ..
cd dbgate-merged
yarn run prepare:packer
zip -r cloud-build.zip packer/build
- name: Copy artifacts
run: |
mkdir artifacts
cp ../dbgate-merged/cloud-build.zip artifacts/cloud-build.zip || true
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
with:
files: 'artifacts/**'
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run `packer init`
run: |
cd packer
packer init ./aws-ubuntu.pkr.hcl
- name: Run `packer build`
run: |
cd packer
packer build ./aws-ubuntu.pkr.hcl

View File

@@ -4,8 +4,7 @@ on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-docker.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
jobs:
build:

View File

@@ -5,7 +5,6 @@ on:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-docker.[0-9]+'
jobs:
build:

View File

@@ -154,3 +154,8 @@ jobs:
working-directory: plugins/dbgate-plugin-oracle
run: |
npm publish
- name: Publish dbgate-plugin-clickhouse
working-directory: plugins/dbgate-plugin-clickhouse
run: |
npm publish

View File

@@ -4,6 +4,7 @@ on:
branches:
- master
- develop
- 'feature/**'
jobs:
test-runner:
@@ -77,6 +78,11 @@ jobs:
ACCEPT_EULA: Y
SA_PASSWORD: Pwd2020Db
MSSQL_PID: Express
clickhouse:
image: bitnami/clickhouse:24.8.4
env:
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
# cockroachdb:
# image: cockroachdb/cockroach

View File

@@ -8,6 +8,85 @@ Builds:
- linux - application for linux
- win - application for Windows
### 5.5.6
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
- FIXED: Correctly closing connection behind SSH tunnel (#920)
- FIXED: Updating MongoDB documents on MongoDB 4 (#916)
- FIXED: (Premium) DbGate container correctly waits for underlying storage database, if database container is started after dbgate container is started
- FIXED: (Premium) Better handling of connection storage errors
### 5.5.5
- ADDED: AWS IAM authentication for MySQL, MariaDB, PostgreSQL (Premium)
- FIXED: Datitme filtering #912
- FIXED: Load redis keys
- ADDED: Query parameters #913
- FIXED: Data grid with hidden columns #911
- ADDED: Added buttons for one-click authentification methods (Anonymous, OAuth) (Team Premium)
- ADDED: Link for switching Admin/user login (Team Premium)
- FIXED: Save connection params in administration for MS SQL and Postgres storages (Team Premium)
### 5.5.4
- FIXED: correct handling when use LOGIN and PASSWORD env variables #903
- FIXED: fixed problems in dbmodel commandline tool
- ADDED: dbmodel - allow connection defined in environment variables
- FIXED: Load postgres schema on Azure #906
- FIXED: Oauth2 in combination with Google doesn't log payload #727
- CHANGED: Improved error reporting for unhandler errors
- CHANGED: Don't restart docker container in case of unhandler error
- FIXED: Crash when displaying specific data values from MongoDB #908
- ADDED: (Premium) Show purchase button after trial license is expired
### 5.5.3
- FIXED: Separate schema mode #894 - for databases with many schemas
- FIXED: Sort by UUID column in POstgreSQL #895
- ADDED: Load pg_dump outputs #893
- ADDED: Improved column mapping in import/export #330
- FIXED: Fixed some errors in create-table workflow
- CHANGED: Show single schema by default only if all objects are from default schema
- FIXED: MS Entra authentication for Azure SQL
### 5.5.2
- FIXED: MySQL, PostgreSQL readonly conections #900
### 5.5.1
- ADDED: Clickhouse support (#532)
- ADDED: MySQL - specify table engine, show table engine in table list
- FIXED: Hidden primary key name in PK editor for DB engines with anonymous PK (MySQL)
- CHANGED: Import/export dialog is now tacub instead of modal
- ADDED: Saving import/export job
- REMOVED: Ability to reopen export/import wizard from generated script. This was a bit hack, now you could save import/export job instead
- ADDED: Autodetect CSV delimited
- FIXED: Import CSV files with spaces around quotes
- ADDED: JSON file import
- ADDED: JSON export can export objects with ID field used as object key
- ADDED: JSON and JSON lines imports supports importing from web URL
- FIXED: Editing imported URL in job editor
- ADDED: Quick export from table data grid (#892)
- CHANGED: Create table workflow is reworked, you can specify schema and table name in table editor
- FIXED: After saving new table, table editor is reset to empty state
- ADDED: (PostgreSQL, SQL Server) - ability to filter objects by schema
- ADDED: (PostgreSQL, SQL Server) - Use separate schemas option - for databases with lot of schemas, only selected schema is loaded
- FIXED: Internal refactor of drivers, client objects are not more messed up with auxiliary fields
- ADDED: Copy connection error to clipboard after clicking on error icon
- FIXED: (MySQL) Fixed importing SQL dump exported from mysqldump (#702)
- FIXED: (PostgreSQL) Fixed filtering JSONB fields (#889)
- FIXED: OIDC authentication not working anymore (#891)
- ADDED: Added tests for import from CSV and JSON
- FIXED: multiple shortcuts handling #898
- ADDED: (Premium) MS Entra authentization for Azure SQL databases
### 5.4.4
- CHANGED: Improved autoupdate, notification is now in app
- CHANGED: Default behaviour of autoupdate, new version is downloaded after click of "Download" button
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)
- ADDED: Option to run check for new version manually
- FIXED: Fixed autoupgrade channel for premium edition
- FIXED: Fixes following issues: #886, #865, #782, #375
### 5.4.2
- FIXED: DbGate now works correctly with Oracle 10g
- FIXED: Fixed update channel for premium edition
### 5.4.1
- FIXED: Broken older plugins #881
- ADDED: Premium edition - "Start trial" button

View File

@@ -30,6 +30,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* CockroachDB
* MariaDB
* CosmosDB (Premium)
* ClickHouse
<!-- Learn more about DbGate features at the [DbGate website](https://dbgate.org/), or try our online [demo application](https://demo.dbgate.org) -->
@@ -69,8 +70,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Redis tree view, generate script from keys, run Redis script
* Runs as application for Windows, Linux and Mac. Or in Docker container on server and in web Browser on client.
* Import, export from/to CSV, Excel, JSON, NDJSON, XML
* Free table editor - quick table data editing (cleanup data after import/before export, prototype tables etc.)
* Archives - backup your data in NDJSON files on local filesystem (or on DbGate server, when using web application)
* NDJSON data viewer and editor - browse NDJSON data, edit data and structure directly on NDJSON files. Works also for big NDSON files
* Charts, export chart to HTML page
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
* Extensible plugin architecture

View File

@@ -6,7 +6,7 @@
"description": "Opensource database administration tool",
"dependencies": {
"electron-log": "^4.4.1",
"electron-updater": "^4.6.1",
"electron-updater": "^6.3.4",
"jsonwebtoken": "^9.0.2",
"lodash.clonedeepwith": "^4.5.0",
"patch-package": "^6.4.7"

View File

@@ -18,7 +18,6 @@ const url = require('url');
const mainMenuDefinition = require('./mainMenuDefinition');
const { isProApp } = require('./proTools');
const updaterChannel = require('./updaterChannel');
let disableAutoUpgrade = false;
// require('@electron/remote/main').initialize();
@@ -29,6 +28,8 @@ let apiLoaded = false;
let mainModule;
// let getLogger;
// let loadLogsContent;
let appUpdateStatus = '';
let settingsJson = {};
process.on('uncaughtException', function (error) {
console.error('uncaughtException', error);
@@ -52,21 +53,11 @@ const isMac = () => os.platform() == 'darwin';
try {
initialConfig = JSON.parse(fs.readFileSync(configRootPath, { encoding: 'utf-8' }));
disableAutoUpgrade = initialConfig['disableAutoUpgrade'] || false;
} catch (err) {
console.log('Error loading config-root:', err.message);
initialConfig = {};
}
if (process.argv.includes('--disable-auto-upgrade')) {
console.log('Disabling auto-upgrade');
disableAutoUpgrade = true;
}
if (process.argv.includes('--enable-auto-upgrade')) {
console.log('Enabling auto-upgrade');
disableAutoUpgrade = false;
}
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let mainWindow;
@@ -77,6 +68,7 @@ log.transports.file.level = 'debug';
autoUpdater.logger = log;
if (updaterChannel) {
autoUpdater.channel = updaterChannel;
autoUpdater.allowPrerelease = updaterChannel.includes('beta');
}
// TODO - create settings for this
// appUpdater.channel = 'beta';
@@ -211,6 +203,15 @@ ipcMain.on('app-started', async (event, arg) => {
if (initialConfig['winIsMaximized']) {
mainWindow.webContents.send('setIsMaximized', true);
}
if (autoUpdater.isUpdaterActive()) {
mainWindow.webContents.send('setAppUpdaterActive');
}
if (!process.env.DEVMODE) {
if (settingsJson['app.autoUpdateMode'] != 'skip') {
autoUpdater.autoDownload = settingsJson['app.autoUpdateMode'] == 'download';
autoUpdater.checkForUpdates();
}
}
});
ipcMain.on('window-action', async (event, arg) => {
if (!mainWindow) {
@@ -284,6 +285,20 @@ ipcMain.handle('showItemInFolder', async (event, path) => {
ipcMain.handle('openExternal', async (event, url) => {
electron.shell.openExternal(url);
});
ipcMain.on('downloadUpdate', async (event, url) => {
autoUpdater.downloadUpdate();
changeAppUpdateStatus({
icon: 'icon loading',
message: `Downloading update...`,
});
});
ipcMain.on('applyUpdate', async (event, url) => {
autoUpdater.quitAndInstall(false, true);
});
ipcMain.on('check-for-updates', async (event, url) => {
autoUpdater.autoDownload = false;
autoUpdater.checkForUpdates();
});
function fillMissingSettings(value) {
const res = {
@@ -321,8 +336,6 @@ function ensureBoundsVisible(bounds) {
function createWindow() {
const datadir = path.join(os.homedir(), '.dbgate');
let settingsJson = {};
let licenseKey = null;
try {
settingsJson = fillMissingSettings(
JSON.parse(fs.readFileSync(path.join(datadir, 'settings.json'), { encoding: 'utf-8' }))
@@ -331,14 +344,6 @@ function createWindow() {
console.log('Error loading settings.json:', err.message);
settingsJson = fillMissingSettings({});
}
if (isProApp()) {
try {
licenseKey = fs.readFileSync(path.join(datadir, 'license.key'), { encoding: 'utf-8' });
} catch (err) {
console.log('Error loading license.key:', err.message);
licenseKey = null;
}
}
let bounds = initialConfig['winBounds'];
if (bounds) {
@@ -354,7 +359,7 @@ function createWindow() {
titleBarStyle: useNativeMenu ? undefined : 'hidden',
...bounds,
icon: os.platform() == 'win32' ? 'icon.ico' : path.resolve(__dirname, '../icon.png'),
partition: 'persist:dbgate',
partition: isProApp() ? 'persist:dbgate-premium' : 'persist:dbgate',
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
@@ -388,7 +393,6 @@ function createWindow() {
JSON.stringify({
winBounds: mainWindow.getBounds(),
winIsMaximized: mainWindow.isMaximized(),
disableAutoUpgrade,
}),
'utf-8'
);
@@ -458,13 +462,61 @@ function createWindow() {
});
}
function changeAppUpdateStatus(status) {
appUpdateStatus = status;
mainWindow.webContents.send('app-update-status', appUpdateStatus);
}
autoUpdater.on('checking-for-update', () => {
console.log('Checking for updates');
changeAppUpdateStatus({
icon: 'icon loading',
message: 'Checking for updates...',
});
});
autoUpdater.on('update-available', info => {
console.log('Update available', info);
if (autoUpdater.autoDownload) {
changeAppUpdateStatus({
icon: 'icon loading',
message: `Downloading update...`,
});
} else {
mainWindow.webContents.send('update-available', info.version);
changeAppUpdateStatus({
icon: 'icon download',
message: `Update available`,
});
}
});
autoUpdater.on('update-not-available', info => {
console.log('Update not available', info);
changeAppUpdateStatus({
icon: 'icon check',
message: `No new updates`,
});
});
autoUpdater.on('update-downloaded', info => {
console.log('Update downloaded from', info);
changeAppUpdateStatus({
icon: 'icon download',
message: `Downloaded ${info.version}`,
});
mainWindow.webContents.send('downloaded-new-version', info.version);
});
autoUpdater.on('error', error => {
changeAppUpdateStatus({
icon: 'icon error',
message: `Autoupdate error`,
});
console.error('Update error', error);
});
function onAppReady() {
if (disableAutoUpgrade) {
console.log('Auto-upgrade is disabled, run dbgate --enable-auto-upgrade to enable');
}
if (!process.env.DEVMODE && !disableAutoUpgrade) {
autoUpdater.checkForUpdatesAndNotify();
}
createWindow();
}

View File

@@ -105,6 +105,8 @@ module.exports = ({ editMenu }) => [
{ command: 'settings.commands', hideDisabled: true },
{ command: 'tabs.changelog', hideDisabled: true },
{ command: 'about.show', hideDisabled: true },
{ divider: true },
{ command: 'file.checkForUpdates', hideDisabled: true },
],
},
];

View File

@@ -193,11 +193,6 @@
dependencies:
"@types/node" "*"
"@types/semver@^7.3.6":
version "7.5.8"
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e"
integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==
"@types/verror@^1.10.3":
version "1.10.10"
resolved "https://registry.yarnpkg.com/@types/verror/-/verror-1.10.10.tgz#d5a4b56abac169bfbc8b23d291363a682e6fa087"
@@ -504,14 +499,6 @@ buffer@^5.1.0, buffer@^5.5.0:
base64-js "^1.3.1"
ieee754 "^1.1.13"
builder-util-runtime@8.9.2:
version "8.9.2"
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-8.9.2.tgz#a9669ae5b5dcabfe411ded26678e7ae997246c28"
integrity sha512-rhuKm5vh7E0aAmT6i8aoSfEjxzdYEFX7zDApK+eNgOhjofnWb74d9SRJv0H/8nsgOkos0TZ4zxW0P8J4N7xQ2A==
dependencies:
debug "^4.3.2"
sax "^1.2.4"
builder-util-runtime@9.0.2:
version "9.0.2"
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.0.2.tgz#dc54f8581bbcf1e0428da4483fa46d09524be857"
@@ -520,6 +507,14 @@ builder-util-runtime@9.0.2:
debug "^4.3.4"
sax "^1.2.4"
builder-util-runtime@9.2.5:
version "9.2.5"
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.2.5.tgz#0afdffa0adb5c84c14926c7dd2cf3c6e96e9be83"
integrity sha512-HjIDfhvqx/8B3TDN4GbABQcgpewTU4LMRTQPkVpKYV3lsuxEJoIfvg09GyWTNmfVNSUAYf+fbTN//JX4TH20pg==
dependencies:
debug "^4.3.4"
sax "^1.2.4"
builder-util@23.0.9:
version "23.0.9"
resolved "https://registry.yarnpkg.com/builder-util/-/builder-util-23.0.9.tgz#8b1aeeeee679060e39ad2bd0f50f5b3f3cb53a59"
@@ -791,7 +786,7 @@ crypto-random-string@^2.0.0:
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4:
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@@ -1012,19 +1007,19 @@ electron-publish@23.0.9:
lazy-val "^1.0.5"
mime "^2.5.2"
electron-updater@^4.6.1:
version "4.6.5"
resolved "https://registry.yarnpkg.com/electron-updater/-/electron-updater-4.6.5.tgz#e9a75458bbfd6bb41a58a829839e150ad2eb2d3d"
integrity sha512-kdTly8O9mSZfm9fslc1mnCY+mYOeaYRy7ERa2Fed240u01BKll3aiupzkd07qKw69KvhBSzuHroIW3mF0D8DWA==
electron-updater@^6.3.4:
version "6.3.4"
resolved "https://registry.yarnpkg.com/electron-updater/-/electron-updater-6.3.4.tgz#3934bc89875bb524c2cbbd11041114e97c0c2496"
integrity sha512-uZUo7p1Y53G4tl6Cgw07X1yF8Jlz6zhaL7CQJDZ1fVVkOaBfE2cWtx80avwDVi8jHp+I/FWawrMgTAeCCNIfAg==
dependencies:
"@types/semver" "^7.3.6"
builder-util-runtime "8.9.2"
fs-extra "^10.0.0"
builder-util-runtime "9.2.5"
fs-extra "^10.1.0"
js-yaml "^4.1.0"
lazy-val "^1.0.5"
lodash.escaperegexp "^4.1.2"
lodash.isequal "^4.5.0"
semver "^7.3.5"
semver "^7.6.3"
tiny-typed-emitter "^2.1.0"
electron@30.0.2:
version "30.0.2"
@@ -2407,7 +2402,7 @@ semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7:
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.1.tgz#60bfe090bf907a25aa8119a72b9f90ef7ca281b2"
integrity sha512-f/vbBsu+fOiYt+lmwZV0rVwJScl46HppnOA1ZvIuBWKOTlllpyJ3bfVax76/OrhCH38dyxoDIA8K7uB963IYgA==
semver@^7.5.4:
semver@^7.5.4, semver@^7.6.3:
version "7.6.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==
@@ -2650,6 +2645,11 @@ through2@^2.0.1:
readable-stream "~2.3.6"
xtend "~4.0.1"
tiny-typed-emitter@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/tiny-typed-emitter/-/tiny-typed-emitter-2.1.0.tgz#b3b027fdd389ff81a152c8e847ee2f5be9fad7b5"
integrity sha512-qVtvMxeXbVej0cQWKqVSSAHmKZEHAvxdF8HEUBFWts8h+xEo5m/lEiPakuyZ3BnCBjOD8i24kzNOiOLLgsSxhA==
tmp-promise@^3.0.2:
version "3.0.3"
resolved "https://registry.yarnpkg.com/tmp-promise/-/tmp-promise-3.0.3.tgz#60a1a1cc98c988674fcbfd23b6e3367bdeac4ce7"

View File

@@ -7,7 +7,9 @@ const { getAlterDatabaseScript, extendDatabaseInfo, generateDbPairingId } = requ
function flatSource() {
return _.flatten(
engines.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
engines
.filter(x => !x.skipReferences)
.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
);
}
@@ -41,7 +43,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
}
describe('Alter database', () => {
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Drop referenced table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDiff(conn, driver, db => {
@@ -65,3 +67,4 @@ describe('Alter database', () => {
})
);
});

View File

@@ -6,39 +6,44 @@ const engines = require('../engines');
const crypto = require('crypto');
const { getAlterTableScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
function pickImportantTableInfo(table) {
function pickImportantTableInfo(engine, table) {
const props = ['columnName'];
if (!engine.skipNullability) props.push('notNull');
if (!engine.skipAutoIncrement) props.push('autoIncrement');
return {
pureName: table.pureName,
columns: table.columns
.filter(x => x.columnName != 'rowid')
.map(fp.pick(['columnName', 'notNull', 'autoIncrement'])),
columns: table.columns.filter(x => x.columnName != 'rowid').map(fp.pick(props)),
};
}
function checkTableStructure(t1, t2) {
function checkTableStructure(engine, t1, t2) {
// expect(t1.pureName).toEqual(t2.pureName)
expect(pickImportantTableInfo(t1)).toEqual(pickImportantTableInfo(t2));
expect(pickImportantTableInfo(engine, t1)).toEqual(pickImportantTableInfo(engine, t2));
}
async function testTableDiff(conn, driver, mangle) {
async function testTableDiff(engine, conn, driver, mangle) {
await driver.query(conn, `create table t0 (id int not null primary key)`);
await driver.query(
conn,
`create table t1 (
col_pk int not null primary key,
col_std int null,
col_def int null default 12,
col_fk int null references t0(id),
col_idx int null,
col_uq int null unique,
col_ref int null unique
col_std int,
col_def int default 12,
${engine.skipReferences ? '' : 'col_fk int references t0(id),'}
col_idx int,
col_uq int ${engine.skipUnique ? '' : 'unique'} ,
col_ref int ${engine.skipUnique ? '' : 'unique'}
)`
);
await driver.query(conn, `create index idx1 on t1(col_idx)`);
if (!engine.skipIndexes) {
await driver.query(conn, `create index idx1 on t1(col_idx)`);
}
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
if (!engine.skipReferences) {
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
}
const tget = x => x.tables.find(y => y.pureName == 't1');
const structure1 = generateDbPairingId(extendDatabaseInfo(await driver.analyseFull(conn)));
@@ -53,7 +58,7 @@ async function testTableDiff(conn, driver, mangle) {
const structure2Real = extendDatabaseInfo(await driver.analyseFull(conn));
checkTableStructure(tget(structure2Real), tget(structure2));
checkTableStructure(engine, tget(structure2Real), tget(structure2));
// expect(stableStringify(structure2)).toEqual(stableStringify(structure2Real));
}
@@ -65,14 +70,22 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
// const TESTED_COLUMNS = ['col_ref'];
function engines_columns_source() {
return _.flatten(engines.map(engine => TESTED_COLUMNS.map(column => [engine.label, column, engine])));
return _.flatten(
engines.map(engine =>
TESTED_COLUMNS.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting).map(column => [
engine.label,
column,
engine,
])
)
);
}
describe('Alter table', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Add column - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(conn, driver, tbl => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.push({
columnName: 'added',
dataType: 'int',
@@ -87,7 +100,7 @@ describe('Alter table', () => {
test.each(engines_columns_source())(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
})
);
@@ -95,6 +108,7 @@ describe('Alter table', () => {
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
@@ -106,6 +120,7 @@ describe('Alter table', () => {
'Rename column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, columnName: 'col_renamed' } : x)))
@@ -116,7 +131,7 @@ describe('Alter table', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Drop index - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(conn, driver, tbl => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.indexes = [];
});
})

View File

@@ -5,7 +5,7 @@ const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
const { runCommandOnDriver } = require('dbgate-tools');
describe('Data duplicator', () => {
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>

View File

@@ -3,6 +3,7 @@ const stream = require('stream');
const { testWrapper } = require('../tools');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const copyStream = require('dbgate-api/src/shell/copyStream');
const importDatabase = require('dbgate-api/src/shell/importDatabase');
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
function createImportStream() {
@@ -72,4 +73,29 @@ describe('DB Import', () => {
})
);
test.each(engines.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
const structure = await driver.analyseFull(conn);
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
});

View File

@@ -1,11 +1,12 @@
/// TODO
const { testWrapper } = require('../tools');
const { testWrapper, testWrapperPrepareOnly } = require('../tools');
const _ = require('lodash');
const engines = require('../engines');
const deployDb = require('dbgate-api/src/shell/deployDb');
const { databaseInfoFromYamlModel } = require('dbgate-tools');
const generateDeploySql = require('dbgate-api/src/shell/generateDeploySql');
const connectUtility = require('dbgate-api/src/utility/connectUtility');
function checkStructure(structure, model) {
const expected = databaseInfoFromYamlModel(model);
@@ -23,7 +24,8 @@ async function testDatabaseDeploy(conn, driver, dbModelsYaml, testEmptyLastScrip
let index = 0;
for (const loadedDbModel of dbModelsYaml) {
const { sql, isEmpty } = await generateDeploySql({
systemConnection: conn,
systemConnection: conn.isPreparedOnly ? undefined : conn,
connection: conn.isPreparedOnly ? conn : undefined,
driver,
loadedDbModel,
});
@@ -36,7 +38,8 @@ async function testDatabaseDeploy(conn, driver, dbModelsYaml, testEmptyLastScrip
}
await deployDb({
systemConnection: conn,
systemConnection: conn.isPreparedOnly ? undefined : conn,
connection: conn.isPreparedOnly ? conn : undefined,
driver,
loadedDbModel,
});
@@ -44,7 +47,9 @@ async function testDatabaseDeploy(conn, driver, dbModelsYaml, testEmptyLastScrip
index++;
}
const structure = await driver.analyseFull(conn);
const dbhan = conn.isPreparedOnly ? await connectUtility(driver, conn, 'read') : conn;
const structure = await driver.analyseFull(dbhan);
if (conn.isPreparedOnly) await driver.close(dbhan);
checkStructure(structure, dbModelsYaml[dbModelsYaml.length - 1]);
}
@@ -67,6 +72,24 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Deploy database simple - %s - not connected',
testWrapperPrepareOnly(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
[
{
name: 't1.table.yaml',
json: {
name: 't1',
columns: [{ name: 'id', type: 'int' }],
primaryKey: ['id'],
},
},
],
]);
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Deploy database simple twice - %s',
testWrapper(async (conn, driver, engine) => {
@@ -167,7 +190,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Foreign keys - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -222,7 +245,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Deploy preloaded data - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
@@ -251,7 +274,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Deploy preloaded data - update - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [

View File

@@ -0,0 +1,151 @@
const dbgateApi = require('dbgate-api/src/shell');
// const jsonLinesWriter = require('dbgate-api/src/shell/jsonLinesWriter');
const tmp = require('tmp');
// const dbgatePluginCsv = require('dbgate-plugin-csv/src/backend');
const fs = require('fs');
const requirePlugin = require('dbgate-api/src/shell/requirePlugin');
const CSV_DATA = `Issue Number; Title; Github URL; Labels; State; Created At; Updated At; Reporter; Assignee
801; "Does it 'burst' the database on startup or first lUI load ? "; https://github.com/dbgate/dbgate/issues/801; ""; open; 05/23/2024; 05/23/2024; rgarrigue;
799; "BUG: latest AppImage crashes on opening in Fedora 39"; https://github.com/dbgate/dbgate/issues/799; ""; open; 05/21/2024; 05/24/2024; BenGraham-Git;
798; "MongoDB write operations fail"; https://github.com/dbgate/dbgate/issues/798; "bug,solved"; open; 05/21/2024; 05/24/2024; mahmed0715;
797; "BUG: Unable to open SQL files"; https://github.com/dbgate/dbgate/issues/797; "bug"; open; 05/20/2024; 05/21/2024; cesarValdivia;
795; "BUG: MS SQL Server connection error (KEY_USAGE_BIT_INCORRECT)"; https://github.com/dbgate/dbgate/issues/795; ""; open; 05/20/2024; 05/20/2024; keskinonur;
794; "GLIBC_2.29' not found and i have 2.31"; https://github.com/dbgate/dbgate/issues/794; ""; closed; 05/20/2024; 05/21/2024; MFdanGM;
793; "BUG: PostgresSQL doesn't show tables when connected"; https://github.com/dbgate/dbgate/issues/793; ""; open; 05/20/2024; 05/22/2024; stomper013;
792; "FEAT: Wayland support"; https://github.com/dbgate/dbgate/issues/792; ""; closed; 05/19/2024; 05/21/2024; VosaXalo;
`;
async function getReaderRows(reader) {
const jsonLinesFileName = tmp.tmpNameSync();
const writer = await dbgateApi.jsonLinesWriter({
fileName: jsonLinesFileName,
});
await dbgateApi.copyStream(reader, writer);
const jsonData = fs.readFileSync(jsonLinesFileName, 'utf-8');
const rows = jsonData
.split('\n')
.filter(x => x.trim() !== '')
.map(x => JSON.parse(x));
return rows;
}
test('csv import test', async () => {
const dbgatePluginCsv = requirePlugin('dbgate-plugin-csv');
const csvFileName = tmp.tmpNameSync();
fs.writeFileSync(csvFileName, CSV_DATA);
const reader = await dbgatePluginCsv.shellApi.reader({
fileName: csvFileName,
});
const rows = await getReaderRows(reader);
expect(rows[0].columns).toEqual([
{ columnName: 'Issue Number' },
{ columnName: 'Title' },
{ columnName: 'Github URL' },
{ columnName: 'Labels' },
{ columnName: 'State' },
{ columnName: 'Created At' },
{ columnName: 'Updated At' },
{ columnName: 'Reporter' },
{ columnName: 'Assignee' },
]);
expect(rows.length).toEqual(9);
expect(rows[1]).toEqual({
'Issue Number': '801',
Title: "Does it 'burst' the database on startup or first lUI load ? ",
'Github URL': 'https://github.com/dbgate/dbgate/issues/801',
Labels: '',
State: 'open',
'Created At': '05/23/2024',
'Updated At': '05/23/2024',
Reporter: 'rgarrigue',
Assignee: '',
});
});
test('JSON array import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify([
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
])
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
]);
});
test('JSON object import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify({
k1: { id: 1, val: 'v1' },
k2: { id: 2, val: 'v2' },
})
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
jsonStyle: 'object',
keyField: 'mykey',
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ mykey: 'k1', id: 1, val: 'v1' },
{ mykey: 'k2', id: 2, val: 'v2' },
]);
});
test('JSON filtered object import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify({
filtered: {
k1: { id: 1, val: 'v1' },
k2: { id: 2, val: 'v2' },
},
})
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
jsonStyle: 'object',
keyField: 'mykey',
rootField: 'filtered',
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ mykey: 'k1', id: 1, val: 'v1' },
{ mykey: 'k2', id: 2, val: 'v2' },
]);
});

View File

@@ -2,7 +2,7 @@ const { testWrapper } = require('../tools');
const engines = require('../engines');
const _ = require('lodash');
const initSql = ['CREATE TABLE t1 (id int)', 'CREATE TABLE t2 (id int)'];
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
function flatSource() {
return _.flatten(
@@ -26,9 +26,9 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Full analysis - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
await driver.query(conn, object.create1);
await driver.query(conn, object.create1, { discardResult: true });
const structure = await driver.analyseFull(conn);
expect(structure[type].length).toEqual(1);
@@ -39,11 +39,11 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Incremental analysis - add - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
await driver.query(conn, object.create2);
await driver.query(conn, object.create2, { discardResult: true });
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.create1);
await driver.query(conn, object.create1, { discardResult: true });
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(2);
@@ -54,12 +54,12 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Incremental analysis - drop - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
await driver.query(conn, object.create1);
await driver.query(conn, object.create2);
await driver.query(conn, object.create1, { discardResult: true });
await driver.query(conn, object.create2, { discardResult: true });
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.drop2);
await driver.query(conn, object.drop2, { discardResult: true });
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(1);
@@ -70,15 +70,15 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Create SQL - add - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
await driver.query(conn, object.create1);
await driver.query(conn, object.create1, { discardResult: true });
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.drop1);
await driver.query(conn, object.drop1, { discardResult: true });
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(0);
await driver.query(conn, structure1[type][0].createSql);
await driver.query(conn, structure1[type][0].createSql, { discardResult: true });
const structure3 = await driver.analyseIncremental(conn, structure2);

View File

@@ -2,7 +2,11 @@ const engines = require('../engines');
const { splitQuery } = require('dbgate-query-splitter');
const { testWrapper } = require('../tools');
const initSql = ['CREATE TABLE t1 (id int)', 'INSERT INTO t1 (id) VALUES (1)', 'INSERT INTO t1 (id) VALUES (2)'];
const initSql = [
'CREATE TABLE t1 (id int primary key)',
'INSERT INTO t1 (id) VALUES (1)',
'INSERT INTO t1 (id) VALUES (2)',
];
expect.extend({
dataRow(row, expected) {
@@ -64,7 +68,7 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
const res = await driver.query(conn, 'SELECT id FROM t1 ORDER BY id');
expect(res.columns).toEqual([
@@ -87,7 +91,7 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple stream query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
const results = await executeStream(driver, conn, 'SELECT id FROM t1 ORDER BY id');
expect(results.length).toEqual(1);
const res = results[0];
@@ -100,7 +104,7 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'More queries - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
const results = await executeStream(
driver,
conn,
@@ -124,7 +128,7 @@ describe('Query', () => {
const results = await executeStream(
driver,
conn,
'CREATE TABLE t1 (id int); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
'CREATE TABLE t1 (id int primary key); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
);
expect(results.length).toEqual(1);
@@ -146,14 +150,15 @@ describe('Query', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Save data query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql);
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
await driver.script(
conn,
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;'
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;',
{ discardResult: true }
);
const res = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t1');
// console.log(res);

View File

@@ -0,0 +1,90 @@
const stableStringify = require('json-stable-stringify');
const _ = require('lodash');
const fp = require('lodash/fp');
const { testWrapper, extractConnection } = require('../tools');
const engines = require('../engines');
const { runCommandOnDriver } = require('dbgate-tools');
async function baseStructure(conn, driver) {
await driver.query(conn, `create table t1 (id int not null primary key)`);
await driver.query(
conn,
`create table t2 (
id int not null primary key,
t1_id int
)`
);
}
describe('Schema tests', () => {
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
'Create schema - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
const structure1 = await driver.analyseFull(conn);
const schemas1 = await driver.listSchemas(conn);
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeFalsy();
const count = schemas1.length;
expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1);
const schemas2 = await driver.listSchemas(conn);
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(schemas2.length).toEqual(count + 1);
expect(schemas2.find(x => x.isDefault).schemaName).toEqual(engine.defaultSchemaName);
expect(structure2).toBeNull();
})
);
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
'Drop schema - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure1 = await driver.analyseFull(conn);
const schemas1 = await driver.listSchemas(conn);
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1);
const schemas2 = await driver.listSchemas(conn);
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeFalsy();
expect(structure2).toBeNull();
})
);
test.each(engines.filter(x => x.supportSchemas && !x.skipSeparateSchemas).map(engine => [engine.label, engine]))(
'Table inside schema - %s',
testWrapper(async (handle, driver, engine) => {
await baseStructure(handle, driver);
await runCommandOnDriver(handle, driver, dmp => dmp.createSchema('myschema'));
const schemaConnDef = {
...extractConnection(engine),
database: `${handle.database}::myschema`,
};
const schemaConn = await driver.connect(schemaConnDef);
await driver.query(schemaConn, `create table myschema.myt1 (id int not null primary key)`);
const structure1 = await driver.analyseFull(schemaConn);
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0].pureName).toEqual('myt1');
})
);
});
describe('Base analyser test', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Structure without change - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2).toBeNull();
})
);
});

View File

@@ -1,32 +1,37 @@
const engines = require('../engines');
const { testWrapper } = require('../tools');
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50) null)';
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50))';
const ix1Sql = 'CREATE index ix1 ON t1(val1, id)';
const t2Sql = 'CREATE TABLE t2 (id int not null primary key, val2 varchar(50) null unique)';
const t2Sql = engine =>
`CREATE TABLE t2 (id int not null primary key, val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
const t3Sql = 'CREATE TABLE t3 (id int not null primary key, valfk int, foreign key (valfk) references t2(id))';
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
const txMatch = (tname, vcolname, nextcol) =>
const txMatch = (engine, tname, vcolname, nextcol) =>
expect.objectContaining({
pureName: tname,
columns: [
expect.objectContaining({
columnName: 'id',
notNull: true,
dataType: expect.stringMatching(/int/i),
dataType: expect.stringMatching(/int.*/i),
...(engine.skipNullability ? {} : { notNull: true }),
}),
expect.objectContaining({
columnName: vcolname,
notNull: false,
dataType: expect.stringMatching(/.*char.*\(50\)/),
...(engine.skipNullability ? {} : { notNull: false }),
dataType: engine.skipStringLength
? expect.stringMatching(/.*string|char.*/i)
: expect.stringMatching(/.*char.*\(50\)/i),
}),
...(nextcol
? [
expect.objectContaining({
columnName: 'nextcol',
notNull: false,
dataType: expect.stringMatching(/.*char.*\(50\)/),
...(engine.skipNullability ? {} : { notNull: false }),
dataType: engine.skipStringLength
? expect.stringMatching(/.*string.*|char.*/i)
: expect.stringMatching(/.*char.*\(50\).*/i),
}),
]
: []),
@@ -40,9 +45,9 @@ const txMatch = (tname, vcolname, nextcol) =>
}),
});
const t1Match = txMatch('t1', 'val1');
const t2Match = txMatch('t2', 'val2');
const t2NextColMatch = txMatch('t2', 'val2', true);
const t1Match = engine => txMatch(engine, 't1', 'val1');
const t2Match = engine => txMatch(engine, 't2', 'val2');
const t2NextColMatch = engine => txMatch(engine, 't2', 'val2', true);
describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
@@ -53,25 +58,25 @@ describe('Table analyse', () => {
const structure = await driver.analyseFull(conn);
expect(structure.tables.length).toEqual(1);
expect(structure.tables[0]).toEqual(t1Match);
expect(structure.tables[0]).toEqual(t1Match(engine));
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Table add - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql);
await driver.query(conn, t2Sql(engine));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0]).toEqual(t2Match);
expect(structure1.tables[0]).toEqual(t2Match(engine));
await driver.query(conn, t1Sql);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(2);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2Match);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
})
);
@@ -79,17 +84,17 @@ describe('Table analyse', () => {
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await driver.query(conn, t2Sql);
await driver.query(conn, t2Sql(engine));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match);
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
await driver.query(conn, 'DROP TABLE t2');
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(1);
expect(structure2.tables[0]).toEqual(t1Match);
expect(structure2.tables[0]).toEqual(t1Match(engine));
})
);
@@ -97,23 +102,26 @@ describe('Table analyse', () => {
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await driver.query(conn, t2Sql);
await driver.query(conn, t2Sql(engine));
const structure1 = await driver.analyseFull(conn);
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
await driver.query(conn, 'ALTER TABLE t2 ADD nextcol varchar(50)');
await driver.query(
conn,
`ALTER TABLE t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} nextcol varchar(50)`
);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2).toBeTruthy(); // if falsy, no modification is detected
expect(structure2.tables.length).toEqual(2);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch(engine));
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Index - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
@@ -128,10 +136,10 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
'Unique - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql);
await driver.query(conn, t2Sql(engine));
const structure = await driver.analyseFull(conn);
const t2 = structure.tables.find(x => x.pureName == 't2');
@@ -142,10 +150,10 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Foreign key - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql);
await driver.query(conn, t2Sql(engine));
await driver.query(conn, t3Sql);
// await driver.query(conn, fkSql);

View File

@@ -62,7 +62,7 @@ describe('Table create', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Table with index - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
@@ -92,7 +92,7 @@ describe('Table create', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Table with foreign key - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
@@ -122,7 +122,7 @@ describe('Table create', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
'Table with unique - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +1,21 @@
version: '3'
services:
# postgres:
# image: postgres
# restart: always
# environment:
# POSTGRES_PASSWORD: Pwd2020Db
# ports:
# - 15000:5432
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
- 15000:5432
# mariadb:
# image: mariadb
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# - 15004:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 15004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql:
# image: mysql:8.0.18
@@ -26,15 +26,23 @@ services:
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
mssql:
image: mcr.microsoft.com/mssql/server
restart: always
ports:
- 15002:1433
environment:
- ACCEPT_EULA=Y
- SA_PASSWORD=Pwd2020Db
- MSSQL_PID=Express
# clickhouse:
# image: bitnami/clickhouse:24.8.4
# restart: always
# ports:
# - 15005:8123
# environment:
# - CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
# mssql:
# image: mcr.microsoft.com/mssql/server
# restart: always
# ports:
# - 15002:1433
# environment:
# - ACCEPT_EULA=Y
# - SA_PASSWORD=Pwd2020Db
# - MSSQL_PID=Express
# cockroachdb:
# image: cockroachdb/cockroach

View File

@@ -30,6 +30,13 @@ const engines = [
// skipOnCI: true,
objects: [views],
dbSnapshotBySeconds: true,
dumpFile: 'data/chinook-mysql.sql',
dumpChecks: [
{
sql: 'select count(*) as res from genre',
res: '25',
},
],
},
{
label: 'MariaDB',
@@ -47,6 +54,13 @@ const engines = [
skipOnCI: true,
objects: [views],
dbSnapshotBySeconds: true,
dumpFile: 'data/chinook-mysql.sql',
dumpChecks: [
{
sql: 'select count(*) as res from genre',
res: '25',
},
],
},
{
label: 'PostgreSQL',
@@ -81,6 +95,15 @@ const engines = [
drop2: 'DROP FUNCTION obj2',
},
],
supportSchemas: true,
defaultSchemaName: 'public',
dumpFile: 'data/chinook-postgre.sql',
dumpChecks: [
{
sql: 'select count(*) as res from "public"."Genre"',
res: '25',
},
],
},
{
label: 'SQL Server',
@@ -105,6 +128,9 @@ const engines = [
drop2: 'DROP PROCEDURE obj2',
},
],
supportSchemas: true,
defaultSchemaName: 'dbo',
// skipSeparateSchemas: true,
},
{
label: 'SQLite',
@@ -113,6 +139,7 @@ const engines = [
engine: 'sqlite@dbgate-plugin-sqlite',
},
objects: [views],
skipOnCI: false,
},
{
label: 'CockroachDB',
@@ -129,16 +156,41 @@ const engines = [
skipOnCI: true,
objects: [views, matviews],
},
{
label: 'ClickHouse',
connection: {
engine: 'clickhouse@dbgate-plugin-clickhouse',
databaseUrl: 'http://clickhouse:8123',
password: 'Pwd2020Db',
},
local: {
databaseUrl: 'http://localhost:15005',
},
skipOnCI: false,
objects: [views],
skipDataModifications: true,
skipReferences: true,
skipIndexes: true,
skipNullability: true,
skipUnique: true,
skipAutoIncrement: true,
skipPkColumnTesting: true,
skipDataDuplicator: true,
skipStringLength: true,
alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true,
},
];
const filterLocal = [
// filter local testing
'-MySQL',
'-MariaDB',
'-PostgreSQL',
'PostgreSQL',
'-SQL Server',
'SQLite',
'-SQLite',
'-CockroachDB',
'-ClickHouse',
];
const enginesPostgre = engines.filter(x => x.label == 'PostgreSQL');

View File

@@ -0,0 +1,3 @@
module.exports = {
setupFilesAfterEnv: ['<rootDir>/setupTests.js'],
};

View File

@@ -11,12 +11,9 @@
"scripts": {
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
},
"jest": {
@@ -24,7 +21,8 @@
},
"devDependencies": {
"cross-env": "^7.0.3",
"jest": "^27.0.1"
},
"dependencies": {}
"jest": "^27.0.1",
"pino-pretty": "^11.2.2",
"tmp": "^0.2.3"
}
}

View File

@@ -0,0 +1,30 @@
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
};
const { prettyFactory } = require('pino-pretty');
const tmp = require('tmp');
const pretty = prettyFactory({
colorize: true,
translateTime: 'SYS:standard',
ignore: 'pid,hostname',
});
global.console = {
...console,
log: (...messages) => {
try {
const parsedMessage = JSON.parse(messages[0]);
process.stdout.write(pretty(parsedMessage));
} catch (error) {
process.stdout.write(messages.join(' ') + '\n');
}
},
debug: (...messages) => {
process.stdout.write(messages.join(' ') + '\n');
},
};
tmp.setGracefulCleanup();

View File

@@ -1,8 +1,3 @@
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
};
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
@@ -48,6 +43,29 @@ async function connect(engine, database) {
}
}
async function prepareConnection(engine, database) {
const connection = extractConnection(engine);
const driver = requireEngineDriver(connection);
if (engine.generateDbFile) {
return {
...connection,
databaseFile: `dbtemp/${database}`,
isPreparedOnly: true,
};
} else {
const conn = await driver.connect(connection);
await driver.query(conn, `CREATE DATABASE ${database}`);
await driver.close(conn);
return {
...connection,
database,
isPreparedOnly: true,
};
}
}
const testWrapper =
body =>
async (label, ...other) => {
@@ -61,9 +79,19 @@ const testWrapper =
}
};
const testWrapperPrepareOnly =
body =>
async (label, ...other) => {
const engine = other[other.length - 1];
const driver = requireEngineDriver(engine.connection);
const conn = await prepareConnection(engine, randomDbName());
await body(conn, driver, ...other);
};
module.exports = {
randomDbName,
connect,
extractConnection,
testWrapper,
testWrapperPrepareOnly,
};

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "5.4.2-pro.1",
"version": "5.5.7-packer-beta.2",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -21,6 +21,7 @@
"start:api:auth": "yarn workspace dbgate-api start:auth | pino-pretty",
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin | pino-pretty",
"start:api:storage": "yarn workspace dbgate-api start:storage | pino-pretty",
"start:api:storage:built": "yarn workspace dbgate-api start:storage:built | pino-pretty",
"sync:pro": "cd sync && yarn start",
"start:web": "yarn workspace dbgate-web dev",
"start:sqltree": "yarn workspace dbgate-sqltree start",
@@ -34,8 +35,9 @@
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
"build:api": "yarn workspace dbgate-api build",
"build:web:docker": "yarn workspace dbgate-web build",
"build:web": "yarn workspace dbgate-web build",
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
@@ -51,8 +53,10 @@
"resetPackagedPlugins": "node resetPackagedPlugins",
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2",
"install:drivers:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && yarn add oracledb && cd ..",
"prepare:docker": "yarn plugins:copydist && yarn build:web:docker && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
"prepare:docker": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
"ts:api": "yarn workspace dbgate-api ts",

View File

@@ -17,6 +17,7 @@
"dbgate"
],
"dependencies": {
"@aws-sdk/rds-signer": "^3.665.0",
"activedirectory2": "^2.1.0",
"async-lock": "^1.2.4",
"axios": "^0.21.1",
@@ -27,7 +28,7 @@
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"dbgate-datalib": "^5.0.0-alpha.1",
"dbgate-query-splitter": "^4.10.3",
"dbgate-query-splitter": "^4.11.2",
"dbgate-sqltree": "^5.0.0-alpha.1",
"dbgate-tools": "^5.0.0-alpha.1",
"debug": "^4.3.4",
@@ -57,6 +58,7 @@
"rimraf": "^3.0.0",
"simple-encryptor": "^4.0.0",
"ssh2": "^1.11.0",
"stream-json": "^1.8.0",
"tar": "^6.0.5"
},
"scripts": {
@@ -67,6 +69,7 @@
"start:dblogin": "env-cmd -f env/dblogin/.env node src/index.js --listen-api",
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db --listen-api",
"start:storage": "env-cmd -f env/storage/.env node src/index.js --listen-api",
"start:storage:built": "env-cmd -f env/storage/.env cross-env DEVMODE= BUILTWEBMODE=1 node dist/bundle.js --listen-api",
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
"ts": "tsc",
"build": "webpack"

View File

@@ -83,9 +83,16 @@ class OAuthProvider extends AuthProviderBase {
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
);
const { access_token, refresh_token } = resp.data;
const { access_token, refresh_token, id_token } = resp.data;
const payload = jwt.decode(access_token);
let payload = jwt.decode(access_token);
// Fallback to id_token in case the access_token is not a JWT
// https://www.oauth.com/oauth2-servers/access-tokens/
// https://github.com/dbgate/dbgate/issues/727
if (!payload && id_token) {
payload = jwt.decode(id_token);
}
logger.info({ payload }, 'User payload returned from OAUTH');
@@ -198,6 +205,19 @@ class LoginsProvider extends AuthProviderBase {
amoid = 'logins';
async login(login, password, options = undefined) {
if (login && password && process.env['LOGIN'] == login && process.env['PASSWORD'] == password) {
return {
accessToken: jwt.sign(
{
amoid: this.amoid,
login,
},
getTokenSecret(),
{ expiresIn: getTokenLifetime() }
),
};
}
if (password == process.env[`LOGIN_PASSWORD_${login}`]) {
return {
accessToken: jwt.sign(
@@ -210,6 +230,7 @@ class LoginsProvider extends AuthProviderBase {
),
};
}
return { error: 'Invalid credentials' };
}

View File

@@ -6,7 +6,7 @@ const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('..
const socket = require('../utility/socket');
const loadFilesRecursive = require('../utility/loadFilesRecursive');
const getJslFileName = require('../utility/getJslFileName');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const dbgateApi = require('../shell');
const jsldata = require('./jsldata');
const platformInfo = require('../utility/platformInfo');
@@ -74,7 +74,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
logger.error({ err }, 'Error reading archive files');
logger.error(extractErrorLogData(err), 'Error reading archive files');
return [];
}
},

View File

@@ -1,7 +1,7 @@
const axios = require('axios');
const jwt = require('jsonwebtoken');
const getExpressPath = require('../utility/getExpressPath');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const AD = require('activedirectory2').promiseWrapper;
const crypto = require('crypto');
const { getTokenSecret, getTokenLifetime } = require('../auth/authCommon');
@@ -22,7 +22,8 @@ function unauthorizedResponse(req, res, text) {
// if (req.path == getExpressPath('/connections/list')) {
// return res.json([]);
// }
return res.sendStatus(401).send(text);
return res.status(401).send(text);
}
function authMiddleware(req, res, next) {
@@ -35,8 +36,9 @@ function authMiddleware(req, res, next) {
'/auth/login',
'/auth/redirect',
'/stream',
'storage/get-connections-for-login-page',
'auth/get-providers',
'/storage/get-connections-for-login-page',
'/storage/set-admin-password',
'/auth/get-providers',
'/connections/dblogin-web',
'/connections/dblogin-app',
'/connections/dblogin-auth',
@@ -68,10 +70,11 @@ function authMiddleware(req, res, next) {
return next();
} catch (err) {
if (skipAuth) {
req.isInvalidToken = true;
return next();
}
logger.error({ err }, 'Sending invalid token error');
logger.error(extractErrorLogData(err), 'Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}
@@ -88,7 +91,12 @@ module.exports = {
const { amoid, login, password, isAdminPage } = params;
if (isAdminPage) {
if (process.env.ADMIN_PASSWORD && process.env.ADMIN_PASSWORD == password) {
let adminPassword = process.env.ADMIN_PASSWORD;
if (!adminPassword) {
const adminConfig = await storage.readConfig({ group: 'admin' });
adminPassword = adminConfig?.adminPassword;
}
if (adminPassword && adminPassword == password) {
return {
accessToken: jwt.sign(
{

View File

@@ -17,6 +17,7 @@ const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools');
const lock = new AsyncLock();
@@ -39,10 +40,12 @@ module.exports = {
const isUserLoggedIn = authProvider.isUserLoggedIn(req);
const singleConid = authProvider.getSingleConnectionId(req);
const storageConnectionError = storage.getStorageConnectionError();
const singleConnection = singleConid
? await connections.getCore({ conid: singleConid })
: connections.singleConnection;
const singleConnection =
singleConid && !storageConnectionError
? await connections.getCore({ conid: singleConid })
: connections.singleConnection;
let configurationError = null;
if (process.env.STORAGE_DATABASE && process.env.BASIC_AUTH) {
@@ -50,8 +53,21 @@ module.exports = {
'Basic authentization is not allowed, when using storage. Cannot use both STORAGE_DATABASE and BASIC_AUTH';
}
const checkedLicense = await checkLicense();
if (storageConnectionError && !configurationError) {
configurationError = extractErrorMessage(storageConnectionError);
}
const checkedLicense = storageConnectionError ? null : await checkLicense();
const isLicenseValid = checkedLicense?.status == 'ok';
const logoutUrl = storageConnectionError ? null : await authProvider.getLogoutUrl();
const adminConfig = storageConnectionError ? null : await storage.readConfig({ group: 'admin' });
const isAdminPasswordMissing = !!(
process.env.STORAGE_DATABASE &&
!process.env.ADMIN_PASSWORD &&
!process.env.BASIC_AUTH &&
!adminConfig?.adminPasswordState
);
return {
runAsPortal: !!connections.portalConnections,
@@ -64,19 +80,23 @@ module.exports = {
isDocker: platformInfo.isDocker,
isElectron: platformInfo.isElectron,
isLicenseValid,
isLicenseExpired: checkedLicense?.isExpired,
trialDaysLeft: checkedLicense?.isGeneratedTrial && !checkedLicense?.isExpired ? checkedLicense?.daysLeft : null,
checkedLicense,
configurationError,
logoutUrl: await authProvider.getLogoutUrl(),
logoutUrl,
permissions,
login,
// ...additionalConfigProps,
isBasicAuth: !!process.env.BASIC_AUTH,
isAdminLoginForm: !!(
process.env.STORAGE_DATABASE &&
process.env.ADMIN_PASSWORD &&
!process.env.BASIC_AUTH &&
checkedLicense?.type == 'premium'
(process.env.ADMIN_PASSWORD || adminConfig?.adminPasswordState == 'set') &&
!process.env.BASIC_AUTH
),
isAdminPasswordMissing,
isInvalidToken: req.isInvalidToken,
adminPasswordState: adminConfig?.adminPasswordState,
storageDatabase: process.env.STORAGE_DATABASE,
logsFilePath: getLogsFilePath(),
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),

View File

@@ -12,7 +12,7 @@ const { pickSafeConnectionInfo } = require('../utility/crypting');
const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
const processArgs = require('../utility/processArgs');
const { safeJsonParse, getLogger } = require('dbgate-tools');
const { safeJsonParse, getLogger, extractErrorLogData } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
const pipeForkLogs = require('../utility/pipeForkLogs');
@@ -76,6 +76,7 @@ function getPortalCollections() {
allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'),
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
parent: process.env[`PARENT_${id}`] || undefined,
useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`],
// SSH tunnel
useSshTunnel: process.env[`USE_SSH_${id}`],
@@ -429,7 +430,7 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true };
} catch (err) {
logger.error({ err }, 'Error getting DB token');
logger.error(extractErrorLogData(err), 'Error getting DB token');
return { error: err.message };
}
},
@@ -445,7 +446,7 @@ module.exports = {
const resp = await authProvider.login(null, null, { conid: volatile._id });
return resp;
} catch (err) {
logger.error({ err }, 'Error getting DB token');
logger.error(extractErrorLogData(err), 'Error getting DB token');
return { error: err.message };
}
},

View File

@@ -12,6 +12,7 @@ const {
extendDatabaseInfo,
modelCompareDbDiffOptions,
getLogger,
extractErrorLogData,
} = require('dbgate-tools');
const { html, parse } = require('diff2html');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -146,7 +147,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request do process');
logger.error(extractErrorLogData(err), 'Error sending request do process');
this.close(conn.conid, conn.database);
}
});
@@ -213,6 +214,17 @@ module.exports = {
return res.result || null;
},
schemaList_meta: true,
async schemaList({ conid, database }, req) {
testConnectionPermission(conid, req);
return this.loadDataCore('schemaList', { conid, database });
},
dispatchDatabaseChangedEvent_meta: true,
dispatchDatabaseChangedEvent({ event, conid, database }) {
socket.emitChanged(event, { conid, database });
},
loadKeys_meta: true,
async loadKeys({ conid, database, root, filter }, req) {
testConnectionPermission(conid, req);
@@ -307,7 +319,7 @@ module.exports = {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging DB connection');
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
this.close(conid, database);
return {
@@ -351,7 +363,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);

View File

@@ -18,11 +18,14 @@ function readFirstLine(file) {
}
if (reader.hasNextLine()) {
reader.nextLine((err, line) => {
if (err) reject(err);
resolve(line);
if (err) {
reader.close(() => reject(err)); // Ensure reader is closed on error
return;
}
reader.close(() => resolve(line)); // Ensure reader is closed after reading
});
} else {
resolve(null);
reader.close(() => resolve(null)); // Properly close if no lines are present
}
});
});

View File

@@ -11,7 +11,7 @@ const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('serverConnection');
@@ -112,7 +112,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
@@ -167,7 +167,7 @@ module.exports = {
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging server connection');
logger.error(extractErrorLogData(err), 'Error pinging server connection');
this.close(conid);
}
})
@@ -217,7 +217,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request');
logger.error(extractErrorLogData(err), 'Error sending request');
this.close(conn.conid);
}
});

View File

@@ -8,7 +8,7 @@ const path = require('path');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const { appdir } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const pipeForkLogs = require('../utility/pipeForkLogs');
const config = require('./config');
@@ -222,7 +222,7 @@ module.exports = {
try {
session.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging session');
logger.error(extractErrorLogData(err), 'Error pinging session');
return {
status: 'error',

View File

@@ -17,4 +17,13 @@ module.exports = {
async getConnectionsForLoginPage() {
return null;
},
getStorageConnectionError() {
return null;
},
readConfig_meta: true,
async readConfig({ group }) {
return {};
},
};

View File

@@ -1,7 +1,7 @@
const crypto = require('crypto');
const path = require('path');
const { uploadsdir, getLogsFilePath } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('uploads');
const axios = require('axios');
const os = require('os');
@@ -110,7 +110,7 @@ module.exports = {
return response.data;
} catch (err) {
logger.error({ err }, 'Error uploading gist');
logger.error(extractErrorLogData(err), 'Error uploading gist');
return {
apiErrorMessage: err.message,

View File

@@ -1,10 +1,17 @@
const { setLogConfig, getLogger, setLoggerName } = require('dbgate-tools');
const { setLogConfig, getLogger, setLoggerName, extractErrorLogData } = require('dbgate-tools');
const processArgs = require('./utility/processArgs');
const fs = require('fs');
const moment = require('moment');
const path = require('path');
const { logsdir, setLogsFilePath, getLogsFilePath } = require('./utility/directories');
const { createLogger } = require('pinomin');
const currentVersion = require('./currentVersion');
const logger = getLogger('apiIndex');
process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
process.exit(1);
});
if (processArgs.startProcess) {
setLoggerName(processArgs.startProcess.replace(/Process$/, ''));
@@ -94,10 +101,17 @@ function configureLogger() {
if (processArgs.listenApi) {
configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:');
for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
}
}
}
const shell = require('./shell/index');
const currentVersion = require('./currentVersion');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),

View File

@@ -73,6 +73,8 @@ function start() {
if (platformInfo.isDocker) {
// server static files inside docker container
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
} else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
} else if (platformInfo.isNpmDist) {
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
} else if (process.env.DEVWEB) {
@@ -126,6 +128,10 @@ function start() {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
port: parseInt(

View File

@@ -20,9 +20,10 @@ function start() {
if (handleProcessCommunication(connection)) return;
try {
const driver = requireEngineDriver(connection);
const conn = await connectUtility(driver, connection, 'app');
const res = await driver.getVersion(conn);
const dbhan = await connectUtility(driver, connection, 'app');
const res = await driver.getVersion(dbhan);
process.send({ msgtype: 'connected', ...res });
await driver.close(dbhan);
} catch (e) {
console.error(e);
process.send({

View File

@@ -1,7 +1,15 @@
const stableStringify = require('json-stable-stringify');
const { splitQuery } = require('dbgate-query-splitter');
const childProcessChecker = require('../utility/childProcessChecker');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const {
extractBoolSettingsValue,
extractIntSettingsValue,
getLogger,
isCompositeDbName,
dbNameLogCategory,
extractErrorMessage,
extractErrorLogData,
} = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -11,7 +19,7 @@ const { dumpSqlSelect } = require('dbgate-sqltree');
const logger = getLogger('dbconnProcess');
let systemConnection;
let dbhan;
let storedConnection;
let afterConnectCallbacks = [];
let afterAnalyseCallbacks = [];
@@ -35,7 +43,7 @@ async function checkedAsyncCall(promise) {
} catch (err) {
setStatus({
name: 'error',
message: err.message,
message: extractErrorMessage(err, 'Checked call error'),
});
// console.error(err);
setTimeout(() => process.exit(1), 1000);
@@ -46,10 +54,16 @@ async function checkedAsyncCall(promise) {
let loadingModel = false;
async function handleFullRefresh() {
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
resolveAnalysedPromises();
// skip loading DB structure
return;
}
loadingModel = true;
const driver = requireEngineDriver(storedConnection);
setStatusName('loadStructure');
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection, serverVersion));
analysedStructure = await checkedAsyncCall(driver.analyseFull(dbhan, serverVersion));
analysedTime = new Date().getTime();
process.send({ msgtype: 'structure', structure: analysedStructure });
process.send({ msgtype: 'structureTime', analysedTime });
@@ -60,12 +74,15 @@ async function handleFullRefresh() {
}
async function handleIncrementalRefresh(forceSend) {
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
resolveAnalysedPromises();
// skip loading DB structure
return;
}
loadingModel = true;
const driver = requireEngineDriver(storedConnection);
setStatusName('checkStructure');
const newStructure = await checkedAsyncCall(
driver.analyseIncremental(systemConnection, analysedStructure, serverVersion)
);
const newStructure = await checkedAsyncCall(driver.analyseIncremental(dbhan, analysedStructure, serverVersion));
analysedTime = new Date().getTime();
if (newStructure != null) {
analysedStructure = newStructure;
@@ -103,7 +120,8 @@ function setStatusName(name) {
async function readVersion() {
const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(systemConnection);
const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`);
process.send({ msgtype: 'version', version });
serverVersion = version;
}
@@ -114,8 +132,13 @@ async function handleConnect({ connection, structure, globalSettings }) {
if (!structure) setStatusName('pending');
const driver = requireEngineDriver(storedConnection);
systemConnection = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
systemConnection.feedback = feedback => setStatus({ feedback });
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
);
dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion());
if (structure) {
analysedStructure = structure;
@@ -138,7 +161,7 @@ async function handleConnect({ connection, structure, globalSettings }) {
}
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -163,10 +186,14 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.script(systemConnection, sql, { useTransaction });
await driver.script(dbhan, sql, { useTransaction });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
});
}
}
@@ -175,10 +202,14 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.operation(systemConnection, operation, { useTransaction });
await driver.operation(dbhan, operation, { useTransaction });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing DB operation'),
});
}
}
@@ -188,10 +219,14 @@ async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
// console.log(sql);
const res = await driver.query(systemConnection, sql);
const res = await driver.query(dbhan, sql);
process.send({ msgtype: 'response', msgid, ...res });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
});
}
}
@@ -202,52 +237,64 @@ async function handleSqlSelect({ msgid, select }) {
return handleQueryData({ msgid, sql: dmp.s }, true);
}
async function handleDriverDataCore(msgid, callMethod) {
async function handleDriverDataCore(msgid, callMethod, { logName }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
}
}
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
}
async function handleCollectionData({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.readCollection(systemConnection, options));
return handleDriverDataCore(msgid, driver => driver.readCollection(dbhan, options), { logName: 'readCollection' });
}
async function handleLoadKeys({ msgid, root, filter }) {
return handleDriverDataCore(msgid, driver => driver.loadKeys(systemConnection, root, filter));
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter), { logName: 'loadKeys' });
}
async function handleExportKeys({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.exportKeys(systemConnection, options));
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
}
async function handleLoadKeyInfo({ msgid, key }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(systemConnection, key));
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(dbhan, key), { logName: 'loadKeyInfo' });
}
async function handleCallMethod({ msgid, method, args }) {
return handleDriverDataCore(msgid, driver => {
if (storedConnection.isReadOnly) {
throw new Error('Connection is read only, cannot call custom methods');
}
return handleDriverDataCore(
msgid,
driver => {
if (storedConnection.isReadOnly) {
throw new Error('Connection is read only, cannot call custom methods');
}
ensureExecuteCustomScript(driver);
return driver.callMethod(systemConnection, method, args);
});
ensureExecuteCustomScript(driver);
return driver.callMethod(dbhan, method, args);
},
{ logName: `callMethod:${method}` }
);
}
async function handleLoadKeyTableRange({ msgid, key, cursor, count }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(systemConnection, key, cursor, count));
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(dbhan, key, cursor, count), {
logName: 'loadKeyTableRange',
});
}
async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) {
return handleDriverDataCore(msgid, driver =>
driver.loadFieldValues(systemConnection, { schemaName, pureName }, field, search)
);
return handleDriverDataCore(msgid, driver => driver.loadFieldValues(dbhan, { schemaName, pureName }, field, search), {
logName: 'loadFieldValues',
});
}
function ensureExecuteCustomScript(driver) {
@@ -264,10 +311,10 @@ async function handleUpdateCollection({ msgid, changeSet }) {
const driver = requireEngineDriver(storedConnection);
try {
ensureExecuteCustomScript(driver);
const result = await driver.updateCollection(systemConnection, changeSet);
const result = await driver.updateCollection(dbhan, changeSet);
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error updating collection') });
}
}
@@ -277,18 +324,24 @@ async function handleSqlPreview({ msgid, objects, options }) {
try {
const dmp = driver.createDumper();
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, systemConnection);
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, dbhan);
await generator.dump();
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(() => {
setTimeout(async () => {
logger.error('Exiting because of unhandled exception');
await driver.close(dbhan);
process.exit(0);
}, 500);
}
} catch (err) {
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
isError: true,
errorMessage: extractErrorMessage(err, 'Error generating SQL preview'),
});
}
}
@@ -297,14 +350,19 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
try {
const res = await generateDeploySql({
systemConnection,
systemConnection: dbhan,
connection: storedConnection,
analysedStructure,
modelFolder,
});
process.send({ ...res, msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
isError: true,
errorMessage: extractErrorMessage(err, 'Error generating deploy SQL'),
});
}
}
@@ -337,6 +395,7 @@ const messageHandlers = {
loadFieldValues: handleLoadFieldValues,
sqlSelect: handleSqlSelect,
exportKeys: handleExportKeys,
schemaList: handleSchemaList,
// runCommand: handleRunCommand,
};
@@ -348,10 +407,12 @@ async function handleMessage({ msgtype, ...other }) {
function start() {
childProcessChecker();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);
@@ -361,8 +422,8 @@ function start() {
try {
await handleMessage(message);
} catch (err) {
logger.error({ err }, 'Error in DB connection');
process.send({ msgtype: 'error', error: err.message });
logger.error(extractErrorLogData(err), 'Error in DB connection');
process.send({ msgtype: 'error', error: extractErrorMessage(err, 'Error processing message') });
}
});
}

View File

@@ -1,12 +1,12 @@
const stableStringify = require('json-stable-stringify');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger, extractErrorLogData } = require('dbgate-tools');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const logger = getLogger('srvconnProcess');
let systemConnection;
let dbhan;
let storedConnection;
let lastDatabases = null;
let lastStatus = null;
@@ -16,7 +16,7 @@ let afterConnectCallbacks = [];
async function handleRefresh() {
const driver = requireEngineDriver(storedConnection);
try {
let databases = await driver.listDatabases(systemConnection);
let databases = await driver.listDatabases(dbhan);
if (storedConnection?.allowedDatabases?.trim()) {
const allowedDatabaseList = storedConnection.allowedDatabases
.split('\n')
@@ -39,14 +39,14 @@ async function handleRefresh() {
name: 'error',
message: err.message,
});
// console.error(err);
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
setTimeout(() => process.exit(1), 1000);
}
}
async function readVersion() {
const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(systemConnection);
const version = await driver.getVersion(dbhan);
process.send({ msgtype: 'version', version });
}
@@ -70,7 +70,7 @@ async function handleConnect(connection) {
const driver = requireEngineDriver(storedConnection);
try {
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
readVersion();
handleRefresh();
if (extractBoolSettingsValue(globalSettings, 'connection.autoRefresh', false)) {
@@ -84,7 +84,7 @@ async function handleConnect(connection) {
name: 'error',
message: err.message,
});
// console.error(err);
logger.error(extractErrorLogData(err), 'Error connecting to server');
setTimeout(() => process.exit(1), 1000);
}
@@ -95,7 +95,7 @@ async function handleConnect(connection) {
}
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -108,14 +108,14 @@ function handlePing() {
async function handleDatabaseOp(op, { msgid, name }) {
try {
const driver = requireEngineDriver(storedConnection);
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
if (driver[op]) {
await driver[op](systemConnection, name);
await driver[op](dbhan, name);
} else {
const dmp = driver.createDumper();
dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script');
await driver.query(systemConnection, dmp.s);
await driver.query(dbhan, dmp.s, { discardResult: true });
}
await handleRefresh();
@@ -137,11 +137,11 @@ async function handleDriverDataCore(msgid, callMethod) {
}
async function handleServerSummary({ msgid }) {
return handleDriverDataCore(msgid, driver => driver.serverSummary(systemConnection));
return handleDriverDataCore(msgid, driver => driver.serverSummary(dbhan));
}
async function handleSummaryCommand({ msgid, command, row }) {
return handleDriverDataCore(msgid, driver => driver.summaryCommand(systemConnection, command, row));
return handleDriverDataCore(msgid, driver => driver.summaryCommand(dbhan, command, row));
}
const messageHandlers = {
@@ -161,10 +161,12 @@ async function handleMessage({ msgtype, ...other }) {
function start() {
childProcessChecker();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);
@@ -178,6 +180,7 @@ function start() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
}
});
}

View File

@@ -14,7 +14,7 @@ const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require
const logger = getLogger('sessionProcess');
let systemConnection;
let dbhan;
let storedConnection;
let afterConnectCallbacks = [];
// let currentHandlers = [];
@@ -177,7 +177,7 @@ function handleStream(driver, resultIndexHolder, sqlItem) {
return new Promise((resolve, reject) => {
const start = sqlItem.trimStart || sqlItem.start;
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
driver.stream(systemConnection, sqlItem.text, handler);
driver.stream(dbhan, sqlItem.text, handler);
});
}
@@ -196,7 +196,7 @@ async function handleConnect(connection) {
storedConnection = connection;
const driver = requireEngineDriver(storedConnection);
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
for (const [resolve] of afterConnectCallbacks) {
resolve();
}
@@ -210,7 +210,7 @@ async function handleConnect(connection) {
// }
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -230,7 +230,7 @@ async function handleStartProfiler({ jslid }) {
const writer = new TableWriter();
writer.initializeFromReader(jslid);
currentProfiler = await driver.startProfiler(systemConnection, {
currentProfiler = await driver.startProfiler(dbhan, {
row: data => writer.rowFromReader(data),
});
currentProfiler.writer = writer;
@@ -241,7 +241,7 @@ async function handleStopProfiler({ jslid }) {
const driver = requireEngineDriver(storedConnection);
currentProfiler.writer.close();
driver.stopProfiler(systemConnection, currentProfiler);
driver.stopProfiler(dbhan, currentProfiler);
currentProfiler = null;
}
@@ -304,7 +304,7 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
const writer = new TableWriter();
writer.initializeFromReader(jslid);
const reader = await driver.readQuery(systemConnection, sql);
const reader = await driver.readQuery(dbhan, sql);
reader.on('data', data => {
writer.rowFromReader(data);
@@ -340,10 +340,12 @@ function start() {
lastPing = new Date().getTime();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
@@ -362,6 +364,8 @@ function start() {
executingScripts == 0
) {
logger.info('Session not active, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);

View File

@@ -3,7 +3,7 @@ const platformInfo = require('../utility/platformInfo');
const childProcessChecker = require('../utility/childProcessChecker');
const { handleProcessCommunication } = require('../utility/processComm');
const { SSHConnection } = require('../utility/SSHConnection');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData, extractErrorMessage } = require('dbgate-tools');
const logger = getLogger('sshProcess');
@@ -40,13 +40,13 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
logger.error({ err }, 'Error creating SSH tunnel connection:');
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',
connection,
tunnelConfig,
errorMessage: err.message,
errorMessage: extractErrorMessage(err.message),
});
}
}

View File

@@ -19,32 +19,39 @@ async function dataDuplicator({
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
try {
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(pool);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
const dupl = new DataDuplicator(
dbhan,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream ||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
const dupl = new DataDuplicator(
pool,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream ||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
}
module.exports = dataDuplicator;

View File

@@ -27,15 +27,23 @@ async function dumpDatabase({
logger.info(`Dumping database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(pool, {
outputFile,
databaseName,
schemaName,
});
await doDump(dumper);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(dbhan, {
outputFile,
databaseName,
schemaName,
});
await doDump(dumper);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dumpDatabase;

View File

@@ -8,10 +8,17 @@ async function executeQuery({ connection = undefined, systemConnection = undefin
logger.info({ sql }, `Execute query`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'script'));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
await driver.script(pool, sql);
try {
logger.info(`Connected.`);
await driver.script(dbhan, sql);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = executeQuery;

View File

@@ -21,41 +21,49 @@ async function generateDeploySql({
}) {
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read'));
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(pool);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
const deployedModel = generateDbPairingId(
extendDatabaseInfo(loadedDbModel ? databaseInfoFromYamlModel(loadedDbModel) : await importDbModel(modelFolder))
);
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
const opts = {
...modelCompareDbDiffOptions,
noDropTable: true,
noDropColumn: true,
noDropConstraint: true,
noDropSqlObject: true,
noRenameTable: true,
noRenameColumn: true,
};
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, dbhan, driver);
// console.log('currentModelPairedPreloaded', currentModelPairedPreloaded.tables[0]);
// console.log('deployedModel', deployedModel.tables[0]);
// console.log('currentModel', currentModel.tables[0]);
// console.log('currentModelPaired', currentModelPaired.tables[0]);
const res = getAlterDatabaseScript(
currentModelPairedPreloaded,
deployedModel,
opts,
currentModelPairedPreloaded,
deployedModel,
driver
);
return res;
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
const deployedModel = generateDbPairingId(
extendDatabaseInfo(loadedDbModel ? databaseInfoFromYamlModel(loadedDbModel) : await importDbModel(modelFolder))
);
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
const opts = {
...modelCompareDbDiffOptions,
noDropTable: true,
noDropColumn: true,
noDropConstraint: true,
noDropSqlObject: true,
noRenameTable: true,
noRenameColumn: true,
};
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, pool, driver);
// console.log('currentModelPairedPreloaded', currentModelPairedPreloaded.tables[0]);
// console.log('deployedModel', deployedModel.tables[0]);
// console.log('currentModel', currentModel.tables[0]);
// console.log('currentModelPaired', currentModelPaired.tables[0]);
const res = getAlterDatabaseScript(
currentModelPairedPreloaded,
deployedModel,
opts,
currentModelPairedPreloaded,
deployedModel,
driver
);
return res;
}
module.exports = generateDeploySql;

View File

@@ -9,14 +9,28 @@ const { getLogger } = require('dbgate-tools');
const logger = getLogger('importDb');
class ImportStream extends stream.Transform {
constructor(pool, driver) {
constructor(dbhan, driver) {
super({ objectMode: true });
this.pool = pool;
this.dbhan = dbhan;
this.driver = driver;
this.writeQueryStream = null;
}
async _transform(chunk, encoding, cb) {
try {
await this.driver.script(this.pool, chunk);
if (chunk.specialMarker == 'copy_stdin_start') {
this.writeQueryStream = await this.driver.writeQueryFromStream(this.dbhan, chunk.text);
} else if (chunk.specialMarker == 'copy_stdin_line') {
this.writeQueryStream.write(chunk.text);
} else if (chunk.specialMarker == 'copy_stdin_end') {
this.writeQueryStream.end();
await new Promise((resolve, reject) => {
this.writeQueryStream.on('finish', resolve);
this.writeQueryStream.on('error', reject);
});
this.writeQueryStream = null;
} else {
await this.driver.script(this.dbhan, chunk.text, { queryOptions: { importSqlDump: true } });
}
} catch (err) {
this.emit('error', err.message);
}
@@ -44,17 +58,28 @@ async function importDatabase({ connection = undefined, systemConnection = undef
logger.info(`Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
const downloadedFile = await download(inputFile);
logger.info(`Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, driver.getQuerySplitterOptions('script'));
const importStream = new ImportStream(pool, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, {
...driver.getQuerySplitterOptions('import'),
returnRichInfo: true,
});
const importStream = new ImportStream(dbhan, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = importDatabase;

View File

@@ -6,7 +6,7 @@ const copyStream = require('./copyStream');
const fakeObjectReader = require('./fakeObjectReader');
const consoleObjectWriter = require('./consoleObjectWriter');
const jsonLinesWriter = require('./jsonLinesWriter');
const jsonArrayWriter = require('./jsonArrayWriter');
const jsonWriter = require('./jsonWriter');
const jsonLinesReader = require('./jsonLinesReader');
const sqlDataWriter = require('./sqlDataWriter');
const jslDataReader = require('./jslDataReader');
@@ -29,6 +29,7 @@ const modifyJsonLinesReader = require('./modifyJsonLinesReader');
const dataDuplicator = require('./dataDuplicator');
const dbModelToJson = require('./dbModelToJson');
const jsonToDbModel = require('./jsonToDbModel');
const jsonReader = require('./jsonReader');
const dbgateApi = {
queryReader,
@@ -37,8 +38,9 @@ const dbgateApi = {
tableReader,
copyStream,
jsonLinesWriter,
jsonArrayWriter,
jsonLinesReader,
jsonReader,
jsonWriter,
sqlDataWriter,
fakeObjectReader,
consoleObjectWriter,

View File

@@ -1,52 +0,0 @@
const { getLogger } = require('dbgate-tools');
const fs = require('fs');
const stream = require('stream');
const logger = getLogger('jsonArrayWriter');
class StringifyStream extends stream.Transform {
constructor() {
super({ objectMode: true });
this.wasHeader = false;
this.wasRecord = false;
}
_transform(chunk, encoding, done) {
let skip = false;
if (!this.wasHeader) {
skip = chunk.__isStreamHeader;
this.wasHeader = true;
}
if (!skip) {
if (!this.wasRecord) {
this.push('[\n');
} else {
this.push(',\n');
}
this.wasRecord = true;
this.push(JSON.stringify(chunk));
}
done();
}
_flush(done) {
if (!this.wasRecord) {
this.push('[]\n');
} else {
this.push('\n]\n');
}
done();
}
}
async function jsonArrayWriter({ fileName, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream();
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
}
module.exports = jsonArrayWriter;

View File

@@ -2,6 +2,7 @@ const fs = require('fs');
const stream = require('stream');
const byline = require('byline');
const { getLogger } = require('dbgate-tools');
const download = require('./download');
const logger = getLogger('jsonLinesReader');
class ParseStream extends stream.Transform {
@@ -35,8 +36,10 @@ class ParseStream extends stream.Transform {
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(
fileName,
downloadedFile,
// @ts-ignore
encoding
);

View File

@@ -0,0 +1,84 @@
const fs = require('fs');
const stream = require('stream');
const byline = require('byline');
const { getLogger } = require('dbgate-tools');
const { parser } = require('stream-json');
const { pick } = require('stream-json/filters/Pick');
const { streamArray } = require('stream-json/streamers/StreamArray');
const { streamObject } = require('stream-json/streamers/StreamObject');
const download = require('./download');
const logger = getLogger('jsonReader');
class ParseStream extends stream.Transform {
constructor({ limitRows, jsonStyle, keyField }) {
super({ objectMode: true });
this.wasHeader = false;
this.limitRows = limitRows;
this.jsonStyle = jsonStyle;
this.keyField = keyField || '_key';
this.rowsWritten = 0;
}
_transform(chunk, encoding, done) {
if (!this.wasHeader) {
this.push({
__isStreamHeader: true,
__isDynamicStructure: true,
});
this.wasHeader = true;
}
if (!this.limitRows || this.rowsWritten < this.limitRows) {
if (this.jsonStyle === 'object') {
this.push({
...chunk.value,
[this.keyField]: chunk.key,
});
} else {
this.push(chunk.value);
}
this.rowsWritten += 1;
}
done();
}
}
async function jsonReader({
fileName,
jsonStyle,
keyField = '_key',
rootField = null,
encoding = 'utf-8',
limitRows = undefined,
}) {
logger.info(`Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(
downloadedFile,
// @ts-ignore
encoding
);
const parseJsonStream = parser();
fileStream.pipe(parseJsonStream);
const parseStream = new ParseStream({ limitRows, jsonStyle, keyField });
const tramsformer = jsonStyle === 'object' ? streamObject() : streamArray();
if (rootField) {
const filterStream = pick({ filter: rootField });
parseJsonStream.pipe(filterStream);
filterStream.pipe(tramsformer);
} else {
parseJsonStream.pipe(tramsformer);
}
tramsformer.pipe(parseStream);
return parseStream;
}
module.exports = jsonReader;

View File

@@ -0,0 +1,97 @@
const { getLogger } = require('dbgate-tools');
const fs = require('fs');
const stream = require('stream');
const _ = require('lodash');
const logger = getLogger('jsonArrayWriter');
class StringifyStream extends stream.Transform {
constructor({ jsonStyle, keyField, rootField }) {
super({ objectMode: true });
this.wasHeader = false;
this.wasRecord = false;
this.jsonStyle = jsonStyle;
this.keyField = keyField || '_key';
this.rootField = rootField;
}
_transform(chunk, encoding, done) {
let skip = false;
if (!this.wasHeader) {
skip = chunk.__isStreamHeader;
this.wasHeader = true;
}
if (!skip) {
if (!this.wasRecord) {
if (this.rootField) {
if (this.jsonStyle === 'object') {
this.push(`{"${this.rootField}": {\n`);
} else {
this.push(`{"${this.rootField}": [\n`);
}
} else {
if (this.jsonStyle === 'object') {
this.push('{\n');
} else {
this.push('[\n');
}
}
} else {
this.push(',\n');
}
this.wasRecord = true;
if (this.jsonStyle === 'object') {
const key = chunk[this.keyField] ?? chunk[Object.keys(chunk)[0]];
this.push(`"${key}": ${JSON.stringify(_.omit(chunk, [this.keyField]))}`);
} else {
this.push(JSON.stringify(chunk));
}
}
done();
}
_flush(done) {
if (!this.wasRecord) {
if (this.rootField) {
if (this.jsonStyle === 'object') {
this.push(`{"${this.rootField}": {}}\n`);
} else {
this.push(`{"${this.rootField}": []}\n`);
}
} else {
if (this.jsonStyle === 'object') {
this.push('{}\n');
} else {
this.push('[]\n');
}
}
} else {
if (this.rootField) {
if (this.jsonStyle === 'object') {
this.push('\n}}\n');
} else {
this.push('\n]}\n');
}
} else {
if (this.jsonStyle === 'object') {
this.push('\n}\n');
} else {
this.push('\n]\n');
}
}
}
done();
}
}
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
}
module.exports = jsonWriter;

View File

@@ -9,13 +9,19 @@ async function loadDatabase({ connection = undefined, systemConnection = undefin
logger.info(`Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dbInfo = await driver.analyseFull(pool);
logger.info(`Analyse finished`);
const dbInfo = await driver.analyseFull(dbhan);
logger.info(`Analyse finished`);
await exportDbModel(dbInfo, outputDir);
await exportDbModel(dbInfo, outputDir);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = loadDatabase;

View File

@@ -66,7 +66,7 @@ class ParseStream extends stream.Transform {
...obj,
...update.fields,
},
(v, k) => v.$$undefined$$
(v, k) => v?.$$undefined$$
);
}
}

View File

@@ -1,4 +1,4 @@
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const childProcessChecker = require('../utility/childProcessChecker');
const processArgs = require('../utility/processArgs');
const logger = getLogger();
@@ -11,7 +11,7 @@ async function runScript(func) {
await func();
process.exit(0);
} catch (err) {
logger.error({ err }, `Error running script: ${err.message}`);
logger.error(extractErrorLogData(err), `Error running script`);
process.exit(1);
}
}

View File

@@ -3,9 +3,9 @@ const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const logger = getLogger('tableReader');
async function tableReader({ connection, pureName, schemaName }) {
async function tableReader({ connection, systemConnection, pureName, schemaName }) {
const driver = requireEngineDriver(connection);
const pool = await connectUtility(driver, connection, 'read');
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
logger.info(`Connected.`);
const fullName = { pureName, schemaName };
@@ -14,26 +14,26 @@ async function tableReader({ connection, pureName, schemaName }) {
// @ts-ignore
logger.info(`Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore
return await driver.readQuery(pool, JSON.stringify(fullName));
return await driver.readQuery(dbhan, JSON.stringify(fullName));
}
const table = await driver.analyseSingleObject(pool, fullName, 'tables');
const table = await driver.analyseSingleObject(dbhan, fullName, 'tables');
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) {
// @ts-ignore
logger.info(`Reading table ${fullNameToString(table)}`);
// @ts-ignore
return await driver.readQuery(pool, query, table);
return await driver.readQuery(dbhan, query, table);
}
const view = await driver.analyseSingleObject(pool, fullName, 'views');
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
if (view) {
// @ts-ignore
logger.info(`Reading view ${fullNameToString(view)}`);
// @ts-ignore
return await driver.readQuery(pool, query, view);
return await driver.readQuery(dbhan, query, view);
}
return await driver.readQuery(pool, query);
return await driver.readQuery(dbhan, query);
}
module.exports = tableReader;

View File

@@ -9,10 +9,10 @@ async function tableWriter({ connection, schemaName, pureName, driver, systemCon
if (!driver) {
driver = requireEngineDriver(connection);
}
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
return await driver.writeTable(pool, { schemaName, pureName }, options);
return await driver.writeTable(dbhan, { schemaName, pureName }, options);
}
module.exports = tableWriter;

View File

@@ -3,7 +3,7 @@ const { fork } = require('child_process');
const { handleProcessCommunication } = require('./processComm');
const processArgs = require('../utility/processArgs');
const pipeForkLogs = require('./pipeForkLogs');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('DatastoreProxy');
class DatastoreProxy {
@@ -73,7 +73,7 @@ class DatastoreProxy {
try {
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
} catch (err) {
logger.error({ err }, 'Error getting rows');
logger.error(extractErrorLogData(err), 'Error getting rows');
this.subprocess = null;
}
});
@@ -87,7 +87,7 @@ class DatastoreProxy {
try {
this.subprocess.send({ msgtype: 'notify', msgid });
} catch (err) {
logger.error({ err }, 'Error notifying subprocess');
logger.error(extractErrorLogData(err), 'Error notifying subprocess');
this.subprocess = null;
}
});

View File

@@ -16,10 +16,20 @@ function getAuthProxyUrl() {
return 'https://auth.dbgate.eu';
}
function supportsAwsIam() {
return false;
}
async function getAwsIamToken(params) {
return null;
}
module.exports = {
isAuthProxySupported,
authProxyGetRedirectUrl,
authProxyGetTokenFromCode,
startTokenChecking,
getAuthProxyUrl,
supportsAwsIam,
getAwsIamToken,
};

View File

@@ -5,6 +5,14 @@ function checkLicense() {
};
}
function checkLicenseKey(key) {
return {
status: 'ok',
type: 'community',
};
}
module.exports = {
checkLicense,
checkLicenseKey,
};

View File

@@ -1,4 +1,4 @@
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('childProcessChecked');
@@ -12,7 +12,7 @@ function childProcessChecker() {
// This will come once parent dies.
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
// and call process.exit()
logger.error({ err }, 'parent died');
logger.error(extractErrorLogData(err), 'parent died');
process.exit(1);
}
}, 1000);

View File

@@ -3,9 +3,10 @@ const { decryptConnection } = require('./crypting');
const { getSshTunnelProxy } = require('./sshTunnelProxy');
const platformInfo = require('../utility/platformInfo');
const connections = require('../controllers/connections');
const _ = require('lodash');
async function loadConnection(driver, storedConnection, connectionMode) {
const { allowShellConnection } = platformInfo;
const { allowShellConnection, allowConnectionFromEnvVariables } = platformInfo;
if (connectionMode == 'app') {
return storedConnection;
@@ -33,6 +34,16 @@ async function loadConnection(driver, storedConnection, connectionMode) {
}
return loadedWithDb;
}
if (allowConnectionFromEnvVariables) {
return _.mapValues(storedConnection, (value, key) => {
if (_.isString(value) && value.startsWith('${') && value.endsWith('}')) {
return process.env[value.slice(2, -1)];
}
return value;
});
}
return storedConnection;
}

View File

@@ -71,9 +71,15 @@ function packagedPluginsDir() {
if (platformInfo.isDevMode) {
return path.resolve(__dirname, '../../../../plugins');
}
if (platformInfo.isBuiltWebMode) {
return path.resolve(__dirname, '../../plugins');
}
if (platformInfo.isDocker) {
return '/home/dbgate-docker/plugins';
}
if (platformInfo.isAwsUbuntuLayout) {
return '/home/ubuntu/build/plugins';
}
if (platformInfo.isNpmDist) {
// node_modules
return global['PLUGINS_DIR'];

View File

@@ -1,13 +1,17 @@
const axios = require('axios');
const os = require('os');
const crypto = require('crypto');
const platformInfo = require('./platformInfo');
async function getPublicIp() {
async function getPublicIpInfo() {
try {
const resp = await axios.default.get('https://api.ipify.org?format=json');
return resp.data.ip || 'unknown-ip';
const resp = await axios.default.get('https://ipinfo.io/json');
if (!resp.data?.ip) {
return { ip: 'unknown-ip' };
}
return resp.data;
} catch (err) {
return 'unknown-ip';
return { ip: 'unknown-ip' };
}
}
@@ -28,7 +32,7 @@ function getMacAddress() {
}
async function getHardwareFingerprint() {
const publicIp = await getPublicIp();
const publicIpInfo = await getPublicIpInfo();
const macAddress = getMacAddress();
const platform = os.platform();
const release = os.release();
@@ -36,7 +40,10 @@ async function getHardwareFingerprint() {
const totalMemory = os.totalmem();
return {
publicIp,
publicIp: publicIpInfo.ip,
country: publicIpInfo.country,
region: publicIpInfo.region,
city: publicIpInfo.city,
macAddress,
platform,
release,
@@ -61,6 +68,12 @@ async function getPublicHardwareFingerprint() {
hash,
payload: {
platform: fingerprint.platform,
city: fingerprint.city,
country: fingerprint.country,
region: fingerprint.region,
isDocker: platformInfo.isDocker,
isAwsUbuntuLayout: platformInfo.isAwsUbuntuLayout,
isElectron: platformInfo.isElectron,
},
};
}

View File

@@ -10,8 +10,11 @@ const isMac = platform === 'darwin';
const isLinux = platform === 'linux';
const isDocker = fs.existsSync('/home/dbgate-docker/public');
const isDevMode = process.env.DEVMODE == '1';
const isBuiltWebMode = process.env.BUILTWEBMODE == '1';
const isNpmDist = !!global['IS_NPM_DIST'];
const isDbModel = !!global['IS_DB_MODEL'];
const isForkedApi = processArgs.isForkedApi;
const isAwsUbuntuLayout = fs.existsSync('/home/ubuntu/build/public');
// function moduleAvailable(name) {
// try {
@@ -39,9 +42,13 @@ const platformInfo = {
environment: process.env.NODE_ENV,
platform,
runningInWebpack: !!process.env.WEBPACK_DEV_SERVER_URL,
allowShellConnection: (!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_CONNECTION || !!isElectron(),
allowShellScripting: (!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_SCRIPTING || !!isElectron(),
allowShellConnection:
(!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_CONNECTION || !!isElectron() || !!isDbModel,
allowShellScripting:
(!processArgs.listenApiChild && !isNpmDist) || !!process.env.SHELL_SCRIPTING || !!isElectron() || !!isDbModel,
allowConnectionFromEnvVariables: !!isDbModel,
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
isAwsUbuntuLayout,
};
module.exports = platformInfo;

View File

@@ -5,7 +5,7 @@ const AsyncLock = require('async-lock');
const lock = new AsyncLock();
const { fork } = require('child_process');
const processArgs = require('../utility/processArgs');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const pipeForkLogs = require('./pipeForkLogs');
const logger = getLogger('sshTunnel');
@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
tunnelConfig,
});
} catch (err) {
logger.error({ err }, 'Error connecting SSH');
logger.error(extractErrorLogData(err), 'Error connecting SSH');
}
return new Promise((resolve, reject) => {
subprocess.on('message', resp => {
@@ -50,7 +50,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
resolve(subprocess);
}
if (msgtype == 'error') {
reject(errorMessage);
reject(new Error(errorMessage));
}
});
subprocess.on('exit', code => {
@@ -91,6 +91,7 @@ async function getSshTunnel(connection) {
};
return sshTunnelCache[tunnelCacheKey];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
// error is not cached
return {
state: 'error',

View File

@@ -1,5 +1,5 @@
const crypto = require('crypto');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { getSshTunnel } = require('./sshTunnel');
const logger = getLogger('sshTunnelProxy');
@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
try {
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
} catch (err) {
logger.error({ err }, 'Error sending to SSH tunnel');
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel');
}
}

View File

@@ -2,7 +2,7 @@ const _ = require('lodash');
const express = require('express');
const getExpressPath = require('./getExpressPath');
const { MissingCredentialsError } = require('./exceptions');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('useController');
/**
@@ -16,7 +16,7 @@ module.exports = function useController(app, electron, route, controller) {
try {
controller._init();
} catch (err) {
logger.error({ err }, `Error initializing controller, exiting application`);
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`);
process.exit(1);
}
}
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
const data = await controller[key]({ ...req.body, ...req.query }, req);
res.json(data);
} catch (err) {
logger.error({ err }, `Error when processing route ${route}/${key}`);
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`);
if (err instanceof MissingCredentialsError) {
res.json({
missingCredentials: true,

View File

@@ -15,7 +15,8 @@
"dependencies": {
"dbgate-sqltree": "^5.0.0-alpha.1",
"dbgate-tools": "^5.0.0-alpha.1",
"dbgate-filterparser": "^5.0.0-alpha.1"
"dbgate-filterparser": "^5.0.0-alpha.1",
"uuid": "^3.4.0"
},
"devDependencies": {
"dbgate-types": "^5.0.0-alpha.1",

View File

@@ -536,3 +536,12 @@ export function changeSetContainsChanges(changeSet: ChangeSet) {
export function changeSetChangedCount(changeSet: ChangeSet) {
return changeSet.deletes.length + changeSet.updates.length + changeSet.inserts.length;
}
export function removeSchemaFromChangeSet(changeSet: ChangeSet) {
return {
...changeSet,
inserts: changeSet.inserts.map(x => ({ ...x, schemaName: undefined })),
updates: changeSet.updates.map(x => ({ ...x, schemaName: undefined })),
deletes: changeSet.deletes.map(x => ({ ...x, schemaName: undefined })),
};
}

View File

@@ -1,4 +1,10 @@
import { createAsyncWriteStream, getLogger, runCommandOnDriver, runQueryOnDriver } from 'dbgate-tools';
import {
createAsyncWriteStream,
extractErrorLogData,
getLogger,
runCommandOnDriver,
runQueryOnDriver,
} from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
@@ -255,7 +261,7 @@ export class DataDuplicator {
);
}
} catch (err) {
logger.error({ err }, `Failed duplicator job, rollbacking. ${err.message}`);
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
return;
}

View File

@@ -169,12 +169,12 @@ export abstract class GridDisplay {
if (isIncluded) {
res = {
...res,
[field]: [...(cfg[field] || []), uniqueName],
[field]: _.uniq([...(cfg[field] || []), uniqueName]),
};
} else {
res = {
...res,
[field]: (cfg[field] || []).filter(x => x != uniqueName),
[field]: _.uniq((cfg[field] || []).filter(x => x != uniqueName)),
};
}
}
@@ -225,7 +225,7 @@ export abstract class GridDisplay {
conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return this.createColumnExpression(column, { alias: column.sourceAlias });
return this.createColumnExpression(column, { alias: column.sourceAlias }, undefined, 'filter');
}
// return {
// exprType: 'column',
@@ -253,7 +253,7 @@ export abstract class GridDisplay {
orCondition.conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return this.createColumnExpression(column, { alias: 'basetbl' });
return this.createColumnExpression(column, { alias: 'basetbl' }, undefined, 'filter');
}
})
);
@@ -278,7 +278,7 @@ export abstract class GridDisplay {
applySortOnSelect(select: Select, displayedColumnInfo: DisplayedColumnInfo) {
if (this.config.sort?.length > 0) {
select.orderBy = this.config.sort
const orderByColumns = this.config.sort
.map(col => ({ ...col, dispInfo: displayedColumnInfo[col.uniqueName] }))
.map(col => ({ ...col, expr: select.columns.find(x => x.alias == col.uniqueName) }))
.filter(col => col.dispInfo && col.expr)
@@ -286,6 +286,10 @@ export abstract class GridDisplay {
...col.expr,
direction: col.order,
}));
if (orderByColumns.length > 0) {
select.orderBy = orderByColumns;
}
}
}
@@ -570,10 +574,10 @@ export abstract class GridDisplay {
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {}
createColumnExpression(col, source, alias?) {
createColumnExpression(col, source, alias?, purpose: 'view' | 'filter' = 'view') {
let expr = null;
if (this.dialect.createColumnViewExpression) {
expr = this.dialect.createColumnViewExpression(col.columnName, col.dataType, source, alias);
expr = this.dialect.createColumnViewExpression(col.columnName, col.dataType, source, alias, purpose);
if (expr) {
return expr;
}
@@ -595,7 +599,7 @@ export abstract class GridDisplay {
name: _.pick(name, ['schemaName', 'pureName']),
alias: 'basetbl',
},
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' })),
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' }, undefined, 'view')),
orderBy: this.driver?.requiresDefaultSortCriteria
? [
{
@@ -625,7 +629,7 @@ export abstract class GridDisplay {
columns: [
...select.columns,
{
alias: '_rowNumber',
alias: '_RowNumber',
exprType: 'rowNumber',
orderBy: select.orderBy
? select.orderBy.map(x =>
@@ -683,7 +687,7 @@ export abstract class GridDisplay {
let select = this.createSelect();
if (!select) return null;
if (this.dialect.rangeSelect) select.range = { offset: offset, limit: count };
else if (this.dialect.rowNumberOverPaging && offset > 0)
else if (this.dialect.rowNumberOverPaging && (offset > 0 || !this.dialect.topRecords))
select = this.getRowNumberOverSelect(select, offset, count);
else if (this.dialect.limitSelect) select.topRecords = count;
return select;

View File

@@ -3,6 +3,7 @@ import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { GridConfig, GridCache } from './GridConfig';
import { analyseCollectionDisplayColumns } from './CollectionGridDisplay';
import { evalFilterBehaviour } from 'dbgate-tools';
import { EngineDriver } from 'dbgate-types';
export class JslGridDisplay extends GridDisplay {
constructor(
@@ -15,9 +16,10 @@ export class JslGridDisplay extends GridDisplay {
rows: any,
isDynamicStructure: boolean,
supportsReload: boolean,
editable: boolean = false
editable: boolean = false,
driver: EngineDriver = null
) {
super(config, setConfig, cache, setCache, null);
super(config, setConfig, cache, setCache, driver);
this.filterable = true;
this.sortable = true;

View File

@@ -63,7 +63,7 @@ export class TableGridDisplay extends GridDisplay {
? this.table.primaryKey.columns.map(x => x.columnName)
: this.table.columns.map(x => x.columnName);
}
if (this.config.isFormView) {
this.addAllExpandedColumnsToSelected = true;
this.hintBaseColumns = this.formColumns;
@@ -287,7 +287,7 @@ export class TableGridDisplay extends GridDisplay {
for (const column of columns) {
if (this.addAllExpandedColumnsToSelected || this.config.addedColumns.includes(column.uniqueName)) {
select.columns.push(
this.createColumnExpression(column, { name: column, alias: parentAlias }, column.uniqueName)
this.createColumnExpression(column, { name: column, alias: parentAlias }, column.uniqueName, 'view')
);
displayedColumnInfo[column.uniqueName] = {
...column,

View File

@@ -1,6 +1,6 @@
import _ from 'lodash';
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
import type { EngineDriver, ViewInfo, ColumnInfo, DatabaseInfo } from 'dbgate-types';
import { GridConfig, GridCache } from './GridConfig';
export class ViewGridDisplay extends GridDisplay {
@@ -11,9 +11,10 @@ export class ViewGridDisplay extends GridDisplay {
setConfig: ChangeConfigFunc,
cache: GridCache,
setCache: ChangeCacheFunc,
dbinfo: DatabaseInfo,
serverVersion
) {
super(config, setConfig, cache, setCache, driver, serverVersion);
super(config, setConfig, cache, setCache, driver, dbinfo, serverVersion);
this.columns = this.getDisplayColumns(view);
this.formColumns = this.columns;
this.filterable = true;

View File

@@ -8,10 +8,12 @@ global.PLUGINS_DIR = process.env.DEVMODE
? path.join(path.dirname(path.dirname(global.API_PACKAGE)), 'plugins')
: path.dirname(global.API_PACKAGE);
global.IS_NPM_DIST = true;
global.IS_DB_MODEL = true;
const program = require('commander');
const dbgateApi = require('dbgate-api');
const { createLogger } = require('pinomin');
const { extractErrorLogData } = require('dbgate-tools');
const logger = createLogger('dbmodel');
@@ -21,7 +23,7 @@ async function runAndExit(promise) {
logger.info('Success');
process.exit();
} catch (err) {
logger.error({ err }, 'Processing failed');
logger.error(extractErrorLogData(err), 'Processing failed');
process.exit(1);
}
}

View File

@@ -24,6 +24,7 @@
"@types/parsimmon": "^1.10.1",
"dbgate-tools": "^5.0.0-alpha.1",
"lodash": "^4.17.21",
"date-fns": "^4.1.0",
"moment": "^2.24.0",
"parsimmon": "^1.13.0"
}

View File

@@ -1,11 +1,19 @@
import { arrayToHexString, isTypeDateTime } from 'dbgate-tools';
import moment from 'moment';
import { format, toDate } from 'date-fns';
import _isString from 'lodash/isString';
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
function getDateStringWithoutTimeZone(dateString) {
if (_isString(dateString)) {
return dateString.replace(/Z|([+-]\d{2}:\d{2})$/, '');
}
return dateString;
}
export function getFilterValueExpression(value, dataType?) {
if (value == null) return 'NULL';
if (isTypeDateTime(dataType)) return moment(value).format('YYYY-MM-DD HH:mm:ss');
if (isTypeDateTime(dataType)) return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
if (value === true) return 'TRUE';
if (value === false) return 'FALSE';
if (value.$oid) return `ObjectId("${value.$oid}")`;

View File

@@ -62,10 +62,13 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
}
export function dumpSqlUpdate(dmp: SqlDumper, cmd: Update) {
dmp.put('^update ');
dumpSqlSourceRef(dmp, cmd.from);
dmp.put('&n^set ');
if (cmd.alterTableUpdateSyntax) {
dmp.put('^alter ^table %f &n^update ', cmd.from?.name);
} else {
dmp.put('^update ');
dumpSqlSourceRef(dmp, cmd.from);
dmp.put('&n^set ');
}
dmp.put('&>');
dmp.putCollection(', ', cmd.fields, col => {
dmp.put('%i=', col.targetColumn);
@@ -81,8 +84,14 @@ export function dumpSqlUpdate(dmp: SqlDumper, cmd: Update) {
}
export function dumpSqlDelete(dmp: SqlDumper, cmd: Delete) {
dmp.put('^delete ^from ');
dumpSqlSourceRef(dmp, cmd.from);
if (cmd.alterTableDeleteSyntax) {
dmp.put('^alter ^table ');
dumpSqlSourceRef(dmp, cmd.from);
dmp.put(' ^delete ');
} else {
dmp.put('^delete ^from ');
dumpSqlSourceRef(dmp, cmd.from);
}
if (cmd.where) {
dmp.put('&n^where ');

View File

@@ -26,12 +26,17 @@ export interface Update {
fields: UpdateField[];
from: FromDefinition;
where?: Condition;
// ALTER TABLE xxx UPDATE col1=val1 - syntax for ClickHouse
alterTableUpdateSyntax?: boolean;
}
export interface Delete {
commandType: 'delete';
from: FromDefinition;
where?: Condition;
// ALTER TABLE xxx DELETE - syntax for ClickHouse
alterTableDeleteSyntax?: boolean;
}
export interface Insert {

View File

@@ -31,7 +31,7 @@
"typescript": "^4.4.3"
},
"dependencies": {
"dbgate-query-splitter": "^4.10.3",
"dbgate-query-splitter": "^4.11.2",
"dbgate-sqltree": "^5.0.0-alpha.1",
"debug": "^4.3.4",
"json-stable-stringify": "^1.0.1",

View File

@@ -1,10 +1,12 @@
import { DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types';
import { DatabaseHandle, DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types';
import _sortBy from 'lodash/sortBy';
import _groupBy from 'lodash/groupBy';
import _pick from 'lodash/pick';
import _compact from 'lodash/compact';
import { getLogger } from './getLogger';
import { type Logger } from 'pinomin';
import { dbNameLogCategory, isCompositeDbName, splitCompositeDbName } from './schemaInfoTools';
import { extractErrorLogData } from './stringTools';
const logger = getLogger('dbAnalyser');
@@ -40,7 +42,7 @@ export class DatabaseAnalyser {
dialect: SqlDialect;
logger: Logger;
constructor(public pool, public driver: EngineDriver, version) {
constructor(public dbhan: DatabaseHandle, public driver: EngineDriver, version) {
this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(version)) || driver?.dialect;
this.logger = logger;
}
@@ -68,6 +70,7 @@ export class DatabaseAnalyser {
}
async fullAnalysis() {
logger.info(`Performing full analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`);
const res = this.addEngineField(await this._runAnalysis());
// console.log('FULL ANALYSIS', res);
return res;
@@ -88,6 +91,9 @@ export class DatabaseAnalyser {
}
async incrementalAnalysis(structure) {
logger.info(
`Performing incremental analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
);
this.structure = structure;
const modifications = await this.getModifications();
@@ -180,8 +186,19 @@ export class DatabaseAnalyser {
// return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
// }
getDefaultSchemaNameCondition() {
return 'is not null';
}
createQuery(template, typeFields, replacements = {}) {
return this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields);
let query = this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields);
const dbname = this.dbhan.database;
const schemaCondition = isCompositeDbName(dbname)
? `= '${splitCompositeDbName(dbname).schema}' `
: ` ${this.getDefaultSchemaNameCondition()} `;
return query?.replace(/=SCHEMA_NAME_CONDITION/g, schemaCondition);
}
processQueryReplacements(query, replacements) {
@@ -242,8 +259,8 @@ export class DatabaseAnalyser {
}
feedback(obj) {
if (this.pool.feedback) {
this.pool.feedback(obj);
if (this.dbhan.feedback) {
this.dbhan.feedback(obj);
}
if (obj && obj.analysingMessage) {
logger.debug(obj.analysingMessage);
@@ -318,11 +335,11 @@ export class DatabaseAnalyser {
};
}
try {
const res = await this.driver.query(this.pool, sql);
const res = await this.driver.query(this.dbhan, sql);
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`);
return res;
} catch (err) {
logger.error({ err }, 'Error running analyser query');
logger.error(extractErrorLogData(err, { template }), 'Error running analyser query');
return {
rows: [],
};
@@ -338,7 +355,6 @@ export class DatabaseAnalyser {
functions: [],
procedures: [],
triggers: [],
schemas: [],
};
}

View File

@@ -1,4 +1,3 @@
import _ from 'lodash';
import type {
ColumnInfo,
ConstraintInfo,
@@ -24,6 +23,7 @@ import _isNumber from 'lodash/isNumber';
import _isDate from 'lodash/isDate';
import _isArray from 'lodash/isArray';
import _isPlainObject from 'lodash/isPlainObject';
import _keys from 'lodash/keys';
import uuidv1 from 'uuid/v1';
export class SqlDumper implements AlterProcessor {
@@ -214,6 +214,14 @@ export class SqlDumper implements AlterProcessor {
this.putCmd('^drop ^database %i', name);
}
createSchema(name: string) {
this.putCmd('^create ^schema %i', name);
}
dropSchema(name: string) {
this.putCmd('^drop ^schema %i', name);
}
specialColumnOptions(column) {}
selectScopeIdentity(table: TableInfo) {}
@@ -246,10 +254,10 @@ export class SqlDumper implements AlterProcessor {
this.putRaw(' ');
this.specialColumnOptions(column);
if (includeNullable) {
if (includeNullable && !this.dialect?.specificNullabilityImplementation) {
this.put(column.notNull ? '^not ^null' : '^null');
}
if (includeDefault && column.defaultValue?.trim()) {
if (includeDefault && column.defaultValue?.toString()?.trim()) {
this.columnDefault(column);
}
}
@@ -294,12 +302,25 @@ export class SqlDumper implements AlterProcessor {
});
this.put('&<&n)');
this.tableOptions(table);
this.endCommand();
(table.indexes || []).forEach(ix => {
this.createIndex(ix);
});
}
tableOptions(table: TableInfo) {
const options = this.driver?.dialect?.getTableFormOptions?.('sqlCreateTable') || [];
for (const option of options) {
if (table[option.name]) {
this.put('&n');
this.put(option.sqlFormatString, table[option.name]);
}
}
}
createTablePrimaryKeyCore(table: TableInfo) {
if (table.primaryKey) {
this.put(',&n');
@@ -531,7 +552,9 @@ export class SqlDumper implements AlterProcessor {
renameConstraint(constraint: ConstraintInfo, newName: string) {}
createColumn(column: ColumnInfo, constraints: ConstraintInfo[]) {
this.put('^alter ^table %f ^add %i ', column, column.columnName);
this.put('^alter ^table %f ^add ', column);
if (this.dialect.createColumnWithColumnKeyword) this.put('^column ');
this.put(' %i ', column.columnName);
this.columnDefinition(column);
this.inlineConstraints(constraints);
this.endCommand();
@@ -607,10 +630,8 @@ export class SqlDumper implements AlterProcessor {
if (!oldTable.pairingId || !newTable.pairingId || oldTable.pairingId != newTable.pairingId) {
throw new Error('Recreate is not possible: oldTable.paringId != newTable.paringId');
}
const tmpTable = `temp_${uuidv1()}`;
// console.log('oldTable', oldTable);
// console.log('newTable', newTable);
const tmpTable = `temp_${uuidv1()}`;
const columnPairs = oldTable.columns
.map(oldcol => ({
@@ -619,33 +640,49 @@ export class SqlDumper implements AlterProcessor {
}))
.filter(x => x.newcol);
this.dropConstraints(oldTable, true);
this.renameTable(oldTable, tmpTable);
if (this.driver.supportsTransactions) {
this.dropConstraints(oldTable, true);
this.renameTable(oldTable, tmpTable);
this.createTable(newTable);
this.createTable(newTable);
const autoinc = newTable.columns.find(x => x.autoIncrement);
if (autoinc) {
this.allowIdentityInsert(newTable, true);
const autoinc = newTable.columns.find(x => x.autoIncrement);
if (autoinc) {
this.allowIdentityInsert(newTable, true);
}
this.putCmd(
'^insert ^into %f (%,i) select %,s ^from %f',
newTable,
columnPairs.map(x => x.newcol.columnName),
columnPairs.map(x => x.oldcol.columnName),
{ ...oldTable, pureName: tmpTable }
);
if (autoinc) {
this.allowIdentityInsert(newTable, false);
}
if (this.dialect.dropForeignKey) {
newTable.dependencies.forEach(cnt => this.createConstraint(cnt));
}
this.dropTable({ ...oldTable, pureName: tmpTable });
} else {
// we have to preserve old table as long as possible
this.createTable({ ...newTable, pureName: tmpTable });
this.putCmd(
'^insert ^into %f (%,i) select %,s ^from %f',
{ ...newTable, pureName: tmpTable },
columnPairs.map(x => x.newcol.columnName),
columnPairs.map(x => x.oldcol.columnName),
oldTable
);
this.dropTable(oldTable);
this.renameTable({ ...newTable, pureName: tmpTable }, newTable.pureName);
}
this.putCmd(
'^insert ^into %f (%,i) select %,s ^from %f',
newTable,
columnPairs.map(x => x.newcol.columnName),
columnPairs.map(x => x.oldcol.columnName),
{ ...oldTable, pureName: tmpTable }
);
if (autoinc) {
this.allowIdentityInsert(newTable, false);
}
if (this.dialect.dropForeignKey) {
newTable.dependencies.forEach(cnt => this.createConstraint(cnt));
}
this.dropTable({ ...oldTable, pureName: tmpTable });
}
createSqlObject(obj: SqlObjectInfo) {
@@ -671,6 +708,23 @@ export class SqlDumper implements AlterProcessor {
this.putCmd('^drop %s %f', this.getSqlObjectSqlName(obj.objectTypeField), obj);
}
setTableOption(table: TableInfo, optionName: string, optionValue: string) {
const options = this?.dialect?.getTableFormOptions?.('sqlAlterTable');
const option = options?.find(x => x.name == optionName && !x.disabled);
if (!option) {
return;
}
this.setTableOptionCore(table, optionName, optionValue, option.sqlFormatString);
this.endCommand();
}
setTableOptionCore(table: TableInfo, optionName: string, optionValue: string, formatString: string) {
this.put('^alter ^table %f ', table);
this.put(formatString, optionValue);
}
fillPreloadedRows(
table: NamedObjectInfo,
oldRows: any[],
@@ -682,7 +736,7 @@ export class SqlDumper implements AlterProcessor {
let was = false;
for (const row of newRows) {
const old = oldRows?.find(r => key.every(col => r[col] == row[col]));
const rowKeys = _.keys(row);
const rowKeys = _keys(row);
if (old) {
const updated = [];
for (const col of rowKeys) {

View File

@@ -12,6 +12,7 @@ import _uniqBy from 'lodash/uniqBy';
import { getLogger } from './getLogger';
import { SqlDumper } from './SqlDumper';
import { extendDatabaseInfo } from './structureTools';
import { extractErrorLogData } from './stringTools';
const logger = getLogger('sqlGenerator');
@@ -85,7 +86,7 @@ export class SqlGenerator {
}
private handleException = error => {
logger.error({ error }, 'Unhandled error');
logger.error(extractErrorLogData(error), 'Unhandled error');
this.isUnhandledException = true;
};

View File

@@ -97,6 +97,13 @@ interface AlterOperation_FillPreloadedRows {
autoIncrementColumn: string;
}
interface AlterOperation_SetTableOption {
operationType: 'setTableOption';
table: TableInfo;
optionName: string;
optionValue: string;
}
type AlterOperation =
| AlterOperation_CreateColumn
| AlterOperation_ChangeColumn
@@ -112,7 +119,8 @@ type AlterOperation =
| AlterOperation_CreateSqlObject
| AlterOperation_DropSqlObject
| AlterOperation_RecreateTable
| AlterOperation_FillPreloadedRows;
| AlterOperation_FillPreloadedRows
| AlterOperation_SetTableOption;
export class AlterPlan {
recreates = {
@@ -253,6 +261,15 @@ export class AlterPlan {
});
}
setTableOption(table: TableInfo, optionName: string, optionValue: string) {
this.operations.push({
operationType: 'setTableOption',
table,
optionName,
optionValue,
});
}
run(processor: AlterProcessor) {
for (const op of this.operations) {
runAlterOperation(op, processor);
@@ -267,6 +284,7 @@ export class AlterPlan {
: [];
const constraints = _.compact([
dependencyDefinition?.includes('primaryKey') ? table.primaryKey : null,
dependencyDefinition?.includes('sortingKey') ? table.sortingKey : null,
...(dependencyDefinition?.includes('foreignKeys') ? table.foreignKeys : []),
...(dependencyDefinition?.includes('indexes') ? table.indexes : []),
...(dependencyDefinition?.includes('uniques') ? table.uniques : []),
@@ -297,35 +315,40 @@ export class AlterPlan {
return res;
}
if (op.operationType == 'changeColumn') {
const constraints = this._getDependendColumnConstraints(op.oldObject, this.dialect.changeColumnDependencies);
for (const [testedOperationType, testedDependencies, testedObject] of [
['changeColumn', this.dialect.changeColumnDependencies, (op as AlterOperation_ChangeColumn).oldObject],
['renameColumn', this.dialect.renameColumnDependencies, (op as AlterOperation_RenameColumn).object],
]) {
if (op.operationType == testedOperationType) {
const constraints = this._getDependendColumnConstraints(testedObject as ColumnInfo, testedDependencies);
if (constraints.length > 0 && this.opts.noDropConstraint) {
return [];
if (constraints.length > 0 && this.opts.noDropConstraint) {
return [];
}
const res: AlterOperation[] = [
...constraints.map(oldObject => {
const opRes: AlterOperation = {
operationType: 'dropConstraint',
oldObject,
};
return opRes;
}),
op,
..._.reverse([...constraints]).map(newObject => {
const opRes: AlterOperation = {
operationType: 'createConstraint',
newObject,
};
return opRes;
}),
];
if (constraints.length > 0) {
this.recreates.constraints += 1;
}
return res;
}
const res: AlterOperation[] = [
...constraints.map(oldObject => {
const opRes: AlterOperation = {
operationType: 'dropConstraint',
oldObject,
};
return opRes;
}),
op,
..._.reverse([...constraints]).map(newObject => {
const opRes: AlterOperation = {
operationType: 'createConstraint',
newObject,
};
return opRes;
}),
];
if (constraints.length > 0) {
this.recreates.constraints += 1;
}
return res;
}
if (op.operationType == 'dropTable') {
@@ -374,7 +397,8 @@ export class AlterPlan {
this._testTableRecreate(op, 'dropColumn', this.dialect.dropColumn, 'oldObject') ||
this._testTableRecreate(op, 'createConstraint', obj => this._canCreateConstraint(obj), 'newObject') ||
this._testTableRecreate(op, 'dropConstraint', obj => this._canDropConstraint(obj), 'oldObject') ||
this._testTableRecreate(op, 'changeColumn', this.dialect.changeColumn, 'newObject') || [op]
this._testTableRecreate(op, 'changeColumn', this.dialect.changeColumn, 'newObject') ||
this._testTableRecreate(op, 'renameColumn', true, 'object') || [op]
);
});
@@ -383,6 +407,7 @@ export class AlterPlan {
_canCreateConstraint(cnt: ConstraintInfo) {
if (cnt.constraintType == 'primaryKey') return this.dialect.createPrimaryKey;
if (cnt.constraintType == 'sortingKey') return this.dialect.createPrimaryKey;
if (cnt.constraintType == 'foreignKey') return this.dialect.createForeignKey;
if (cnt.constraintType == 'index') return this.dialect.createIndex;
if (cnt.constraintType == 'unique') return this.dialect.createUnique;
@@ -392,6 +417,7 @@ export class AlterPlan {
_canDropConstraint(cnt: ConstraintInfo) {
if (cnt.constraintType == 'primaryKey') return this.dialect.dropPrimaryKey;
if (cnt.constraintType == 'sortingKey') return this.dialect.dropPrimaryKey;
if (cnt.constraintType == 'foreignKey') return this.dialect.dropForeignKey;
if (cnt.constraintType == 'index') return this.dialect.dropIndex;
if (cnt.constraintType == 'unique') return this.dialect.dropUnique;
@@ -453,7 +479,7 @@ export class AlterPlan {
}
} else {
// @ts-ignore
const oldObject: TableInfo = op.oldObject;
const oldObject: TableInfo = op.oldObject || op.object;
if (oldObject) {
const recreated = recreates[`${oldObject.schemaName}||${oldObject.pureName}`];
if (recreated) {
@@ -575,6 +601,9 @@ export function runAlterOperation(op: AlterOperation, processor: AlterProcessor)
case 'dropSqlObject':
processor.dropSqlObject(op.oldObject);
break;
case 'setTableOption':
processor.setTableOption(op.table, op.optionName, op.optionValue);
break;
case 'fillPreloadedRows':
processor.fillPreloadedRows(op.table, op.oldRows, op.newRows, op.key, op.insertOnly, op.autoIncrementColumn);
break;

View File

@@ -5,7 +5,7 @@ import { prepareTableForImport } from './tableTransforms';
const logger = getLogger('bulkStreamBase');
export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, name, options: WriteTableOptions): any {
export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan, name, options: WriteTableOptions): any {
const fullNameQuoted = name.schemaName
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
: driver.dialect.quoteIdentifier(name.pureName);
@@ -29,21 +29,22 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
};
writable.checkStructure = async () => {
let structure = await driver.analyseSingleTable(pool, name);
let structure = await driver.analyseSingleTable(dbhan, name);
// console.log('ANALYSING', name, structure);
if (structure && options.dropIfExists) {
logger.info(`Dropping table ${fullNameQuoted}`);
await driver.script(pool, `DROP TABLE ${fullNameQuoted}`);
await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`);
}
if (options.createIfNotExists && (!structure || options.dropIfExists)) {
const dmp = driver.createDumper();
dmp.createTable(prepareTableForImport({ ...writable.structure, ...name }));
const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name }));
dmp.createTable(createdTableInfo);
logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`);
await driver.script(pool, dmp.s);
structure = await driver.analyseSingleTable(pool, name);
await driver.script(dbhan, dmp.s);
structure = await driver.analyseSingleTable(dbhan, name);
}
if (options.truncate) {
await driver.script(pool, `TRUNCATE TABLE ${fullNameQuoted}`);
await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`);
}
writable.columnNames = _intersection(
@@ -73,7 +74,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
dmp.putRaw(';');
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
// console.log(dmp.s);
await driver.query(pool, dmp.s, { discardResult: true });
await driver.query(dbhan, dmp.s, { discardResult: true });
} else {
for (const row of rows) {
const dmp = driver.createDumper();
@@ -84,13 +85,13 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
dmp.putRaw('(');
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
dmp.putRaw(')');
await driver.query(pool, dmp.s, { discardResult: true });
await driver.query(dbhan, dmp.s, { discardResult: true });
}
}
if (options.commitAfterInsert) {
const dmp = driver.createDumper();
dmp.commitTransaction();
await driver.query(pool, dmp.s, { discardResult: true });
await driver.query(dbhan, dmp.s, { discardResult: true });
}
};

View File

@@ -11,6 +11,7 @@ import {
UniqueInfo,
SqlObjectInfo,
NamedObjectInfo,
ColumnsConstraintInfo,
} from '../../types';
export class DatabaseInfoAlterProcessor {
@@ -59,6 +60,9 @@ export class DatabaseInfoAlterProcessor {
case 'primaryKey':
table.primaryKey = constraint as PrimaryKeyInfo;
break;
case 'sortingKey':
table.sortingKey = constraint as ColumnsConstraintInfo;
break;
case 'foreignKey':
table.foreignKeys.push(constraint as ForeignKeyInfo);
break;
@@ -86,6 +90,9 @@ export class DatabaseInfoAlterProcessor {
case 'primaryKey':
table.primaryKey = null;
break;
case 'sortingKey':
table.sortingKey = null;
break;
case 'foreignKey':
table.foreignKeys = table.foreignKeys.filter(x => x.constraintName != constraint.constraintName);
break;
@@ -129,4 +136,9 @@ export class DatabaseInfoAlterProcessor {
tableInfo.preloadedRowsKey = key;
tableInfo.preloadedRowsInsertOnly = insertOnly;
}
setTableOption(table: TableInfo, optionName: string, optionValue: string) {
const tableInfo = this.db.tables.find(x => x.pureName == table.pureName && x.schemaName == table.schemaName);
tableInfo[optionName] = optionValue;
}
}

View File

@@ -46,6 +46,14 @@ export function generateTablePairingId(table: TableInfo): TableInfo {
if (!table.pairingId) {
return {
...table,
primaryKey: table.primaryKey && {
...table.primaryKey,
pairingId: table.primaryKey.pairingId || uuidv1(),
},
sortingKey: table.sortingKey && {
...table.sortingKey,
pairingId: table.sortingKey.pairingId || uuidv1(),
},
columns: table.columns?.map(col => ({
...col,
pairingId: col.pairingId || uuidv1(),
@@ -335,6 +343,7 @@ export function testEqualTypes(a: ColumnInfo, b: ColumnInfo, opts: DbDiffOptions
function getTableConstraints(table: TableInfo) {
const res = [];
if (table.primaryKey) res.push(table.primaryKey);
if (table.sortingKey) res.push(table.sortingKey);
if (table.foreignKeys) res.push(...table.foreignKeys);
if (table.indexes) res.push(...table.indexes);
if (table.uniques) res.push(...table.uniques);
@@ -345,7 +354,9 @@ function getTableConstraints(table: TableInfo) {
function createPairs(oldList, newList, additionalCondition = null) {
const res = [];
for (const a of oldList) {
const b = newList.find(x => x.pairingId == a.pairingId || (additionalCondition && additionalCondition(a, x)));
const b = newList.find(
x => (a.pairingId && x.pairingId == a.pairingId) || (additionalCondition && additionalCondition(a, x))
);
if (b) {
res.push([a, b]);
} else {
@@ -381,9 +392,14 @@ function planAlterTable(plan: AlterPlan, oldTable: TableInfo, newTable: TableInf
const constraintPairs = createPairs(
getTableConstraints(oldTable),
getTableConstraints(newTable),
(a, b) => a.constraintType == 'primaryKey' && b.constraintType == 'primaryKey'
(a, b) =>
(a.constraintType == 'primaryKey' && b.constraintType == 'primaryKey') ||
(a.constraintType == 'sortingKey' && b.constraintType == 'sortingKey')
);
// console.log('constraintPairs SOURCE', getTableConstraints(oldTable), getTableConstraints(newTable));
// console.log('constraintPairs OLD TABLE', oldTable);
// console.log('constraintPairs NEW TABLE', newTable);
// console.log('constraintPairs SOURCE OLD', getTableConstraints(oldTable));
// console.log('constraintPairs SOURCE NEW', getTableConstraints(newTable));
// console.log('constraintPairs', constraintPairs);
if (!opts.noDropConstraint) {
@@ -407,7 +423,7 @@ function planAlterTable(plan: AlterPlan, oldTable: TableInfo, newTable: TableInf
// console.log('PLAN RENAME COLUMN')
plan.renameColumn(x[0], x[1].columnName);
} else {
// console.log('PLAN CHANGE COLUMN')
// console.log('PLAN CHANGE COLUMN', x[0], x[1]);
plan.changeColumn(x[0], x[1]);
}
}
@@ -425,6 +441,28 @@ function planAlterTable(plan: AlterPlan, oldTable: TableInfo, newTable: TableInf
constraintPairs.filter(x => x[0] == null).forEach(x => plan.createConstraint(x[1]));
planTablePreload(plan, oldTable, newTable);
planChangeTableOptions(plan, oldTable, newTable, opts);
// console.log('oldTable', oldTable);
// console.log('newTable', newTable);
// console.log('plan.operations', plan.operations);
}
function planChangeTableOptions(plan: AlterPlan, oldTable: TableInfo, newTable: TableInfo, opts: DbDiffOptions) {
for (const option of plan.dialect?.getTableFormOptions?.('sqlAlterTable') || []) {
if (option.disabled) {
continue;
}
const name = option.name;
if (
oldTable[name] != newTable[name] &&
(oldTable[name] || newTable[name]) &&
(newTable[name] || option.allowEmptyValue)
) {
plan.setTableOption(newTable, name, newTable[name]);
}
}
}
export function testEqualTables(

Some files were not shown because too many files have changed in this diff Show More