Compare commits

...

720 Commits

Author SHA1 Message Date
SPRINX0\prochazka
08410ef209 v6.6.9 2025-10-31 16:32:37 +01:00
SPRINX0\prochazka
6cd3242454 v6.6.9-premium-beta.1 2025-10-31 16:05:43 +01:00
CI workflows
986c1a7bc8 chore: auto-update github workflows 2025-10-31 15:05:12 +00:00
SPRINX0\prochazka
a7703ec996 fix build 2025-10-31 16:04:46 +01:00
SPRINX0\prochazka
52d2eb3f59 v6.6.8 2025-10-31 15:22:13 +01:00
SPRINX0\prochazka
b19fed41ef changelog 2025-10-31 15:20:51 +01:00
SPRINX0\prochazka
1d52cde3b3 v6.6.8-beta.19 2025-10-31 14:38:07 +01:00
SPRINX0\prochazka
aca4cc0ace fix 2025-10-31 14:37:57 +01:00
SPRINX0\prochazka
8f9a78feb9 v6.6.8-beta.18 2025-10-31 14:01:57 +01:00
SPRINX0\prochazka
45484a43f1 v6.6.8-beta.17 2025-10-31 14:01:32 +01:00
CI workflows
63665e6e9c chore: auto-update github workflows 2025-10-31 13:00:53 +00:00
SPRINX0\prochazka
151665c880 pragmatic hack - fix yaml hashes for windows 2025-10-31 14:00:23 +01:00
SPRINX0\prochazka
15e475873e v6.8.8-beta.14 2025-10-31 10:49:37 +01:00
SPRINX0\prochazka
f846af75e8 Merge branch 'moveprem' 2025-10-31 10:47:34 +01:00
SPRINX0\prochazka
5c7ab3793f v6.6.8-beta.13 2025-10-31 08:21:42 +01:00
CI workflows
a501f0cdef chore: auto-update github workflows 2025-10-31 07:19:48 +00:00
SPRINX0\prochazka
4b4c2606cd skipped publish 2025-10-31 08:19:15 +01:00
SPRINX0\prochazka
901e7581fe v6.6.8-alpha.10 2025-10-30 14:32:16 +01:00
SPRINX0\prochazka
55fb233ce4 v6.6.8-alpha.9 2025-10-30 14:20:14 +01:00
SPRINX0\prochazka
dd77ee4a8e added missing repo info 2025-10-30 14:18:35 +01:00
CI workflows
a7ae6d7b47 chore: auto-update github workflows 2025-10-30 13:18:30 +00:00
CI workflows
34dfac8bb2 Update pro ref 2025-10-30 13:18:11 +00:00
SPRINX0\prochazka
599f783c04 v6.6.8-alpha.8 2025-10-30 14:10:07 +01:00
SPRINX0\prochazka
c7f82d3d46 v6.6.8-alpha.7 2025-10-30 14:09:37 +01:00
CI workflows
8b346c6b44 chore: auto-update github workflows 2025-10-30 13:09:06 +00:00
SPRINX0\prochazka
e386764151 npm publish fix 2025-10-30 14:08:48 +01:00
SPRINX0\prochazka
c0acecc6e9 git repo config 2025-10-30 13:48:44 +01:00
SPRINX0\prochazka
a1724134ec v6.6.8-alpha.6 2025-10-30 13:41:53 +01:00
SPRINX0\prochazka
530b830586 fixed repo 2025-10-30 13:41:24 +01:00
SPRINX0\prochazka
363a72c3ad v6.6.8-alpha.5 2025-10-30 13:31:42 +01:00
CI workflows
c1a6daf9d2 chore: auto-update github workflows 2025-10-30 12:31:14 +00:00
SPRINX0\prochazka
8a7a73678c added npm publish --tag 2025-10-30 13:30:56 +01:00
SPRINX0\prochazka
6cc8c7cb9d v6.6.8-alpha.4 2025-10-30 13:20:32 +01:00
CI workflows
e2d1771a7a chore: auto-update github workflows 2025-10-30 12:18:26 +00:00
SPRINX0\prochazka
23f7dd6ee3 use OIDC NPM sign 2025-10-30 13:18:05 +01:00
SPRINX0\prochazka
f24caad997 v6.6.8-beta.1 2025-10-30 08:49:48 +01:00
CI workflows
32729350f6 chore: auto-update github workflows 2025-10-30 07:48:22 +00:00
SPRINX0\prochazka
4929d190a5 azure code signing 2025-10-30 08:47:55 +01:00
Stela Augustinova
0e211dc91b translation-connections,sqlObject,column,filter 2025-10-29 16:55:27 +01:00
Stela Augustinova
8663ab2d28 translation-settings,common 2025-10-29 16:55:27 +01:00
SPRINX0\prochazka
ba0ecaf70f fix 2025-10-29 16:47:52 +01:00
SPRINX0\prochazka
acb4f5924e upgrade button 2025-10-29 16:43:28 +01:00
SPRINX0\prochazka
46553a80ad v6.6.7 2025-10-29 15:20:51 +01:00
SPRINX0\prochazka
e254657813 remvoed links to dbgate.org 2025-10-29 15:06:30 +01:00
SPRINX0\prochazka
b087df8d97 v6.6.7-beta.2 2025-10-29 14:59:28 +01:00
SPRINX0\prochazka
47eb74d5ba fix 2025-10-29 14:59:14 +01:00
SPRINX0\prochazka
f0ac047978 v6.6.7-beta.1 2025-10-29 14:26:05 +01:00
SPRINX0\prochazka
e57e246991 SYNC: fixed web info 2025-10-29 13:00:17 +00:00
SPRINX0\prochazka
6934cdd122 json UI improvements 2025-10-29 13:08:37 +01:00
SPRINX0\prochazka
0e2a77ced7 changelog 2025-10-27 17:19:16 +01:00
SPRINX0\prochazka
2d135d708e v6.6.6 2025-10-27 17:18:48 +01:00
SPRINX0\prochazka
26d34f896b changelog 2025-10-27 16:20:27 +01:00
SPRINX0\prochazka
87c58cae83 SYNC: fixed test 2025-10-27 14:46:01 +00:00
SPRINX0\prochazka
4d6957a6fa v6.6.6-premium-beta.18 2025-10-27 13:19:40 +01:00
SPRINX0\prochazka
70c53248ae v6.6.6-beta.17 2025-10-27 13:19:20 +01:00
SPRINX0\prochazka
6e645cb054 v6.6.6-beta.7 2025-10-27 13:19:11 +01:00
SPRINX0\prochazka
28fecc6834 v6.6.6-beta.14 2025-10-27 13:18:55 +01:00
SPRINX0\prochazka
64c2faf538 JSON UI 2025-10-27 13:10:45 +01:00
Jan Prochazka
391d04b45c MPR archive 2025-10-26 18:28:35 +01:00
Jan Prochazka
f974c00a63 MPR references 2025-10-26 18:23:07 +01:00
Jan Prochazka
9f0107c002 MPR refs, macros 2025-10-26 18:06:45 +01:00
Jan Prochazka
6ce82e915e MPR grouping 2025-10-26 15:04:13 +01:00
Jan Prochazka
864797fc99 MPR advanced exports 2025-10-26 09:33:48 +01:00
SPRINX0\prochazka
c741434e3c v6.6.6-beta.13 2025-10-23 07:44:45 +02:00
Jan Prochazka
60fcb1a862 Merge pull request #1230 from dbgate/feature/disable-filter
Feature/disable filter
2025-10-22 14:42:51 +02:00
Stela Augustinova
18dc6a3ff5 Allow disable/re-enable multicolumn filter #1174 2025-10-22 14:20:23 +02:00
Stela Augustinova
da52dd006b Allow disable/re-enable filter #1174 2025-10-22 14:06:31 +02:00
SPRINX0\prochazka
ed1655ed8f better colored icon 2025-10-22 13:56:46 +02:00
SPRINX0\prochazka
516c4e32be v6.6.6-premium-beta.9 2025-10-22 12:40:58 +02:00
SPRINX0\prochazka
1e222d806e SYNC: fixed export from team files 2025-10-22 10:40:37 +00:00
SPRINX0\prochazka
bfb35b198d v6.6.6-premium-beta.8 2025-10-22 12:28:03 +02:00
SPRINX0\prochazka
3a5f36155f promo widget colors 2025-10-22 10:47:59 +02:00
SPRINX0\prochazka
82092fab76 premium icon 2025-10-22 10:15:27 +02:00
SPRINX0\prochazka
a432cb886d #1211 - fixed incorrect behaviour in server connection ping 2025-10-21 16:34:40 +02:00
Jan Prochazka
5fff8c6ba2 Merge pull request #1227 from dbgate/feature/execute-current-line
Execute Current, Execute Current Line Not Current Query #1209
2025-10-21 15:27:09 +02:00
Stela Augustinova
e1de4f5c5f Execute Current, Execute Current Line Not Current Query #1209 2025-10-21 14:18:22 +02:00
Jan Prochazka
b6145d6f1e Merge pull request #1226 from dbgate/feature/close-tabs
message: close right side tabs #1219
2025-10-21 14:15:25 +02:00
Stela Augustinova
3804a87cef message: close right side tabs #1219 2025-10-21 12:43:44 +02:00
SPRINX0\prochazka
a7a6c664c8 SYNC: #1220 fixed typo 2025-10-21 10:42:26 +02:00
SPRINX0\prochazka
f075515607 v6.6.6-premium-beta.2 2025-10-21 09:30:08 +02:00
SPRINX0\prochazka
84c15bbc69 v6.6.6-beta.1 2025-10-21 09:29:58 +02:00
SPRINX0\prochazka
54e70d490d SYNC: fixed circular dependency 2025-10-21 06:10:16 +00:00
SPRINX0\prochazka
67c3de1f5d Merge branch 'feature/dynamic-promo-widget' 2025-10-20 15:28:29 +02:00
SPRINX0\prochazka
7281b5b1d7 highlight promo 2025-10-20 14:24:35 +02:00
SPRINX0\prochazka
966eb01f1c dynamic promo widget data 2025-10-20 14:00:59 +02:00
SPRINX0\prochazka
5bdf072cdf promo widget validity 2025-10-20 12:54:24 +02:00
SPRINX0\prochazka
59c3381962 promo widget WIP 2025-10-17 17:30:27 +02:00
SPRINX0\prochazka
1fa4216b18 dynamic promo widget 2025-10-17 14:04:08 +02:00
SPRINX0\prochazka
a98c953876 fixed link 2025-10-17 13:05:25 +02:00
SPRINX0\prochazka
bf995e5861 don't send country to cloud update - is not really needed 2025-10-17 12:51:30 +02:00
SPRINX0\prochazka
1b8470df38 use DBG info 2025-10-17 11:59:52 +02:00
SPRINX0\prochazka
98ded8ea30 added placehorder file for premium 2025-10-16 10:55:53 +02:00
CI workflows
b72a50eb7e chore: auto-update github workflows 2025-10-16 08:52:21 +00:00
CI workflows
9c3f4fbb9d Update pro ref 2025-10-16 08:52:04 +00:00
Jan Prochazka
28c68308a9 SYNC: Merge pull request #15 from dbgate/feature/redis-cluster 2025-10-16 08:51:49 +00:00
CI workflows
ea3b0c15ac chore: auto-update github workflows 2025-10-15 10:20:08 +00:00
CI workflows
38ce46adb0 Update pro ref 2025-10-15 10:19:48 +00:00
SPRINX0\prochazka
0a9a0103dd changelog 2025-10-15 11:05:31 +02:00
SPRINX0\prochazka
b7b370ff62 v6.6.5 2025-10-15 10:50:39 +02:00
SPRINX0\prochazka
7b0c98ad2c v6.6.5-premium-beta.2 2025-10-14 15:12:07 +02:00
CI workflows
bfe25e70d6 chore: auto-update github workflows 2025-10-14 13:07:25 +00:00
CI workflows
b2409df369 Update pro ref 2025-10-14 13:07:03 +00:00
SPRINX0\prochazka
e1d8549730 SYNC: explain query error 2025-10-14 13:06:53 +00:00
SPRINX0\prochazka
865cc081ce SYNC: database chat test moved 2025-10-14 12:48:28 +00:00
CI workflows
867d5a9eb5 chore: auto-update github workflows 2025-10-14 11:00:02 +00:00
CI workflows
ac84b7604b Update pro ref 2025-10-14 10:59:42 +00:00
SPRINX0\prochazka
8f860ad93e SYNC: chart test 2025-10-14 10:59:30 +00:00
SPRINX0\prochazka
f3b65700d7 fixed build of Community edition 2025-10-14 11:20:31 +02:00
CI workflows
8ec1856206 chore: auto-update github workflows 2025-10-14 09:02:33 +00:00
CI workflows
811d2162fc Update pro ref 2025-10-14 09:02:16 +00:00
Jan Prochazka
5e2cdca103 SYNC: Merge pull request #14 from dbgate/ai-sql 2025-10-14 09:02:04 +00:00
SPRINX0\prochazka
23fb5852ba v6.6.5-premium-beta.1 2025-10-10 16:27:47 +02:00
CI workflows
75cbc0d29a chore: auto-update github workflows 2025-10-10 14:08:48 +00:00
CI workflows
d08cd684c5 Update pro ref 2025-10-10 14:08:31 +00:00
CI workflows
529b297ba6 chore: auto-update github workflows 2025-10-10 13:54:02 +00:00
CI workflows
32193eef49 Update pro ref 2025-10-10 13:53:47 +00:00
Jan Prochazka
1f97b90b2d Merge pull request #1205 from dbgate/feature/pin-unpin-single-database-connections
feat: allow pinning and unpinning single database connections
2025-10-10 12:57:14 +02:00
SPRINX0\prochazka
0dd36260e9 SYNC: fixed Cannot open up large JSON file #1215 2025-10-10 10:49:25 +00:00
CI workflows
3571d49987 chore: auto-update github workflows 2025-10-09 15:58:36 +00:00
CI workflows
ad3489c491 Update pro ref 2025-10-09 15:58:17 +00:00
CI workflows
2461fa2e25 chore: auto-update github workflows 2025-10-09 15:55:45 +00:00
CI workflows
60e49ba343 Update pro ref 2025-10-09 15:47:43 +00:00
SPRINX0\prochazka
a709381980 SYNC: try to fix TE editing problem 2025-10-09 15:33:58 +00:00
SPRINX0\prochazka
c2805c8c1c use link www.dbgate.io 2025-10-08 11:19:15 +02:00
SPRINX0\prochazka
0346cbe911 SYNC: try to fix screenshot 2025-10-03 08:06:23 +00:00
CI workflows
74a4d4455b chore: auto-update github workflows 2025-10-03 07:38:01 +00:00
CI workflows
3659e1c91f Update pro ref 2025-10-03 07:37:43 +00:00
SPRINX0\prochazka
09da5c6968 SYNC: test fix 2025-10-03 07:37:32 +00:00
SPRINX0\prochazka
2575efd28d SYNC: fix 2025-10-03 06:36:41 +00:00
CI workflows
78c1c8d2b1 chore: auto-update github workflows 2025-10-03 06:24:14 +00:00
CI workflows
d5147f3dbb Update pro ref 2025-10-03 06:23:56 +00:00
CI workflows
593580fbc1 chore: auto-update github workflows 2025-10-02 15:42:25 +00:00
CI workflows
67ca1cb638 Update pro ref 2025-10-02 15:42:02 +00:00
SPRINX0\prochazka
bcf5b64545 SYNC: LLM provider config test 2025-10-02 15:41:51 +00:00
SPRINX0\prochazka
e37ad663b3 longer timeout 2025-10-02 16:38:41 +02:00
SPRINX0\prochazka
4ce7582a46 changelog 2025-10-02 16:38:33 +02:00
SPRINX0\prochazka
1c371bb7bf v6.6.4 2025-10-02 16:23:57 +02:00
CI workflows
17fdeb0734 chore: auto-update github workflows 2025-10-02 12:32:59 +00:00
SPRINX0\prochazka
5c6f0c32b3 fix 2025-10-02 14:32:36 +02:00
SPRINX0\prochazka
e630280673 v6.6.4-beta.11 2025-10-02 13:13:47 +02:00
SPRINX0\prochazka
7c87961adf v6.6.4-premium-beta.10 2025-10-02 13:13:37 +02:00
SPRINX0\prochazka
0df5ceb7d2 shared cloud folders also for community (readonly) 2025-10-02 13:11:22 +02:00
CI workflows
54342f2592 chore: auto-update github workflows 2025-10-02 10:14:37 +00:00
CI workflows
fbad558c37 Update pro ref 2025-10-02 10:14:21 +00:00
CI workflows
b27dfb290c chore: auto-update github workflows 2025-10-02 08:56:55 +00:00
CI workflows
063c930349 Update pro ref 2025-10-02 08:56:40 +00:00
SPRINX0\prochazka
c1f1e489a7 team files controller 2025-09-29 11:18:53 +02:00
CI workflows
62960ed8de chore: auto-update github workflows 2025-09-29 09:11:45 +00:00
CI workflows
03305e04a7 Update pro ref 2025-09-29 09:11:29 +00:00
SPRINX0\prochazka
4b294b1125 v6.6.4-premium-beta.9 2025-09-26 17:06:20 +02:00
CI workflows
7122a21591 chore: auto-update github workflows 2025-09-26 15:02:10 +00:00
CI workflows
9682e571a2 Update pro ref 2025-09-26 15:01:52 +00:00
SPRINX0\prochazka
2cefbfb8aa SYNC: fixes 2025-09-26 15:01:40 +00:00
CI workflows
084062488c chore: auto-update github workflows 2025-09-26 14:52:55 +00:00
CI workflows
4dbe2b5297 Update pro ref 2025-09-26 14:52:27 +00:00
CI workflows
0391e5bc3d Update pro ref 2025-09-26 14:51:57 +00:00
SPRINX0\prochazka
bcbd96c608 SYNC: chart - support group by week 2025-09-26 14:51:45 +00:00
SPRINX0\prochazka
6a6633e151 SYNC: fix 2025-09-26 13:22:26 +00:00
SPRINX0\prochazka
5c4546a54c v6.6.4-premium-beta.8 2025-09-26 14:19:42 +02:00
CI workflows
255e328340 chore: auto-update github workflows 2025-09-26 12:14:15 +00:00
CI workflows
aaf9b085d7 Update pro ref 2025-09-26 12:13:58 +00:00
SPRINX0\prochazka
c7b14c9fab SYNC: Merge branch 'feature/team-files' 2025-09-26 12:13:45 +00:00
CI workflows
0436ba78e2 chore: auto-update github workflows 2025-09-26 10:45:03 +00:00
CI workflows
1085a1c221 Update pro ref 2025-09-26 10:44:50 +00:00
Jan Prochazka
494b33bd7a SYNC: Merge pull request #12 from dbgate/feature/team-files 2025-09-26 10:44:39 +00:00
SPRINX0\prochazka
925e3a67da fixed for shorten names 2025-09-25 10:48:43 +02:00
SPRINX0\prochazka
78026f7fa5 Shorten identifiers 2025-09-25 10:38:14 +02:00
SPRINX0\prochazka
9d77cac4bb filter only tables with rows 2025-09-25 09:23:52 +02:00
SPRINX0\prochazka
6747280964 table row count in firebird 2025-09-25 09:11:56 +02:00
SPRINX0\prochazka
d7dbd79f7c fixed loading structure for firebird 2025-09-25 08:59:12 +02:00
SPRINX0\prochazka
aec692c402 simplified blob loading for firebird 2025-09-25 08:33:28 +02:00
CI workflows
113bbead4a chore: auto-update github workflows 2025-09-24 15:25:42 +00:00
CI workflows
1361c196da Update pro ref 2025-09-24 15:25:23 +00:00
SPRINX0\prochazka
987995ad68 SYNC: upgraded query splitter 2025-09-24 15:25:12 +00:00
SPRINX0\prochazka
d24db7c053 using firebird splitter options 2025-09-24 17:23:09 +02:00
CI workflows
25a9d52d86 chore: auto-update github workflows 2025-09-24 15:10:12 +00:00
CI workflows
946c632920 Update pro ref 2025-09-24 15:09:54 +00:00
SPRINX0\prochazka
94bcbb80fd SYNC: upgraded query splitter 2025-09-24 15:09:42 +00:00
SPRINX0\prochazka
ba58965770 firebird small refactor 2025-09-24 13:25:12 +02:00
SPRINX0\prochazka
e50ddbf348 v6.6.4-premium-beta.7 2025-09-24 10:56:14 +02:00
SPRINX0\prochazka
e95f21fa9c SYNC: fixed app log tab 2025-09-24 08:48:30 +00:00
SPRINX0\prochazka
7026b765bd v6.6.4-premium-beta.6 2025-09-24 10:31:17 +02:00
SPRINX0\prochazka
53eedd2701 SYNC: better DB analysis logging 2025-09-24 08:30:13 +00:00
SPRINX0\prochazka
9886c58681 SYNC: imrpoved logging of DB analyser 2025-09-24 08:12:39 +00:00
Pavel
953f6da7d7 feat: allow pinning and unpinning single database connections 2025-09-13 22:01:13 +02:00
Jan Prochazka
da1efe880d v6.6.4-beta.5 2025-09-12 11:42:09 +02:00
Jan Prochazka
bd0b6dd4d2 remove sourcemaps from build 2025-09-12 11:41:59 +02:00
SPRINX0\prochazka
1c049fe1fb v6.6.4-premium-beta.4 2025-09-11 16:04:25 +02:00
SPRINX0\prochazka
10b1b87d55 flexColContainer fixed 2025-09-11 16:02:28 +02:00
SPRINX0\prochazka
3ec6a3b3f2 v6.6.4-premium-beta.3 2025-09-11 15:27:58 +02:00
SPRINX0\prochazka
8da919d4cd missing file 2025-09-11 15:26:13 +02:00
SPRINX0\prochazka
5cd59b795b SYNC: fix 2025-09-11 13:20:09 +00:00
CI workflows
e4dc30d1fb chore: auto-update github workflows 2025-09-11 12:53:50 +00:00
CI workflows
2013cee298 Update pro ref 2025-09-11 12:53:33 +00:00
Jan Prochazka
1f89a6304b SYNC: Merge pull request #10 from dbgate/feat-chat-compl-api 2025-09-11 12:53:21 +00:00
SPRINX0\prochazka
580e0f9df7 SYNC: fixed load apps where there is no apps dir 2025-09-11 11:47:36 +00:00
CI workflows
2221c4548e chore: auto-update github workflows 2025-09-11 11:11:37 +00:00
CI workflows
e06c226e84 Update pro ref 2025-09-11 11:11:19 +00:00
Jan Prochazka
11a4f0ef32 SYNC: Merge pull request #9 from dbgate/feature/apps 2025-09-11 11:11:08 +00:00
SPRINX0\prochazka
ef15f299d2 v6.6.4-beta.2 2025-09-05 13:10:27 +02:00
SPRINX0\prochazka
1d333b9322 Fixed: does no longer work with Cockroach DB #1202 2025-09-05 13:08:21 +02:00
CI workflows
5302ed8653 chore: auto-update github workflows 2025-09-04 08:15:02 +00:00
CI workflows
2cf26a10c4 Update pro ref 2025-09-04 08:14:44 +00:00
SPRINX0\prochazka
deda1e4251 SYNC: fixed authorization select 2025-09-04 08:14:33 +00:00
SPRINX0\prochazka
c042bf2d15 SYNC: commented out feedback menu 2025-09-04 07:20:22 +00:00
SPRINX0\prochazka
8ced6aa205 SYNC: settings permissions 2025-09-04 07:16:21 +00:00
SPRINX0\prochazka
3ca514c85b fixed error 2025-09-03 16:29:43 +02:00
CI workflows
27b8e7d5ec chore: auto-update github workflows 2025-09-03 14:22:22 +00:00
SPRINX0\prochazka
a2d77a3917 removed upload error to Gist feature 2025-09-03 16:22:06 +02:00
CI workflows
c0549fe422 chore: auto-update github workflows 2025-09-03 14:15:46 +00:00
CI workflows
0dc8d6fd68 Update pro ref 2025-09-03 14:15:32 +00:00
SPRINX0\prochazka
cd97647818 SYNC: next permissions 2025-09-03 14:15:22 +00:00
CI workflows
fcb5811f37 chore: auto-update github workflows 2025-09-03 13:30:37 +00:00
CI workflows
a239ba2211 Update pro ref 2025-09-03 13:30:20 +00:00
SPRINX0\prochazka
e8dc96bcda v6.6.4-beta.1 2025-09-03 10:40:05 +02:00
SPRINX0\prochazka
096ad97a73 SYNC: Fixed DbGate Web UI Connections do not display 'Databases' #1199 2025-09-03 08:37:50 +00:00
SPRINX0\prochazka
a5a5517555 changelog 2025-09-01 15:46:09 +02:00
SPRINX0\prochazka
d9b88a5d8d v6.6.3 2025-09-01 15:22:28 +02:00
SPRINX0\prochazka
8493ea22eb v6.6.3-premium-beta.1 2025-09-01 09:51:40 +02:00
SPRINX0\prochazka
aebb87aa20 Fixed - error listing databases from Azure SQL SERVER #1197 2025-09-01 09:09:04 +02:00
SPRINX0\prochazka
14e97cb24f fixed mongoDB rename collection #1198 2025-09-01 08:57:30 +02:00
SPRINX0\prochazka
26cc15b4a2 save zoom level 2025-09-01 08:40:20 +02:00
SPRINX0\prochazka
50ce606e12 v6.6.2 2025-08-29 09:24:47 +02:00
SPRINX0\prochazka
b052320f98 changelog 2025-08-29 09:23:30 +02:00
SPRINX0\prochazka
ecb3cebc9f v6.6.2-beta.7 2025-08-29 08:59:02 +02:00
SPRINX0\prochazka
4a32dfc71b fixed: DBGate cannot execute any queries and the process is killed #1195 2025-08-29 08:39:19 +02:00
SPRINX0\prochazka
c913929ff9 changelog 2025-08-29 08:31:14 +02:00
SPRINX0\prochazka
d7d5b29b07 change log 2025-08-28 17:55:22 +02:00
SPRINX0\prochazka
111a7f72f8 v6.6.2-premium-beta.6 2025-08-28 15:07:14 +02:00
SPRINX0\prochazka
149abdef9b v6.6.2-premium-beta.3 2025-08-28 15:07:00 +02:00
Jan Prochazka
980848f35a Merge pull request #1194 from dbgate/feature/mssql-better-query-for-desc
feat: add ep.class = 1 filter
2025-08-28 15:05:22 +02:00
Pavel
d1e0c86a71 feat: add ep.class = 1 filter 2025-08-26 18:05:42 +02:00
SPRINX0\prochazka
d3872ca8a3 Redis "Scan all" button 2025-08-26 10:48:19 +02:00
SPRINX0\prochazka
003dec269a Allow max page size 50000 #1185 2025-08-26 10:30:30 +02:00
SPRINX0\prochazka
e88092cde7 v6.6.2-premium-beta.5 2025-08-26 10:11:16 +02:00
SPRINX0\prochazka
98422bd355 changelog 2025-08-26 10:10:58 +02:00
CI workflows
af83b89812 chore: auto-update github workflows 2025-08-26 07:45:32 +00:00
CI workflows
cffb1b8713 Update pro ref 2025-08-26 07:45:06 +00:00
CI workflows
0b4895addf chore: auto-update github workflows 2025-08-26 07:27:54 +00:00
CI workflows
08646ea12a Update pro ref 2025-08-26 07:27:36 +00:00
CI workflows
40beb7ceeb chore: auto-update github workflows 2025-08-25 14:37:58 +00:00
CI workflows
e7963aa324 Update pro ref 2025-08-25 14:37:41 +00:00
CI workflows
d5894b9fb7 chore: auto-update github workflows 2025-08-25 14:25:01 +00:00
CI workflows
75c47a1113 Update pro ref 2025-08-25 14:24:46 +00:00
SPRINX0\prochazka
97923b19bf SYNC: roles UX 2025-08-25 14:24:33 +00:00
SPRINX0\prochazka
879c89a285 v6.6.2-beta.4 2025-08-25 10:06:03 +02:00
SPRINX0\prochazka
718727462b v6.6.2-premium-beta.3 2025-08-25 10:05:53 +02:00
SPRINX0\prochazka
63f2fd864a SYNC: fixed 2025-08-22 12:50:45 +00:00
SPRINX0\prochazka
556dda5790 SYNC: about window screenshot 2025-08-22 12:24:33 +00:00
SPRINX0\prochazka
aeaa8549e3 SYNC: renamed screenshot 2025-08-22 12:09:06 +00:00
CI workflows
1a8a757912 chore: auto-update github workflows 2025-08-22 11:41:07 +00:00
CI workflows
c69bb8acc9 Update pro ref 2025-08-22 11:40:52 +00:00
SPRINX0\prochazka
4989d67b92 v6.6.2-premium-beta.2 2025-08-22 12:32:41 +02:00
SPRINX0\prochazka
b272d342b0 Merge branch 'feature/mongo-server-summary' 2025-08-22 12:32:31 +02:00
SPRINX0\prochazka
251b2853e0 v6.6.2-premium-beta.1 2025-08-22 12:00:23 +02:00
CI workflows
d381c9505f chore: auto-update github workflows 2025-08-22 08:34:07 +00:00
CI workflows
eeb3b8f939 Update pro ref 2025-08-22 08:33:50 +00:00
SPRINX0\prochazka
ee40f32b0c SYNC: fixed permission check, new permission test 2025-08-22 08:33:39 +00:00
CI workflows
02a69ea6d9 chore: auto-update github workflows 2025-08-22 07:46:24 +00:00
CI workflows
d47bb5ecd4 Update pro ref 2025-08-22 07:46:05 +00:00
Jan Prochazka
d2d6e2f554 SYNC: Merge pull request #8 from dbgate/feature/db-table-permissions 2025-08-22 07:45:53 +00:00
Jan Prochazka
f48b4a6c62 Merge pull request #1186 from dbgate/feature/mongo-server-summary
Feature/mongo server summary
2025-08-21 15:46:34 +02:00
Pavel
07f7b7df1b fix: send empty string instead of null when process has no operation 2025-08-21 14:45:03 +02:00
Pavel
26486f9d63 style: full width + scroll for child1 wrapper in SummaryProcesses splitter 2025-08-21 14:44:31 +02:00
Pavel
428aa970b9 Merge branch 'master' into feature/mongo-server-summary 2025-08-21 14:27:27 +02:00
SPRINX0\prochazka
2a8c532786 fixes 2025-08-21 13:00:33 +02:00
Pavel
4381829d16 fix: use full process list and info for operaiton in mysql server summary 2025-08-19 18:43:08 +02:00
Pavel
176d75768f feat: vertical split for process operation 2025-08-19 18:40:08 +02:00
Pavel
e28e363bd0 feat: keep tabs and table headers sticky, scroll table bodies 2025-08-19 18:05:07 +02:00
Pavel
114ce1ea3a feat: make summary table sortable, filtrable, with sticky header 2025-08-19 17:13:19 +02:00
Pavel
78215552bf chore: add logging for server summary 2025-08-19 17:08:15 +02:00
SPRINX0\prochazka
be4fe6ab77 removed obsolete method 2025-08-19 12:26:31 +02:00
Pavel
ab924f6b48 feat: confirm kill process 2025-08-14 22:55:36 +02:00
Pavel
e4bf2b4c9b feat: mssql server summary 2025-08-14 22:48:51 +02:00
Pavel
c49b1a46f8 feat: mysql server summary 2025-08-14 22:48:42 +02:00
Pavel
6a56726734 fix: refresh proccesses only if summary tab is opened 2025-08-14 22:48:35 +02:00
Pavel
8f6783792f feat: add error handler for server summary tab 2025-08-14 21:48:56 +02:00
Pavel
b5ab1d6b33 fix: always convert seconds to fixed 3 when printing running time 2025-08-14 21:36:34 +02:00
Pavel
25fe1d03a7 feat: server summary for postgres 2025-08-14 21:33:33 +02:00
Pavel
90546ad4a7 feat: toasts when killing db proc 2025-08-14 21:16:53 +02:00
Pavel
939bbc3f2c chore: update summary typing 2025-08-14 20:09:47 +02:00
Pavel
02ee327595 feat: format fileSize cols in summary databases 2025-08-14 19:21:04 +02:00
Pavel
d8081277ee feat: parse col to row formatter 2025-08-14 19:18:42 +02:00
Pavel
164a112e0c feat: refresh processes only if processes tab is open 2025-08-14 19:18:33 +02:00
Pavel
697bde7b53 feat: pass tabVisible to tabs 2025-08-14 19:18:14 +02:00
Pavel
2fd5244f85 feat: use _t translations in server summary 2025-08-14 18:46:55 +02:00
SPRINX0\prochazka
354d925f94 v6.6.1 2025-08-14 17:30:52 +02:00
SPRINX0\prochazka
88c74f020c changelog 2025-08-14 17:29:59 +02:00
SPRINX0\prochazka
9b7021b1cd v6.6.1-beta.19 2025-08-14 17:06:20 +02:00
SPRINX0\prochazka
30dbb23330 fixed potential error 2025-08-14 17:05:56 +02:00
Pavel
b1696ed1cd feat: autorefresh processes, refresh processes w/o displaying loader 2025-08-14 16:40:20 +02:00
Pavel
61f1c99791 feat: send cols from list databases 2025-08-14 16:39:54 +02:00
Jan Prochazka
bb9a559b80 Merge pull request #1184 from daujyungx/master
fix: #340, SQL Server tedious driver connect to named instance
2025-08-14 12:17:12 +02:00
Jan Prochazka
5e20ea4975 Merge pull request #1183 from dbgate/feature/1181-oracle-lob-handler
feat: normalize oracle LOB values
2025-08-14 12:13:14 +02:00
daujyungx
c5d7e30bed fix: #340, SQL Server tedious driver connect to named instance 2025-08-14 15:50:46 +08:00
Pavel
de5f3a31ed fix: fetch oracle blobs as buffer 2025-08-13 14:47:59 +02:00
Jan Prochazka
7913c4135f v6.6.1-premium-beta.18 2025-08-13 07:22:17 +02:00
Jan Prochazka
d49345de9c v6.6.1-beta.18 2025-08-13 07:22:06 +02:00
Jan Prochazka
1dbfa71bde v6.6.1-beta.16 2025-08-13 07:21:34 +02:00
Jan Prochazka
d46b84f0d6 Merge pull request #1171 from dbgate/feature/1137-mssql-column-desc
feat: add MS_Description to mssql analyzer columns
2025-08-13 06:55:35 +02:00
Pavel
9d456992cf feat: kill db process 2025-08-13 04:54:15 +02:00
Pavel
5dd62ad2aa feat: new server summary tab 2025-08-13 04:53:25 +02:00
Pavel
a293eeb398 feat: new mongo server summary 2025-08-13 04:53:24 +02:00
Pavel
a6b6b5eb70 feat: add typing for listVariables, listProccess, killProccess, update server summary typings 2025-08-13 04:53:24 +02:00
Pavel
971af1df5f fix: use baseColumns for tables hashes in _runAnalysis 2025-08-12 15:44:22 +02:00
Pavel
21641da0bf feat: safeCommentChanges flag to dialect 2025-08-07 15:32:24 +02:00
Pavel
a8d9c145e6 Merge branch 'master' into feature/1137-mssql-column-desc 2025-08-07 14:21:57 +02:00
Pavel
bfafcb76ba fix: use only cols with comments for obj hash 2025-08-07 14:21:57 +02:00
Pavel
52f74f1204 Merge branch 'master' into feature/1137-mssql-column-desc 2025-08-07 12:53:09 +02:00
SPRINX0\prochazka
00c212ecb2 v6.6.1-premium-beta.15 2025-08-07 09:57:18 +02:00
CI workflows
e77d302a49 chore: auto-update github workflows 2025-08-07 07:37:04 +00:00
CI workflows
5183f3729c Update pro ref 2025-08-07 07:36:45 +00:00
SPRINX0\prochazka
e93102f105 v6.6.1-premium-beta.14 2025-08-07 07:46:26 +02:00
CI workflows
a4652689ec chore: auto-update github workflows 2025-08-06 14:42:42 +00:00
CI workflows
ede1005087 Update pro ref 2025-08-06 14:42:23 +00:00
SPRINX0\prochazka
e97c7ed32e SYNC: fix BE 2025-08-06 14:42:10 +00:00
SPRINX0\prochazka
9230a2ab73 SYNC: Merge branch 'feature/mongosh' 2025-08-06 12:56:16 +00:00
SPRINX0\prochazka
01ee66ec4f SYNC: fixed choosing default database 2025-08-06 12:14:29 +00:00
SPRINX0\prochazka
4c12cbd3cc SYNC: improved log export 2025-08-06 11:37:49 +00:00
SPRINX0\prochazka
d8eeeaaef6 SYNC: export logs 2025-08-06 11:29:24 +00:00
SPRINX0\prochazka
994dae2a7d SYNC: export logs to file 2025-08-06 11:17:05 +00:00
CI workflows
51da6e928d chore: auto-update github workflows 2025-08-06 10:35:50 +00:00
CI workflows
a328ad030e Update pro ref 2025-08-06 10:35:32 +00:00
SPRINX0\prochazka
ed7605eccd SYNC: Merge branch 'feature/dblogs' 2025-08-06 10:35:21 +00:00
Pavel
de43880a1c feat: fetch only base column info for modifications, drop columnComment only if exists 2025-08-06 09:36:16 +02:00
Pavel
32b1a5b22d feat: include comments in contentHash for mssql 2025-08-06 09:36:16 +02:00
Pavel
795992fb42 feat: add add comment to table teest 2025-08-06 09:36:16 +02:00
Pavel
339eab33c8 feat: add add comment to column test 2025-08-06 09:36:16 +02:00
Pavel
489f3aa19d feat: add test for table creation with comments 2025-08-06 09:36:16 +02:00
Pavel
888e284f84 fix:fix: removeu duplicate method, simplify changeColumnComment 2025-08-06 09:36:16 +02:00
Pavel
d151114f08 fix: correctly parse table comment when creating a table 2025-08-06 09:36:16 +02:00
Pavel
4f6a3c23ad feat: ms_description for tables, upd columns 2025-08-06 09:36:16 +02:00
CI workflows
4ed437fd4e chore: auto-update github workflows 2025-08-05 15:15:45 +00:00
CI workflows
fa0b21ba81 Update pro ref 2025-08-05 15:14:18 +00:00
Jan Prochazka
a96f1d0b49 SYNC: Merge pull request #7 from dbgate/feature/applog 2025-08-05 15:14:06 +00:00
SPRINX0\prochazka
ac0aebd751 SYNC: strict delimit logs by date 2025-08-04 10:52:45 +00:00
SPRINX0\prochazka
781cbb4668 SYNC: updated mongo version 2025-08-04 08:34:43 +00:00
SPRINX0\prochazka
56ca1911a1 SYNC: mongosh - use only in stream method 2025-08-01 14:21:45 +00:00
Pavel
c20aec23a2 feat: change ms_description when columnComment changes 2025-07-31 15:01:22 +02:00
Pavel
a9cff01579 fix: show columnComment in structure ui 2025-07-31 15:01:22 +02:00
Pavel
6af56a61b8 feat: add ignoreComments options testEqualColumns 2025-07-31 15:01:22 +02:00
Pavel
252db191a6 feat: add ms_description to mssqldumper table options 2025-07-31 15:01:22 +02:00
Pavel
5e2776f264 feat: add MS_Description to mssql analyzer columns 2025-07-31 15:01:22 +02:00
CI workflows
7ec9fb2c44 chore: auto-update github workflows 2025-07-31 10:52:36 +00:00
SPRINX0\prochazka
34facb6b3b try to fix build 2025-07-31 12:52:17 +02:00
SPRINX0\prochazka
5bb2a1368e v6.6.1-premium-beta.13 2025-07-31 10:49:08 +02:00
CI workflows
85a7bbca66 chore: auto-update github workflows 2025-07-31 08:47:07 +00:00
CI workflows
2b7f27bf8f Update pro ref 2025-07-31 08:46:50 +00:00
SPRINX0\prochazka
dd57945e7d v6.6.1-premium-beta.12 2025-07-31 10:26:58 +02:00
SPRINX0\prochazka
fa02b4fd56 SYNC: tag fix 2025-07-31 08:26:35 +00:00
SPRINX0\prochazka
c8715eead5 v6.6.1-premium-beta.11 2025-07-31 09:56:51 +02:00
SPRINX0\prochazka
37aae8c10e SYNC: GTM support 2025-07-31 07:55:58 +00:00
SPRINX0\prochazka
a97ed02e15 support for script embeding 2025-07-31 09:34:45 +02:00
SPRINX0\prochazka
38ebb2d06a Revert "update all packages"
This reverts commit c765bfc946.
2025-07-30 15:42:23 +02:00
SPRINX0\prochazka
c765bfc946 update all packages 2025-07-30 15:41:18 +02:00
CI workflows
b00ac75f33 chore: auto-update github workflows 2025-07-30 10:40:36 +00:00
SPRINX0\prochazka
36730168c0 SYNC: upgraded mongo for E2E tests 2025-07-30 10:40:19 +00:00
SPRINX0\prochazka
4339ece6f6 v6.6.1-beta.7 2025-07-30 11:17:38 +02:00
SPRINX0\prochazka
041c997e59 v6.6.1-premium-beta.6 2025-07-30 11:17:26 +02:00
SPRINX0\prochazka
098ebb38dc SYNC: fixed BE for premium 2025-07-30 09:05:11 +00:00
CI workflows
b99c38a070 chore: auto-update github workflows 2025-07-30 08:46:03 +00:00
SPRINX0\prochazka
07b42d8e74 Merge branch 'feature/mongosh' 2025-07-30 10:45:02 +02:00
SPRINX0\prochazka
062d168c97 windows arm build for community is back 2025-07-30 10:44:33 +02:00
SPRINX0\prochazka
c9be5fb125 v6.6.1-premium-beta.5 2025-07-30 10:20:20 +02:00
SPRINX0\prochazka
06c6716ee1 v6.6.1-beta.4 2025-07-30 10:20:09 +02:00
SPRINX0\prochazka
e856d8fddf mongosh support only for premium 2025-07-30 10:19:57 +02:00
SPRINX0\prochazka
20339f70c1 text 2025-07-30 08:42:30 +02:00
SPRINX0\prochazka
c2e6cf1eb0 time-limited offer info 2025-07-30 08:19:26 +02:00
SPRINX0\prochazka
8dfdca97cd readme 2025-07-30 07:38:17 +02:00
Pavel
6e8cdc24a3 feat: use getColleciton syntax for mongo 2025-07-29 16:33:39 +02:00
Pavel
461f1e39fa fix: use correct dbName in connection string 2025-07-29 16:12:56 +02:00
Pavel
4892dbce5e fix: use [''] syntax for mongosh col calls 2025-07-29 15:31:13 +02:00
SPRINX0\prochazka
9da32a13de v6.6.1-beta.3 2025-07-29 10:55:18 +02:00
SPRINX0\prochazka
9c6908da77 reverted change 2025-07-29 10:55:07 +02:00
SPRINX0\prochazka
ac081e6c86 v6.6.1-beta.2 2025-07-29 10:50:18 +02:00
SPRINX0\prochazka
cc9744156c removed windows ARM build 2025-07-29 10:50:09 +02:00
SPRINX0\prochazka
72a874c7f4 v6.6.1-beta.1 2025-07-29 09:58:42 +02:00
SPRINX0\prochazka
2809324b35 Merge branch 'master' into feature/mongosh 2025-07-29 09:57:20 +02:00
SPRINX0\prochazka
58e6c45c73 mongosh volatile packages 2025-07-29 09:56:25 +02:00
SPRINX0\prochazka
43aaf192a2 promo widget 2025-07-25 13:04:21 +02:00
SPRINX0\prochazka
c0574bc738 readme 2025-07-25 12:25:02 +02:00
SPRINX0\prochazka
27e5d639ef changelog 2025-07-25 09:27:45 +02:00
SPRINX0\prochazka
aa9fdd4fc9 changelog 2025-07-25 09:24:22 +02:00
SPRINX0\prochazka
7af6d9b2ce test wait 2025-07-25 09:22:55 +02:00
SPRINX0\prochazka
0e06d28335 v6.6.0 2025-07-25 09:21:25 +02:00
SPRINX0\prochazka
e3b86e4d41 v6.5.7-premium-beta.6 2025-07-25 08:39:12 +02:00
SPRINX0\prochazka
5b1bfe7379 v6.5.7-beta.5 2025-07-25 08:39:02 +02:00
CI workflows
76d07b967e chore: auto-update github workflows 2025-07-25 06:30:06 +00:00
CI workflows
4b1932fe52 Update pro ref 2025-07-25 06:29:48 +00:00
CI workflows
a56de91b1e chore: auto-update github workflows 2025-07-25 06:23:26 +00:00
CI workflows
6b4fb616bc Update pro ref 2025-07-25 06:23:11 +00:00
SPRINX0\prochazka
d24670e14e SYNC: chat & chart permission 2025-07-25 06:23:00 +00:00
CI workflows
13b3ae35ed chore: auto-update github workflows 2025-07-25 06:04:12 +00:00
CI workflows
6860e1f085 Update pro ref 2025-07-25 06:03:56 +00:00
SPRINX0\prochazka
74fa1c6628 SYNC: store connection definition in storagedb 2025-07-25 06:03:44 +00:00
SPRINX0\prochazka
85f847a4f3 SYNC: fix 2025-07-25 06:01:37 +00:00
CI workflows
39df72d163 chore: auto-update github workflows 2025-07-25 05:54:02 +00:00
CI workflows
5ca8786802 Update pro ref 2025-07-25 05:53:44 +00:00
SPRINX0\prochazka
ca145967dc SYNC: Merge branch 'feature/firestore' 2025-07-25 05:53:34 +00:00
Pavel
06a3ce7486 feat: update mongo getCollectionExportQueryScript 2025-07-24 20:55:24 +02:00
Pavel
9f85b6154d feat: look into cursor's firstBatch in mongosh res 2025-07-24 18:48:58 +02:00
SPRINX0\prochazka
732763689a v6.5.7-beta.4 2025-07-24 17:28:19 +02:00
SPRINX0\prochazka
0ea75f25f1 process workflows - upgrade node 2025-07-24 17:21:28 +02:00
SPRINX0\prochazka
eab27ce0bb node version 22 2025-07-24 17:20:52 +02:00
SPRINX0\prochazka
29fd381989 Merge branch 'master' into feature/mongosh 2025-07-24 17:14:59 +02:00
Pavel
06a845697a feat: mongosh for script and stream methods 2025-07-24 15:52:59 +02:00
CI workflows
b12587626d chore: auto-update github workflows 2025-07-24 10:11:39 +00:00
SPRINX0\prochazka
b49988032e firestore plugin build 2025-07-24 12:11:21 +02:00
SPRINX0\prochazka
a9b4152553 v6.5.7-premium-beta.3 2025-07-24 12:08:00 +02:00
CI workflows
63720045f1 chore: auto-update github workflows 2025-07-24 10:07:04 +00:00
CI workflows
aa7529192e Update pro ref 2025-07-24 10:06:46 +00:00
SPRINX0\prochazka
a162a15a27 v6.5.7-premium-beta.2 2025-07-24 11:01:28 +02:00
CI workflows
457a73efae chore: auto-update github workflows 2025-07-24 09:00:52 +00:00
CI workflows
91c3dd982b Update pro ref 2025-07-24 09:00:35 +00:00
Jan Prochazka
c171f93c93 SYNC: Merge pull request #5 from dbgate/feature/firestore 2025-07-24 09:00:23 +00:00
SPRINX0\prochazka
0cf9ddb1cd SYNC: try to fix test 2025-07-24 07:13:39 +00:00
SPRINX0\prochazka
2322537350 v6.5.7-premium-beta.1 2025-07-24 08:58:19 +02:00
SPRINX0\prochazka
6ce50109da gw server API 2025-07-24 08:55:21 +02:00
SPRINX0\prochazka
abe7fdf34d SYNC: simplker chat test 2025-07-24 06:52:58 +00:00
CI workflows
ecf2f5ed8c chore: auto-update github workflows 2025-07-24 06:50:30 +00:00
CI workflows
98b4934dd5 Update pro ref 2025-07-24 06:50:16 +00:00
SPRINX0\prochazka
0bc7c544ad SYNC: chat UX 2025-07-24 06:50:05 +00:00
SPRINX0\prochazka
1f7ad9d418 SYNC: fix 2025-07-24 06:31:57 +00:00
CI workflows
4b9d3b3dbc Update pro ref 2025-07-24 06:31:44 +00:00
SPRINX0\prochazka
571e332ed5 SYNC: fixed test 2025-07-24 06:31:34 +00:00
CI workflows
d78d22b188 chore: auto-update github workflows 2025-07-24 06:11:23 +00:00
CI workflows
d37638240a Update pro ref 2025-07-24 06:11:05 +00:00
SPRINX0\prochazka
ae7fd3f87b SYNC: open db chat ctx menu 2025-07-24 06:11:00 +00:00
Jan Prochazka
e82e63b288 SYNC: adidtional test 2025-07-24 06:10:57 +00:00
Jan Prochazka
0149d4e27b SYNC: database chat test 2025-07-24 06:10:54 +00:00
CI workflows
9fc9c71b6f chore: auto-update github workflows 2025-07-23 13:46:29 +00:00
CI workflows
b264f690d1 Update pro ref 2025-07-23 13:46:13 +00:00
Jan Prochazka
c07e19c898 SYNC: Merge pull request #6 from dbgate/feature/ai-assistant 2025-07-23 13:46:01 +00:00
Pavel
b8e50737d2 feat: basic mongosh support 2025-07-23 04:40:52 +02:00
SPRINX0\prochazka
082d0aa02f translations 2025-07-18 10:03:58 +02:00
SPRINX0\prochazka
ca26d0e450 translations 2025-07-18 10:01:24 +02:00
SPRINX0\prochazka
8cbe021ffc v6.5.6 2025-07-17 09:04:06 +02:00
SPRINX0\prochazka
7b39d8025b changelog 2025-07-17 09:03:18 +02:00
SPRINX0\prochazka
47bd35b151 fixed failing test 2025-07-17 08:44:22 +02:00
SPRINX0\prochazka
d7add54a3c v6.5.6-premium-beta.5 2025-07-17 08:19:37 +02:00
SPRINX0\prochazka
d3c937569b SYNC: anonymized cloud instance 2025-07-17 06:18:50 +00:00
SPRINX0\prochazka
94ca613201 v6.5.6-premium-beta.4 2025-07-16 15:52:09 +02:00
SPRINX0\prochazka
30f2f635be v6.5.6-premium-beta.3 2025-07-16 15:51:20 +02:00
SPRINX0\prochazka
57f4d31c21 SYNC: fix 2025-07-16 13:24:47 +00:00
SPRINX0\prochazka
90e4fd7ff5 SYNC: disable splitting queries with blank lines? #1162 2025-07-16 13:16:02 +00:00
SPRINX0\prochazka
17835832f2 v6.5.6-beta.2 2025-07-16 14:51:55 +02:00
SPRINX0\prochazka
949817f597 SYNC: SKIP_ALL_AUTH support 2025-07-16 12:48:38 +00:00
SPRINX0\prochazka
23065f2c4b SYNC: test fix 2025-07-16 12:26:08 +00:00
SPRINX0\prochazka
b623b06cf0 SYNC: bugfix 2025-07-16 12:02:19 +00:00
CI workflows
55c86d8ec7 chore: auto-update github workflows 2025-07-16 11:43:35 +00:00
CI workflows
e955617aa1 Update pro ref 2025-07-16 11:43:18 +00:00
SPRINX0\prochazka
6304610713 SYNC: hard limit for pie chart 2025-07-16 11:43:08 +00:00
SPRINX0\prochazka
47d20928e0 SYNC: try to fix tests 2025-07-16 11:24:06 +00:00
SPRINX0\prochazka
c9a4d02e0d SYNC: new object window screenshot 2025-07-16 11:03:07 +00:00
CI workflows
6513dfb42a chore: auto-update github workflows 2025-07-16 10:52:10 +00:00
CI workflows
3f0412453f Update pro ref 2025-07-16 10:51:29 +00:00
SPRINX0\prochazka
dcba319071 SYNC: disabled messages in new object modal 2025-07-16 10:51:19 +00:00
SPRINX0\prochazka
d19851fc0c SYNC: compare database in new object modal 2025-07-16 10:51:17 +00:00
SPRINX0\prochazka
d6eb06cb72 SYNC: export db window 2025-07-16 10:51:16 +00:00
SPRINX0\prochazka
473080d7ee SYNC: typo 2025-07-16 10:51:15 +00:00
SPRINX0\prochazka
c98a6adb09 SYNC: new object modal testid 2025-07-16 10:51:13 +00:00
SPRINX0\prochazka
2cd56d5041 SYNC: new object button refactor + diagram accesibility 2025-07-16 10:51:12 +00:00
SPRINX0\prochazka
982098672e SYNC: new object modal 2025-07-16 10:51:10 +00:00
SPRINX0\prochazka
445ecea3e6 SYNC: new object modal WIP 2025-07-16 10:51:09 +00:00
SPRINX0\prochazka
db977dfba4 SYNC: next screenshots 2025-07-15 08:32:33 +00:00
CI workflows
a3c12ab9f5 chore: auto-update github workflows 2025-07-15 08:22:58 +00:00
CI workflows
0f7e152650 Update pro ref 2025-07-15 08:22:41 +00:00
SPRINX0\prochazka
b55c7ba9a1 v6.5.6-premium-beta.1 2025-07-15 09:16:08 +02:00
CI workflows
8256c9f7ad chore: auto-update github workflows 2025-07-15 07:12:36 +00:00
CI workflows
59727d7b0b Update pro ref 2025-07-15 07:12:20 +00:00
SPRINX0\prochazka
2dd2210a73 SYNC: separate schemas mode usable for administration 2025-07-15 07:12:08 +00:00
SPRINX0\prochazka
25aafdbebc SYNC: chart screenshots for tutorial 2025-07-15 06:46:01 +00:00
SPRINX0\prochazka
cd5717169c login checker dummy implementation 2025-07-14 15:23:09 +02:00
CI workflows
a38ad5a11e chore: auto-update github workflows 2025-07-14 13:22:34 +00:00
CI workflows
66d9b56976 Update pro ref 2025-07-14 13:22:21 +00:00
SPRINX0\prochazka
ac40bd1e17 SYNC: checking logged users 2025-07-14 13:22:10 +00:00
SPRINX0\prochazka
16d2a9bf99 SYNC: renew license from set license page 2025-07-14 11:41:55 +00:00
SPRINX0\prochazka
b7e6838d26 refresh license fake 2025-07-14 12:28:49 +02:00
CI workflows
21d23b5baa chore: auto-update github workflows 2025-07-14 10:22:13 +00:00
CI workflows
69a2941d57 Update pro ref 2025-07-14 10:21:59 +00:00
SPRINX0\prochazka
3cc2abf8b9 SYNC: better handling of expired license in electron app 2025-07-14 10:21:49 +00:00
Jan Prochazka
6f4173650a v6.5.5 2025-07-04 09:08:49 +02:00
Jan Prochazka
0fcb8bdc0a SYNC: changelog 2025-07-04 07:05:28 +00:00
CI workflows
c0937cf412 chore: auto-update github workflows 2025-07-04 06:44:51 +00:00
CI workflows
d9ab3aab0f Update pro ref 2025-07-04 06:44:36 +00:00
CI workflows
c8652de78b chore: auto-update github workflows 2025-07-04 06:34:19 +00:00
CI workflows
86dc4e2bd5 Update pro ref 2025-07-04 06:34:04 +00:00
Jan Prochazka
1b9c56a9b9 SYNC: fixed data replicator test 2025-07-04 06:33:54 +00:00
Jan Prochazka
08ab504fac SYNC: fix 2025-07-04 06:33:52 +00:00
CI workflows
21c0842fae chore: auto-update github workflows 2025-07-04 06:11:12 +00:00
CI workflows
8d10feaa68 Update pro ref 2025-07-04 06:10:53 +00:00
Jan Prochazka
df2171f253 SYNC: fixed disabling/enabling auth methods for team premium 2025-07-04 06:10:43 +00:00
SPRINX0\prochazka
f5fcd94faf SYNC: fix 2025-07-03 15:36:58 +00:00
CI workflows
15c5dbef00 chore: auto-update github workflows 2025-07-03 15:28:56 +00:00
CI workflows
79df56c096 Update pro ref 2025-07-03 15:28:39 +00:00
SPRINX0\prochazka
d3fffd9530 SYNC: missing audit logs 2025-07-03 15:28:28 +00:00
SPRINX0\prochazka
527c9c8e6e loginchecker placeholder 2025-07-03 16:58:27 +02:00
CI workflows
d285be45cb chore: auto-update github workflows 2025-07-03 14:51:25 +00:00
CI workflows
0dda9c73f6 Update pro ref 2025-07-03 14:51:10 +00:00
SPRINX0\prochazka
d07bf270e7 SYNC: logi checker refactor 2025-07-03 14:50:59 +00:00
CI workflows
eb24dd5d9e chore: auto-update github workflows 2025-07-03 13:25:47 +00:00
CI workflows
ce693c7cd5 Update pro ref 2025-07-03 13:25:32 +00:00
CI workflows
3198890269 chore: auto-update github workflows 2025-07-03 13:11:26 +00:00
CI workflows
eacc93de43 Update pro ref 2025-07-03 13:11:07 +00:00
SPRINX0\prochazka
9795740257 SYNC: check licensed user count 2025-07-03 13:10:55 +00:00
SPRINX0\prochazka
4548f5d8aa fix 2025-07-03 14:09:29 +02:00
SPRINX0\prochazka
8dfd2fb519 markUserAsActive dummy method 2025-07-03 14:07:04 +02:00
CI workflows
83a40f83e1 chore: auto-update github workflows 2025-07-03 11:50:05 +00:00
CI workflows
5b2fcb3c6c Update pro ref 2025-07-03 11:49:50 +00:00
Jan Prochazka
bcd9adb66d Merge pull request #1159 from dbgate/feature/firebird-always-use-text-for-file
feat: add useServerDatabaseFile for firebird
2025-07-03 13:30:21 +02:00
Pavel
5e2dc114ab feat: add useServerDatabaseFile for firebird 2025-07-03 13:27:22 +02:00
SPRINX0\prochazka
1ced4531be auditlog dummy methods 2025-07-03 13:20:07 +02:00
CI workflows
05fe39c0ae chore: auto-update github workflows 2025-07-03 11:18:54 +00:00
CI workflows
3769b2b3ea Update pro ref 2025-07-03 11:18:38 +00:00
Jan Prochazka
f4d5480f6f SYNC: try to fix test 2025-07-02 14:09:25 +00:00
Jan Prochazka
ddf3c0810b SYNC: charts auto detect 2025-07-02 13:49:39 +00:00
Jan Prochazka
6afd6d0aa0 v6.5.5-premium-beta.5 2025-07-02 13:42:41 +02:00
CI workflows
59fe92eb04 chore: auto-update github workflows 2025-07-02 11:41:41 +00:00
CI workflows
0550f32434 Update pro ref 2025-07-02 11:41:25 +00:00
Jan Prochazka
b702cad549 SYNC: fixed chart test 2025-07-02 11:41:15 +00:00
Jan Prochazka
aa5c4d3c5e changelog 2025-07-02 13:29:37 +02:00
CI workflows
6a99445d97 chore: auto-update github workflows 2025-07-02 11:22:53 +00:00
CI workflows
c9880ef47d Update pro ref 2025-07-02 11:22:38 +00:00
CI workflows
c16452dfcb chore: auto-update github workflows 2025-07-02 11:12:38 +00:00
CI workflows
af802c02fc Update pro ref 2025-07-02 11:12:20 +00:00
Jan Prochazka
8028aafeff SYNC: split too different ydefs 2025-07-02 11:12:09 +00:00
Jan Prochazka
b7469062a1 SYNC: charts autodetector test 2025-07-02 08:57:39 +00:00
Jan Prochazka
33b707aa68 SYNC: chart autodetection improved 2025-07-02 08:50:33 +00:00
Jan Prochazka
cd3a1bebff SYNC: autodetect - with grouping field 2025-07-02 08:23:12 +00:00
Jan Prochazka
794dd5a797 SYNC: refactor 2025-07-02 08:23:10 +00:00
CI workflows
a1465432e8 chore: auto-update github workflows 2025-07-02 06:54:14 +00:00
CI workflows
e1f8af0909 Update pro ref 2025-07-02 06:53:57 +00:00
Jan Prochazka
88918be329 SYNC: chart - detect data types 2025-07-02 06:53:47 +00:00
CI workflows
a3fc1dbff0 chore: auto-update github workflows 2025-07-02 06:20:20 +00:00
CI workflows
626c9825cc Update pro ref 2025-07-02 06:20:02 +00:00
Jan Prochazka
c10a84fc79 SYNC: timeline chart type 2025-07-02 06:19:49 +00:00
SPRINX0\prochazka
f14e4fe197 SYNC: month match 2025-07-01 14:53:27 +00:00
CI workflows
6eb218db5e chore: auto-update github workflows 2025-07-01 14:30:39 +00:00
CI workflows
0e77e053b0 Update pro ref 2025-07-01 14:30:23 +00:00
SPRINX0\prochazka
b9a4128a3d SYNC: charts - grouping field support 2025-07-01 14:30:12 +00:00
CI workflows
16f480e1f3 chore: auto-update github workflows 2025-07-01 12:23:46 +00:00
CI workflows
7c42511133 Update pro ref 2025-07-01 12:23:32 +00:00
CI workflows
1b252a84c2 chore: auto-update github workflows 2025-07-01 12:14:57 +00:00
CI workflows
bf833cadff Update pro ref 2025-07-01 12:14:42 +00:00
CI workflows
b6f872882a chore: auto-update github workflows 2025-07-01 12:12:56 +00:00
CI workflows
a18d6fb441 Update pro ref 2025-07-01 12:12:39 +00:00
CI workflows
922e703e81 chore: auto-update github workflows 2025-07-01 10:59:50 +00:00
CI workflows
d7f5817b8b Update pro ref 2025-07-01 10:59:32 +00:00
SPRINX0\prochazka
92a8a4bfa6 SYNC: chart improvements 2025-07-01 10:59:20 +00:00
CI workflows
b480151fc3 chore: auto-update github workflows 2025-07-01 10:35:08 +00:00
CI workflows
37bdbc1bd5 Update pro ref 2025-07-01 10:34:52 +00:00
CI workflows
8eb669139b chore: auto-update github workflows 2025-07-01 10:11:57 +00:00
CI workflows
b485e8cacc Update pro ref 2025-07-01 10:11:39 +00:00
CI workflows
c4bab61c47 chore: auto-update github workflows 2025-07-01 08:45:43 +00:00
CI workflows
72be417ff1 Update pro ref 2025-07-01 08:45:27 +00:00
CI workflows
9be483d7a6 chore: auto-update github workflows 2025-07-01 08:35:12 +00:00
CI workflows
910f2cee2c Update pro ref 2025-07-01 08:34:48 +00:00
SPRINX0\prochazka
1e47ace527 SYNC: fixed diagram zoom GL#57 2025-07-01 07:14:55 +00:00
Jan Prochazka
912b06b145 v6.5.5-premium-beta.4 2025-06-30 14:35:30 +02:00
CI workflows
87d878e287 chore: auto-update github workflows 2025-06-30 12:33:45 +00:00
CI workflows
be886d6bce Update pro ref 2025-06-30 12:33:29 +00:00
Jan Prochazka
0683deb47e v6.5.5-premium-beta.3 2025-06-30 13:53:39 +02:00
CI workflows
114bb22e27 chore: auto-update github workflows 2025-06-30 11:46:31 +00:00
CI workflows
c327ebc3df Update pro ref 2025-06-30 11:46:18 +00:00
Jan Prochazka
92cbd1c69c SYNC: config fixed 2025-06-30 11:46:05 +00:00
CI workflows
7242515e48 chore: auto-update github workflows 2025-06-30 10:32:57 +00:00
CI workflows
401d1a0ac2 Update pro ref 2025-06-30 10:32:41 +00:00
Jan Prochazka
863e042a37 SYNC: fixed exporting chart for electron 2025-06-30 10:32:31 +00:00
Jan Prochazka
39e6c45ec6 v6.5.5-premium-beta.2 2025-06-30 09:25:54 +02:00
CI workflows
0d364d18c7 chore: auto-update github workflows 2025-06-30 06:56:17 +00:00
CI workflows
61444ea390 Update pro ref 2025-06-30 06:56:01 +00:00
CI workflows
106a935efb chore: auto-update github workflows 2025-06-30 06:28:20 +00:00
CI workflows
d175d8a853 Update pro ref 2025-06-30 06:28:04 +00:00
Jan Prochazka
ce6d19a77a SYNC: call adapt db info 2025-06-30 06:27:54 +00:00
CI workflows
0a29273924 chore: auto-update github workflows 2025-06-29 18:32:57 +00:00
CI workflows
5ede64de58 Update pro ref 2025-06-29 18:32:40 +00:00
Jan Prochazka
224c6ad798 SYNC: try to fix oracle test 2025-06-29 18:27:07 +00:00
Jan Prochazka
57b3a0dbe7 v6.5.5-premium-beta.1 2025-06-28 12:00:16 +02:00
CI workflows
f381f708e0 chore: auto-update github workflows 2025-06-27 13:27:16 +00:00
CI workflows
63bf149546 Update pro ref 2025-06-27 13:27:02 +00:00
SPRINX0\prochazka
cb5e671259 SYNC: audit log test 2025-06-27 13:26:51 +00:00
CI workflows
3e38173c4e chore: auto-update github workflows 2025-06-27 13:01:02 +00:00
CI workflows
efacb643fc Update pro ref 2025-06-27 13:00:45 +00:00
SPRINX0\prochazka
1bd153ea0b SYNC: audit log UX 2025-06-27 13:00:32 +00:00
CI workflows
bac3dc5f4c chore: auto-update github workflows 2025-06-27 11:08:55 +00:00
CI workflows
959a853d77 Update pro ref 2025-06-27 11:08:36 +00:00
SPRINX0\prochazka
90bbdd563b SYNC: Merge branch 'feature/audit-logs' 2025-06-27 11:08:23 +00:00
SPRINX0\prochazka
e3c6d05a0a SYNC: try to fix test 2025-06-27 10:32:01 +00:00
SPRINX0\prochazka
930b3d4538 SYNC: cloud test - use test login 2025-06-27 08:51:36 +00:00
SPRINX0\prochazka
74b78141b4 fake method 2025-06-27 10:02:33 +02:00
SPRINX0\prochazka
aa1108cd5b SYNC: try to fix build 2025-06-27 07:42:42 +00:00
SPRINX0\prochazka
f24b1a9db3 audit log fake methods 2025-06-26 16:49:10 +02:00
SPRINX0\prochazka
71b191e740 SYNC: try to fix test 2025-06-25 07:24:42 +00:00
SPRINX0\prochazka
8f6341b903 SYNC: removed baseUrl config 2025-06-25 07:02:07 +00:00
SPRINX0\prochazka
161586db7e SYNC: try to fix test 2025-06-24 15:13:32 +00:00
SPRINX0\prochazka
052262bef9 SYNC: try to fix test 2025-06-24 13:05:38 +00:00
SPRINX0\prochazka
a5a7144707 SYNC: try to fix test 2025-06-24 10:57:54 +00:00
SPRINX0\prochazka
d945e0426d SYNC: try to fix test 2025-06-24 10:29:47 +00:00
SPRINX0\prochazka
926970c4eb SYNC: added missing script 2025-06-24 09:23:53 +00:00
CI workflows
cce36e0f28 chore: auto-update github workflows 2025-06-24 08:33:21 +00:00
CI workflows
48c6dc5be5 Update pro ref 2025-06-24 08:33:05 +00:00
SPRINX0\prochazka
c641830825 SYNC: private cloud test 2025-06-24 08:32:52 +00:00
SPRINX0\prochazka
eba16cc15d SYNC: dbgate cloud redirect workflow 2025-06-24 07:22:43 +00:00
SPRINX0\prochazka
bd88b8411e SYNC: private cloud test 2025-06-23 14:59:05 +00:00
SPRINX0\prochazka
fc121e8750 missing file 2025-06-23 16:51:42 +02:00
CI workflows
d4142fe56a chore: auto-update github workflows 2025-06-23 14:31:30 +00:00
CI workflows
f76a3e72bb Update pro ref 2025-06-23 14:31:11 +00:00
SPRINX0\prochazka
2d400ae7eb SYNC: folder administration modal 2025-06-23 14:30:58 +00:00
SPRINX0\prochazka
edf1632cab SYNC: fixed connection for scripts 2025-06-23 11:29:04 +00:00
SPRINX0\prochazka
a648f1ee67 SYNC: SQL fixed database WIP 2025-06-23 11:10:46 +00:00
SPRINX0\prochazka
d004e6e86c SYNC: new cloud file 2025-06-23 09:05:42 +00:00
SPRINX0\prochazka
fa321d3e8d SYNC: create query on cloud shortcut 2025-06-23 08:52:07 +00:00
SPRINX0\prochazka
e1e53d323f SYNC: dbgate cloud menu refactor 2025-06-23 07:40:46 +00:00
SPRINX0\prochazka
ccb18ca302 v6.5.4 2025-06-20 17:03:39 +02:00
SPRINX0\prochazka
e170f36bc6 v6.5.4-premium-beta.1 2025-06-20 16:46:18 +02:00
SPRINX0\prochazka
4bd9cc51ee SYNC: try to fix e2e test 2025-06-20 14:41:03 +00:00
SPRINX0\prochazka
43ffbda1a4 SYNC: removed vorgotten test.only 2025-06-20 13:32:20 +00:00
SPRINX0\prochazka
8240485fd1 changelog 2025-06-20 15:05:18 +02:00
SPRINX0\prochazka
7f053c0567 v6.5.3 2025-06-20 15:01:47 +02:00
SPRINX0\prochazka
d2922eb0b7 SYNC: cloud connections fix 2025-06-20 12:57:09 +00:00
SPRINX0\prochazka
fec10d453f license detection fix 2025-06-20 14:36:00 +02:00
SPRINX0\prochazka
162040545d SYNC: improved about modal 2025-06-20 12:29:11 +00:00
CI workflows
f14577f8bf chore: auto-update github workflows 2025-06-20 11:57:27 +00:00
CI workflows
e5720bd1be Update pro ref 2025-06-20 11:57:15 +00:00
CI workflows
6d4959bac8 Update pro ref 2025-06-20 11:57:11 +00:00
SPRINX0\prochazka
d668128a34 SYNC: private cloud UX + fixes 2025-06-20 11:46:04 +00:00
SPRINX0\prochazka
f2af38da4c v6.5.3-premium-beta.1 2025-06-19 18:00:13 +02:00
CI workflows
4776d18fd7 chore: auto-update github workflows 2025-06-19 15:57:59 +00:00
CI workflows
cdd0be7b78 Update pro ref 2025-06-19 15:57:43 +00:00
SPRINX0\prochazka
cd505abb22 SYNC: charts fix 2025-06-19 15:57:31 +00:00
SPRINX0\prochazka
28439c010f SYNC: fixed all search column settings for alternative grids #1118 2025-06-19 12:43:15 +00:00
CI workflows
e85f43beb1 chore: auto-update github workflows 2025-06-19 12:08:31 +00:00
CI workflows
a06cbc0840 Update pro ref 2025-06-19 12:08:18 +00:00
SPRINX0\prochazka
adef9728f8 SYNC: charts UX, error handling, bucket count limit 2025-06-19 12:08:06 +00:00
CI workflows
ff1b688b6e chore: auto-update github workflows 2025-06-19 08:56:09 +00:00
CI workflows
3e7574a927 Update pro ref 2025-06-19 08:55:55 +00:00
SPRINX0\prochazka
f852ea90ad changelog 2025-06-18 11:31:53 +02:00
SPRINX0\prochazka
d8f6247c32 v6.5.2 2025-06-18 11:00:46 +02:00
SPRINX0\prochazka
9dc28393a5 links added 2025-06-18 10:50:44 +02:00
SPRINX0\prochazka
c442c98ecf SYNC: fixed test 2025-06-18 08:43:56 +00:00
SPRINX0\prochazka
71e0109927 v6.5.2-premium-beta.1 2025-06-18 10:21:57 +02:00
SPRINX0\prochazka
9c7dd5ed1c SYNC: close chart fix 2025-06-18 08:17:58 +00:00
CI workflows
83620848f2 chore: auto-update github workflows 2025-06-18 08:13:39 +00:00
CI workflows
d548a5b4f3 Update pro ref 2025-06-18 08:13:25 +00:00
CI workflows
b6e5307755 chore: auto-update github workflows 2025-06-18 08:05:11 +00:00
CI workflows
4c5dc5a145 Update pro ref 2025-06-18 08:04:54 +00:00
SPRINX0\prochazka
69ed9172b8 SYNC: chart UX 2025-06-18 08:04:41 +00:00
CI workflows
68551ae176 chore: auto-update github workflows 2025-06-18 07:48:21 +00:00
CI workflows
c97d9d35ba Update pro ref 2025-06-18 07:48:07 +00:00
SPRINX0\prochazka
e86cc97cdf SYNC: changed chart logic 2025-06-18 07:47:56 +00:00
SPRINX0\prochazka
9bff8608c1 SYNC: auto-detect charts is disabled by default #1145 2025-06-18 07:30:16 +00:00
SPRINX0\prochazka
a10fe6994a v6.5.1 2025-06-17 16:08:07 +02:00
SPRINX0\prochazka
67e6a37b59 v6.5.1-beta.1 2025-06-17 15:42:26 +02:00
SPRINX0\prochazka
3075a56735 fixed cloud login 2025-06-17 15:42:16 +02:00
SPRINX0\prochazka
7e4a862cc3 v6.5.0 2025-06-17 10:02:43 +02:00
SPRINX0\prochazka
ed2078ee3b handle license errors 2025-06-17 09:55:59 +02:00
SPRINX0\prochazka
f99c23a622 v6.4.3-premium-beta.10 2025-06-17 09:08:40 +02:00
SPRINX0\prochazka
41e7317764 v6.4.3-beta.9 2025-06-17 09:08:18 +02:00
SPRINX0\prochazka
e0a78c2399 equal behaviour for premium and community 2025-06-17 09:06:08 +02:00
SPRINX0\prochazka
95ad39d2d4 admin premium widget 2025-06-17 09:03:57 +02:00
CI workflows
b831f827b1 chore: auto-update github workflows 2025-06-16 11:25:21 +00:00
SPRINX0\prochazka
c5d8413d9c SYNC: fix 2025-06-16 11:25:08 +00:00
CI workflows
4648ea3424 Update pro ref 2025-06-16 11:25:04 +00:00
SPRINX0\prochazka
e05bd6f231 SYNC: front matter in chart screenshot 2025-06-16 11:24:49 +00:00
CI workflows
caadee7901 chore: auto-update github workflows 2025-06-16 10:58:30 +00:00
CI workflows
18c524117d Update pro ref 2025-06-16 10:58:11 +00:00
SPRINX0\prochazka
ad30fb8b04 SYNC: query result chart screenshot 2025-06-16 10:57:58 +00:00
SPRINX0\prochazka
a8077965a9 icon fix 2025-06-16 10:36:57 +02:00
SPRINX0\prochazka
532ab85ebb public cloud improvements 2025-06-16 10:32:49 +02:00
SPRINX0\prochazka
546227eb37 changelog 2025-06-16 09:55:15 +02:00
CI workflows
7ec3b262d3 chore: auto-update github workflows 2025-06-16 07:24:20 +00:00
CI workflows
c435000d24 Update pro ref 2025-06-16 07:24:03 +00:00
SPRINX0\prochazka
eaa60c281e SYNC: chart screenshot 2025-06-16 07:23:51 +00:00
Jan Prochazka
cd7cf63144 SYNC: copy to cloud folder works for connected DB 2025-06-15 08:22:58 +00:00
CI workflows
6a704aa079 chore: auto-update github workflows 2025-06-15 07:25:21 +00:00
CI workflows
d6b5a1cec8 Update pro ref 2025-06-15 07:25:03 +00:00
Jan Prochazka
9a24ad31cc SYNC: pie chart out labels 2025-06-15 07:24:50 +00:00
CI workflows
9331630b54 chore: auto-update github workflows 2025-06-15 06:58:43 +00:00
CI workflows
904e869d7f Update pro ref 2025-06-15 06:58:29 +00:00
Jan Prochazka
307fa4f5e6 SYNC: trim license 2025-06-15 06:58:15 +00:00
SPRINX0\prochazka
131d16d3ea v6.4.3-beta.8 2025-06-13 16:35:05 +02:00
SPRINX0\prochazka
dbc54c45dd v6.4.3-premium-beta.7 2025-06-13 16:34:50 +02:00
SPRINX0\prochazka
a96a84d509 SYNC: grayed scripts for non active database 2025-06-13 14:31:53 +00:00
SPRINX0\prochazka
3eb8863f67 SYNC: cloud connections in tab names 2025-06-13 13:25:34 +00:00
SPRINX0\prochazka
8737ab077b SYNC: fixed status bar color 2025-06-13 13:14:16 +00:00
SPRINX0\prochazka
50bb6a1d19 SYNC: prod cloud 2025-06-13 13:12:09 +00:00
SPRINX0\prochazka
4181b75af7 SYNC: connection color for cloud connections 2025-06-13 13:08:56 +00:00
CI workflows
620705c87a chore: auto-update github workflows 2025-06-13 12:14:41 +00:00
CI workflows
50b7b93529 Update pro ref 2025-06-13 12:14:22 +00:00
SPRINX0\prochazka
6f18f6bd5c SYNC: debug config 2025-06-13 12:14:07 +00:00
SPRINX0\prochazka
01d256eeee SYNC: don't show private cloud for web app 2025-06-13 11:52:03 +00:00
SPRINX0\prochazka
a1405412a8 debug config 2025-06-13 11:49:19 +02:00
SPRINX0\prochazka
58589b3a15 v6.4.3-premium-beta.6 2025-06-13 09:44:49 +02:00
SPRINX0\prochazka
2983266fdf Merge branch 'master' of https://github.com/dbgate/dbgate 2025-06-12 16:55:58 +02:00
SPRINX0\prochazka
e33df8f12d cloud content refactor 2025-06-12 16:55:55 +02:00
CI workflows
0e0e8e9d18 chore: auto-update github workflows 2025-06-12 13:30:21 +00:00
Jan Prochazka
37f8b54752 Merge pull request #1130 from dbgate/feature/firebird
Feature/firebird
2025-06-12 15:29:58 +02:00
SPRINX0\prochazka
e9a086ad23 SYNC: refresh cloud files improvements 2025-06-12 12:26:35 +00:00
SPRINX0\prochazka
7c06a8ac41 SYNC: fix refresh publis files 2025-06-12 12:13:33 +00:00
SPRINX0\prochazka
70801d958e SYNC: security rename 2025-06-12 11:51:22 +00:00
SPRINX0\prochazka
cf3f95c952 SYNC: security fixes 2025-06-12 11:49:53 +00:00
Pavel
d708616a6a feat: add createFirebirdInsertStream with datetime fields transform 2025-06-12 13:44:24 +02:00
Pavel
17711bc5c9 Revert "feat: transform rows suport for json lines reader"
This reverts commit b74b6b3284.
2025-06-12 13:29:48 +02:00
Pavel
1e2474921b Revert "feat: transform firebird model rows"
This reverts commit 5760ada3b4.
2025-06-12 13:29:46 +02:00
SPRINX0\prochazka
3f37b2b728 security fixes 2025-06-12 10:58:46 +02:00
SPRINX0\prochazka
18b11df672 security: prevent file traversal in uploads 2025-06-12 10:43:27 +02:00
SPRINX0\prochazka
c34f2d4da7 better UX when logging in in electron 2025-06-11 17:25:14 +02:00
SPRINX0\prochazka
d61792581a Merge branch 'master' of https://github.com/dbgate/dbgate 2025-06-11 17:02:40 +02:00
SPRINX0\prochazka
76d9a511b8 sql generate aliases automatically #1122 2025-06-11 17:02:38 +02:00
CI workflows
4248326697 chore: auto-update github workflows 2025-06-11 12:41:59 +00:00
SPRINX0\prochazka
a540b38151 don't generate artifacts for check build 2025-06-11 14:41:34 +02:00
SPRINX0\prochazka
8fb5ef0c1d v6.4.3-premium-beta.5 2025-06-11 14:39:00 +02:00
SPRINX0\prochazka
2da4979e59 UX fix 2025-06-11 14:29:59 +02:00
SPRINX0\prochazka
0146e4a1dd #1111 mssql - handle timestamp and computed columns in clonerows 2025-06-11 11:28:46 +02:00
SPRINX0\prochazka
34bdb72ffd #1118 2025-06-11 11:09:07 +02:00
SPRINX0\prochazka
2ef7c63047 copy column names #1119 2025-06-11 10:34:22 +02:00
SPRINX0\prochazka
95f5417761 fixed grid performance problem - limited length of cell string 2025-06-11 09:30:58 +02:00
SPRINX0\prochazka
4922ec4499 Merge branch 'master' into feature/firebird 2025-06-11 08:09:28 +02:00
SPRINX0\prochazka
20d947a199 readme for firebird 2025-06-10 16:46:16 +02:00
SPRINX0\prochazka
871dc90ee4 fixed community build (missing JslChart.svelte) 2025-06-09 16:32:57 +02:00
416 changed files with 17962 additions and 3333 deletions

View File

@@ -5,10 +5,14 @@ name: Electron app BETA
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
environment: dbgate-app
strategy:
fail-fast: false
matrix:
@@ -24,7 +28,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -53,12 +57,6 @@ jobs:
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
@@ -66,21 +64,53 @@ jobs:
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
- name: Publish Windows
if: matrix.os == 'windows-2022'
run: |
yarn run build:app
- name: Publish MacOS
if: matrix.os == 'macos-14'
run: |
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Publish Linux
if: matrix.os == 'ubuntu-22.04'
run: |
yarn run build:app
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Azure login (OIDC)
uses: azure/login@v2
if: matrix.os == 'windows-2022'
with:
client-id: ${{ secrets.AZURE_TC_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TC_TENANT_ID }}
allow-no-subscriptions: true
- name: Sign Windows artifacts with Azure Trusted Signing
uses: azure/trusted-signing-action@v0
if: matrix.os == 'windows-2022'
with:
endpoint: https://wus3.codesigning.azure.net/
trusted-signing-account-name: DbGate
certificate-profile-name: DbGate-Release
files-folder: app/dist
files-folder-filter: exe
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Fix YML hashes
if: matrix.os == 'windows-2022'
run: |
yarn run fixYmlHashes
- name: Copy artifacts
run: |
mkdir artifacts
@@ -92,6 +122,7 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-beta.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-beta.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-beta-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-beta-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-beta.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-beta-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-beta-arm64.dmg || true
@@ -111,16 +142,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -6,9 +6,13 @@ name: Electron app check build
push:
tags:
- check-[0-9]+-[0-9]+-[0-9]+.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: ${{ matrix.os }}
environment: dbgate-app
strategy:
fail-fast: false
matrix:
@@ -49,12 +53,6 @@ jobs:
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
@@ -62,21 +60,53 @@ jobs:
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
- name: Publish Windows
if: matrix.os == 'windows-2022'
run: |
yarn run build:app
- name: Publish MacOS
if: matrix.os == 'macos-14'
run: |
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Publish Linux
if: matrix.os == 'ubuntu-22.04'
run: |
yarn run build:app
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Azure login (OIDC)
uses: azure/login@v2
if: matrix.os == 'windows-2022'
with:
client-id: ${{ secrets.AZURE_TC_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TC_TENANT_ID }}
allow-no-subscriptions: true
- name: Sign Windows artifacts with Azure Trusted Signing
uses: azure/trusted-signing-action@v0
if: matrix.os == 'windows-2022'
with:
endpoint: https://wus3.codesigning.azure.net/
trusted-signing-account-name: DbGate
certificate-profile-name: DbGate-Release
files-folder: app/dist
files-folder-filter: exe
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Fix YML hashes
if: matrix.os == 'windows-2022'
run: |
yarn run fixYmlHashes
- name: Copy artifacts
run: |
mkdir artifacts
@@ -88,6 +118,7 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-check.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-check.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-check-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-check-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-check.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-check-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-check-arm64.dmg || true
@@ -104,11 +135,6 @@ jobs:
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
path: artifacts
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,14 @@ name: Electron app PREMIUM BETA
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
environment: dbgate-app
strategy:
fail-fast: false
matrix:
@@ -24,7 +28,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -37,9 +41,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -81,37 +85,67 @@ jobs:
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
cd ..
cd dbgate-merged
yarn fillPackagedPlugins
- name: Publish
- name: Publish Windows
if: matrix.os == 'windows-2022'
run: |
cd ..
cd dbgate-merged
yarn run build:app
- name: Publish MacOS
if: matrix.os == 'macos-14'
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Publish Linux
if: matrix.os == 'ubuntu-22.04'
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Azure login (OIDC)
uses: azure/login@v2
if: matrix.os == 'windows-2022'
with:
client-id: ${{ secrets.AZURE_TC_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TC_TENANT_ID }}
allow-no-subscriptions: true
- name: Sign Windows artifacts with Azure Trusted Signing
uses: azure/trusted-signing-action@v0
if: matrix.os == 'windows-2022'
with:
endpoint: https://wus3.codesigning.azure.net/
trusted-signing-account-name: DbGate
certificate-profile-name: DbGate-Release
files-folder: ../dbgate-merged/app/dist
files-folder-filter: exe
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Fix YML hashes
if: matrix.os == 'windows-2022'
run: |
cd ..
cd dbgate-merged
yarn run fixYmlHashes
- name: Copy artifacts
run: |
mkdir artifacts
@@ -123,6 +157,7 @@ jobs:
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-beta.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-beta.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-beta-arm64.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.exe artifacts/dbgate-windows-premium-beta-arm64.exe || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-beta.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-beta-arm64.dmg || true
@@ -142,16 +177,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,14 @@ name: Electron app PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
environment: dbgate-app
strategy:
fail-fast: false
matrix:
@@ -24,7 +28,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -37,9 +41,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -81,37 +85,67 @@ jobs:
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
cd ..
cd dbgate-merged
yarn fillPackagedPlugins
- name: Publish
- name: Publish Windows
if: matrix.os == 'windows-2022'
run: |
cd ..
cd dbgate-merged
yarn run build:app
- name: Publish MacOS
if: matrix.os == 'macos-14'
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Publish Linux
if: matrix.os == 'ubuntu-22.04'
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Azure login (OIDC)
uses: azure/login@v2
if: matrix.os == 'windows-2022'
with:
client-id: ${{ secrets.AZURE_TC_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TC_TENANT_ID }}
allow-no-subscriptions: true
- name: Sign Windows artifacts with Azure Trusted Signing
uses: azure/trusted-signing-action@v0
if: matrix.os == 'windows-2022'
with:
endpoint: https://wus3.codesigning.azure.net/
trusted-signing-account-name: DbGate
certificate-profile-name: DbGate-Release
files-folder: ../dbgate-merged/app/dist
files-folder-filter: exe
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Fix YML hashes
if: matrix.os == 'windows-2022'
run: |
cd ..
cd dbgate-merged
yarn run fixYmlHashes
- name: Copy artifacts
run: |
mkdir artifacts
@@ -123,6 +157,7 @@ jobs:
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-latest.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-latest.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-latest-arm64.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.exe artifacts/dbgate-windows-premium-latest-arm64.exe || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-latest.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-latest-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-latest-arm64.dmg || true
@@ -142,16 +177,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,14 @@ name: Electron app
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
environment: dbgate-app
strategy:
fail-fast: false
matrix:
@@ -24,7 +28,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -49,12 +53,6 @@ jobs:
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
@@ -62,24 +60,56 @@ jobs:
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
- name: Publish Windows
if: matrix.os == 'windows-2022'
run: |
yarn run build:app
- name: Publish MacOS
if: matrix.os == 'macos-14'
run: |
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Publish Linux
if: matrix.os == 'ubuntu-22.04'
run: |
yarn run build:app
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: generatePadFile
run: |
yarn generatePadFile
- name: Azure login (OIDC)
uses: azure/login@v2
if: matrix.os == 'windows-2022'
with:
client-id: ${{ secrets.AZURE_TC_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TC_TENANT_ID }}
allow-no-subscriptions: true
- name: Sign Windows artifacts with Azure Trusted Signing
uses: azure/trusted-signing-action@v0
if: matrix.os == 'windows-2022'
with:
endpoint: https://wus3.codesigning.azure.net/
trusted-signing-account-name: DbGate
certificate-profile-name: DbGate-Release
files-folder: app/dist
files-folder-filter: exe
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Fix YML hashes
if: matrix.os == 'windows-2022'
run: |
yarn run fixYmlHashes
- name: Copy artifacts
run: |
mkdir artifacts
@@ -91,6 +121,7 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-latest.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-latest.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-latest-arm64.zip || true
cp app/dist/*win_arm64.exe artifacts/dbgate-windows-latest-arm64.exe || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-latest.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-latest-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-latest-arm64.dmg || true
@@ -114,16 +145,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,11 +5,11 @@ name: Cloud images PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,15 +17,15 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Install jq
run: |
sudo apt-get install jq -y
@@ -37,9 +37,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -66,13 +66,6 @@ jobs:
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare packer build
run: |
cd ..
@@ -87,16 +80,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run `packer init` for Azure
run: |
cd ../dbgate-merged/packer
@@ -110,33 +103,33 @@ jobs:
cd ../dbgate-merged/packer
packer init ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
- name: Run `packer build` for AWS
run: |
cd ../dbgate-merged/packer
packer build ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
- name: Delete old Azure VMs
run: |
cd ../dbgate-merged/packer
chmod +x delete-old-azure-images.sh
./delete-old-azure-images.sh
env:
AZURE_CLIENT_ID: '${{secrets.AZURE_CLIENT_ID}}'
AZURE_CLIENT_SECRET: '${{secrets.AZURE_CLIENT_SECRET}}'
AZURE_TENANT_ID: '${{secrets.AZURE_TENANT_ID}}'
AZURE_SUBSCRIPTION_ID: '${{secrets.AZURE_SUBSCRIPTION_ID}}'
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
- name: Delete old AMIs (AWS)
run: |
cd ../dbgate-merged/packer
chmod +x delete-old-amis.sh
./delete-old-amis.sh
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}

View File

@@ -5,11 +5,11 @@ name: Docker image PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -42,9 +42,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -76,14 +76,6 @@ jobs:
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare docker image
run: |
cd ..
@@ -97,12 +89,12 @@ jobs:
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ../dbgate-merged/docker
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64'
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64

View File

@@ -5,11 +5,11 @@ name: Docker image Community
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -65,12 +65,6 @@ jobs:
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare docker image
run: |
@@ -82,20 +76,20 @@ jobs:
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
- name: Build and push alpine
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
file: ./docker/Dockerfile-alpine
tags: '${{ steps.alpmeta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
tags: ${{ steps.alpmeta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7

View File

@@ -5,11 +5,14 @@ name: NPM packages PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,22 +20,22 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -49,13 +52,8 @@ jobs:
cd ..
cd dbgate-merged
node adjustNpmPackageJsonPremium
- name: Configure NPM token
env:
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
run: |
cd ..
cd dbgate-merged
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
- name: Update npm
run: npm install -g npm@latest
- name: Remove dbmodel - should be not published
run: |
cd ..
@@ -71,30 +69,35 @@ jobs:
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
- name: Compute npm dist-tag
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
if [[ "${GITHUB_REF_NAME}" =~ -alpha\. ]]; then
echo "NPM_TAG=alpha" >> $GITHUB_ENV
else
echo "NPM_TAG=latest" >> $GITHUB_ENV
fi
- name: Publish dbgate-api-premium
run: |
cd ..
cd dbgate-merged/packages/api
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-web-premium
run: |
cd ..
cd dbgate-merged/packages/web
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-serve-premium
run: |
cd ..
cd dbgate-merged/packages/serve
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-cosmosdb
run: |
cd ..
cd dbgate-merged/plugins/dbgate-plugin-cosmosdb
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-firestore
run: |
cd ..
cd dbgate-merged/plugins/dbgate-plugin-firestore
npm publish --tag "$NPM_TAG"

View File

@@ -5,11 +5,14 @@ name: NPM packages
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
permissions:
id-token: write
contents: write
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,117 +20,116 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Configure NPM token
env:
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
run: |
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
node-version: 22.x
- name: Update npm
run: npm install -g npm@latest
- name: yarn install
run: |
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
- name: Compute npm dist-tag
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
if [[ "${GITHUB_REF_NAME}" =~ -alpha\. ]]; then
echo "NPM_TAG=alpha" >> $GITHUB_ENV
else
echo "NPM_TAG=latest" >> $GITHUB_ENV
fi
- name: Publish types
working-directory: packages/types
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish tools
working-directory: packages/tools
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish sqltree
working-directory: packages/sqltree
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish api
working-directory: packages/api
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish datalib
working-directory: packages/datalib
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish filterparser
working-directory: packages/filterparser
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish web
working-directory: packages/web
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-serve
working-directory: packages/serve
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbmodel
working-directory: packages/dbmodel
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-csv
working-directory: plugins/dbgate-plugin-csv
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-xml
working-directory: plugins/dbgate-plugin-xml
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-excel
working-directory: plugins/dbgate-plugin-excel
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-mssql
working-directory: plugins/dbgate-plugin-mssql
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-mysql
working-directory: plugins/dbgate-plugin-mysql
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-mongo
working-directory: plugins/dbgate-plugin-mongo
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-postgres
working-directory: plugins/dbgate-plugin-postgres
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-sqlite
working-directory: plugins/dbgate-plugin-sqlite
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-redis
working-directory: plugins/dbgate-plugin-redis
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-oracle
working-directory: plugins/dbgate-plugin-oracle
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-clickhouse
working-directory: plugins/dbgate-plugin-clickhouse
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-dbf
working-directory: plugins/dbgate-plugin-dbf
run: |
npm publish
npm publish --tag "$NPM_TAG"
- name: Publish dbgate-plugin-cassandra
working-directory: plugins/dbgate-plugin-cassandra
run: |
npm publish
npm publish --tag "$NPM_TAG"

View File

@@ -30,8 +30,8 @@ jobs:
uses: docker/login-action@v2
with:
registry: ghcr.io
username: '${{ github.actor }}'
password: '${{ secrets.GITHUB_TOKEN }}'
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Push mysql-ssh-login to GHCR
run: |
docker tag dbgate/mysql-ssh-login:latest ghcr.io/dbgate/mysql-ssh-login:latest

View File

@@ -33,4 +33,4 @@ jobs:
cd diflow
node dist/diflow.js sync -r https://DIFLOW_GIT_SECRET@github.com/dbgate/dbgate-diflow-config.git -b master
env:
DIFLOW_GIT_SECRET: '${{ secrets.DIFLOW_GIT_SECRET }}'
DIFLOW_GIT_SECRET: ${{ secrets.DIFLOW_GIT_SECRET }}

View File

@@ -13,10 +13,10 @@ jobs:
e2e-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1
@@ -24,9 +24,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
ref: 6195e103e1d45e4f59bade60df5dd1784f4e6c77
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -70,7 +70,7 @@ jobs:
name: screenshots
path: screenshots
- name: Push E2E screenshots
if: '${{ github.ref_name == ''master'' }}'
if: ${{ github.ref_name == 'master' }}
run: |
git config --global user.email "info@dbgate.info"
git config --global user.name "GitHub Actions"
@@ -89,25 +89,25 @@ jobs:
ports:
- '16000:5432'
mysql-cypress:
image: 'mysql:8.0.18'
image: mysql:8.0.18
ports:
- '16004:3306'
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
mysql-ssh-login:
image: 'ghcr.io/dbgate/mysql-ssh-login:latest'
image: ghcr.io/dbgate/mysql-ssh-login:latest
ports:
- '16012:22'
mysql-ssh-keyfile:
image: 'ghcr.io/dbgate/mysql-ssh-keyfile:latest'
image: ghcr.io/dbgate/mysql-ssh-keyfile:latest
ports:
- '16008:22'
dex:
image: 'ghcr.io/dbgate/dex:latest'
image: ghcr.io/dbgate/dex:latest
ports:
- '16009:5556'
mongo:
image: 'mongo:4.0.12'
image: mongo:4.4.29
env:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db
@@ -126,7 +126,7 @@ jobs:
SA_PASSWORD: Pwd2020Db
MSSQL_PID: Express
oracle:
image: 'gvenzl/oracle-xe:21-slim'
image: gvenzl/oracle-xe:21-slim
env:
ORACLE_PASSWORD: Pwd2020Db
ports:

View File

@@ -15,7 +15,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v3
with:
token: '${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}'
token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
- name: git pull
run: |
git pull
@@ -47,5 +47,5 @@ jobs:
- name: Push changes
uses: ad-m/github-push-action@v0.6.0
with:
github_token: '${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}'
github_token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
branch: master

View File

@@ -13,10 +13,10 @@ jobs:
all-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1
@@ -45,19 +45,19 @@ jobs:
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: integration-tests/result.json
action-name: Integration tests
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: packages/filterparser/result.json
action-name: Filter parser test results
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: packages/datalib/result.json
action-name: Datalib (perspectives) test results
services:
@@ -69,7 +69,7 @@ jobs:
ports:
- '15000:5432'
mysql-integr:
image: 'mysql:8.0.18'
image: mysql:8.0.18
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
ports:
@@ -83,27 +83,27 @@ jobs:
ports:
- '15002:1433'
clickhouse-integr:
image: 'bitnami/clickhouse:24.8.4'
image: bitnamilegacy/clickhouse:24.8.4
env:
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
ports:
- '15005:8123'
oracle-integr:
image: 'gvenzl/oracle-xe:21-slim'
image: gvenzl/oracle-xe:21-slim
env:
ORACLE_PASSWORD: Pwd2020Db
ports:
- '15006:1521'
cassandradb:
image: 'cassandra:5.0.2'
image: cassandra:5.0.2
ports:
- '15942:9042'
libsql:
image: 'ghcr.io/tursodatabase/libsql-server:latest'
image: ghcr.io/tursodatabase/libsql-server:latest
ports:
- '8080:8080'
firebird:
image: 'firebirdsql/firebird:latest'
image: firebirdsql/firebird:latest
env:
FIREBIRD_DATABASE: mydatabase.fdb
FIREBIRD_USER: dbuser

2
.nvmrc
View File

@@ -1 +1 @@
v21.7.3
v24.4.1

59
.vscode/launch.json vendored
View File

@@ -1,20 +1,41 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch API",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/packages/api/src/index.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
]
}
]
}
"version": "0.2.0",
"configurations": [
{
"name": "Debug App",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/packages/api/src/index.js",
"envFile": "${workspaceFolder}/packages/api/.env",
"args": ["--listen-api"],
"console": "integratedTerminal",
"restart": true,
"runtimeExecutable": "node",
"skipFiles": ["<node_internals>/**"]
},
{
"name": "Debug App (Break on Start)",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/packages/api/src/index.js",
"args": ["--listen-api"],
"envFile": "${workspaceFolder}/.env",
"console": "integratedTerminal",
"restart": true,
"runtimeExecutable": "node",
"skipFiles": ["<node_internals>/**"],
"stopOnEntry": true
},
{
"name": "Attach to Process",
"type": "node",
"request": "attach",
"port": 9229,
"restart": true,
"localRoot": "${workspaceFolder}",
"remoteRoot": "${workspaceFolder}",
"skipFiles": ["<node_internals>/**"]
}
]
}

View File

@@ -8,6 +8,122 @@ Builds:
- linux - application for linux
- win - application for Windows
## 6.6.8
- CHANGED: Windows executable now uses Azure trusted signing certificate
- CHANGED: NPM packages now use GitHub OIDC provenance signing for better security
- CHANGED: Some features moved to Premium edition (master/detail views, FK lookups, column expansion, split view, advanced export/import, data archives, grouping, macros)
## 6.6.6
- ADDED: Allow disable/re-enable filter #1174
- ADDED: Close right side tabs #1219
- ADDED: Ability disable execute current line in query editor #1209
- ADDED: Support for Redis Cluster #1204 (Premium)
## 6.6.5
- ADDED: SQL AI assistant - powered by database chat, could help you to write SQL queries (Premium)
- ADDED: Explain SQL error (powered by AI) (Premium)
- ADDED: Database chat (and SQL AI Assistant) now supports showing charts (Premium)
- FIXED: Fxied editing new files and roles (Team Premium)
- FIXED: Connection to standalone database could be now pinned
- FIXED: Cannot open up large JSON file #1215
## 6.6.4
- ADDED: AI Database chat now supports much more LLM models. (Premium)
- ADDED: Possibility to use your own API key with OPENAI-compatible providers (OpenRouter, Antropic...)
- ADDED: Possibility to use self-hosted own LLM (eg. Llama)
- ADDED: Team files - save SQL files and define shared charts, assign roles and users to these objects (Team Premium)
- FIXED: BUG: does no longer work with Cockroach DB #1202
- FIXED: DbGate Web UI Connections do not display 'Databases' #1199
- CHANGED: Redesign fof applications. Applications are now storted in single JSON file
- ADDED: Application editor (Premium)
- ADDED: Posibility to filter only tables with rows
- FIXED: Fixed several issues with large Firebird databases
- CHANGED: Community edition now supports shared folders in read-only mode
## 6.6.3
- FIXED: Error “db.getCollection(…).renameCollection is not a function” when renaming collection in dbGate #1198
- FIXED: Can't list databases from Azure SQL SERVER #1197
- ADDED: Save zoom level in electron apps
## 6.6.2
- ADDED: List of processes, ability to kill process (Server summary) #1178
- ADDED: Database and table permissions (Team Premium edition)
- ADDED: Redis search box - Scan all #1191
- FIXED: Optimalized loading SQL server with descriptions #1187
- CHANGED: Allow a much greater page size #1185
- FIXED: Optimalized loading SQL server with descriptions #1187
- FIXED: Executing queries for SQLite crash #1195
## 6.6.1
- ADDED: Support for Mongo shell (Premium) - #1114
- FIXED: Support for BLOB in Oracle #1181
- ADDED: Connect to named SQL Server instance #340
- ADDED: Support for SQL Server descriptions #1137
- ADDED: Application log viewer
- FIXED: Selecting default database in connection dialog
- CHANGED: Improved logging system, added related database and connection to logs metadata
## 6.6.0
- ADDED: Database chat - AI powered chatbot, which knows your database (Premium)
- ADDED: Firestore support (Premium)
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
- FIXED: Chart permissions were ignored (Premium)
## 6.5.6
- ADDED: New object window - quick access to most common functions
- ADDED: Possibility to disable split query by empty line #1162
- ADDED: Possibility to opt out authentication #1152
- FIXED: Separate schema mode now works in Team Premium edition
- FIXED: Handled situation, when user enters expired license, which is already prolonged
- FIXED: Fixed some minor problems of charts
## 6.5.5
- ADDED: Administer cloud folder window
- CHANGED: Cloud menu redesign
- ADDED: Audit log (for Team Premium edition)
- ADDED: Added new timeline chart type (line chart with time axis)
- ADDED: Chart grouping (more measure determined from data)
- CHANGED: Improved chart autodetection - string X axis (with bar type), COUNT as measure, split different measures
- ADDED: Added chart data type detection
- FIXED: Fixed chart displaying problems
- FIXED: Fixed exporting chart to HTML
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual __count)
- FIXED: Problems with authentification administration, especially for Postgres storage
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
## 6.5.3
- CHANGED: Improved DbGate Cloud sign-in workflow
- FIXED: Some fixes and error handling in new charts engine
- ADDED: Charts - ability to choose aggregate function
- CHANGED: Improved About window
## 6.5.2
- CHANGED: Autodetecting charts is disabled by default #1145
- CHANGED: Improved chart displaying workflow
- ADDED: Ability to close chart
## 6.5.1
- FIXED: DbGate Cloud e-mail sign-in method for desktop clients
## 6.5.0
- ADDED: DbGate cloud - online storage for connections, SQL scripts and other objects
- ADDED: Public knowledge base - common SQL scripts for specific DB engines (table sizes, index stats etc.)
- ADDED: Query results could be visualised in charts (Premium)
- REMOVED: Chart from selection, active charts - replaced by query result charts
- ADDED: FirebirdSQL support
- ADDED: SQL front matter - properties of SQL script
- ADDED: Auto-execute SQL script on open (saved in SQL front matter)
- CHANGED: Smaller widget icon panel
- CHANGED: Applications and Single-connection mode removed from widget icon panel
- CHANGED: Temporarily disabled MongoDB profiler support
- FIXED: Pie chart distorted if settings change #838
- FIXED: SQL server generated insert statement should exclude computed and timestamp columns #1111
- ADDED: Added option "Show all columns when searching" #1118
- ADDED: Copy cells/rows (e.g. column names) from Structure view #1119
- ADDED: Setting "Show table aliases in code completion" #1122
- FIXED: Vulnerability - check file paths in web version
- FIXED: Very slow render of tables with very log cells
## 6.4.2
- ADDED: Source label to docker container #1105

View File

@@ -15,12 +15,10 @@ But there are also many advanced features like schema compare, visual query desi
DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
* **Download** application for Windows, Linux or Mac from [dbgate.io](https://dbgate.io/download/)
* Looking for DbGate Community? **Download** from [dbgate.org](https://dbgate.org/download/)
* **Download** application for Windows, Linux or Mac from [dbgate.io](https://www.dbgate.io/download/)
* Looking for DbGate Community? **Download** from [dbgate.io](https://www.dbgate.io/download-community/)
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
* Use nodeJs [scripting interface](https://docs.dbgate.io/scripting) ([API documentation](https://docs.dbgate.io/apidoc))
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
* [Give us feedback](https://dbgate.org/feedback) - it will help us to decide, how to improve DbGate in future
## Supported databases
* MySQL
@@ -38,6 +36,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Apache Cassandra
* libSQL/Turso (Premium)
* DuckDB
* Firebird
* Firestore (Premium)
<a href="https://raw.githubusercontent.com/dbgate/dbgate/master/img/screenshot1.png">
@@ -80,6 +80,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Archives - backup your data in NDJSON files on local filesystem (or on DbGate server, when using web application)
* NDJSON data viewer and editor - browse NDJSON data, edit data and structure directly on NDJSON files. Works also for big NDSON files
* Charts, export chart to HTML page
* AI powered database chat
* Show GEO data on map, export map to HTML page
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
* Extensible plugin architecture
@@ -88,10 +89,11 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
Any contributions are welcome. If you want to contribute without coding, consider following:
* Tell your friends about DbGate or share on social networks - when more people will use DbGate, it will grow to be better
* Write review on [Slant.co](https://www.slant.co/improve/options/41086/~dbgate-review) or [G2](https://www.g2.com/products/dbgate/reviews)
* Purchase a [DbGate Premium](https://www.dbgate.io/purchase/premium/) license
* Create issue, if you find problem in app, or you have idea to new feature. If issue already exists, you could leave comment on it, to prioritise most wanted issues
* Create some tutorial video on [youtube](https://www.youtube.com/playlist?list=PLCo7KjCVXhr0RfUSjM9wJMsp_ShL1q61A)
* Become a backer on [GitHub sponsors](https://github.com/sponsors/dbgate) or [Open collective](https://opencollective.com/dbgate)
* Add a SQL script to [Public Knowledge Base](https://github.com/dbgate/dbgate-knowledge-base)
* Where a small coding is acceptable for you, you could [create plugin](https://docs.dbgate.io/plugin-development). Plugins for new themes can be created actually without JS coding
Thank you!

View File

@@ -43,6 +43,8 @@ function adjustFile(file, isApp = false) {
if (process.argv.includes('--community')) {
delete json.optionalDependencies['mongodb-client-encryption'];
delete json.dependencies['@mongosh/service-provider-node-driver'];
delete json.dependencies['@mongosh/browser-runtime-electron'];
}
if (isApp && process.argv.includes('--premium')) {

View File

@@ -117,7 +117,7 @@
"scripts": {
"start": "cross-env ELECTRON_START_URL=http://localhost:5001 DEVMODE=1 electron .",
"start:local": "cross-env electron .",
"dist": "electron-builder",
"dist": "electron-builder --publish never",
"build": "cd ../packages/api && yarn build && cd ../web && yarn build && cd ../../app && yarn dist",
"build:local": "cd ../packages/api && yarn build && cd ../web && yarn build && cd ../../app && yarn predist",
"postinstall": "yarn rebuild && patch-package",

View File

@@ -212,6 +212,10 @@ ipcMain.on('app-started', async (event, arg) => {
autoUpdater.checkForUpdates();
}
}
if (initialConfig['winZoomLevel'] != null) {
mainWindow.webContents.zoomLevel = initialConfig['winZoomLevel'];
}
});
ipcMain.on('window-action', async (event, arg) => {
if (!mainWindow) {
@@ -394,6 +398,7 @@ function createWindow() {
JSON.stringify({
winBounds: mainWindow.getBounds(),
winIsMaximized: mainWindow.isMaximized(),
winZoomLevel: mainWindow.webContents.zoomLevel,
}),
'utf-8'
);

View File

@@ -10,6 +10,7 @@ module.exports = ({ editMenu, isMac }) => [
{ command: 'new.queryDesign', hideDisabled: true },
{ command: 'new.diagram', hideDisabled: true },
{ command: 'new.perspective', hideDisabled: true },
{ command: 'new.application', hideDisabled: true },
{ command: 'new.shell', hideDisabled: true },
{ command: 'new.jsonl', hideDisabled: true },
{ command: 'new.modelTransform', hideDisabled: true },
@@ -86,7 +87,6 @@ module.exports = ({ editMenu, isMac }) => [
{ divider: true },
{ command: 'folder.showLogs', hideDisabled: true },
{ command: 'folder.showData', hideDisabled: true },
{ command: 'new.gist', hideDisabled: true },
{ command: 'app.resetSettings', hideDisabled: true },
{ divider: true },
{ command: 'app.exportConnections', hideDisabled: true },
@@ -108,7 +108,7 @@ module.exports = ({ editMenu, isMac }) => [
{ command: 'app.openWeb', hideDisabled: true },
{ command: 'app.openIssue', hideDisabled: true },
{ command: 'app.openSponsoring', hideDisabled: true },
{ command: 'app.giveFeedback', hideDisabled: true },
// { command: 'app.giveFeedback', hideDisabled: true },
{ divider: true },
{ command: 'settings.commands', hideDisabled: true },
{ command: 'tabs.changelog', hideDisabled: true },

View File

@@ -0,0 +1,129 @@
#!/usr/bin/env node
// assign-dbgm-codes.mjs
import fs from 'fs/promises';
import path from 'path';
const PLACEHOLDER = 'DBGM-00000';
const CODE_RE = /DBGM-(\d{5})/g;
const JS_TS_RE = /\.(mjs|cjs|js|ts|jsx|tsx)$/i;
const IGNORE_DIRS = new Set([
'node_modules',
'.git',
'.hg',
'.svn',
'dist',
'build',
'out',
'.next',
'.turbo',
'.cache',
]);
const IGNORE_FILES = ['assign-dbgm-codes.mjs', 'package.json', 'README.md'];
// --- CLI ---
const args = process.argv.slice(2);
const dryRun = args.includes('--dry');
const rootArg = args.find(a => a !== '--dry') || process.cwd();
const root = path.resolve(rootArg);
// --- helpers ---
async function* walk(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const e of entries) {
if (e.isDirectory()) {
if (IGNORE_DIRS.has(e.name)) continue;
yield* walk(path.join(dir, e.name));
} else if (e.isFile()) {
if (JS_TS_RE.test(e.name) && !IGNORE_FILES.includes(e.name)) yield path.join(dir, e.name);
}
}
}
function formatCode(n) {
return `DBGM-${String(n).padStart(5, '0')}`;
}
// Find the smallest positive integer not in `taken`
function makeNextCodeFn(taken) {
let n = 1;
// advance n to first free
while (taken.has(n)) n++;
return () => {
const code = n;
taken.add(code);
// move n to next free for next call
do {
n++;
} while (taken.has(n));
return formatCode(code);
};
}
// --- main ---
(async () => {
console.log(`Scanning: ${root} ${dryRun ? '(dry run)' : ''}`);
// 1) Collect all taken codes across the repo
const taken = new Set(); // numeric parts only
const files = [];
for await (const file of walk(root)) files.push(file);
await Promise.all(
files.map(async file => {
try {
const text = await fs.readFile(file, 'utf8');
for (const m of text.matchAll(CODE_RE)) {
const num = Number(m[1]);
if (Number.isInteger(num) && num > 0) taken.add(num);
}
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
}
})
);
console.log(`Found ${taken.size} occupied code(s).`);
// 2) Replace placeholders with next available unique code
const nextCode = makeNextCodeFn(taken);
let filesChanged = 0;
let placeholdersReplaced = 0;
for (const file of files) {
let text;
try {
text = await fs.readFile(file, 'utf8');
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
continue;
}
if (!text.includes(PLACEHOLDER)) continue;
let countInFile = 0;
const updated = text.replaceAll(PLACEHOLDER, () => {
countInFile++;
return nextCode();
});
if (countInFile > 0) {
placeholdersReplaced += countInFile;
filesChanged++;
console.log(`${dryRun ? '[dry]' : '[write]'} ${file}${countInFile} replacement(s)`);
if (!dryRun) {
try {
await fs.writeFile(file, updated, 'utf8');
} catch (err) {
console.warn(`! Failed to write ${file}: ${err.message}`);
}
}
}
}
console.log(`Done. Files changed: ${filesChanged}, placeholders replaced: ${placeholdersReplaced}.`);
})().catch(err => {
console.error(err);
process.exit(1);
});

110
common/fixYmlHashes.js Normal file
View File

@@ -0,0 +1,110 @@
import fs from 'node:fs/promises';
import { createHash } from 'node:crypto';
import path from 'node:path';
import process from 'node:process';
import { fileURLToPath } from 'node:url';
import YAML from 'yaml';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function sha512Base64(filePath) {
const buf = await fs.readFile(filePath);
const h = createHash('sha512');
h.update(buf);
return h.digest('base64');
}
async function fileSize(filePath) {
const st = await fs.stat(filePath);
return st.size;
}
async function fixOneYaml(ymlPath, distDir) {
let raw;
try {
raw = await fs.readFile(ymlPath, 'utf8');
} catch (e) {
console.error(`✗ Cannot read ${ymlPath}:`, e.message);
return;
}
let doc;
try {
doc = YAML.parse(raw);
} catch (e) {
console.error(`✗ Cannot parse YAML ${ymlPath}:`, e.message);
return;
}
if (!doc || !Array.isArray(doc.files)) {
console.warn(`! ${path.basename(ymlPath)} has no 'files' array — skipped.`);
return;
}
let changed = false;
// Update each files[i].sha512 and files[i].size based on files[i].url
for (const entry of doc.files) {
if (!entry?.url) continue;
const target = path.resolve(distDir, entry.url);
try {
const [hash, size] = await Promise.all([sha512Base64(target), fileSize(target)]);
if (entry.sha512 !== hash || entry.size !== size) {
console.log(`${path.basename(ymlPath)}: refresh ${entry.url}`);
entry.sha512 = hash;
entry.size = size;
changed = true;
}
} catch (e) {
console.warn(
`! Missing or unreadable file for ${entry.url} (referenced by ${path.basename(ymlPath)}): ${e.message}`
);
}
}
// Update top-level sha512 for the primary "path" file if present
if (doc.path) {
const primary = path.resolve(distDir, doc.path);
try {
const hash = await sha512Base64(primary);
if (doc.sha512 !== hash) {
console.log(`${path.basename(ymlPath)}: refresh top-level sha512 for path=${doc.path}`);
doc.sha512 = hash;
changed = true;
}
} catch (e) {
console.warn(`! Primary 'path' file not found for ${path.basename(ymlPath)}: ${doc.path} (${e.message})`);
}
}
if (changed) {
const out = YAML.stringify(doc);
await fs.writeFile(ymlPath, out, 'utf8');
console.log(`✓ Updated ${path.basename(ymlPath)}`);
} else {
console.log(`= No changes for ${path.basename(ymlPath)}`);
}
}
async function main() {
const distDir = path.resolve(process.argv[2] ?? path.join(__dirname, '..', 'app', 'dist'));
const entries = await fs.readdir(distDir);
const ymls = entries.filter(f => f.toLowerCase().endsWith('.yml'));
if (ymls.length === 0) {
console.warn(`No .yml files found in ${distDir}`);
return;
}
console.log(`Scanning ${distDir}`);
for (const y of ymls) {
await fixOneYaml(path.join(distDir, y), distDir);
}
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -22,6 +22,8 @@ const volatilePackages = [
'ssh2',
'wkx',
'@duckdb/node-api',
'@mongosh/browser-runtime-electron',
'@mongosh/service-provider-node-driver',
];
module.exports = volatilePackages;

View File

@@ -7,7 +7,9 @@ const path = require('path');
module.exports = defineConfig({
e2e: {
// baseUrl: 'http://localhost:3000',
// trashAssetsBeforeRuns: false,
chromeWebSecurity: false,
setupNodeEvents(on, config) {
// implement node event listeners here
@@ -40,6 +42,12 @@ module.exports = defineConfig({
case 'multi-sql':
serverProcess = exec('yarn start:multi-sql');
break;
case 'cloud':
serverProcess = exec('yarn start:cloud');
break;
case 'charts':
serverProcess = exec('yarn start:charts');
break;
}
await waitOn({ resources: ['http://localhost:3000'] });

View File

@@ -119,4 +119,17 @@ describe('Add connection', () => {
cy.contains('Export connections').click();
cy.themeshot('export-connections');
});
it('configure LLM provider', () => {
cy.testid('WidgetIconPanel_settings').click();
cy.contains('Settings').click();
cy.contains('AI').click();
cy.testid('AiSupportedProvidersInfo_add_OpenRouter').click();
cy.testid('AiProviderCard_apikey_OpenRouter').clear().type('xxx');
cy.testid('AiProviderCard_testButton_OpenRouter').click();
cy.testid('AiProviderCard_statusValid_OpenRouter').should('exist');
cy.testid('AiProviderCard_editButton_OpenRouter').click();
cy.wait(1000);
cy.themeshot('llm-providers-settings');
});
});

View File

@@ -191,7 +191,8 @@ describe('Data browser data', () => {
it('Query editor - join wizard', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('select * from Invoice');
cy.get('body').realPress('{enter}');
@@ -276,6 +277,14 @@ describe('Data browser data', () => {
cy.testid('CommandPalette_main').themeshot('command-palette', { padding: 50 });
});
it('About window', () => {
cy.contains('Connections');
cy.testid('WidgetIconPanel_menu').click();
cy.contains('Help').click();
cy.contains('About').click();
cy.testid('ModalBase_window').themeshot('about-window', { padding: 50 });
});
it('Show map', () => {
cy.contains('Postgres-connection').click();
cy.contains('PgGeoData').click();
@@ -302,7 +311,8 @@ describe('Data browser data', () => {
});
it('Plugin tab', () => {
cy.testid('WidgetIconPanel_plugins').click();
cy.testid('WidgetIconPanel_settings').click();
cy.contains('Manage plugins').click();
cy.contains('dbgate-plugin-theme-total-white').click();
// text from plugin markdown
cy.contains('Total white theme');
@@ -379,21 +389,6 @@ describe('Data browser data', () => {
cy.themeshot('compare-database-settings');
});
it('Query editor - AI assistant', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('QueryTab_switchAiAssistantButton').click();
cy.testid('QueryAiAssistant_allowSendToAiServiceButton').click();
cy.testid('ConfirmModal_okButton').click();
cy.testid('QueryAiAssistant_promptInput').type('album names');
cy.testid('QueryAiAssistant_queryFromQuestionButton').click();
cy.contains('Use this', { timeout: 10000 }).click();
cy.testid('QueryTab_executeButton').click();
cy.contains('Balls to the Wall');
cy.themeshot('ai-assistant');
});
it('Modify data', () => {
// TODO FIX: delete references cascade not working
cy.contains('MySql-connection').click();

View File

@@ -0,0 +1,169 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('Charts', () => {
it('Auto detect chart', () => {
cy.contains('MySql-connection').click();
cy.contains('charts_sample').click();
cy.testid('WidgetIconPanel_file').click();
cy.contains('chart1').click();
cy.contains('department_name');
// cy.testid('QueryTab_executeButton').click();
// cy.testid('QueryTab_openChartButton').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('choose-detected-chart');
});
it('Two line charts', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('SELECT InvoiceDate, Total from Invoice');
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('two-line-charts');
});
it('Invoice naive autodetection', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('SELECT * from Invoice');
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-naive-autodetection');
});
it('Invoice by country - grouped chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType(
"SELECT InvoiceDate, Total, BillingCountry from Invoice where BillingCountry in ('USA', 'Canada', 'Brazil', 'France', 'Germany')"
);
cy.contains('Execute').click();
cy.contains('Open chart').click();
cy.testid('ChartSelector_chart_1').click();
cy.testid('JslChart_customizeButton').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-grouped-autodetected');
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Bar');
cy.testid('ChartDefinitionEditor_xAxisTransformSelect').select('Date (Year)');
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('chart-grouped-bars');
});
it('Public Knowledge base - show chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('WidgetIconPanel_cloud-public').click();
cy.testid('public-cloud-file-tag-mysql/folder-MySQL/tag-premium/top-tables-row-count.sql').click();
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('public-knowledge-base-tables-sizes');
});
it('Auto detect chart', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Invoice').rightclick();
cy.contains('SQL template').click();
cy.contains('SELECT').click();
cy.testid('QueryTab_detectChartButton').click();
cy.testid('QueryTab_executeButton').click();
cy.contains('Chart 1').click();
cy.testid('ChartSelector_chart_0').click();
cy.testid('JslChart_customizeButton').click();
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Bar');
cy.testid('ChartDefinitionEditor_chartTypeSelect').select('Line');
cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('query-result-chart');
});
it('New object window', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Invoice').click();
cy.testid('WidgetIconPanel_addButton').click();
cy.contains('Compare database');
cy.themeshot('new-object-window');
});
it.only('Database chat - charts', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('show me chart of most popular genres');
cy.get('body').realPress('{enter}');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.testid('chart-canvas', { timeout: 30000 }).should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
cy.themeshot('database-chat-chart');
});
it('Database chat', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('find most popular artist');
cy.get('body').realPress('{enter}');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.wait(30000);
// cy.contains('Iron Maiden');
cy.themeshot('database-chat');
// cy.testid('DatabaseChatTab_promptInput').click();
// cy.get('body').realType('I need top 10 songs with the biggest income');
// cy.get('body').realPress('{enter}');
// cy.contains('Hot Girl', { timeout: 20000 });
// cy.wait(1000);
// cy.themeshot('database-chat');
});
it('Explain query error', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('select * from Invoice2');
cy.contains('Execute').click();
cy.testid('MessageViewRow-explainErrorButton-1').click();
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
cy.themeshot('explain-query-error');
// cy.testid('TabsPanel_buttonNewObject').click();
// cy.testid('NewObjectModal_databaseChat').click();
// cy.wait(1000);
// cy.get('body').realType('show me chart of most popular genres');
// cy.get('body').realPress('{enter}');
// cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
// cy.wait(5000);
// cy.testid('chart-canvas').should($c => expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/));
// cy.themeshot('database-chat-chart');
});
});

View File

@@ -0,0 +1,56 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('Cloud tests', () => {
it('Private cloud', () => {
cy.testid('WidgetIconPanel_cloudAccount');
cy.window().then(win => {
win.__loginToCloudTest('dbgate.test@gmail.com');
});
cy.contains('dbgate.test@gmail.com');
// cy.testid('WidgetIconPanel_cloudAccount').click();
// cy.origin('https://identity.dbgate.io', () => {
// cy.contains('Sign in with GitHub').click();
// });
// cy.origin('https://github.com', () => {
// cy.get('#login_field').type('dbgatetest');
// cy.get('#password').type('Pwd2020Db');
// cy.get('input[type="submit"]').click();
// });
// cy.wait(3000);
// cy.location('origin').then(origin => {
// if (origin === 'https://github.com') {
// // Still on github.com → an authorization step is waiting
// cy.origin('https://github.com', () => {
// // Try once, don't wait the full default timeout
// cy.get('button[data-octo-click="oauth_application_authorization"]', { timeout: 500, log: false }).click(); // if the button exists it will be clicked
// // if not, the short timeout elapses and we drop out
// });
// } else {
// // Already back on localhost nothing to authorize
// cy.log('OAuth redirect skipped the Authorize screen');
// }
// });
cy.contains('Testing Connections').rightclick();
cy.contains('Administrate access').click();
cy.contains('User email');
cy.themeshot('administer-shared-folder');
});
});

View File

@@ -59,7 +59,8 @@ describe('Transactions', () => {
cy.contains(connectionName).click();
if (databaseName) cy.contains(databaseName).click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').type(
formatQueryWithoutParams(driver, "INSERT INTO ~categories (~category_id, ~category_name) VALUES (5, 'test');")

View File

@@ -1,3 +1,12 @@
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
@@ -12,14 +21,17 @@ describe('Team edition tests', () => {
cy.testid('AdminMenuWidget_itemConnections').click();
cy.contains('New connection').click();
cy.testid('ConnectionDriverFields_connectionType').select('PostgreSQL');
cy.contains('not granted').should('not.exist');
cy.themeshot('connection-administration');
cy.testid('AdminMenuWidget_itemRoles').click();
cy.contains('logged-user').click();
cy.contains('not granted').should('not.exist');
cy.themeshot('role-administration');
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('New user').click();
cy.contains('not granted').should('not.exist');
cy.themeshot('user-administration');
cy.testid('AdminMenuWidget_itemAuthentication').click();
@@ -27,6 +39,7 @@ describe('Team edition tests', () => {
cy.contains('Use database login').click();
cy.contains('Add authentication').click();
cy.contains('OAuth 2.0').click();
cy.contains('not granted').should('not.exist');
cy.themeshot('authentication-administration');
});
@@ -80,4 +93,59 @@ describe('Team edition tests', () => {
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('test@example.com');
});
it('Audit logging', () => {
cy.testid('LoginPage_linkAdmin').click();
cy.testid('LoginPage_password').type('adminpwd');
cy.testid('LoginPage_submitLogin').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.contains('Audit log is not enabled');
cy.testid('AdminMenuWidget_itemSettings').click();
cy.testid('AdminSettingsTab_auditLogCheckbox').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.contains('No data for selected date');
cy.testid('AdminMenuWidget_itemConnections').click();
cy.contains('Open table').click();
cy.contains('displayName');
cy.get('.toolstrip').contains('Export').click();
cy.contains('CSV file').click();
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('Open table').click();
cy.contains('login');
cy.get('.toolstrip').contains('Export').click();
cy.contains('XML file').click();
cy.testid('AdminMenuWidget_itemAuditLog').click();
cy.testid('AdminAuditLogTab_refreshButton').click();
cy.contains('Exporting query').click();
cy.themeshot('auditlog');
});
it('Edit database permissions', () => {
cy.testid('LoginPage_linkAdmin').click();
cy.testid('LoginPage_password').type('adminpwd');
cy.testid('LoginPage_submitLogin').click();
cy.testid('AdminMenuWidget_itemRoles').click();
cy.testid('AdminRolesTab_table').contains('superadmin').click();
cy.testid('AdminRolesTab_databases').click();
cy.testid('AdminDatabasesPermissionsGrid_addButton').click();
cy.testid('AdminDatabasesPermissionsGrid_addButton').click();
cy.testid('AdminDatabasesPermissionsGrid_addButton').click();
cy.testid('AdminListOrRegexEditor_1_regexInput').type('^Chinook[\\d]*$');
cy.testid('AdminListOrRegexEditor_2_listSwitch').click();
cy.testid('AdminListOrRegexEditor_2_listInput').type('Nortwind\nSales');
cy.testid('AdminDatabasesPermissionsGrid_roleSelect_0').select('-2');
cy.testid('AdminDatabasesPermissionsGrid_roleSelect_1').select('-3');
cy.testid('AdminDatabasesPermissionsGrid_roleSelect_2').select('-4');
cy.contains('not granted').should('not.exist');
cy.themeshot('database-permissions');
});
});

View File

@@ -42,3 +42,11 @@ beforeEach(() => {
});
});
});
// Cypress.Screenshot.defaults({
// onBeforeScreenshot() {
// if (window.Chart) {
// Object.values(window.Chart.instances).forEach(c => c.resize());
// }
// },
// });

View File

@@ -0,0 +1,6 @@
{"__isStreamHeader":true,"pureName":"departments","schemaName":"dbo","objectId":1205579333,"createDate":"2025-06-12T10:30:34.083Z","modifyDate":"2025-06-12T10:30:34.120Z","contentHash":"2025-06-12T10:30:34.120Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__departme__3213E83FE8E7043D","schemaName":"dbo","pureName":"departments","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"IT"}
{"id":2,"name":"Marketing"}
{"id":3,"name":"Finance"}
{"id":4,"name":"Human Resources"}
{"id":5,"name":"Research and Development"}

View File

@@ -0,0 +1,12 @@
name: departments
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
primaryKey:
- id

View File

@@ -0,0 +1,39 @@
{"__isStreamHeader":true,"pureName":"employee_project","schemaName":"dbo","objectId":1333579789,"createDate":"2025-06-12T10:30:34.133Z","modifyDate":"2025-06-12T10:30:34.133Z","contentHash":"2025-06-12T10:30:34.133Z","columns":[{"columnName":"employee_id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"project_id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"role","dataType":"varchar(50)","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__employee__2EE9924949ED9668","schemaName":"dbo","pureName":"employee_project","constraintType":"primaryKey","columns":[{"columnName":"employee_id"},{"columnName":"project_id"}]},"foreignKeys":[{"constraintName":"FK__employee___emplo__5165187F","constraintType":"foreignKey","schemaName":"dbo","pureName":"employee_project","refSchemaName":"dbo","refTableName":"employees","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"employee_id","refColumnName":"id"}]},{"constraintName":"FK__employee___proje__52593CB8","constraintType":"foreignKey","schemaName":"dbo","pureName":"employee_project","refSchemaName":"dbo","refTableName":"projects","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"project_id","refColumnName":"id"}]}],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"employee_id":1,"project_id":6,"role":"Manager"}
{"employee_id":1,"project_id":8,"role":"Developer"}
{"employee_id":2,"project_id":7,"role":"Tester"}
{"employee_id":2,"project_id":8,"role":"Developer"}
{"employee_id":3,"project_id":4,"role":"Analyst"}
{"employee_id":3,"project_id":6,"role":"Developer"}
{"employee_id":4,"project_id":2,"role":"Manager"}
{"employee_id":4,"project_id":4,"role":"Analyst"}
{"employee_id":4,"project_id":5,"role":"Analyst"}
{"employee_id":5,"project_id":5,"role":"Tester"}
{"employee_id":6,"project_id":1,"role":"Analyst"}
{"employee_id":6,"project_id":6,"role":"Tester"}
{"employee_id":6,"project_id":9,"role":"Manager"}
{"employee_id":7,"project_id":8,"role":"Manager"}
{"employee_id":8,"project_id":10,"role":"Analyst"}
{"employee_id":9,"project_id":2,"role":"Analyst"}
{"employee_id":9,"project_id":6,"role":"Analyst"}
{"employee_id":9,"project_id":7,"role":"Developer"}
{"employee_id":10,"project_id":2,"role":"Manager"}
{"employee_id":10,"project_id":6,"role":"Analyst"}
{"employee_id":11,"project_id":1,"role":"Tester"}
{"employee_id":12,"project_id":4,"role":"Tester"}
{"employee_id":13,"project_id":2,"role":"Developer"}
{"employee_id":13,"project_id":3,"role":"Analyst"}
{"employee_id":13,"project_id":7,"role":"Developer"}
{"employee_id":14,"project_id":3,"role":"Developer"}
{"employee_id":14,"project_id":9,"role":"Manager"}
{"employee_id":15,"project_id":1,"role":"Developer"}
{"employee_id":15,"project_id":5,"role":"Manager"}
{"employee_id":16,"project_id":3,"role":"Tester"}
{"employee_id":16,"project_id":5,"role":"Developer"}
{"employee_id":17,"project_id":6,"role":"Analyst"}
{"employee_id":18,"project_id":1,"role":"Tester"}
{"employee_id":18,"project_id":5,"role":"Tester"}
{"employee_id":18,"project_id":6,"role":"Manager"}
{"employee_id":19,"project_id":6,"role":"Analyst"}
{"employee_id":20,"project_id":2,"role":"Developer"}
{"employee_id":20,"project_id":4,"role":"Developer"}

View File

@@ -0,0 +1,18 @@
name: employee_project
columns:
- name: employee_id
type: int
default: null
notNull: true
references: employees
- name: project_id
type: int
default: null
notNull: true
references: projects
- name: role
type: varchar(50)
default: null
primaryKey:
- employee_id
- project_id

View File

@@ -0,0 +1,21 @@
{"__isStreamHeader":true,"pureName":"employees","schemaName":"dbo","objectId":1237579447,"createDate":"2025-06-12T10:30:34.113Z","modifyDate":"2025-06-12T12:35:22.140Z","contentHash":"2025-06-12T12:35:22.140Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"email","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"hire_date","dataType":"date","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"department_id","dataType":"int","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__employee__3213E83FE576E55A","schemaName":"dbo","pureName":"employees","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[{"constraintName":"FK__employees__depar__4CA06362","constraintType":"foreignKey","schemaName":"dbo","pureName":"employees","refSchemaName":"dbo","refTableName":"departments","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"department_id","refColumnName":"id"}]}],"indexes":[],"uniques":[{"constraintName":"UQ__employee__AB6E6164E18D883F","columns":[{"columnName":"email"}]}],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"John Smith","email":"john.smith@example.com","hire_date":"2018-07-09T00:00:00.000Z","department_id":2}
{"id":2,"name":"Jane Garcia","email":"jane.garcia@example.com","hire_date":"2019-10-13T00:00:00.000Z","department_id":5}
{"id":3,"name":"Grace Smith","email":"grace.smith@example.com","hire_date":"2019-03-16T00:00:00.000Z","department_id":1}
{"id":4,"name":"Charlie Williams","email":"charlie.williams@example.com","hire_date":"2020-10-18T00:00:00.000Z","department_id":2}
{"id":5,"name":"Eve Brown","email":"eve.brown@example.com","hire_date":"2018-04-12T00:00:00.000Z","department_id":4}
{"id":6,"name":"Alice Moore","email":"alice.moore@example.com","hire_date":"2019-04-20T00:00:00.000Z","department_id":2}
{"id":7,"name":"Eve Williams","email":"eve.williams@example.com","hire_date":"2020-04-26T00:00:00.000Z","department_id":4}
{"id":8,"name":"Eve Jones","email":"eve.jones@example.com","hire_date":"2022-10-04T00:00:00.000Z","department_id":3}
{"id":9,"name":"Diana Miller","email":"diana.miller@example.com","hire_date":"2021-03-28T00:00:00.000Z","department_id":1}
{"id":10,"name":"Diana Smith","email":"diana.smith@example.com","hire_date":"2018-04-12T00:00:00.000Z","department_id":2}
{"id":11,"name":"Hank Johnson","email":"hank.johnson@example.com","hire_date":"2020-09-16T00:00:00.000Z","department_id":2}
{"id":12,"name":"Frank Miller","email":"frank.miller@example.com","hire_date":"2023-01-12T00:00:00.000Z","department_id":4}
{"id":13,"name":"Jane Brown","email":"jane.brown@example.com","hire_date":"2023-05-07T00:00:00.000Z","department_id":3}
{"id":14,"name":"Grace Davis","email":"grace.davis@example.com","hire_date":"2019-08-22T00:00:00.000Z","department_id":3}
{"id":15,"name":"Jane Black","email":"jane.black@example.com","hire_date":"2019-04-28T00:00:00.000Z","department_id":2}
{"id":16,"name":"Charlie Smith","email":"charlie.smith@example.com","hire_date":"2019-06-12T00:00:00.000Z","department_id":5}
{"id":17,"name":"Eve Johnson","email":"eve.johnson@example.com","hire_date":"2020-11-07T00:00:00.000Z","department_id":5}
{"id":18,"name":"Jane Johnson","email":"jane.johnson@example.com","hire_date":"2020-04-06T00:00:00.000Z","department_id":2}
{"id":19,"name":"Hank Brown","email":"hank.brown@example.com","hire_date":"2023-05-10T00:00:00.000Z","department_id":2}
{"id":20,"name":"Frank Jones","email":"frank.jones@example.com","hire_date":"2020-10-26T00:00:00.000Z","department_id":1}

View File

@@ -0,0 +1,28 @@
name: employees
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
- name: email
type: varchar(100)
default: null
notNull: true
- name: hire_date
type: date
default: null
notNull: true
- name: department_id
type: int
default: null
references: departments
primaryKey:
- id
uniques:
- name: UQ__employee__AB6E6164E18D883F
columns:
- email

View File

@@ -0,0 +1,141 @@
{"__isStreamHeader":true,"pureName":"finance_reports","schemaName":"dbo","objectId":338100245,"createDate":"2025-06-23T12:15:08.727Z","modifyDate":"2025-06-23T12:15:08.750Z","contentHash":"2025-06-23T12:15:08.750Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"date","dataType":"date","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"profit","dataType":"money","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"foreignKeys":[{"constraintName":"project_id","constraintType":"foreignKey","schemaName":"dbo","pureName":"finance_reports","refSchemaName":"dbo","refTableName":"projects","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"id","refColumnName":"id"}]}],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"date":"2022-01-01T00:00:00.000Z","profit":73923.4}
{"id":1,"date":"2022-01-31T00:00:00.000Z","profit":21837.75}
{"id":1,"date":"2022-03-02T00:00:00.000Z","profit":67859.8}
{"id":1,"date":"2022-04-01T00:00:00.000Z","profit":77403.3}
{"id":1,"date":"2022-05-01T00:00:00.000Z","profit":84083.19}
{"id":1,"date":"2022-05-31T00:00:00.000Z","profit":30040.55}
{"id":1,"date":"2022-06-30T00:00:00.000Z","profit":50947.14}
{"id":1,"date":"2022-07-30T00:00:00.000Z","profit":63345.62}
{"id":1,"date":"2022-08-29T00:00:00.000Z","profit":23819.45}
{"id":1,"date":"2022-09-28T00:00:00.000Z","profit":-25919.19}
{"id":1,"date":"2022-10-28T00:00:00.000Z","profit":27967.6}
{"id":1,"date":"2022-11-27T00:00:00.000Z","profit":-37402.36}
{"id":1,"date":"2022-12-27T00:00:00.000Z","profit":94528.8}
{"id":1,"date":"2023-01-26T00:00:00.000Z","profit":29491.03}
{"id":1,"date":"2023-02-25T00:00:00.000Z","profit":81541.29}
{"id":2,"date":"2022-01-01T00:00:00.000Z","profit":18070.94}
{"id":2,"date":"2022-01-31T00:00:00.000Z","profit":-40609.87}
{"id":2,"date":"2022-03-02T00:00:00.000Z","profit":42435.51}
{"id":2,"date":"2022-04-01T00:00:00.000Z","profit":-11915.15}
{"id":2,"date":"2022-05-01T00:00:00.000Z","profit":-37417.4}
{"id":2,"date":"2022-05-31T00:00:00.000Z","profit":23028.66}
{"id":2,"date":"2022-06-30T00:00:00.000Z","profit":-6895.49}
{"id":2,"date":"2022-07-30T00:00:00.000Z","profit":63114.54}
{"id":2,"date":"2022-08-29T00:00:00.000Z","profit":94646.99}
{"id":2,"date":"2022-09-28T00:00:00.000Z","profit":99560.77}
{"id":2,"date":"2022-10-28T00:00:00.000Z","profit":62216.22}
{"id":2,"date":"2022-11-27T00:00:00.000Z","profit":85094.32}
{"id":2,"date":"2022-12-27T00:00:00.000Z","profit":-23378.37}
{"id":2,"date":"2023-01-26T00:00:00.000Z","profit":47635.86}
{"id":2,"date":"2023-02-25T00:00:00.000Z","profit":33727.72}
{"id":3,"date":"2022-01-01T00:00:00.000Z","profit":33088.03}
{"id":3,"date":"2022-01-31T00:00:00.000Z","profit":66668.91}
{"id":3,"date":"2022-03-02T00:00:00.000Z","profit":5344.27}
{"id":3,"date":"2022-04-01T00:00:00.000Z","profit":22122.99}
{"id":3,"date":"2022-05-01T00:00:00.000Z","profit":27342.01}
{"id":3,"date":"2022-05-31T00:00:00.000Z","profit":55479.42}
{"id":3,"date":"2022-06-30T00:00:00.000Z","profit":35956.11}
{"id":3,"date":"2022-07-30T00:00:00.000Z","profit":9667.12}
{"id":3,"date":"2022-08-29T00:00:00.000Z","profit":63430.18}
{"id":3,"date":"2022-09-28T00:00:00.000Z","profit":-4883.41}
{"id":3,"date":"2022-10-28T00:00:00.000Z","profit":38902.8}
{"id":3,"date":"2022-11-27T00:00:00.000Z","profit":-25500.13}
{"id":3,"date":"2022-12-27T00:00:00.000Z","profit":65074.21}
{"id":3,"date":"2023-01-26T00:00:00.000Z","profit":12570.27}
{"id":3,"date":"2023-02-25T00:00:00.000Z","profit":35418.36}
{"id":4,"date":"2022-01-01T00:00:00.000Z","profit":68282.98}
{"id":4,"date":"2022-01-31T00:00:00.000Z","profit":77778.99}
{"id":4,"date":"2022-03-02T00:00:00.000Z","profit":95490.49}
{"id":4,"date":"2022-04-01T00:00:00.000Z","profit":-44466.37}
{"id":4,"date":"2022-05-01T00:00:00.000Z","profit":40215.71}
{"id":4,"date":"2022-05-31T00:00:00.000Z","profit":-31228.87}
{"id":4,"date":"2022-06-30T00:00:00.000Z","profit":60667.69}
{"id":4,"date":"2022-07-30T00:00:00.000Z","profit":71439.16}
{"id":4,"date":"2022-08-29T00:00:00.000Z","profit":-25077.4}
{"id":4,"date":"2022-09-28T00:00:00.000Z","profit":-36128.2}
{"id":4,"date":"2022-10-28T00:00:00.000Z","profit":36727.68}
{"id":4,"date":"2022-11-27T00:00:00.000Z","profit":-24207.2}
{"id":4,"date":"2022-12-27T00:00:00.000Z","profit":63846.96}
{"id":5,"date":"2022-01-01T00:00:00.000Z","profit":21648.3}
{"id":5,"date":"2022-01-31T00:00:00.000Z","profit":59263.22}
{"id":5,"date":"2022-03-02T00:00:00.000Z","profit":49154.51}
{"id":5,"date":"2022-04-01T00:00:00.000Z","profit":34787.48}
{"id":5,"date":"2022-05-01T00:00:00.000Z","profit":-24120.19}
{"id":5,"date":"2022-05-31T00:00:00.000Z","profit":98437.86}
{"id":5,"date":"2022-06-30T00:00:00.000Z","profit":18614.77}
{"id":5,"date":"2022-07-30T00:00:00.000Z","profit":17680.34}
{"id":5,"date":"2022-08-29T00:00:00.000Z","profit":74406.86}
{"id":5,"date":"2022-09-28T00:00:00.000Z","profit":61845.3}
{"id":5,"date":"2022-10-28T00:00:00.000Z","profit":-37889.59}
{"id":5,"date":"2022-11-27T00:00:00.000Z","profit":76651.05}
{"id":5,"date":"2022-12-27T00:00:00.000Z","profit":58739.6}
{"id":5,"date":"2023-01-26T00:00:00.000Z","profit":82605.85}
{"id":6,"date":"2022-01-01T00:00:00.000Z","profit":-5206.8}
{"id":6,"date":"2022-01-31T00:00:00.000Z","profit":27498.27}
{"id":6,"date":"2022-03-02T00:00:00.000Z","profit":-2939.84}
{"id":6,"date":"2022-04-01T00:00:00.000Z","profit":-37261.08}
{"id":6,"date":"2022-05-01T00:00:00.000Z","profit":37069.04}
{"id":6,"date":"2022-05-31T00:00:00.000Z","profit":524.88}
{"id":6,"date":"2022-06-30T00:00:00.000Z","profit":-29620.85}
{"id":6,"date":"2022-07-30T00:00:00.000Z","profit":35540.81}
{"id":6,"date":"2022-08-29T00:00:00.000Z","profit":20608.94}
{"id":6,"date":"2022-09-28T00:00:00.000Z","profit":34809.33}
{"id":6,"date":"2022-10-28T00:00:00.000Z","profit":-44949.05}
{"id":6,"date":"2022-11-27T00:00:00.000Z","profit":-22524.26}
{"id":6,"date":"2022-12-27T00:00:00.000Z","profit":37841.58}
{"id":7,"date":"2022-01-01T00:00:00.000Z","profit":6903.17}
{"id":7,"date":"2022-01-31T00:00:00.000Z","profit":58480.84}
{"id":7,"date":"2022-03-02T00:00:00.000Z","profit":48217.34}
{"id":7,"date":"2022-04-01T00:00:00.000Z","profit":73592.44}
{"id":7,"date":"2022-05-01T00:00:00.000Z","profit":-21831.18}
{"id":7,"date":"2022-05-31T00:00:00.000Z","profit":-40926.16}
{"id":7,"date":"2022-06-30T00:00:00.000Z","profit":62299.5}
{"id":7,"date":"2022-07-30T00:00:00.000Z","profit":95376.53}
{"id":7,"date":"2022-08-29T00:00:00.000Z","profit":-13317.36}
{"id":7,"date":"2022-09-28T00:00:00.000Z","profit":81565.05}
{"id":7,"date":"2022-10-28T00:00:00.000Z","profit":77420.52}
{"id":7,"date":"2022-11-27T00:00:00.000Z","profit":-12052.47}
{"id":7,"date":"2022-12-27T00:00:00.000Z","profit":37742.07}
{"id":7,"date":"2023-01-26T00:00:00.000Z","profit":-8057.99}
{"id":8,"date":"2022-01-01T00:00:00.000Z","profit":27213.73}
{"id":8,"date":"2022-01-31T00:00:00.000Z","profit":34271.75}
{"id":8,"date":"2022-03-02T00:00:00.000Z","profit":-44549.47}
{"id":8,"date":"2022-04-01T00:00:00.000Z","profit":15236.34}
{"id":8,"date":"2022-05-01T00:00:00.000Z","profit":-27759.81}
{"id":8,"date":"2022-05-31T00:00:00.000Z","profit":7955.12}
{"id":8,"date":"2022-06-30T00:00:00.000Z","profit":-34484.38}
{"id":8,"date":"2022-07-30T00:00:00.000Z","profit":-49758.7}
{"id":8,"date":"2022-08-29T00:00:00.000Z","profit":-41990.86}
{"id":8,"date":"2022-09-28T00:00:00.000Z","profit":58123.01}
{"id":8,"date":"2022-10-28T00:00:00.000Z","profit":30128.78}
{"id":8,"date":"2022-11-27T00:00:00.000Z","profit":-10151.17}
{"id":8,"date":"2022-12-27T00:00:00.000Z","profit":54048.33}
{"id":8,"date":"2023-01-26T00:00:00.000Z","profit":-43123.17}
{"id":9,"date":"2022-01-01T00:00:00.000Z","profit":61031.83}
{"id":9,"date":"2022-01-31T00:00:00.000Z","profit":68577.58}
{"id":9,"date":"2022-03-02T00:00:00.000Z","profit":88698.97}
{"id":9,"date":"2022-04-01T00:00:00.000Z","profit":8906.03}
{"id":9,"date":"2022-05-01T00:00:00.000Z","profit":28824.73}
{"id":9,"date":"2022-05-31T00:00:00.000Z","profit":88280.34}
{"id":9,"date":"2022-06-30T00:00:00.000Z","profit":35266.09}
{"id":9,"date":"2022-07-30T00:00:00.000Z","profit":-38025.36}
{"id":9,"date":"2022-08-29T00:00:00.000Z","profit":-12118.53}
{"id":9,"date":"2022-09-28T00:00:00.000Z","profit":-27265.86}
{"id":9,"date":"2022-10-28T00:00:00.000Z","profit":56870.57}
{"id":9,"date":"2022-11-27T00:00:00.000Z","profit":88078.95}
{"id":9,"date":"2022-12-27T00:00:00.000Z","profit":-24059.67}
{"id":9,"date":"2023-01-26T00:00:00.000Z","profit":-13301.43}
{"id":10,"date":"2022-01-01T00:00:00.000Z","profit":-22479.23}
{"id":10,"date":"2022-01-31T00:00:00.000Z","profit":8106.27}
{"id":10,"date":"2022-03-02T00:00:00.000Z","profit":69372.19}
{"id":10,"date":"2022-04-01T00:00:00.000Z","profit":-11895.74}
{"id":10,"date":"2022-05-01T00:00:00.000Z","profit":-33206.5}
{"id":10,"date":"2022-05-31T00:00:00.000Z","profit":56073.34}
{"id":10,"date":"2022-06-30T00:00:00.000Z","profit":67488.3}
{"id":10,"date":"2022-07-30T00:00:00.000Z","profit":48529.23}
{"id":10,"date":"2022-08-29T00:00:00.000Z","profit":28680.2}
{"id":10,"date":"2022-09-28T00:00:00.000Z","profit":59311.16}
{"id":10,"date":"2022-10-28T00:00:00.000Z","profit":25315.78}
{"id":10,"date":"2022-11-27T00:00:00.000Z","profit":36116.38}
{"id":10,"date":"2022-12-27T00:00:00.000Z","profit":-42040.4}

View File

@@ -0,0 +1,15 @@
name: finance_reports
columns:
- name: id
type: int
default: null
notNull: true
references: projects
- name: date
type: date
default: null
notNull: true
- name: profit
type: money
default: null
notNull: true

View File

@@ -0,0 +1,11 @@
{"__isStreamHeader":true,"pureName":"projects","schemaName":"dbo","objectId":1301579675,"createDate":"2025-06-12T10:30:34.127Z","modifyDate":"2025-06-23T12:15:08.750Z","contentHash":"2025-06-23T12:15:08.750Z","columns":[{"columnName":"id","dataType":"int","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"name","dataType":"varchar(100)","notNull":true,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"start_date","dataType":"date","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false},{"columnName":"end_date","dataType":"date","notNull":false,"autoIncrement":false,"defaultValue":null,"defaultConstraint":null,"computedExpression":null,"hasAutoValue":false}],"primaryKey":{"constraintName":"PK__projects__3213E83F26A7ED11","schemaName":"dbo","pureName":"projects","constraintType":"primaryKey","columns":[{"columnName":"id"}]},"foreignKeys":[],"indexes":[],"uniques":[],"engine":"mssql@dbgate-plugin-mssql"}
{"id":1,"name":"Apollo Upgrade","start_date":"2020-04-27T00:00:00.000Z","end_date":"2020-10-19T00:00:00.000Z"}
{"id":2,"name":"Market Expansion","start_date":"2022-08-04T00:00:00.000Z","end_date":"2023-06-20T00:00:00.000Z"}
{"id":3,"name":"AI Integration","start_date":"2020-05-11T00:00:00.000Z","end_date":"2021-07-10T00:00:00.000Z"}
{"id":4,"name":"Cost Reduction","start_date":"2022-01-08T00:00:00.000Z","end_date":"2022-07-12T00:00:00.000Z"}
{"id":5,"name":"Cloud Migration","start_date":"2021-01-11T00:00:00.000Z","end_date":"2021-05-27T00:00:00.000Z"}
{"id":6,"name":"Customer Portal","start_date":"2021-07-13T00:00:00.000Z","end_date":"2022-09-22T00:00:00.000Z"}
{"id":7,"name":"Data Lake","start_date":"2021-02-25T00:00:00.000Z","end_date":"2021-08-21T00:00:00.000Z"}
{"id":8,"name":"UX Overhaul","start_date":"2021-05-20T00:00:00.000Z","end_date":"2022-09-10T00:00:00.000Z"}
{"id":9,"name":"Security Hardening","start_date":"2021-05-28T00:00:00.000Z","end_date":"2022-07-28T00:00:00.000Z"}
{"id":10,"name":"Mobile App Revamp","start_date":"2021-11-17T00:00:00.000Z","end_date":"2022-06-04T00:00:00.000Z"}

View File

@@ -0,0 +1,18 @@
name: projects
columns:
- name: id
type: int
default: null
notNull: true
- name: name
type: varchar(100)
default: null
notNull: true
- name: start_date
type: date
default: null
- name: end_date
type: date
default: null
primaryKey:
- id

View File

@@ -0,0 +1,23 @@
-- >>>
-- autoExecute: true
-- splitterInitialValue: 20%
-- selected-chart: 1
-- <<<
SELECT
d.name AS department_name,
FORMAT(fr.date, 'yyyy-MM') AS month,
SUM(fr.profit) AS total_monthly_profit
FROM
departments d
JOIN
employees e ON d.id = e.department_id
JOIN
employee_project ep ON e.id = ep.employee_id
JOIN
finance_reports fr ON ep.project_id = fr.id
GROUP BY
d.name, FORMAT(fr.date, 'yyyy-MM')
ORDER BY
d.name, month;

View File

@@ -37,7 +37,7 @@ services:
- "16009:5556"
mongo:
image: mongo:4.0.12
image: mongo:4.4.29
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: root

8
e2e-tests/env/charts/.env vendored Normal file
View File

@@ -0,0 +1,8 @@
CONNECTIONS=mysql
LABEL_mysql=MySql-connection
SERVER_mysql=localhost
USER_mysql=root
PASSWORD_mysql=Pwd2020Db
PORT_mysql=16004
ENGINE_mysql=mysql@dbgate-plugin-mysql

2
e2e-tests/env/cloud/.env vendored Normal file
View File

@@ -0,0 +1,2 @@
ALLOW_DBGATE_PRIVATE_CLOUD=1
REDIRECT_TO_DBGATE_CLOUD_LOGIN=1

96
e2e-tests/init/charts.js Normal file
View File

@@ -0,0 +1,96 @@
const path = require('path');
const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
const dbgateApi = require('dbgate-api');
dbgateApi.initializeApiEnvironment();
const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
async function copyFolder(source, target) {
if (!fs.existsSync(target)) {
fs.mkdirSync(target, { recursive: true });
}
for (const file of fs.readdirSync(source)) {
fs.copyFileSync(path.join(source, file), path.join(target, file));
}
}
async function initMySqlDatabase(dbname, inputFile) {
await dbgateApi.executeQuery({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
},
sql: `drop database if exists ${dbname}`,
});
await dbgateApi.executeQuery({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
},
sql: `create database ${dbname}`,
});
await dbgateApi.importDatabase({
connection: {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
database: dbname,
engine: 'mysql@dbgate-plugin-mysql',
},
inputFile,
});
}
async function run() {
const connection = {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
};
try {
await dbgateApi.executeQuery({
connection,
sql: 'drop database if exists charts_sample',
});
} catch (err) {
console.error('Failed to drop database', err);
}
await dbgateApi.executeQuery({
connection,
sql: 'create database charts_sample',
});
await dbgateApi.importDbFromFolder({
connection: {
...connection,
database: 'charts_sample',
},
folder: path.resolve(path.join(__dirname, '../data/charts-sample')),
});
await copyFolder(
path.resolve(path.join(__dirname, '../data/files/sql')),
path.join(baseDir, 'files-e2etests', 'sql')
);
await initMySqlDatabase('MyChinook', path.resolve(path.join(__dirname, '../data/chinook-mysql.sql')));
}
dbgateApi.runScript(run);

View File

@@ -21,6 +21,8 @@
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
"cy:run:cloud": "cypress run --spec cypress/e2e/cloud.cy.js",
"cy:run:charts": "cypress run --spec cypress/e2e/charts.cy.js",
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
@@ -28,6 +30,8 @@
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:cloud": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/cloud/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:charts": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/charts/.env node e2e-tests/init/charts.js && env-cmd -f e2e-tests/env/charts/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
@@ -35,8 +39,10 @@
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
"test:cloud": "start-server-and-test start:cloud http://localhost:3000 cy:run:cloud",
"test:charts": "start-server-and-test start:charts http://localhost:3000 cy:run:charts",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts",
"test:ci": "yarn test"
},
"dependencies": {}

View File

@@ -118,6 +118,31 @@ describe('Alter table', () => {
})
);
test.each(engines.filter(i => i.supportTableComments).map(engine => [engine.label, engine]))(
'Add comment to table - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.objectComment = 'Added table comment';
});
})
);
test.each(engines.filter(i => i.supportColumnComments).map(engine => [engine.label, engine]))(
'Add comment to column - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.push({
columnName: 'added',
columnComment: 'Added column comment',
dataType: 'int',
pairingId: crypto.randomUUID(),
notNull: false,
autoIncrement: false,
});
});
})
);
test.each(
createEnginesColumnsSource(engines.filter(x => !x.skipDropColumn)).filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')

View File

@@ -4,7 +4,7 @@ const { testWrapper } = require('../tools');
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
const deployDb = require('dbgate-api/src/shell/deployDb');
const storageModel = require('dbgate-api/src/storageModel');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
const { runCommandOnDriver, runQueryOnDriver, adaptDatabaseInfo } = require('dbgate-tools');
describe('Data replicator', () => {
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
@@ -162,7 +162,7 @@ describe('Data replicator', () => {
await deployDb({
systemConnection: conn,
driver,
loadedDbModel: storageModel,
loadedDbModel: adaptDatabaseInfo(storageModel, driver),
targetSchema: engine.defaultSchemaName,
});
@@ -176,11 +176,11 @@ describe('Data replicator', () => {
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeFalsy();
).toBeTruthy();
const DB1 = {
auth_methods: [
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 0 },
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
@@ -266,7 +266,7 @@ describe('Data replicator', () => {
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeTruthy();
).toEqual('0');
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');

View File

@@ -193,7 +193,6 @@ describe('DB Import/export', () => {
systemConnection: conn,
driver,
folder: path.join(__dirname, '../../e2e-tests/data/my-guitar-shop'),
transformRow: engine.transformModelRow,
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~categories`));

View File

@@ -64,6 +64,40 @@ describe('Table create', () => {
})
);
test.each(
engines.filter(i => i.supportTableComments || i.supportColumnComments).map(engine => [engine.label, engine])
)(
'Simple table with comment - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(engine, conn, driver, {
...(engine.supportTableComments && {
schemaName: 'dbo',
objectComment: 'table comment',
}),
...(engine.defaultSchemaName && {
schemaName: engine.defaultSchemaName,
}),
columns: [
{
columnName: 'col1',
dataType: 'int',
pureName: 'tested',
...(engine.skipNullability ? {} : { notNull: true }),
...(engine.supportColumnComments && {
columnComment: 'column comment',
}),
...(engine.defaultSchemaName && {
schemaName: engine.defaultSchemaName,
}),
},
],
primaryKey: {
columns: [{ columnName: 'col1' }],
},
});
})
);
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Table with index - %s',
testWrapper(async (conn, driver, engine) => {

View File

@@ -443,6 +443,8 @@ const sqlServerEngine = {
supportSchemas: true,
supportRenameSqlObject: true,
defaultSchemaName: 'dbo',
supportTableComments: true,
supportColumnComments: true,
// skipSeparateSchemas: true,
triggers: [
{
@@ -726,16 +728,6 @@ const firebirdEngine = {
// supportRenameSqlObject: true,
skipIncrementalAnalysis: true,
skipRenameTable: true,
transformModelRow: row => {
return Object.fromEntries(
Object.entries(row).map(([key, value]) => {
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$/.test(value)) {
return [key, value.replace('T', ' ')];
}
return [key, value];
})
);
},
// skipDefaultValue: true,
skipDropReferences: true,
};

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "6.4.3-premium-beta.4",
"version": "6.6.9",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -43,7 +43,7 @@
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"storage-json": "node packages/dbmodel/bin/dbmodel.js model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
"build:app:local": "yarn plugins:copydist && cd app && yarn build:local",
"start:app:local": "cd app && yarn start:local",
@@ -52,6 +52,7 @@
"generatePadFile": "node generatePadFile",
"fillPackagedPlugins": "node fillPackagedPlugins",
"resetPackagedPlugins": "node resetPackagedPlugins",
"fixYmlHashes": "cd common && yarn init -y && yarn add yaml -W && cd .. && node common/fixYmlHashes.js app/dist",
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2 && copyfiles packer/install-packages.sh packer/build -f && yarn install:drivers:packer",
@@ -72,7 +73,8 @@
"translations:extract": "node common/translations-cli/index.js extract",
"translations:add-missing": "node common/translations-cli/index.js add-missing",
"translations:remove-unused": "node common/translations-cli/index.js remove-unused",
"translations:check": "node common/translations-cli/index.js check"
"translations:check": "node common/translations-cli/index.js check",
"errors": "node common/assign-dbgm-codes.mjs ."
},
"dependencies": {
"concurrently": "^5.1.0",

View File

@@ -1,5 +1,13 @@
DEVMODE=1
SHELL_SCRIPTING=1
ALLOW_DBGATE_PRIVATE_CLOUD=1
DEVWEB=1
LOCAL_AUTH_PROXY=1
# LOCAL_AI_GATEWAY=true
# REDIRECT_TO_DBGATE_CLOUD_LOGIN=1
# PROD_DBGATE_CLOUD=1
# PROD_DBGATE_IDENTITY=1
# LOCAL_DBGATE_CLOUD=1
# LOCAL_DBGATE_IDENTITY=1
@@ -9,7 +17,6 @@ SHELL_SCRIPTING=1
# DISABLE_SHELL=1
# HIDE_APP_EDITOR=1
# DEVWEB=1
# LOGINS=admin,test

View File

@@ -31,7 +31,7 @@
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"dbgate-datalib": "^6.0.0-alpha.1",
"dbgate-query-splitter": "^4.11.5",
"dbgate-query-splitter": "^4.11.7",
"dbgate-sqltree": "^6.0.0-alpha.1",
"dbgate-tools": "^6.0.0-alpha.1",
"debug": "^4.3.4",
@@ -56,7 +56,7 @@
"ncp": "^2.0.0",
"node-cron": "^2.0.3",
"on-finished": "^2.4.1",
"pinomin": "^1.0.4",
"pinomin": "^1.0.5",
"portfinder": "^1.0.28",
"rimraf": "^3.0.0",
"semver": "^7.6.3",
@@ -68,6 +68,7 @@
},
"scripts": {
"start": "env-cmd -f .env node src/index.js --listen-api",
"start:debug": "env-cmd -f .env node --inspect src/index.js --listen-api",
"start:portal": "env-cmd -f env/portal/.env node src/index.js --listen-api",
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js --listen-api",
"start:auth": "env-cmd -f env/auth/.env node src/index.js --listen-api",

View File

@@ -10,7 +10,13 @@ function getTokenSecret() {
return tokenSecret;
}
function getStaticTokenSecret() {
// TODO static not fixed
return '14813c43-a91b-4ad1-9dcd-a81bd7dbb05f';
}
module.exports = {
getTokenLifetime,
getTokenSecret,
getStaticTokenSecret,
};

View File

@@ -10,8 +10,9 @@ const logger = getLogger('authProvider');
class AuthProviderBase {
amoid = 'none';
skipInList = false;
async login(login, password, options = undefined) {
async login(login, password, options = undefined, req = undefined) {
return {
accessToken: jwt.sign(
{
@@ -23,7 +24,7 @@ class AuthProviderBase {
};
}
oauthToken(params) {
oauthToken(params, req) {
return {};
}
@@ -36,12 +37,28 @@ class AuthProviderBase {
return !!req?.user || !!req?.auth;
}
getCurrentPermissions(req) {
async getCurrentPermissions(req) {
const login = this.getCurrentLogin(req);
const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
return permissions || process.env.PERMISSIONS;
}
async checkCurrentConnectionPermission(req, conid) {
return true;
}
async getCurrentDatabasePermissions(req) {
return [];
}
async getCurrentTablePermissions(req) {
return [];
}
async getCurrentFilePermissions(req) {
return [];
}
getLoginPageConnections() {
return null;
}
@@ -94,7 +111,7 @@ class OAuthProvider extends AuthProviderBase {
payload = jwt.decode(id_token);
}
logger.info({ payload }, 'User payload returned from OAUTH');
logger.info({ payload }, 'DBGM-00002 User payload returned from OAUTH');
const login =
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]

View File

@@ -1,233 +1,99 @@
const fs = require('fs-extra');
const _ = require('lodash');
const path = require('path');
const { appdir } = require('../utility/directories');
const { appdir, filesdir } = require('../utility/directories');
const socket = require('../utility/socket');
const connections = require('./connections');
const {
loadPermissionsFromRequest,
loadFilePermissionsFromRequest,
hasPermission,
getFilePermissionRole,
} = require('../utility/hasPermission');
module.exports = {
folders_meta: true,
async folders() {
const folders = await fs.readdir(appdir());
return [
...folders.map(name => ({
name,
})),
];
},
createFolder_meta: true,
async createFolder({ folder }) {
const name = await this.getNewAppFolder({ name: folder });
await fs.mkdir(path.join(appdir(), name));
socket.emitChanged('app-folders-changed');
this.emitChangedDbApp(folder);
return name;
},
files_meta: true,
async files({ folder }) {
if (!folder) return [];
const dir = path.join(appdir(), folder);
getAllApps_meta: true,
async getAllApps({}, req) {
const dir = path.join(filesdir(), 'apps');
if (!(await fs.exists(dir))) return [];
const files = await fs.readdir(dir);
function fileType(ext, type) {
return files
.filter(name => name.endsWith(ext))
.map(name => ({
name: name.slice(0, -ext.length),
label: path.parse(name.slice(0, -ext.length)).base,
type,
}));
}
return [
...fileType('.command.sql', 'command.sql'),
...fileType('.query.sql', 'query.sql'),
...fileType('.config.json', 'config.json'),
];
},
async emitChangedDbApp(folder) {
const used = await this.getUsedAppFolders();
if (used.includes(folder)) {
socket.emitChanged('used-apps-changed');
}
},
refreshFiles_meta: true,
async refreshFiles({ folder }) {
socket.emitChanged('app-files-changed', { app: folder });
},
refreshFolders_meta: true,
async refreshFolders() {
socket.emitChanged(`app-folders-changed`);
},
deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) {
await fs.unlink(path.join(appdir(), folder, `${file}.${fileType}`));
socket.emitChanged('app-files-changed', { app: folder });
this.emitChangedDbApp(folder);
},
renameFile_meta: true,
async renameFile({ folder, file, newFile, fileType }) {
await fs.rename(
path.join(path.join(appdir(), folder), `${file}.${fileType}`),
path.join(path.join(appdir(), folder), `${newFile}.${fileType}`)
);
socket.emitChanged('app-files-changed', { app: folder });
this.emitChangedDbApp(folder);
},
renameFolder_meta: true,
async renameFolder({ folder, newFolder }) {
const uniqueName = await this.getNewAppFolder({ name: newFolder });
await fs.rename(path.join(appdir(), folder), path.join(appdir(), uniqueName));
socket.emitChanged(`app-folders-changed`);
},
deleteFolder_meta: true,
async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter');
await fs.rmdir(path.join(appdir(), folder), { recursive: true });
socket.emitChanged(`app-folders-changed`);
socket.emitChanged('app-files-changed', { app: folder });
socket.emitChanged('used-apps-changed');
},
async getNewAppFolder({ name }) {
if (!(await fs.exists(path.join(appdir(), name)))) return name;
let index = 2;
while (await fs.exists(path.join(appdir(), `${name}${index}`))) {
index += 1;
}
return `${name}${index}`;
},
getUsedAppFolders_meta: true,
async getUsedAppFolders() {
const list = await connections.list();
const apps = [];
for (const connection of list) {
for (const db of connection.databases || []) {
for (const key of _.keys(db || {})) {
if (key.startsWith('useApp:') && db[key]) {
apps.push(key.substring('useApp:'.length));
}
}
}
}
return _.intersection(_.uniq(apps), await fs.readdir(appdir()));
},
getUsedApps_meta: true,
async getUsedApps() {
const apps = await this.getUsedAppFolders();
const res = [];
const loadedPermissions = await loadPermissionsFromRequest(req);
const filePermissions = await loadFilePermissionsFromRequest(req);
for (const folder of apps) {
res.push(await this.loadApp({ folder }));
}
return res;
},
// getAppsForDb_meta: true,
// async getAppsForDb({ conid, database }) {
// const connection = await connections.get({ conid });
// if (!connection) return [];
// const db = (connection.databases || []).find(x => x.name == database);
// const apps = [];
// const res = [];
// if (db) {
// for (const key of _.keys(db || {})) {
// if (key.startsWith('useApp:') && db[key]) {
// apps.push(key.substring('useApp:'.length));
// }
// }
// }
// for (const folder of apps) {
// res.push(await this.loadApp({ folder }));
// }
// return res;
// },
loadApp_meta: true,
async loadApp({ folder }) {
const res = {
queries: [],
commands: [],
name: folder,
};
const dir = path.join(appdir(), folder);
if (await fs.exists(dir)) {
const files = await fs.readdir(dir);
async function processType(ext, field) {
for (const file of files) {
if (file.endsWith(ext)) {
res[field].push({
name: file.slice(0, -ext.length),
sql: await fs.readFile(path.join(dir, file), { encoding: 'utf-8' }),
});
}
}
for (const file of await fs.readdir(dir)) {
if (!hasPermission(`all-disk-files`, loadedPermissions)) {
const role = getFilePermissionRole('apps', file, filePermissions);
if (role == 'deny') continue;
}
const content = await fs.readFile(path.join(dir, file), { encoding: 'utf-8' });
const appJson = JSON.parse(content);
// const app = {
// appid: file,
// name: appJson.applicationName,
// usageRules: appJson.usageRules || [],
// icon: appJson.applicationIcon || 'img app',
// color: appJson.applicationColor,
// queries: Object.values(appJson.files || {})
// .filter(x => x.type == 'query')
// .map(x => ({
// name: x.label,
// sql: x.sql,
// })),
// commands: Object.values(appJson.files || {})
// .filter(x => x.type == 'command')
// .map(x => ({
// name: x.label,
// sql: x.sql,
// })),
// virtualReferences: appJson.virtualReferences,
// dictionaryDescriptions: appJson.dictionaryDescriptions,
// };
const app = {
...appJson,
appid: file,
};
await processType('.command.sql', 'commands');
await processType('.query.sql', 'queries');
res.push(app);
}
try {
res.virtualReferences = JSON.parse(
await fs.readFile(path.join(dir, 'virtual-references.config.json'), { encoding: 'utf-8' })
);
} catch (err) {
res.virtualReferences = [];
}
try {
res.dictionaryDescriptions = JSON.parse(
await fs.readFile(path.join(dir, 'dictionary-descriptions.config.json'), { encoding: 'utf-8' })
);
} catch (err) {
res.dictionaryDescriptions = [];
}
return res;
},
async saveConfigFile(appFolder, filename, filterFunc, newItem) {
const file = path.join(appdir(), appFolder, filename);
let json;
try {
json = JSON.parse(await fs.readFile(file, { encoding: 'utf-8' }));
} catch (err) {
json = [];
createAppFromDb_meta: true,
async createAppFromDb({ appName, server, database }, req) {
const appdir = path.join(filesdir(), 'apps');
if (!fs.existsSync(appdir)) {
await fs.mkdir(appdir);
}
if (filterFunc) {
json = json.filter(filterFunc);
const appId = _.kebabCase(appName);
let suffix = undefined;
while (fs.existsSync(path.join(appdir, `${appId}${suffix || ''}`))) {
if (!suffix) suffix = 2;
else suffix++;
}
const finalAppId = `${appId}${suffix || ''}`;
json = [...json, newItem];
const appJson = {
applicationName: appName,
usageRules: [
{
serverHostsList: server,
databaseNamesList: database,
},
],
};
await fs.writeFile(file, JSON.stringify(json, undefined, 2));
await fs.writeFile(path.join(appdir, `${finalAppId}`), JSON.stringify(appJson, undefined, 2));
socket.emitChanged('app-files-changed', { app: appFolder });
socket.emitChanged('used-apps-changed');
socket.emitChanged(`files-changed`, { folder: 'apps' });
return finalAppId;
},
saveVirtualReference_meta: true,
async saveVirtualReference({ appFolder, schemaName, pureName, refSchemaName, refTableName, columns }) {
await this.saveConfigFile(
appFolder,
'virtual-references.config.json',
async saveVirtualReference({ appid, schemaName, pureName, refSchemaName, refTableName, columns }) {
await this.saveConfigItem(
appid,
'virtualReferences',
columns.length == 1
? x =>
!(
@@ -245,14 +111,17 @@ module.exports = {
columns,
}
);
socket.emitChanged(`files-changed`, { folder: 'apps' });
return true;
},
saveDictionaryDescription_meta: true,
async saveDictionaryDescription({ appFolder, pureName, schemaName, expression, columns, delimiter }) {
await this.saveConfigFile(
appFolder,
'dictionary-descriptions.config.json',
async saveDictionaryDescription({ appid, pureName, schemaName, expression, columns, delimiter }) {
await this.saveConfigItem(
appid,
'dictionaryDescriptions',
x => !(x.schemaName == schemaName && x.pureName == pureName),
{
schemaName,
@@ -263,18 +132,271 @@ module.exports = {
}
);
socket.emitChanged(`files-changed`, { folder: 'apps' });
return true;
},
createConfigFile_meta: true,
async createConfigFile({ appFolder, fileName, content }) {
const file = path.join(appdir(), appFolder, fileName);
if (!(await fs.exists(file))) {
await fs.writeFile(file, JSON.stringify(content, undefined, 2));
socket.emitChanged('app-files-changed', { app: appFolder });
socket.emitChanged('used-apps-changed');
return true;
async saveConfigItem(appid, fieldName, filterFunc, newItem) {
const file = path.join(filesdir(), 'apps', appid);
const appJson = JSON.parse(await fs.readFile(file, { encoding: 'utf-8' }));
let json = appJson[fieldName] || [];
if (filterFunc) {
json = json.filter(filterFunc);
}
return false;
json = [...json, newItem];
await fs.writeFile(
file,
JSON.stringify(
{
...appJson,
[fieldName]: json,
},
undefined,
2
)
);
socket.emitChanged('files-changed', { folder: 'apps' });
},
// folders_meta: true,
// async folders() {
// const folders = await fs.readdir(appdir());
// return [
// ...folders.map(name => ({
// name,
// })),
// ];
// },
// createFolder_meta: true,
// async createFolder({ folder }) {
// const name = await this.getNewAppFolder({ name: folder });
// await fs.mkdir(path.join(appdir(), name));
// socket.emitChanged('app-folders-changed');
// this.emitChangedDbApp(folder);
// return name;
// },
// files_meta: true,
// async files({ folder }) {
// if (!folder) return [];
// const dir = path.join(appdir(), folder);
// if (!(await fs.exists(dir))) return [];
// const files = await fs.readdir(dir);
// function fileType(ext, type) {
// return files
// .filter(name => name.endsWith(ext))
// .map(name => ({
// name: name.slice(0, -ext.length),
// label: path.parse(name.slice(0, -ext.length)).base,
// type,
// }));
// }
// return [
// ...fileType('.command.sql', 'command.sql'),
// ...fileType('.query.sql', 'query.sql'),
// ...fileType('.config.json', 'config.json'),
// ];
// },
// async emitChangedDbApp(folder) {
// const used = await this.getUsedAppFolders();
// if (used.includes(folder)) {
// socket.emitChanged('used-apps-changed');
// }
// },
// refreshFiles_meta: true,
// async refreshFiles({ folder }) {
// socket.emitChanged('app-files-changed', { app: folder });
// },
// refreshFolders_meta: true,
// async refreshFolders() {
// socket.emitChanged(`app-folders-changed`);
// },
// deleteFile_meta: true,
// async deleteFile({ folder, file, fileType }) {
// await fs.unlink(path.join(appdir(), folder, `${file}.${fileType}`));
// socket.emitChanged('app-files-changed', { app: folder });
// this.emitChangedDbApp(folder);
// },
// renameFile_meta: true,
// async renameFile({ folder, file, newFile, fileType }) {
// await fs.rename(
// path.join(path.join(appdir(), folder), `${file}.${fileType}`),
// path.join(path.join(appdir(), folder), `${newFile}.${fileType}`)
// );
// socket.emitChanged('app-files-changed', { app: folder });
// this.emitChangedDbApp(folder);
// },
// renameFolder_meta: true,
// async renameFolder({ folder, newFolder }) {
// const uniqueName = await this.getNewAppFolder({ name: newFolder });
// await fs.rename(path.join(appdir(), folder), path.join(appdir(), uniqueName));
// socket.emitChanged(`app-folders-changed`);
// },
// deleteFolder_meta: true,
// async deleteFolder({ folder }) {
// if (!folder) throw new Error('Missing folder parameter');
// await fs.rmdir(path.join(appdir(), folder), { recursive: true });
// socket.emitChanged(`app-folders-changed`);
// socket.emitChanged('app-files-changed', { app: folder });
// socket.emitChanged('used-apps-changed');
// },
// async getNewAppFolder({ name }) {
// if (!(await fs.exists(path.join(appdir(), name)))) return name;
// let index = 2;
// while (await fs.exists(path.join(appdir(), `${name}${index}`))) {
// index += 1;
// }
// return `${name}${index}`;
// },
// getUsedAppFolders_meta: true,
// async getUsedAppFolders() {
// const list = await connections.list();
// const apps = [];
// for (const connection of list) {
// for (const db of connection.databases || []) {
// for (const key of _.keys(db || {})) {
// if (key.startsWith('useApp:') && db[key]) {
// apps.push(key.substring('useApp:'.length));
// }
// }
// }
// }
// return _.intersection(_.uniq(apps), await fs.readdir(appdir()));
// },
// // getAppsForDb_meta: true,
// // async getAppsForDb({ conid, database }) {
// // const connection = await connections.get({ conid });
// // if (!connection) return [];
// // const db = (connection.databases || []).find(x => x.name == database);
// // const apps = [];
// // const res = [];
// // if (db) {
// // for (const key of _.keys(db || {})) {
// // if (key.startsWith('useApp:') && db[key]) {
// // apps.push(key.substring('useApp:'.length));
// // }
// // }
// // }
// // for (const folder of apps) {
// // res.push(await this.loadApp({ folder }));
// // }
// // return res;
// // },
// loadApp_meta: true,
// async loadApp({ folder }) {
// const res = {
// queries: [],
// commands: [],
// name: folder,
// };
// const dir = path.join(appdir(), folder);
// if (await fs.exists(dir)) {
// const files = await fs.readdir(dir);
// async function processType(ext, field) {
// for (const file of files) {
// if (file.endsWith(ext)) {
// res[field].push({
// name: file.slice(0, -ext.length),
// sql: await fs.readFile(path.join(dir, file), { encoding: 'utf-8' }),
// });
// }
// }
// }
// await processType('.command.sql', 'commands');
// await processType('.query.sql', 'queries');
// }
// try {
// res.virtualReferences = JSON.parse(
// await fs.readFile(path.join(dir, 'virtual-references.config.json'), { encoding: 'utf-8' })
// );
// } catch (err) {
// res.virtualReferences = [];
// }
// try {
// res.dictionaryDescriptions = JSON.parse(
// await fs.readFile(path.join(dir, 'dictionary-descriptions.config.json'), { encoding: 'utf-8' })
// );
// } catch (err) {
// res.dictionaryDescriptions = [];
// }
// return res;
// },
// async saveConfigFile(appFolder, filename, filterFunc, newItem) {
// const file = path.join(appdir(), appFolder, filename);
// let json;
// try {
// json = JSON.parse(await fs.readFile(file, { encoding: 'utf-8' }));
// } catch (err) {
// json = [];
// }
// if (filterFunc) {
// json = json.filter(filterFunc);
// }
// json = [...json, newItem];
// await fs.writeFile(file, JSON.stringify(json, undefined, 2));
// socket.emitChanged('app-files-changed', { app: appFolder });
// socket.emitChanged('used-apps-changed');
// },
// saveDictionaryDescription_meta: true,
// async saveDictionaryDescription({ appFolder, pureName, schemaName, expression, columns, delimiter }) {
// await this.saveConfigFile(
// appFolder,
// 'dictionary-descriptions.config.json',
// x => !(x.schemaName == schemaName && x.pureName == pureName),
// {
// schemaName,
// pureName,
// expression,
// columns,
// delimiter,
// }
// );
// return true;
// },
// createConfigFile_meta: true,
// async createConfigFile({ appFolder, fileName, content }) {
// const file = path.join(appdir(), appFolder, fileName);
// if (!(await fs.exists(file))) {
// await fs.writeFile(file, JSON.stringify(content, undefined, 2));
// socket.emitChanged('app-files-changed', { app: appFolder });
// socket.emitChanged('used-apps-changed');
// return true;
// }
// return false;
// },
};

View File

@@ -102,7 +102,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error reading archive files');
logger.error(extractErrorLogData(err), 'DBGM-00001 Error reading archive files');
return [];
}
},

View File

@@ -13,8 +13,21 @@ const {
} = require('../auth/authProvider');
const storage = require('./storage');
const { decryptPasswordString } = require('../utility/crypting');
const { createDbGateIdentitySession, startCloudTokenChecking } = require('../utility/cloudIntf');
const {
createDbGateIdentitySession,
startCloudTokenChecking,
readCloudTokenHolder,
readCloudTestTokenHolder,
} = require('../utility/cloudIntf');
const socket = require('../utility/socket');
const { sendToAuditLog } = require('../utility/auditlog');
const {
isLoginLicensed,
LOGIN_LIMIT_ERROR,
markTokenAsLoggedIn,
markUserAsActive,
markLoginAsLoggedOut,
} = require('../utility/loginchecker');
const logger = getLogger('auth');
@@ -38,6 +51,7 @@ function authMiddleware(req, res, next) {
'/auth/oauth-token',
'/auth/login',
'/auth/redirect',
'/redirect',
'/stream',
'/storage/get-connections-for-login-page',
'/storage/set-admin-password',
@@ -54,6 +68,11 @@ function authMiddleware(req, res, next) {
// const isAdminPage = req.headers['x-is-admin-page'] == 'true';
if (process.env.SKIP_ALL_AUTH) {
// API is not authorized for basic auth
return next();
}
if (process.env.BASIC_AUTH) {
// API is not authorized for basic auth
return next();
@@ -72,6 +91,8 @@ function authMiddleware(req, res, next) {
try {
const decoded = jwt.verify(token, getTokenSecret());
req.user = decoded;
markUserAsActive(decoded.licenseUid, token);
return next();
} catch (err) {
if (skipAuth) {
@@ -79,7 +100,7 @@ function authMiddleware(req, res, next) {
return next();
}
logger.error(extractErrorLogData(err), 'Sending invalid token error');
logger.error(extractErrorLogData(err), 'DBGM-00098 Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}
@@ -87,12 +108,12 @@ function authMiddleware(req, res, next) {
module.exports = {
oauthToken_meta: true,
async oauthToken(params) {
async oauthToken(params, req) {
const { amoid } = params;
return getAuthProviderById(amoid).oauthToken(params);
return getAuthProviderById(amoid).oauthToken(params, req);
},
login_meta: true,
async login(params) {
async login(params, req) {
const { amoid, login, password, isAdminPage } = params;
if (isAdminPage) {
@@ -102,31 +123,60 @@ module.exports = {
adminPassword = decryptPasswordString(adminConfig?.adminPassword);
}
if (adminPassword && adminPassword == password) {
if (!(await isLoginLicensed(req, `superadmin`))) {
return { error: LOGIN_LIMIT_ERROR };
}
sendToAuditLog(req, {
category: 'auth',
component: 'AuthController',
action: 'login',
event: 'login.admin',
severity: 'info',
message: 'Administration login successful',
});
const licenseUid = `superadmin`;
const accessToken = jwt.sign(
{
login: 'superadmin',
roleId: -3,
licenseUid,
amoid: 'superadmin',
},
getTokenSecret(),
{
expiresIn: getTokenLifetime(),
}
);
markTokenAsLoggedIn(licenseUid, accessToken);
return {
accessToken: jwt.sign(
{
login: 'superadmin',
permissions: await storage.loadSuperadminPermissions(),
roleId: -3,
},
getTokenSecret(),
{
expiresIn: getTokenLifetime(),
}
),
accessToken,
};
}
sendToAuditLog(req, {
category: 'auth',
component: 'AuthController',
action: 'loginFail',
event: 'login.adminFailed',
severity: 'warn',
message: 'Administraton login failed',
});
return { error: 'Login failed' };
}
return getAuthProviderById(amoid).login(login, password);
return getAuthProviderById(amoid).login(login, password, undefined, req);
},
getProviders_meta: true,
getProviders() {
return {
providers: getAuthProviders().map(x => x.toJson()),
providers: getAuthProviders()
.filter(x => !x.skipInList)
.map(x => x.toJson()),
default: getDefaultAuthProvider()?.amoid,
};
},
@@ -138,13 +188,39 @@ module.exports = {
},
createCloudLoginSession_meta: true,
async createCloudLoginSession({ client }) {
const res = await createDbGateIdentitySession(client);
async createCloudLoginSession({ client, redirectUri }) {
const res = await createDbGateIdentitySession(client, redirectUri);
startCloudTokenChecking(res.sid, tokenHolder => {
socket.emit('got-cloud-token', tokenHolder);
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
});
return res;
},
cloudLoginRedirected_meta: true,
async cloudLoginRedirected({ sid }) {
const tokenHolder = await readCloudTokenHolder(sid);
return tokenHolder;
},
cloudTestLogin_meta: true,
async cloudTestLogin({ email }) {
const tokenHolder = await readCloudTestTokenHolder(email);
return tokenHolder;
},
logoutAdmin_meta: true,
async logoutAdmin() {
await markLoginAsLoggedOut('superadmin');
return true;
},
logoutUser_meta: true,
async logoutUser({}, req) {
await markLoginAsLoggedOut(req?.user?.licenseUid);
return true;
},
authMiddleware,
};

View File

@@ -8,6 +8,9 @@ const {
getCloudContent,
putCloudContent,
removeCloudCachedConnection,
getPromoWidgetData,
getPromoWidgetList,
getPromoWidgetPreview,
} = require('../utility/cloudIntf');
const connections = require('./connections');
const socket = require('../utility/socket');
@@ -16,6 +19,7 @@ const { getConnectionLabel, getLogger, extractErrorLogData } = require('dbgate-t
const logger = getLogger('cloud');
const _ = require('lodash');
const fs = require('fs-extra');
const { getAiGatewayServer } = require('../utility/authProxy');
module.exports = {
publicFiles_meta: true,
@@ -44,7 +48,7 @@ module.exports = {
const resp = await callCloudApiGet('content-list');
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
logger.error(extractErrorLogData(err), 'DBGM-00099 Error getting cloud content list');
return [];
}
@@ -58,7 +62,7 @@ module.exports = {
putContent_meta: true,
async putContent({ folid, cntid, content, name, type }) {
const resp = await putCloudContent(folid, cntid, content, name, type);
const resp = await putCloudContent(folid, cntid, content, name, type, {});
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
@@ -129,7 +133,11 @@ module.exports = {
undefined,
JSON.stringify(connToSend),
getConnectionLabel(conn),
'connection'
'connection',
{
connectionColor: conn.connectionColor,
connectionEngine: conn.engine,
}
);
return resp;
},
@@ -157,7 +165,11 @@ module.exports = {
cntid,
JSON.stringify(recryptedConn),
getConnectionLabel(recryptedConn),
'connection'
'connection',
{
connectionColor: connection.connectionColor,
connectionEngine: connection.engine,
}
);
if (resp.apiErrorMessage) {
@@ -188,7 +200,10 @@ module.exports = {
...conn,
displayName: getConnectionLabel(conn) + ' - copy',
};
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection');
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection', {
connectionColor: conn.connectionColor,
connectionEngine: conn.engine,
});
return respPut;
},
@@ -224,7 +239,7 @@ module.exports = {
saveFile_meta: true,
async saveFile({ folid, cntid, fileName, data, contentFolder, format }) {
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', contentFolder, format);
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', { contentFolder, contentType: format });
socket.emitChanged('cloud-content-changed');
socket.emit('cloud-content-updated');
return resp;
@@ -247,4 +262,57 @@ module.exports = {
await fs.writeFile(filePath, content);
return true;
},
folderUsers_meta: true,
async folderUsers({ folid }) {
const resp = await callCloudApiGet(`content-folders/users/${folid}`);
return resp;
},
setFolderUserRole_meta: true,
async setFolderUserRole({ folid, email, role }) {
const resp = await callCloudApiPost(`content-folders/set-user-role/${folid}`, { email, role });
return resp;
},
removeFolderUser_meta: true,
async removeFolderUser({ folid, email }) {
const resp = await callCloudApiPost(`content-folders/remove-user/${folid}`, { email });
return resp;
},
getAiGateway_meta: true,
async getAiGateway() {
return getAiGatewayServer();
},
premiumPromoWidget_meta: true,
async premiumPromoWidget() {
const data = await getPromoWidgetData();
if (data?.state != 'data') {
return null;
}
if (data.validTo && new Date().getTime() > new Date(data.validTo).getTime()) {
return null;
}
return data;
},
promoWidgetList_meta: true,
async promoWidgetList() {
return getPromoWidgetList();
},
promoWidgetPreview_meta: true,
async promoWidgetPreview({ campaign, variant }) {
return getPromoWidgetPreview(campaign, variant);
},
// chatStream_meta: {
// raw: true,
// method: 'post',
// },
// chatStream(req, res) {
// callChatStream(req.body, res);
// },
};

View File

@@ -3,7 +3,7 @@ const os = require('os');
const path = require('path');
const axios = require('axios');
const { datadir, getLogsFilePath } = require('../utility/directories');
const { hasPermission } = require('../utility/hasPermission');
const { hasPermission, loadPermissionsFromRequest } = require('../utility/hasPermission');
const socket = require('../utility/socket');
const _ = require('lodash');
const AsyncLock = require('async-lock');
@@ -16,7 +16,7 @@ const connections = require('../controllers/connections');
const { getAuthProviderFromReq } = require('../auth/authProvider');
const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy');
const { getAuthProxyUrl, tryToGetRefreshedLicense } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools');
const {
@@ -29,6 +29,7 @@ const {
} = require('../utility/crypting');
const lock = new AsyncLock();
let cachedSettingsValue = null;
module.exports = {
// settingsValue: {},
@@ -45,7 +46,7 @@ module.exports = {
async get(_params, req) {
const authProvider = getAuthProviderFromReq(req);
const login = authProvider.getCurrentLogin(req);
const permissions = authProvider.getCurrentPermissions(req);
const permissions = await authProvider.getCurrentPermissions(req);
const isUserLoggedIn = authProvider.isUserLoggedIn(req);
const singleConid = authProvider.getSingleConnectionId(req);
@@ -108,6 +109,7 @@ module.exports = {
),
isAdminPasswordMissing,
isInvalidToken: req?.isInvalidToken,
skipAllAuth: !!process.env.SKIP_ALL_AUTH,
adminPasswordState: adminConfig?.adminPasswordState,
storageDatabase: process.env.STORAGE_DATABASE,
logsFilePath: getLogsFilePath(),
@@ -116,7 +118,9 @@ module.exports = {
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
),
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
allowPrivateCloud: platformInfo.isElectron || !!process.env.ALLOW_DBGATE_PRIVATE_CLOUD,
...currentVersion,
redirectToDbGateCloudLogin: !!process.env.REDIRECT_TO_DBGATE_CLOUD_LOGIN,
};
return configResult;
@@ -143,6 +147,13 @@ module.exports = {
return res;
},
async getCachedSettings() {
if (!cachedSettingsValue) {
cachedSettingsValue = await this.loadSettings();
}
return cachedSettingsValue;
},
deleteSettings_meta: true,
async deleteSettings() {
await fs.unlink(path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'));
@@ -181,6 +192,7 @@ module.exports = {
return {
...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
// 'other.licenseKey': await this.loadLicenseKey(),
};
}
} catch (err) {
@@ -198,21 +210,34 @@ module.exports = {
},
saveLicenseKey_meta: true,
async saveLicenseKey({ licenseKey }) {
const decoded = jwt.decode(licenseKey);
if (!decoded) {
return {
status: 'error',
errorMessage: 'Invalid license key',
};
}
async saveLicenseKey({ licenseKey, forceSave = false, tryToRenew = false }) {
if (!forceSave) {
const decoded = jwt.decode(licenseKey?.trim());
if (!decoded) {
return {
status: 'error',
errorMessage: 'Invalid license key',
};
}
const { exp } = decoded;
if (exp * 1000 < Date.now()) {
return {
status: 'error',
errorMessage: 'License key is expired',
};
const { exp } = decoded;
if (exp * 1000 < Date.now()) {
let renewed = false;
if (tryToRenew) {
const newLicenseKey = await tryToGetRefreshedLicense(licenseKey);
if (newLicenseKey.status == 'ok') {
licenseKey = newLicenseKey.token;
renewed = true;
}
}
if (!renewed) {
return {
status: 'error',
errorMessage: 'License key is expired',
};
}
}
}
try {
@@ -255,7 +280,9 @@ module.exports = {
updateSettings_meta: true,
async updateSettings(values, req) {
if (!hasPermission(`settings/change`, req)) return false;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`settings/change`, loadedPermissions)) return false;
cachedSettingsValue = null;
const res = await lock.acquire('settings', async () => {
const currentValue = await this.loadSettings();
@@ -264,7 +291,11 @@ module.exports = {
if (process.env.STORAGE_DATABASE) {
updated = {
...currentValue,
...values,
..._.mapValues(values, v => {
if (v === true) return 'true';
if (v === false) return 'false';
return v;
}),
};
await storage.writeConfig({
group: 'settings',
@@ -282,7 +313,7 @@ module.exports = {
// this.settingsValue = updated;
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'], forceSave: true });
socket.emitChanged(`config-changed`);
}
}
@@ -302,7 +333,7 @@ module.exports = {
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
return resp.data;
} catch (err) {
return ''
return '';
}
},
@@ -312,6 +343,16 @@ module.exports = {
return resp;
},
getNewLicense_meta: true,
async getNewLicense({ oldLicenseKey }) {
const newLicenseKey = await tryToGetRefreshedLicense(oldLicenseKey);
const res = await checkLicenseKey(newLicenseKey.token);
if (res.status == 'ok') {
res.licenseKey = newLicenseKey.token;
}
return res;
},
recryptDatabaseForExport(db) {
const encryptionKey = generateTransportEncryptionKey();
const transportEncryptor = createTransportEncryptor(encryptionKey);
@@ -352,7 +393,8 @@ module.exports = {
exportConnectionsAndSettings_meta: true,
async exportConnectionsAndSettings(_params, req) {
if (!hasPermission(`admin/config`, req)) {
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`admin/config`, loadedPermissions)) {
throw new Error('Permission denied: admin/config');
}
@@ -376,7 +418,8 @@ module.exports = {
importConnectionsAndSettings_meta: true,
async importConnectionsAndSettings({ db }, req) {
if (!hasPermission(`admin/config`, req)) {
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`admin/config`, loadedPermissions)) {
throw new Error('Permission denied: admin/config');
}

View File

@@ -14,7 +14,7 @@ const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
const processArgs = require('../utility/processArgs');
const { safeJsonParse, getLogger, extractErrorLogData } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
const { connectionHasPermission, testConnectionPermission, loadPermissionsFromRequest } = require('../utility/hasPermission');
const pipeForkLogs = require('../utility/pipeForkLogs');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { getAuthProviderById } = require('../auth/authProvider');
@@ -116,12 +116,12 @@ function getPortalCollections() {
}
}
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'DBGM-00005 Using connections from ENV variables');
const noengine = connections.filter(x => !x.engine);
if (noengine.length > 0) {
logger.warn(
{ connections: noengine.map(x => x._id) },
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
'DBGM-00006 Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
);
}
return connections;
@@ -227,6 +227,7 @@ module.exports = {
list_meta: true,
async list(_params, req) {
const storage = require('./storage');
const loadedPermissions = await loadPermissionsFromRequest(req);
const storageConnections = await storage.connections(req);
if (storageConnections) {
@@ -234,9 +235,9 @@ module.exports = {
}
if (portalConnections) {
if (platformInfo.allowShellConnection) return portalConnections;
return portalConnections.map(maskConnection).filter(x => connectionHasPermission(x, req));
return portalConnections.map(maskConnection).filter(x => connectionHasPermission(x, loadedPermissions));
}
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
return (await this.datastore.find()).filter(x => connectionHasPermission(x, loadedPermissions));
},
async getUsedEngines() {
@@ -375,7 +376,7 @@ module.exports = {
update_meta: true,
async update({ _id, values }, req) {
if (portalConnections) return;
testConnectionPermission(_id, req);
await testConnectionPermission(_id, req);
const res = await this.datastore.patch(_id, values);
socket.emitChanged('connection-list-changed');
return res;
@@ -392,7 +393,7 @@ module.exports = {
updateDatabase_meta: true,
async updateDatabase({ conid, database, values }, req) {
if (portalConnections) return;
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const conn = await this.datastore.get(conid);
let databases = (conn && conn.databases) || [];
if (databases.find(x => x.name == database)) {
@@ -410,7 +411,7 @@ module.exports = {
delete_meta: true,
async delete(connection, req) {
if (portalConnections) return;
testConnectionPermission(connection, req);
await testConnectionPermission(connection, req);
const res = await this.datastore.remove(connection._id);
socket.emitChanged('connection-list-changed');
return res;
@@ -452,7 +453,7 @@ module.exports = {
_id: '__model',
};
}
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.getCore({ conid, mask: true });
},
@@ -530,40 +531,40 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true };
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00100 Error getting DB token');
return { error: err.message };
}
},
dbloginAuthToken_meta: true,
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }) {
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }, req) {
try {
const connection = await this.getCore({ conid });
const driver = requireEngineDriver(connection);
const accessToken = await driver.getAuthTokenFromCode(connection, { code, redirectUri, sid });
const volatile = await this.saveVolatile({ conid, accessToken });
const authProvider = getAuthProviderById(amoid);
const resp = await authProvider.login(null, null, { conid: volatile._id });
const resp = await authProvider.login(null, null, { conid: volatile._id }, req);
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00101 Error getting DB token');
return { error: err.message };
}
},
dbloginAuth_meta: true,
async dbloginAuth({ amoid, conid, user, password }) {
async dbloginAuth({ amoid, conid, user, password }, req) {
if (user || password) {
const saveResp = await this.saveVolatile({ conid, user, password, test: true });
if (saveResp.msgtype == 'connected') {
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id });
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id }, req);
return loginResp;
}
return saveResp;
}
// user and password is stored in connection, volatile connection is not needed
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid });
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid }, req);
return loginResp;
},

View File

@@ -29,7 +29,7 @@ const generateDeploySql = require('../shell/generateDeploySql');
const { createTwoFilesPatch } = require('diff');
const diff2htmlPage = require('../utility/diff2htmlPage');
const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const { testConnectionPermission, hasPermission, loadPermissionsFromRequest, loadTablePermissionsFromRequest, getTablePermissionRole, loadDatabasePermissionsFromRequest, getDatabasePermissionRole, getTablePermissionRoleLevelIndex, testDatabaseRolePermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const crypto = require('crypto');
@@ -41,6 +41,7 @@ const { decryptConnection } = require('../utility/crypting');
const { getSshTunnel } = require('../utility/sshTunnel');
const sessions = require('./sessions');
const jsldata = require('./jsldata');
const { sendToAuditLog } = require('../utility/auditlog');
const logger = getLogger('databaseConnections');
@@ -75,7 +76,7 @@ module.exports = {
handle_error(conid, database, props) {
const { error } = props;
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
logger.error(`DBGM-00102 Error in database connection ${conid}, database ${database}: ${error}`);
if (props?.msgid) {
const [resolve, reject] = this.requests[props?.msgid];
reject(error);
@@ -83,8 +84,11 @@ module.exports = {
}
},
handle_response(conid, database, { msgid, ...response }) {
const [resolve, reject] = this.requests[msgid];
const [resolve, reject, additionalData] = this.requests[msgid];
resolve(response);
if (additionalData?.auditLogger) {
additionalData?.auditLogger(response);
}
delete this.requests[msgid];
},
handle_status(conid, database, { status }) {
@@ -96,7 +100,7 @@ module.exports = {
socket.emitChanged(`database-status-changed`, { conid, database });
},
handle_ping() {},
handle_ping() { },
// session event handlers
@@ -140,7 +144,7 @@ module.exports = {
handle_copyStreamError(conid, database, { copyStreamError }) {
const { progressName } = copyStreamError;
const { runid } = progressName;
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
logger.error(`DBGM-00103 Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
socket.emit(`runner-done-${runid}`);
},
@@ -189,7 +193,7 @@ module.exports = {
if (newOpened.disconnected) return;
const funcName = `handle_${msgtype}`;
if (!this[funcName]) {
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
logger.error(`DBGM-00104 Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
return;
}
@@ -200,7 +204,7 @@ module.exports = {
this.close(conid, database, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00114 Error in database connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, database, false);
});
@@ -215,14 +219,14 @@ module.exports = {
},
/** @param {import('dbgate-types').OpenedDatabaseConnection} conn */
sendRequest(conn, message) {
sendRequest(conn, message, additionalData = {}) {
const msgid = crypto.randomUUID();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.requests[msgid] = [resolve, reject, additionalData];
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request do process');
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
this.close(conn.conid, conn.database);
}
});
@@ -231,8 +235,8 @@ module.exports = {
queryData_meta: true,
async queryData({ conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing query');
await testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'DBGM-00007 Processing query');
const opened = await this.ensureOpened(conid, database);
// if (opened && opened.status && opened.status.name == 'error') {
// return opened.status;
@@ -242,41 +246,118 @@ module.exports = {
},
sqlSelect_meta: true,
async sqlSelect({ conid, database, select }, req) {
testConnectionPermission(conid, req);
async sqlSelect({ conid, database, select, auditLogSessionGroup }, req) {
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'sqlSelect', select });
const res = await this.sendRequest(
opened,
{ msgtype: 'sqlSelect', select },
{
auditLogger:
auditLogSessionGroup && select?.from?.name?.pureName
? response => {
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.select',
action: 'select',
severity: 'info',
conid,
database,
schemaName: select?.from?.name?.schemaName,
pureName: select?.from?.name?.pureName,
sumint1: response?.rows?.length,
sessionParam: `${conid}::${database}::${select?.from?.name?.schemaName || '0'}::${select?.from?.name?.pureName
}`,
sessionGroup: auditLogSessionGroup,
message: `Loaded table data from ${select?.from?.name?.pureName}`,
});
}
: null,
}
);
return res;
},
runScript_meta: true,
async runScript({ conid, database, sql, useTransaction }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing script');
async runScript({ conid, database, sql, useTransaction, logMessage }, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
await testConnectionPermission(conid, req, loadedPermissions);
await testDatabaseRolePermission(conid, database, 'run_script', req);
logger.info({ conid, database, sql }, 'DBGM-00008 Processing script');
const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.runscript',
action: 'runscript',
severity: 'info',
conid,
database,
detail: sql,
message: logMessage || `Running SQL script`,
});
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql, useTransaction });
return res;
},
runOperation_meta: true,
async runOperation({ conid, database, operation, useTransaction }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, operation }, 'Processing operation');
await testConnectionPermission(conid, req);
logger.info({ conid, database, operation }, 'DBGM-00009 Processing operation');
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'sql.runoperation',
action: operation.type,
severity: 'info',
conid,
database,
detail: operation,
message: `Running DB operation: ${operation.type}`,
});
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'runOperation', operation, useTransaction });
return res;
},
collectionData_meta: true,
async collectionData({ conid, database, options }, req) {
testConnectionPermission(conid, req);
async collectionData({ conid, database, options, auditLogSessionGroup }, req) {
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'collectionData', options });
const res = await this.sendRequest(
opened,
{ msgtype: 'collectionData', options },
{
auditLogger:
auditLogSessionGroup && options?.pureName
? response => {
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
event: 'nosql.collectionData',
action: 'select',
severity: 'info',
conid,
database,
pureName: options?.pureName,
sumint1: response?.result?.rows?.length,
sessionParam: `${conid}::${database}::${options?.pureName}`,
sessionGroup: auditLogSessionGroup,
message: `Loaded collection data ${options?.pureName}`,
});
}
: null,
}
);
return res.result || null;
},
async loadDataCore(msgtype, { conid, database, ...args }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype, ...args });
if (res.errorMessage) {
@@ -291,7 +372,7 @@ module.exports = {
schemaList_meta: true,
async schemaList({ conid, database }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('schemaList', { conid, database });
},
@@ -303,43 +384,43 @@ module.exports = {
loadKeys_meta: true,
async loadKeys({ conid, database, root, filter, limit }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('loadKeys', { conid, database, root, filter, limit });
},
scanKeys_meta: true,
async scanKeys({ conid, database, root, pattern, cursor, count }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('scanKeys', { conid, database, root, pattern, cursor, count });
},
exportKeys_meta: true,
async exportKeys({ conid, database, options }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('exportKeys', { conid, database, options });
},
loadKeyInfo_meta: true,
async loadKeyInfo({ conid, database, key }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('loadKeyInfo', { conid, database, key });
},
loadKeyTableRange_meta: true,
async loadKeyTableRange({ conid, database, key, cursor, count }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('loadKeyTableRange', { conid, database, key, cursor, count });
},
loadFieldValues_meta: true,
async loadFieldValues({ conid, database, schemaName, pureName, field, search, dataType }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('loadFieldValues', { conid, database, schemaName, pureName, field, search, dataType });
},
callMethod_meta: true,
async callMethod({ conid, database, method, args }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
return this.loadDataCore('callMethod', { conid, database, method, args });
// const opened = await this.ensureOpened(conid, database);
@@ -352,7 +433,8 @@ module.exports = {
updateCollection_meta: true,
async updateCollection({ conid, database, changeSet }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'updateCollection', changeSet });
if (res.errorMessage) {
@@ -363,6 +445,36 @@ module.exports = {
return res.result || null;
},
saveTableData_meta: true,
async saveTableData({ conid, database, changeSet }, req) {
await testConnectionPermission(conid, req);
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
const tablePermissions = await loadTablePermissionsFromRequest(req);
const fieldsAndRoles = [
[changeSet.inserts, 'create_update_delete'],
[changeSet.deletes, 'create_update_delete'],
[changeSet.updates, 'update_only'],
]
for (const [operations, requiredRole] of fieldsAndRoles) {
for (const operation of operations) {
const role = getTablePermissionRole(conid, database, 'tables', operation.schemaName, operation.pureName, tablePermissions, databasePermissions);
if (getTablePermissionRoleLevelIndex(role) < getTablePermissionRoleLevelIndex(requiredRole)) {
throw new Error('DBGM-00262 Permission not granted');
}
}
}
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'saveTableData', changeSet });
if (res.errorMessage) {
return {
errorMessage: res.errorMessage,
};
}
return res.result || null;
},
status_meta: true,
async status({ conid, database }, req) {
if (!conid) {
@@ -371,7 +483,7 @@ module.exports = {
message: 'No connection',
};
}
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) {
return {
@@ -394,14 +506,14 @@ module.exports = {
ping_meta: true,
async ping({ conid, database }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
let existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
logger.error(extractErrorLogData(err), 'DBGM-00116 Error pinging DB connection');
this.close(conid, database);
return {
@@ -422,7 +534,7 @@ module.exports = {
refresh_meta: true,
async refresh({ conid, database, keepOpen }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
if (!keepOpen) this.close(conid, database);
await this.ensureOpened(conid, database);
@@ -436,7 +548,7 @@ module.exports = {
return { status: 'ok' };
}
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const conn = await this.ensureOpened(conid, database);
conn.subprocess.send({ msgtype: 'syncModel', isFullRefresh });
return { status: 'ok' };
@@ -450,7 +562,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00117 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
@@ -473,7 +585,7 @@ module.exports = {
disconnect_meta: true,
async disconnect({ conid, database }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
await this.close(conid, database, true);
return { status: 'ok' };
},
@@ -483,8 +595,9 @@ module.exports = {
if (!conid || !database) {
return {};
}
const loadedPermissions = await loadPermissionsFromRequest(req);
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req, loadedPermissions);
if (conid == '__model') {
const model = await importDbModel(database);
const trans = await loadModelTransform(modelTransFile);
@@ -492,6 +605,52 @@ module.exports = {
}
const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, {
category: 'dbop',
component: 'DatabaseConnectionsController',
action: 'structure',
event: 'dbStructure.get',
severity: 'info',
conid,
database,
sessionParam: `${conid}::${database}`,
sessionGroup: 'getStructure',
message: `Loaded database structure for ${database}`,
});
if (process.env.STORAGE_DATABASE && !hasPermission(`all-tables`, loadedPermissions)) {
// filter databases by permissions
const tablePermissions = await loadTablePermissionsFromRequest(req);
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
const databasePermissionRole = getDatabasePermissionRole(conid, database, databasePermissions);
function applyTablePermissionRole(list, objectTypeField) {
const res = [];
for (const item of list ?? []) {
const tablePermissionRole = getTablePermissionRole(conid, database, objectTypeField, item.schemaName, item.pureName, tablePermissions, databasePermissionRole);
if (tablePermissionRole != 'deny') {
res.push({
...item,
tablePermissionRole,
});
}
}
return res;
}
const res = {
...opened.structure,
tables: applyTablePermissionRole(opened.structure.tables, 'tables'),
views: applyTablePermissionRole(opened.structure.views, 'views'),
procedures: applyTablePermissionRole(opened.structure.procedures, 'procedures'),
functions: applyTablePermissionRole(opened.structure.functions, 'functions'),
triggers: applyTablePermissionRole(opened.structure.triggers, 'triggers'),
collections: applyTablePermissionRole(opened.structure.collections, 'collections'),
}
return res;
}
return opened.structure;
// const existing = this.opened.find((x) => x.conid == conid && x.database == database);
// if (existing) return existing.status;
@@ -506,7 +665,7 @@ module.exports = {
if (!conid) {
return null;
}
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
if (!conid) return null;
const opened = await this.ensureOpened(conid, database);
return opened.serverVersion || null;
@@ -514,7 +673,7 @@ module.exports = {
sqlPreview_meta: true,
async sqlPreview({ conid, database, objects, options }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
// wait for structure
await this.structure({ conid, database });
@@ -525,7 +684,7 @@ module.exports = {
exportModel_meta: true,
async exportModel({ conid, database, outputFolder, schema }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const realFolder = outputFolder.startsWith('archive:')
? resolveArchiveFolder(outputFolder.substring('archive:'.length))
@@ -543,7 +702,7 @@ module.exports = {
exportModelSql_meta: true,
async exportModelSql({ conid, database, outputFolder, outputFile, schema }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const connection = await connections.getCore({ conid });
const driver = requireEngineDriver(connection);
@@ -557,7 +716,7 @@ module.exports = {
generateDeploySql_meta: true,
async generateDeploySql({ conid, database, archiveFolder }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, {
msgtype: 'generateDeploySql',
@@ -722,17 +881,17 @@ module.exports = {
return {
...(command == 'backup'
? driver.backupDatabaseCommand(
connection,
{ outputFile, database, options, selectedTables, skippedTables, argsFormat },
// @ts-ignore
externalTools
)
connection,
{ outputFile, database, options, selectedTables, skippedTables, argsFormat },
// @ts-ignore
externalTools
)
: driver.restoreDatabaseCommand(
connection,
{ inputFile, database, options, argsFormat },
// @ts-ignore
externalTools
)),
connection,
{ inputFile, database, options, argsFormat },
// @ts-ignore
externalTools
)),
transformMessage: driver.transformNativeCommandMessage
? message => driver.transformNativeCommandMessage(message, command)
: null,
@@ -829,8 +988,8 @@ module.exports = {
executeSessionQuery_meta: true,
async executeSessionQuery({ sesid, conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'Processing query');
await testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'DBGM-00010 Processing query');
sessions.dispatchMessage(sesid, 'Query execution started');
const opened = await this.ensureOpened(conid, database);
@@ -841,7 +1000,7 @@ module.exports = {
evalJsonScript_meta: true,
async evalJsonScript({ conid, database, script, runid }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
opened.subprocess.send({ msgtype: 'evalJsonScript', script, runid });

View File

@@ -1,9 +1,14 @@
const fs = require('fs-extra');
const path = require('path');
const crypto = require('crypto');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir } = require('../utility/directories');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir, jsldir } = require('../utility/directories');
const getChartExport = require('../utility/getChartExport');
const { hasPermission } = require('../utility/hasPermission');
const {
hasPermission,
loadPermissionsFromRequest,
loadFilePermissionsFromRequest,
getFilePermissionRole,
} = require('../utility/hasPermission');
const socket = require('../utility/socket');
const scheduler = require('./scheduler');
const getDiagramExport = require('../utility/getDiagramExport');
@@ -11,6 +16,9 @@ const apps = require('./apps');
const getMapExport = require('../utility/getMapExport');
const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
const logger = getLogger('files');
function serialize(format, data) {
@@ -28,7 +36,8 @@ function deserialize(format, text) {
module.exports = {
list_meta: true,
async list({ folder }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return [];
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return [];
const dir = path.join(filesdir(), folder);
if (!(await fs.exists(dir))) return [];
const files = (await fs.readdir(dir)).map(file => ({ folder, file }));
@@ -37,10 +46,11 @@ module.exports = {
listAll_meta: true,
async listAll(_params, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
const folders = await fs.readdir(filesdir());
const res = [];
for (const folder of folders) {
if (!hasPermission(`files/${folder}/read`, req)) continue;
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) continue;
const dir = path.join(filesdir(), folder);
const files = (await fs.readdir(dir)).map(file => ({ folder, file }));
res.push(...files);
@@ -50,7 +60,11 @@ module.exports = {
delete_meta: true,
async delete({ folder, file }, req) {
if (!hasPermission(`files/${folder}/write`, req)) return false;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/write`, loadedPermissions)) return false;
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
await fs.unlink(path.join(filesdir(), folder, file));
socket.emitChanged(`files-changed`, { folder });
socket.emitChanged(`all-files-changed`);
@@ -59,7 +73,11 @@ module.exports = {
rename_meta: true,
async rename({ folder, file, newFile }, req) {
if (!hasPermission(`files/${folder}/write`, req)) return false;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/write`, loadedPermissions)) return false;
if (!checkSecureFilePathsWithoutDirectory(folder, file, newFile)) {
return false;
}
await fs.rename(path.join(filesdir(), folder, file), path.join(filesdir(), folder, newFile));
socket.emitChanged(`files-changed`, { folder });
socket.emitChanged(`all-files-changed`);
@@ -77,7 +95,11 @@ module.exports = {
copy_meta: true,
async copy({ folder, file, newFile }, req) {
if (!hasPermission(`files/${folder}/write`, req)) return false;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!checkSecureFilePathsWithoutDirectory(folder, file, newFile)) {
return false;
}
if (!hasPermission(`files/${folder}/write`, loadedPermissions)) return false;
await fs.copyFile(path.join(filesdir(), folder, file), path.join(filesdir(), folder, newFile));
socket.emitChanged(`files-changed`, { folder });
socket.emitChanged(`all-files-changed`);
@@ -86,6 +108,10 @@ module.exports = {
load_meta: true,
async load({ folder, file, format }, req) {
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
if (folder.startsWith('archive:')) {
const text = await fs.readFile(path.join(resolveArchiveFolder(folder.substring('archive:'.length)), file), {
encoding: 'utf-8',
@@ -97,7 +123,8 @@ module.exports = {
});
return deserialize(format, text);
} else {
if (!hasPermission(`files/${folder}/read`, req)) return null;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return null;
const text = await fs.readFile(path.join(filesdir(), folder, file), { encoding: 'utf-8' });
return deserialize(format, text);
}
@@ -105,20 +132,29 @@ module.exports = {
loadFrom_meta: true,
async loadFrom({ filePath, format }, req) {
if (!platformInfo.isElectron) {
// this is available only in electron app
return false;
}
const text = await fs.readFile(filePath, { encoding: 'utf-8' });
return deserialize(format, text);
},
save_meta: true,
async save({ folder, file, data, format }, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!checkSecureFilePathsWithoutDirectory(folder, file)) {
return false;
}
if (folder.startsWith('archive:')) {
if (!hasPermission(`archive/write`, req)) return false;
if (!hasPermission(`archive/write`, loadedPermissions)) return false;
const dir = resolveArchiveFolder(folder.substring('archive:'.length));
await fs.writeFile(path.join(dir, file), serialize(format, data));
socket.emitChanged(`archive-files-changed`, { folder: folder.substring('archive:'.length) });
return true;
} else if (folder.startsWith('app:')) {
if (!hasPermission(`apps/write`, req)) return false;
if (!hasPermission(`apps/write`, loadedPermissions)) return false;
const app = folder.substring('app:'.length);
await fs.writeFile(path.join(appdir(), app, file), serialize(format, data));
socket.emitChanged(`app-files-changed`, { app });
@@ -126,7 +162,7 @@ module.exports = {
apps.emitChangedDbApp(folder);
return true;
} else {
if (!hasPermission(`files/${folder}/write`, req)) return false;
if (!hasPermission(`files/${folder}/write`, loadedPermissions)) return false;
const dir = path.join(filesdir(), folder);
if (!(await fs.exists(dir))) {
await fs.mkdir(dir);
@@ -143,12 +179,18 @@ module.exports = {
saveAs_meta: true,
async saveAs({ filePath, data, format }) {
if (!platformInfo.isElectron) {
// this is available only in electron app
return false;
}
await fs.writeFile(filePath, serialize(format, data));
},
favorites_meta: true,
async favorites(_params, req) {
if (!hasPermission(`files/favorites/read`, req)) return [];
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/favorites/read`, loadedPermissions)) return [];
const dir = path.join(filesdir(), 'favorites');
if (!(await fs.exists(dir))) return [];
const files = await fs.readdir(dir);
@@ -175,10 +217,10 @@ module.exports = {
},
exportChart_meta: true,
async exportChart({ filePath, title, config, image }) {
async exportChart({ filePath, title, config, image, plugins }) {
const fileName = path.parse(filePath).base;
const imageFile = fileName.replace('.html', '-preview.png');
const html = getChartExport(title, config, imageFile);
const html = getChartExport(title, config, imageFile, plugins);
await fs.writeFile(filePath, html);
if (image) {
const index = image.indexOf('base64,');
@@ -205,16 +247,17 @@ module.exports = {
getFileRealPath_meta: true,
async getFileRealPath({ folder, file }, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
if (folder.startsWith('archive:')) {
if (!hasPermission(`archive/write`, req)) return false;
if (!hasPermission(`archive/write`, loadedPermissions)) return false;
const dir = resolveArchiveFolder(folder.substring('archive:'.length));
return path.join(dir, file);
} else if (folder.startsWith('app:')) {
if (!hasPermission(`apps/write`, req)) return false;
if (!hasPermission(`apps/write`, loadedPermissions)) return false;
const app = folder.substring('app:'.length);
return path.join(appdir(), app, file);
} else {
if (!hasPermission(`files/${folder}/write`, req)) return false;
if (!hasPermission(`files/${folder}/write`, loadedPermissions)) return false;
const dir = path.join(filesdir(), folder);
if (!(await fs.exists(dir))) {
await fs.mkdir(dir);
@@ -225,7 +268,7 @@ module.exports = {
createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`);
logger.info(`DBGM-00011 Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath);
return true;
},
@@ -251,7 +294,7 @@ module.exports = {
const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`);
logger.info(`DBGM-00012 Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder });
@@ -263,19 +306,44 @@ module.exports = {
}
}
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
throw new Error(`DBGM-00013 ${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
},
exportFile_meta: true,
async exportFile({ folder, file, filePath }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return false;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return false;
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
return true;
},
simpleCopy_meta: true,
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
if (!platformInfo.isElectron) {
if (!checkSecureDirectories(sourceFilePath, targetFilePath)) {
return false;
}
}
await fs.copyFile(sourceFilePath, targetFilePath);
return true;
},
fillAppLogs_meta: true,
async fillAppLogs({ dateFrom = 0, dateTo = new Date().getTime(), prepareForExport = false }) {
const jslid = crypto.randomUUID();
const outputFile = path.join(jsldir(), `${jslid}.jsonl`);
await copyAppLogsIntoFile(dateFrom, dateTo, outputFile, prepareForExport);
return {
jslid,
};
},
getRecentAppLog_meta: true,
getRecentAppLog({ limit }) {
const res = getRecentAppLogRecords();
if (limit) {
return res.slice(-limit);
}
return res;
},
};

View File

@@ -313,19 +313,9 @@ module.exports = {
return true;
});
processor.finalize();
return processor.charts;
},
detectChartColumns_meta: true,
async detectChartColumns({ jslid }) {
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
const processor = new ChartProcessor();
processor.autoDetectCharts = false;
await datastore.enumRows(row => {
processor.addRow(row);
return true;
});
processor.finalize();
return processor.availableColumns;
return {
charts: processor.charts,
columns: processor.availableColumns,
};
},
};

View File

@@ -7,7 +7,7 @@ const socket = require('../utility/socket');
const compareVersions = require('compare-versions');
const requirePlugin = require('../shell/requirePlugin');
const downloadPackage = require('../utility/downloadPackage');
const { hasPermission } = require('../utility/hasPermission');
const { hasPermission, loadPermissionsFromRequest } = require('../utility/hasPermission');
const _ = require('lodash');
const packagedPluginsContent = require('../packagedPluginsContent');
@@ -118,7 +118,8 @@ module.exports = {
install_meta: true,
async install({ packageName }, req) {
if (!hasPermission(`plugins/install`, req)) return;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`plugins/install`, loadedPermissions)) return;
const dir = path.join(pluginsdir(), packageName);
// @ts-ignore
if (!(await fs.exists(dir))) {
@@ -132,7 +133,8 @@ module.exports = {
uninstall_meta: true,
async uninstall({ packageName }, req) {
if (!hasPermission(`plugins/install`, req)) return;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`plugins/install`, loadedPermissions)) return;
const dir = path.join(pluginsdir(), packageName);
await fs.rmdir(dir, { recursive: true });
socket.emitChanged(`installed-plugins-changed`);
@@ -143,7 +145,8 @@ module.exports = {
upgrade_meta: true,
async upgrade({ packageName }, req) {
if (!hasPermission(`plugins/install`, req)) return;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`plugins/install`, loadedPermissions)) return;
const dir = path.join(pluginsdir(), packageName);
// @ts-ignore
if (await fs.exists(dir)) {

View File

@@ -19,6 +19,9 @@ const {
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const platformInfo = require('../utility/platformInfo');
const { checkSecureDirectories, checkSecureDirectoriesInScript } = require('../utility/security');
const { sendToAuditLog, logJsonRunnerScript } = require('../utility/auditlog');
const { testStandardPermission } = require('../utility/hasPermission');
const logger = getLogger('runners');
function extractPlugins(script) {
@@ -46,7 +49,7 @@ require=null;
async function run() {
${script}
await dbgateApi.finalizer.run();
logger.info('Finished job script');
logger.info('DBGM-00014 Finished job script');
}
dbgateApi.runScript(run);
`;
@@ -72,7 +75,8 @@ module.exports = {
dispatchMessage(runid, message) {
if (message) {
if (_.isPlainObject(message)) logger.log(message);
if (_.isPlainObject(message))
logger.log({ ...message, msg: message.msg || message.message || '', message: undefined });
else logger.info(message);
const toEmit = _.isPlainObject(message)
@@ -130,7 +134,7 @@ module.exports = {
const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText);
logger.info({ scriptFile }, 'Running script');
logger.info({ scriptFile }, 'DBGM-00015 Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
scriptFile,
@@ -169,7 +173,7 @@ module.exports = {
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
socket.emit(`runner-done-${runid}`, code);
this.opened = this.opened.filter(x => x.runid != runid);
});
@@ -220,7 +224,7 @@ module.exports = {
subprocess.on('exit', code => {
console.log('... EXITED', code);
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
this.dispatchMessage(runid, `Finished external process with code ${code}`);
socket.emit(`runner-done-${runid}`, code);
if (onFinished) {
@@ -256,7 +260,7 @@ module.exports = {
severity: 'error',
message: extractErrorMessage(err),
});
logger.error(extractErrorLogData(err), 'Caught error on stdin');
logger.error(extractErrorLogData(err), 'DBGM-00118 Caught error on stdin');
});
}
@@ -269,18 +273,48 @@ module.exports = {
},
start_meta: true,
async start({ script }) {
async start({ script }, req) {
const runid = crypto.randomUUID();
if (script.type == 'json') {
if (!platformInfo.isElectron) {
if (!checkSecureDirectoriesInScript(script)) {
return { errorMessage: 'Unallowed directories in script' };
}
}
logJsonRunnerScript(req, script);
const js = await jsonScriptToJavascript(script);
return this.startCore(runid, scriptTemplate(js, false));
}
await testStandardPermission('run-shell-script', req);
if (!platformInfo.allowShellScripting) {
sendToAuditLog(req, {
category: 'shell',
component: 'RunnersController',
event: 'script.runFailed',
action: 'script',
severity: 'warn',
detail: script,
message: 'Scripts are not allowed',
});
return { errorMessage: 'Shell scripting is not allowed' };
}
sendToAuditLog(req, {
category: 'shell',
component: 'RunnersController',
event: 'script.run.shell',
action: 'script',
severity: 'info',
detail: script,
message: 'Running JS script',
});
return this.startCore(runid, scriptTemplate(script, false));
},
@@ -317,6 +351,11 @@ module.exports = {
loadReader_meta: true,
async loadReader({ functionName, props }) {
if (!platformInfo.isElectron) {
if (props?.fileName && !checkSecureDirectories(props.fileName)) {
return { errorMessage: 'Unallowed file' };
}
}
const prefix = extractShellApiPlugins(functionName)
.map(packageName => `// @require ${packageName}\n`)
.join('');

View File

@@ -3,7 +3,7 @@ const fs = require('fs-extra');
const path = require('path');
const cron = require('node-cron');
const runners = require('./runners');
const { hasPermission } = require('../utility/hasPermission');
const { hasPermission, loadPermissionsFromRequest } = require('../utility/hasPermission');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('scheduler');
@@ -24,13 +24,14 @@ module.exports = {
if (!match) return;
const pattern = match[1];
if (!cron.validate(pattern)) return;
logger.info(`Schedule script ${file} with pattern ${pattern}`);
logger.info(`DBGM-00018 Schedule script ${file} with pattern ${pattern}`);
const task = cron.schedule(pattern, () => runners.start({ script: text }));
this.tasks.push(task);
},
async reload(_params, req) {
if (!hasPermission('files/shell/read', req)) return;
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission('files/shell/read', loadedPermissions)) return;
const shellDir = path.join(filesdir(), 'shell');
await this.unload();
if (!(await fs.exists(shellDir))) return;

View File

@@ -8,10 +8,17 @@ const { handleProcessCommunication } = require('../utility/processComm');
const lock = new AsyncLock();
const config = require('./config');
const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const {
testConnectionPermission,
loadPermissionsFromRequest,
hasPermission,
loadDatabasePermissionsFromRequest,
getDatabasePermissionRole,
} = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { sendToAuditLog } = require('../utility/auditlog');
const logger = getLogger('serverConnection');
@@ -102,7 +109,7 @@ module.exports = {
this.close(conid, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00119 Error in server connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, false);
});
@@ -120,7 +127,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00120 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
@@ -134,7 +141,7 @@ module.exports = {
disconnect_meta: true,
async disconnect({ conid }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
await this.close(conid, true);
return { status: 'ok' };
},
@@ -143,14 +150,44 @@ module.exports = {
async listDatabases({ conid }, req) {
if (!conid) return [];
if (conid == '__model') return [];
testConnectionPermission(conid, req);
const loadedPermissions = await loadPermissionsFromRequest(req);
await testConnectionPermission(conid, req, loadedPermissions);
const opened = await this.ensureOpened(conid);
sendToAuditLog(req, {
category: 'serverop',
component: 'ServerConnectionsController',
action: 'listDatabases',
event: 'databases.list',
severity: 'info',
conid,
sessionParam: `${conid}`,
sessionGroup: 'listDatabases',
message: `Loaded databases for connection`,
});
if (process.env.STORAGE_DATABASE && !hasPermission(`all-databases`, loadedPermissions)) {
// filter databases by permissions
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
const res = [];
for (const db of opened?.databases ?? []) {
const databasePermissionRole = getDatabasePermissionRole(db.id, db.name, databasePermissions);
if (databasePermissionRole != 'deny') {
res.push({
...db,
databasePermissionRole,
});
}
}
return res;
}
return opened?.databases ?? [];
},
version_meta: true,
async version({ conid }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
return opened?.version ?? null;
},
@@ -172,14 +209,14 @@ module.exports = {
return Promise.resolve();
}
this.lastPinged[conid] = new Date().getTime();
const opened = await this.ensureOpened(conid);
if (!opened) {
return Promise.resolve();
}
try {
const opened = await this.ensureOpened(conid);
if (!opened) {
return Promise.resolve();
}
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging server connection');
logger.error(extractErrorLogData(err), 'DBGM-00121 Error pinging server connection');
this.close(conid);
}
})
@@ -190,7 +227,7 @@ module.exports = {
refresh_meta: true,
async refresh({ conid, keepOpen }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
if (!keepOpen) this.close(conid);
await this.ensureOpened(conid);
@@ -198,7 +235,7 @@ module.exports = {
},
async sendDatabaseOp({ conid, msgtype, name }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
@@ -232,7 +269,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request');
logger.error(extractErrorLogData(err), 'DBGM-00122 Error sending request');
this.close(conn.conid);
}
});
@@ -240,7 +277,7 @@ module.exports = {
},
async loadDataCore(msgtype, { conid, ...args }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
@@ -258,13 +295,43 @@ module.exports = {
serverSummary_meta: true,
async serverSummary({ conid }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
logger.info({ conid }, 'DBGM-00260 Processing server summary');
return this.loadDataCore('serverSummary', { conid });
},
listDatabaseProcesses_meta: true,
async listDatabaseProcesses(ctx, req) {
const { conid } = ctx;
// logger.info({ conid }, 'DBGM-00261 Listing processes of database server');
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
}
if (opened.connection.isReadOnly) return false;
return this.sendRequest(opened, { msgtype: 'listDatabaseProcesses' });
},
killDatabaseProcess_meta: true,
async killDatabaseProcess(ctx, req) {
const { conid, pid } = ctx;
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
}
if (opened.connection.isReadOnly) return false;
return this.sendRequest(opened, { msgtype: 'killDatabaseProcess', pid });
},
summaryCommand_meta: true,
async summaryCommand({ conid, command, row }, req) {
testConnectionPermission(conid, req);
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;

View File

@@ -8,9 +8,13 @@ const path = require('path');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const { appdir } = require('../utility/directories');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { getLogger, extractErrorLogData, removeSqlFrontMatter } = require('dbgate-tools');
const pipeForkLogs = require('../utility/pipeForkLogs');
const config = require('./config');
const { sendToAuditLog } = require('../utility/auditlog');
const { testStandardPermission, testDatabaseRolePermission } = require('../utility/hasPermission');
const { getStaticTokenSecret } = require('../auth/authCommon');
const jwt = require('jsonwebtoken');
const logger = getLogger('sessions');
@@ -146,15 +150,47 @@ module.exports = {
},
executeQuery_meta: true,
async executeQuery({ sesid, sql, autoCommit, limitRows, frontMatter }) {
async executeQuery({ sesid, sql, autoCommit, autoDetectCharts, limitRows, frontMatter }, req) {
let useTokenIsOk = false;
if (frontMatter?.useToken) {
const decoded = jwt.verify(frontMatter.useToken, getStaticTokenSecret());
if (decoded?.['contentHash'] == crypto.createHash('md5').update(removeSqlFrontMatter(sql)).digest('hex')) {
useTokenIsOk = true;
}
}
if (!useTokenIsOk) {
await testStandardPermission('dbops/query', req);
}
const session = this.opened.find(x => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
}
if (!useTokenIsOk) {
await testDatabaseRolePermission(session.conid, session.database, 'run_script', req);
}
logger.info({ sesid, sql }, 'Processing query');
sendToAuditLog(req, {
category: 'dbop',
component: 'SessionController',
action: 'executeQuery',
event: 'query.execute',
severity: 'info',
detail: sql,
conid: session.conid,
database: session.database,
message: 'Executing query',
});
logger.info({ sesid, sql }, 'DBGM-00019 Processing query');
this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit, limitRows, frontMatter });
session.subprocess.send({
msgtype: 'executeQuery',
sql,
autoCommit,
autoDetectCharts: autoDetectCharts || !!frontMatter?.['selected-chart'],
limitRows,
frontMatter,
});
return { state: 'ok' };
},
@@ -166,7 +202,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid, command }, 'Processing control command');
logger.info({ sesid, command }, 'DBGM-00020 Processing control command');
this.dispatchMessage(sesid, `${_.startCase(command)} started`);
session.subprocess.send({ msgtype: 'executeControlCommand', command });
@@ -204,7 +240,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid }, 'Starting profiler');
logger.info({ sesid }, 'DBGM-00021 Starting profiler');
session.loadingReader_jslid = jslid;
session.subprocess.send({ msgtype: 'startProfiler', jslid });
@@ -251,7 +287,7 @@ module.exports = {
try {
session.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging session');
logger.error(extractErrorLogData(err), 'DBGM-00145 Error pinging session');
return {
status: 'error',

View File

@@ -13,10 +13,6 @@ module.exports = {
return null;
},
async loadSuperadminPermissions() {
return [];
},
getConnectionsForLoginPage_meta: true,
async getConnectionsForLoginPage() {
return null;
@@ -31,6 +27,11 @@ module.exports = {
return {};
},
sendAuditLog_meta: true,
async sendAuditLog({}) {
return null;
},
startRefreshLicense() {},
async getUsedEngines() {

View File

@@ -0,0 +1,6 @@
module.exports = {
list_meta: true,
async list(req) {
return [];
},
};

View File

@@ -1,19 +1,8 @@
const crypto = require('crypto');
const path = require('path');
const { uploadsdir, getLogsFilePath, filesdir } = require('../utility/directories');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { uploadsdir } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('uploads');
const axios = require('axios');
const os = require('os');
const fs = require('fs/promises');
const { read } = require('./queryHistory');
const platformInfo = require('../utility/platformInfo');
const _ = require('lodash');
const serverConnections = require('./serverConnections');
const config = require('./config');
const gistSecret = require('../gistSecret');
const currentVersion = require('../currentVersion');
const socket = require('../utility/socket');
module.exports = {
upload_meta: {
@@ -28,7 +17,7 @@ module.exports = {
}
const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName);
logger.info(`Uploading file ${data.name}, size=${data.size}`);
logger.info(`DBGM-00025 Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => {
res.json({
@@ -44,91 +33,77 @@ module.exports = {
raw: true,
},
get(req, res) {
if (req.query.file.includes('..') || req.query.file.includes('/') || req.query.file.includes('\\')) {
res.status(400).send('Invalid file path');
return;
}
res.sendFile(path.join(uploadsdir(), req.query.file));
},
async getGistToken() {
const settings = await config.getSettings();
// uploadErrorToGist_meta: true,
// async uploadErrorToGist() {
// const logs = await fs.readFile(getLogsFilePath(), { encoding: 'utf-8' });
// const connections = await serverConnections.getOpenedConnectionReport();
// try {
// const response = await axios.default.post(
// 'https://api.github.com/gists',
// {
// description: `DbGate ${currentVersion.version} error report`,
// public: false,
// files: {
// 'logs.jsonl': {
// content: logs,
// },
// 'os.json': {
// content: JSON.stringify(
// {
// release: os.release(),
// arch: os.arch(),
// machine: os.machine(),
// platform: os.platform(),
// type: os.type(),
// },
// null,
// 2
// ),
// },
// 'platform.json': {
// content: JSON.stringify(
// _.omit(
// {
// ...platformInfo,
// },
// ['defaultKeyfile', 'sshAuthSock']
// ),
// null,
// 2
// ),
// },
// 'connections.json': {
// content: JSON.stringify(connections, null, 2),
// },
// 'version.json': {
// content: JSON.stringify(currentVersion, null, 2),
// },
// },
// },
// {
// headers: {
// Authorization: `token ${await this.getGistToken()}`,
// 'Content-Type': 'application/json',
// Accept: 'application/vnd.github.v3+json',
// },
// }
// );
return settings['other.gistCreateToken'] || gistSecret;
},
// return response.data;
// } catch (err) {
// logger.error(extractErrorLogData(err), 'DBGM-00148 Error uploading gist');
uploadErrorToGist_meta: true,
async uploadErrorToGist() {
const logs = await fs.readFile(getLogsFilePath(), { encoding: 'utf-8' });
const connections = await serverConnections.getOpenedConnectionReport();
try {
const response = await axios.default.post(
'https://api.github.com/gists',
{
description: `DbGate ${currentVersion.version} error report`,
public: false,
files: {
'logs.jsonl': {
content: logs,
},
'os.json': {
content: JSON.stringify(
{
release: os.release(),
arch: os.arch(),
machine: os.machine(),
platform: os.platform(),
type: os.type(),
},
null,
2
),
},
'platform.json': {
content: JSON.stringify(
_.omit(
{
...platformInfo,
},
['defaultKeyfile', 'sshAuthSock']
),
null,
2
),
},
'connections.json': {
content: JSON.stringify(connections, null, 2),
},
'version.json': {
content: JSON.stringify(currentVersion, null, 2),
},
},
},
{
headers: {
Authorization: `token ${await this.getGistToken()}`,
'Content-Type': 'application/json',
Accept: 'application/vnd.github.v3+json',
},
}
);
return response.data;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error uploading gist');
return {
apiErrorMessage: err.message,
};
// console.error('Error creating gist:', error.response ? error.response.data : error.message);
}
},
deleteGist_meta: true,
async deleteGist({ url }) {
const response = await axios.default.delete(url, {
headers: {
Authorization: `token ${await this.getGistToken()}`,
'Content-Type': 'application/json',
Accept: 'application/vnd.github.v3+json',
},
});
return true;
},
// return {
// apiErrorMessage: err.message,
// };
// // console.error('Error creating gist:', error.response ? error.response.data : error.message);
// }
// },
};

View File

@@ -1 +0,0 @@
module.exports = process.env.GIST_UPLOAD_SECRET;

View File

@@ -5,11 +5,12 @@ const moment = require('moment');
const path = require('path');
const { logsdir, setLogsFilePath, getLogsFilePath } = require('./utility/directories');
const currentVersion = require('./currentVersion');
const _ = require('lodash');
const logger = getLogger('apiIndex');
process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
logger.fatal(extractErrorLogData(err), 'DBGM-00259 Uncaught exception, exiting process');
process.exit(1);
});
@@ -33,6 +34,9 @@ if (processArgs.processDisplayName) {
// }
function configureLogger() {
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/appLogStore');
initializeRecentLogProvider();
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
setLogsFilePath(logsFilePath);
setLoggerName('main');
@@ -40,6 +44,8 @@ function configureLogger() {
const consoleLogLevel = process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info';
const fileLogLevel = process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'debug';
const streamsByDatePart = {};
const logConfig = {
base: { pid: process.pid },
targets: [
@@ -49,10 +55,35 @@ function configureLogger() {
level: consoleLogLevel,
},
{
type: 'stream',
type: 'objstream',
// @ts-ignore
level: fileLogLevel,
stream: fs.createWriteStream(logsFilePath, { flags: 'a' }),
objstream: {
send(msg) {
const datePart = moment(msg.time).format('YYYY-MM-DD');
if (!streamsByDatePart[datePart]) {
streamsByDatePart[datePart] = fs.createWriteStream(
path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`),
{ flags: 'a' }
);
}
const additionals = {};
const finalMsg =
_.isString(msg.msg) && msg.msg.match(/^DBGM-\d\d\d\d\d/)
? {
...msg,
msg: msg.msg.substring(10).trimStart(),
msgcode: msg.msg.substring(0, 10),
...additionals,
}
: {
...msg,
...additionals,
};
streamsByDatePart[datePart].write(`${JSON.stringify(finalMsg)}\n`);
pushToRecentLogs(finalMsg);
},
},
},
],
};
@@ -101,10 +132,10 @@ function configureLogger() {
if (processArgs.listenApi) {
configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`);
logger.info(`DBGM-00026 Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:');
logger.info('DBGM-00027 Debug print environment variables:');
for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
}

View File

@@ -6,6 +6,7 @@ const http = require('http');
const cors = require('cors');
const getPort = require('get-port');
const path = require('path');
const fs = require('fs/promises');
const useController = require('./utility/useController');
const socket = require('./utility/socket');
@@ -28,6 +29,8 @@ const files = require('./controllers/files');
const scheduler = require('./controllers/scheduler');
const queryHistory = require('./controllers/queryHistory');
const cloud = require('./controllers/cloud');
const teamFiles = require('./controllers/teamFiles');
const onFinished = require('on-finished');
const processArgs = require('./utility/processArgs');
@@ -44,6 +47,48 @@ const { startCloudFiles } = require('./utility/cloudIntf');
const logger = getLogger('main');
function registerExpressStatic(app, publicDir) {
app.get([getExpressPath('/'), getExpressPath('/*.html')], async (req, res, next) => {
try {
const relPath = req.path === getExpressPath('/') ? '/index.html' : req.path;
const filePath = path.join(publicDir, relPath);
let html = await fs.readFile(filePath, 'utf8');
if (process.env.DBGATE_GTM_ID) {
html = html.replace(
/<!--HEAD_SCRIPT-->/g,
`<!-- Google Tag Manager -->
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','${process.env.DBGATE_GTM_ID}');</script>
<!-- End Google Tag Manager -->`
);
html = html.replace(
/<!--BODY_SCRIPT-->/g,
process.env.PAGE_BODY_SCRIPT ??
`<!-- Google Tag Manager (noscript) -->
<noscript><iframe src="https://www.googletagmanager.com/ns.html?id=${process.env.DBGATE_GTM_ID}" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript>
<!-- End Google Tag Manager (noscript) -->`
);
} else {
html = html.replace(/<!--HEAD_SCRIPT-->/g, process.env.PAGE_HEAD_SCRIPT ?? '');
html = html.replace(/<!--BODY_SCRIPT-->/g, process.env.PAGE_BODY_SCRIPT ?? '');
}
res.type('html').send(html);
} catch (err) {
if (err.code === 'ENOENT') return next();
next(err);
}
});
// 2) Static assets for everything else (css/js/images/etc.)
app.use(getExpressPath('/'), express.static(publicDir));
}
function start() {
// console.log('process.argv', process.argv);
@@ -78,22 +123,18 @@ function start() {
if (platformInfo.isDocker) {
// server static files inside docker container
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
registerExpressStatic(app, '/home/dbgate-docker/public');
} else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
registerExpressStatic(app, '/home/dbgate-docker/public');
registerExpressStatic(app, '/home/ubuntu/build/public');
} else if (platformInfo.isAzureUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/azureuser/build/public'));
registerExpressStatic(app, '/home/azureuser/build/public');
} else if (processArgs.runE2eTests) {
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public')));
registerExpressStatic(app, path.resolve('packer/build/public'));
} else if (platformInfo.isNpmDist) {
app.use(
getExpressPath('/'),
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
);
registerExpressStatic(app, path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'));
} else if (process.env.DEVWEB) {
// console.log('__dirname', __dirname);
// console.log(path.join(__dirname, '../../web/public/build'));
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../web/public')));
registerExpressStatic(app, path.join(__dirname, '../../web/public'));
} else {
app.get(getExpressPath('/'), (req, res) => {
res.send('DbGate API');
@@ -152,15 +193,15 @@ function start() {
if (platformInfo.isDocker) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`);
logger.info(`DBGM-00028 DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
logger.info(`DBGM-00029 DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isAzureUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (Azure VM build)`);
logger.info(`DBGM-00030 DbGate API listening on port ${port} (Azure VM build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
@@ -170,27 +211,27 @@ function start() {
),
}).then(port => {
server.listen(port, () => {
logger.info(`DbGate API listening on port ${port} (NPM build)`);
logger.info(`DBGM-00031 DbGate API listening on port ${port} (NPM build)`);
});
});
} else if (process.env.DEVWEB) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API & web listening on port ${port} (dev web build)`);
logger.info(`DBGM-00032 DbGate API & web listening on port ${port} (dev web build)`);
server.listen(port);
} else {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (dev API build)`);
logger.info(`DBGM-00033 DbGate API listening on port ${port} (dev API build)`);
server.listen(port);
}
function shutdown() {
logger.info('\nShutting down DbGate API server');
logger.info('DBGM-00034 Shutting down DbGate API server');
server.close(() => {
logger.info('Server shut down, terminating');
logger.info('DBGM-00035 Server shut down, terminating');
process.exit(0);
});
setTimeout(() => {
logger.info('Server close timeout, terminating');
logger.info('DBGM-00036 Server close timeout, terminating');
process.exit(0);
}, 1000);
}
@@ -225,6 +266,7 @@ function useAllControllers(app, electron) {
useController(app, electron, '/apps', apps);
useController(app, electron, '/auth', auth);
useController(app, electron, '/cloud', cloud);
useController(app, electron, '/team-files', teamFiles);
}
function setElectronSender(electronSender) {

View File

@@ -6,7 +6,6 @@ const {
extractIntSettingsValue,
getLogger,
isCompositeDbName,
dbNameLogCategory,
extractErrorMessage,
extractErrorLogData,
ScriptWriterEval,
@@ -18,13 +17,14 @@ const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const generateDeploySql = require('../shell/generateDeploySql');
const { dumpSqlSelect } = require('dbgate-sqltree');
const { dumpSqlSelect, scriptToSql } = require('dbgate-sqltree');
const { allowExecuteCustomScript, handleQueryStream } = require('../utility/handleQueryStream');
const dbgateApi = require('../shell');
const requirePlugin = require('../shell/requirePlugin');
const path = require('path');
const { rundir } = require('../utility/directories');
const fs = require('fs-extra');
const { changeSetToSql } = require('dbgate-datalib');
const logger = getLogger('dbconnProcess');
@@ -45,6 +45,14 @@ function getStatusCounter() {
return statusCounter;
}
function getLogInfo() {
return {
database: dbhan ? dbhan.database : undefined,
conid: dbhan ? dbhan.conid : undefined,
engine: storedConnection ? storedConnection.engine : undefined,
};
}
async function checkedAsyncCall(promise) {
try {
const res = await promise;
@@ -131,10 +139,10 @@ async function readVersion() {
const driver = requireEngineDriver(storedConnection);
try {
const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`);
logger.debug(getLogInfo(), `DBGM-00037 Got server version: ${version.version}`);
serverVersion = version;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00149 Error getting DB server version');
serverVersion = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version: serverVersion });
@@ -148,9 +156,8 @@ async function handleConnect({ connection, structure, globalSettings }) {
const driver = requireEngineDriver(storedConnection);
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
getLogInfo(),
`DBGM-00038 Connected to database, separate schemas: ${storedConnection.useSeparateSchemas ? 'YES' : 'NO'}`
);
dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion());
@@ -257,13 +264,16 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
} catch (err) {
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
logger.error(
extractErrorLogData(err, { logName, ...getLogInfo() }),
`DBGM-00150 Error when handling message ${logName}`
);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
}
}
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
logger.debug(getLogInfo(), 'DBGM-00039 Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
}
@@ -339,6 +349,25 @@ async function handleUpdateCollection({ msgid, changeSet }) {
}
}
async function handleSaveTableData({ msgid, changeSet }) {
await waitStructure();
try {
const driver = requireEngineDriver(storedConnection);
const script = driver.createSaveChangeSetScript(changeSet, analysedStructure, () =>
changeSetToSql(changeSet, analysedStructure, driver.dialect)
);
const sql = scriptToSql(driver, script);
await driver.script(dbhan, sql, { useTransaction: true });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
});
}
}
async function handleSqlPreview({ msgid, objects, options }) {
await waitStructure();
const driver = requireEngineDriver(storedConnection);
@@ -351,7 +380,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(async () => {
logger.error('Exiting because of unhandled exception');
logger.error(getLogInfo(), 'DBGM-00151 Exiting because of unhandled exception');
await driver.close(dbhan);
process.exit(0);
}, 500);
@@ -455,6 +484,7 @@ const messageHandlers = {
runScript: handleRunScript,
runOperation: handleRunOperation,
updateCollection: handleUpdateCollection,
saveTableData: handleSaveTableData,
collectionData: handleCollectionData,
loadKeys: handleLoadKeys,
scanKeys: handleScanKeys,
@@ -485,7 +515,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting');
logger.info(getLogInfo(), 'DBGM-00040 Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -497,10 +527,10 @@ function start() {
try {
await handleMessage(message);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error in DB connection');
logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00041 Error in DB connection');
process.send({
msgtype: 'error',
error: extractErrorMessage(err, 'Error processing message'),
error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),
msgid: message?.msgid,
});
}

View File

@@ -39,7 +39,7 @@ async function handleRefresh() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
logger.error(extractErrorLogData(err), 'DBGM-00152 Error refreshing server databases');
setTimeout(() => process.exit(1), 1000);
}
}
@@ -50,7 +50,7 @@ async function readVersion() {
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err), 'DBGM-00153 Error getting DB server version');
version = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version });
@@ -90,7 +90,7 @@ async function handleConnect(connection) {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error connecting to server');
logger.error(extractErrorLogData(err), 'DBGM-00154 Error connecting to server');
setTimeout(() => process.exit(1), 1000);
}
@@ -120,7 +120,7 @@ async function handleDatabaseOp(op, { msgid, name }) {
} else {
const dmp = driver.createDumper();
dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script');
logger.info({ sql: dmp.s }, 'DBGM-00043 Running script');
await driver.query(dbhan, dmp.s, { discardResult: true });
}
await handleRefresh();
@@ -146,6 +146,30 @@ async function handleServerSummary({ msgid }) {
return handleDriverDataCore(msgid, driver => driver.serverSummary(dbhan));
}
async function handleKillDatabaseProcess({ msgid, pid }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
const result = await driver.killProcess(dbhan, Number(pid));
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
}
}
async function handleListDatabaseProcesses({ msgid }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
const result = await driver.listProcesses(dbhan);
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
}
}
async function handleSummaryCommand({ msgid, command, row }) {
return handleDriverDataCore(msgid, driver => driver.summaryCommand(dbhan, command, row));
}
@@ -154,6 +178,8 @@ const messageHandlers = {
connect: handleConnect,
ping: handlePing,
serverSummary: handleServerSummary,
killDatabaseProcess: handleKillDatabaseProcess,
listDatabaseProcesses: handleListDatabaseProcesses,
summaryCommand: handleSummaryCommand,
createDatabase: props => handleDatabaseOp('createDatabase', props),
dropDatabase: props => handleDatabaseOp('dropDatabase', props),
@@ -170,7 +196,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting');
logger.info('DBGM-00044 Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
if (dbhan) {
await driver.close(dbhan);
@@ -188,7 +214,7 @@ function start() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
logger.error(extractErrorLogData(err), `DBGM-00155 Error processing message ${message?.['msgtype']}`);
}
});
}

View File

@@ -117,7 +117,7 @@ async function handleExecuteControlCommand({ command }) {
}
}
async function handleExecuteQuery({ sql, autoCommit, limitRows, frontMatter }) {
async function handleExecuteQuery({ sql, autoCommit, autoDetectCharts, limitRows, frontMatter }) {
lastActivity = new Date().getTime();
await waitConnected();
@@ -146,7 +146,16 @@ async function handleExecuteQuery({ sql, autoCommit, limitRows, frontMatter }) {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, undefined, limitRows, frontMatter);
await handleQueryStream(
dbhan,
driver,
queryStreamInfoHolder,
sqlItem,
undefined,
limitRows,
frontMatter,
autoDetectCharts
);
// const handler = new StreamHandler(resultIndex);
// const stream = await driver.stream(systemConnection, sqlItem, handler);
// handler.stream = stream;
@@ -221,7 +230,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting');
logger.info('DBGM-00045 Session not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -241,7 +250,7 @@ function start() {
!currentProfiler &&
executingScripts == 0
) {
logger.info('Session not active, exiting');
logger.info('DBGM-00046 Session not active, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);

View File

@@ -41,7 +41,7 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
logger.error(extractErrorLogData(err), 'DBGM-00156 Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',

View File

@@ -10,7 +10,7 @@ const logger = getLogger();
function archiveWriter({ folderName, fileName }) {
const dir = resolveArchiveFolder(folderName);
if (!fs.existsSync(dir)) {
logger.info(`Creating directory ${dir}`);
logger.info(`DBGM-00047 Creating directory ${dir}`);
fs.mkdirSync(dir);
}
const jsonlFile = path.join(dir, `${fileName}.jsonl`);

View File

@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
});
}
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
// throw err;
}
}

View File

@@ -14,7 +14,7 @@ const crypto = require('crypto');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {import('dbgate-tools').DatabaseModelFile[] | import('dbgate-types').DatabaseInfo} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name

View File

@@ -28,20 +28,20 @@ async function executeQuery({
useTransaction,
}) {
if (!logScriptItems && !skipLogging) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
}
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
if (sqlFile) {
logger.debug(`Loading SQL file ${sqlFile}`);
logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
}
try {
if (!skipLogging) {
logger.debug(`Running SQL query, length: ${sql.length}`);
logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
}
await driver.script(dbhan, sql, { logScriptItems, useTransaction });

View File

@@ -23,7 +23,7 @@ const { connectUtility } = require('../utility/connectUtility');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {import('dbgate-tools').DatabaseModelFile[] | import('dbgate-types').DatabaseInfo} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name

View File

@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`);
logger.info(`DBGM-00051 Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Input file: ${inputFile}`);
logger.info(`DBGM-00052 Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, {

View File

@@ -3,7 +3,7 @@ const fs = require('fs-extra');
const executeQuery = require('./executeQuery');
const { connectUtility } = require('../utility/connectUtility');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver } = require('dbgate-tools');
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver, adaptDatabaseInfo } = require('dbgate-tools');
const importDbModel = require('../utility/importDbModel');
const jsonLinesReader = require('./jsonLinesReader');
const tableWriter = require('./tableWriter');
@@ -17,9 +17,8 @@ const copyStream = require('./copyStream');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {string} options.folder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
*/
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms, transformRow }) {
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms }) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
@@ -27,10 +26,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
if (driver?.databaseEngineTypes?.includes('sql')) {
const model = await importDbModel(folder);
let modelAdapted = {
...model,
tables: model.tables.map(table => driver.adaptTableInfo(table)),
};
let modelAdapted = adaptDatabaseInfo(model, driver);
for (const transform of modelTransforms || []) {
modelAdapted = transform(modelAdapted);
}
@@ -78,7 +74,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
for (const table of modelAdapted.tables) {
const fileName = path.join(folder, `${table.pureName}.jsonl`);
if (await fs.exists(fileName)) {
const src = await jsonLinesReader({ fileName, transformRow });
const src = await jsonLinesReader({ fileName });
const dst = await tableWriter({
systemConnection: dbhan,
pureName: table.pureName,
@@ -106,7 +102,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
for (const file of fs.readdirSync(folder)) {
if (!file.endsWith('.jsonl')) continue;
const pureName = path.parse(file).name;
const src = await jsonLinesReader({ fileName: path.join(folder, file), transformRow });
const src = await jsonLinesReader({ fileName: path.join(folder, file) });
const dst = await tableWriter({
systemConnection: dbhan,
pureName,

View File

@@ -6,11 +6,10 @@ const download = require('./download');
const logger = getLogger('jsonLinesReader');
class ParseStream extends stream.Transform {
constructor({ limitRows, transformRow }) {
constructor({ limitRows }) {
super({ objectMode: true });
this.wasHeader = false;
this.limitRows = limitRows;
this.transformRow = transformRow;
this.rowsWritten = 0;
}
_transform(chunk, encoding, done) {
@@ -27,11 +26,7 @@ class ParseStream extends stream.Transform {
this.wasHeader = true;
}
if (!this.limitRows || this.rowsWritten < this.limitRows) {
if (this.transformRow) {
this.push(this.transformRow(obj));
} else {
this.push(obj);
}
this.push(obj);
this.rowsWritten += 1;
}
done();
@@ -44,11 +39,10 @@ class ParseStream extends stream.Transform {
* @param {string} options.fileName - file name or URL
* @param {string} options.encoding - encoding of the file
* @param {number} options.limitRows - maximum number of rows to read
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
* @returns {Promise<readerType>} - reader object
*/
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined, transformRow }) {
logger.info(`Reading file ${fileName}`);
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`DBGM-00054 Reading file ${fileName}`);
const downloadedFile = await download(fileName);
@@ -58,7 +52,7 @@ async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undef
encoding
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows, transformRow });
const parser = new ParseStream({ limitRows });
return [liner, parser];
}

View File

@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00055 Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -63,7 +63,7 @@ async function jsonReader({
encoding = 'utf-8',
limitRows = undefined,
}) {
logger.info(`Reading file ${fileName}`);
logger.info(`DBGM-00056 Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(

View File

@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00057 Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.debug(`Analysing database`);
logger.debug(`DBGM-00058 Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
const dbInfo = await driver.analyseFull(dbhan);
logger.debug(`Analyse finished`);
logger.debug(`DBGM-00059 Analyse finished`);
await exportDbModel(dbInfo, outputDir);
} finally {

View File

@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
mergeKey = null,
mergeMode = 'merge',
}) {
logger.info(`Reading file ${fileName} with change set`);
logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
const fileStream = fs.createReadStream(
fileName,

View File

@@ -29,7 +29,7 @@ async function queryReader({
// if (!sql && !json) {
// throw new Error('One of sql or json must be set');
// }
logger.info({ sql: query || sql }, `Reading query`);
logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
// else console.log(`Reading query ${JSON.stringify(json)}`);
if (!driver) {

View File

@@ -4,6 +4,7 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
const platformInfo = require('../utility/platformInfo');
const authProxy = require('../utility/authProxy');
const { getLogger } = require('dbgate-tools');
//
const logger = getLogger('requirePlugin');
const loadedPlugins = {};
@@ -12,6 +13,10 @@ const dbgateEnv = {
dbgateApi: null,
platformInfo,
authProxy,
isProApp: () =>{
const { isProApp } = require('../utility/checkLicense');
return isProApp();
}
};
function requirePlugin(packageName, requiredPlugin = null) {
if (!packageName) throw new Error('Missing packageName in plugin');
@@ -20,7 +25,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
if (requiredPlugin == null) {
let module;
const modulePath = getPluginBackendPath(packageName);
logger.info(`Loading module ${packageName} from ${modulePath}`);
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
try {
// @ts-ignore
module = __non_webpack_require__(modulePath);

View File

@@ -11,7 +11,7 @@ async function runScript(func) {
await func();
process.exit(0);
} catch (err) {
logger.error(extractErrorLogData(err), `Error running script`);
logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
process.exit(1);
}
}

Some files were not shown because too many files have changed in this diff Show More