Compare commits

...

860 Commits

Author SHA1 Message Date
Nybkox
576a4fc2e5 feat: add missing triggers and events factories 2025-01-03 16:26:10 +01:00
Jan Prochazka
ceb6a88964 Merge branch 'feature/workflow-refactor' 2024-12-30 15:25:39 +01:00
Jan Prochazka
a39fff8b3a ssh tunnel test disabled 2024-12-30 15:25:23 +01:00
Jan Prochazka
0cf3d5993c generated header 2024-12-30 08:28:25 +01:00
Jan Prochazka
2966484cda v6.1.3-beta.1 2024-12-30 08:23:36 +01:00
Jan Prochazka
ac3bb58106 generated workflows 2024-12-30 08:23:18 +01:00
Jan Prochazka
36e2f1bdee workflow templates 2024-12-30 08:11:18 +01:00
Jan Prochazka
70083bd870 workflows - defs support 2024-12-30 07:05:25 +01:00
Jan Prochazka
f7b39fca26 code cleanup 2024-12-29 09:57:33 +01:00
Jan Prochazka
70873e83bd docker pipeline refactor 2024-12-29 09:57:18 +01:00
Jan Prochazka
2858bba8b2 yaml replace changed 2024-12-28 21:16:52 +01:00
Jan Prochazka
bd0404fbaf fixes 2024-12-28 15:36:05 +01:00
Jan Prochazka
317f6256f6 fix 2024-12-28 15:28:34 +01:00
Jan Prochazka
6c9b738717 fix 2024-12-28 15:10:45 +01:00
Jan Prochazka
2f8c584af5 workflows 2024-12-28 15:03:38 +01:00
Jan Prochazka
d3b417679e workflow processor 2024-12-28 15:00:59 +01:00
Jan Prochazka
00dd8bfc72 fixed links to dbgate.org 2024-12-28 14:01:19 +01:00
Jan Prochazka
e1eea4adf3 fixed links to dbgate.org 2024-12-28 11:30:18 +01:00
Jan Prochazka
fd2d2e90d4 workflow refactor WIP 2024-12-27 08:34:17 +01:00
Jan Prochazka
f97b70ce45 v6.1.2 2024-12-26 08:05:17 +01:00
Jan Prochazka
45d79478fb workflow fixes 2024-12-26 08:05:03 +01:00
Jan Prochazka
a59f42bd62 v6.1.2-premium-beta.18 2024-12-26 07:47:14 +01:00
Jan Prochazka
a189fa7b6d fix 2024-12-26 07:46:58 +01:00
Jan Prochazka
794bc97207 v6.1.2-beta.17 2024-12-26 07:45:27 +01:00
Jan Prochazka
24f7def7ab v6.1.2-premium-beta.16 2024-12-26 07:45:16 +01:00
Jan Prochazka
01071c236e notarization log 2024-12-26 07:44:53 +01:00
Jan Prochazka
a6326ac9fa v6.1.2-premium-beta.15 2024-12-26 07:37:36 +01:00
Jan Prochazka
4c6d409be8 print notarization log 2024-12-26 07:37:19 +01:00
Jan Prochazka
44ad8e51a5 v6.1.2-premium-beta.14 2024-12-26 07:18:26 +01:00
Jan Prochazka
e82578adfb v6.1.2-beta.14 2024-12-26 07:17:52 +01:00
Jan Prochazka
24f28b559e pro app build 2024-12-26 07:17:24 +01:00
Jan Prochazka
b086a5d3d2 prevent double upload snap 2024-12-25 21:32:17 +01:00
Jan Prochazka
bb0b87b770 v6.1.2-beta.13 2024-12-25 20:20:26 +01:00
Jan Prochazka
aa4f82fa98 fix 2024-12-25 20:20:10 +01:00
Jan Prochazka
873ace4170 v6.1.2-beta.12 2024-12-25 19:51:24 +01:00
Jan Prochazka
5a9d8ba0a7 fix 2024-12-25 19:51:16 +01:00
Jan Prochazka
07f0fdcc1c v6.1.2-beta.11 2024-12-25 19:43:27 +01:00
Jan Prochazka
ff7ee4fb98 fix 2024-12-25 19:43:16 +01:00
Jan Prochazka
b1f5b62757 v6.1.2-beta.10 2024-12-25 19:39:37 +01:00
Jan Prochazka
a67c857204 python 3.11 fix 2024-12-25 19:39:26 +01:00
Jan Prochazka
4032c53a30 v6.1.2-beta.9 2024-12-25 12:19:09 +01:00
Jan Prochazka
0a37ed74d9 electron reverted to version 30 2024-12-25 12:18:58 +01:00
Jan Prochazka
f07efe440a v6.1.2-beta.8 2024-12-25 12:09:35 +01:00
Jan Prochazka
2d5e67802d removed mongodb-client-encryption from community build 2024-12-25 12:09:25 +01:00
Jan Prochazka
7b8e013084 v6.1.2-beta.7 2024-12-25 11:29:42 +01:00
Jan Prochazka
b69fc04b5c try to upgrade electron 2024-12-25 11:29:23 +01:00
Jan Prochazka
b6dfdd4741 v6.1.2-beta.6 2024-12-25 11:14:10 +01:00
Jan Prochazka
ff3985a542 upgraded mongodb-client-encryption 2024-12-25 11:13:54 +01:00
Jan Prochazka
9c7e1dcaf1 v6.1.2-beta.5 2024-12-25 10:44:39 +01:00
Jan Prochazka
48eb09687c app build - node version 22 2024-12-25 10:44:02 +01:00
Jan Prochazka
86b339535d updated electron builder 2024-12-25 10:42:49 +01:00
Jan Prochazka
576b6dc774 v6.1.2-beta.4 2024-12-25 10:28:26 +01:00
Jan Prochazka
18ca971f67 try to use electron notarize 2024-12-25 10:28:10 +01:00
Jan Prochazka
3916a5e6e8 fixed ssh tunnel connection test #972 #973 2024-12-25 09:49:23 +01:00
Jan Prochazka
1446fdad5e v6.1.2-beta.3 2024-12-23 09:52:31 +01:00
Jan Prochazka
1478886e04 fix 2024-12-23 09:52:22 +01:00
Jan Prochazka
54d61790a9 v6.1.2-beta.2 2024-12-23 09:27:28 +01:00
Jan Prochazka
bb638fd8db fix 2024-12-23 09:27:14 +01:00
Jan Prochazka
e3a2589517 v6.1.2-beta.1 2024-12-23 09:21:42 +01:00
Jan Prochazka
6f557a6463 print notarization error 2024-12-23 09:21:31 +01:00
Jan Prochazka
323f27f6e4 changelog 2024-12-23 07:06:36 +01:00
Jan Prochazka
8d4a38dccb v6.1.1 2024-12-23 07:06:12 +01:00
SPRINX0\prochazka
54a3b30e83 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-12-20 13:45:41 +01:00
SPRINX0\prochazka
6251bcd8fd changelog 2024-12-20 13:45:39 +01:00
SPRINX0\prochazka
e42099c1a9 v6.1.1-beta.1 2024-12-20 12:57:25 +01:00
Jan Prochazka
b07ee91cf9 fixed oracle export + clickhouse test 2024-12-20 12:23:29 +01:00
Jan Prochazka
62c028fc3c export test finished 2024-12-20 11:33:28 +01:00
SPRINX0\prochazka
07ea575b0f fixed postgres export + export tests #970 2024-12-20 11:17:43 +01:00
SPRINX0\prochazka
ee78786974 #968 mysql table comment fix 2024-12-20 10:47:14 +01:00
SPRINX0\prochazka
e1814663cd trigger fulltext search 2024-12-20 10:40:57 +01:00
SPRINX0\prochazka
6ae3e019ff ux 2024-12-20 10:38:29 +01:00
Jan Prochazka
cadf539c3d fixed diff for triggers 2024-12-20 10:30:05 +01:00
Jan Prochazka
51929d7ef7 some tests fixed 2024-12-20 10:23:49 +01:00
Jan Prochazka
0edc7f077f test trigger createSql 2024-12-20 09:26:53 +01:00
SPRINX0\prochazka
2488cee7ea test definition refactor 2024-12-20 09:03:30 +01:00
SPRINX0\prochazka
edf64db69a search settings improvement 2024-12-20 08:42:30 +01:00
SPRINX0\prochazka
c65bf51dcd Merge branch 'feature/parameters-oracle' 2024-12-20 07:46:51 +01:00
Nybkox
effe235a95 feat: oracle parameters 2024-12-19 16:06:56 +01:00
Nybkox
583d1494c2 feat: fallback to position if param does not have name 2024-12-19 12:50:50 +01:00
Nybkox
983daf0d1c feat: add options position to params typing 2024-12-19 12:09:25 +01:00
Jan Prochazka
756ddf1a8b Merge pull request #969 from dbgate/feature/triggers
Feature/triggers
2024-12-19 12:09:08 +01:00
Nybkox
9ccd647d97 fix: update triggers typing to cover all oracle timing options 2024-12-19 11:55:22 +01:00
Nybkox
dc9bfdc553 fix: update pssql triggers query to match ts def 2024-12-19 10:50:39 +01:00
Nybkox
af4e91faba fix: update oracle triggers test create queries 2024-12-19 10:46:40 +01:00
Nybkox
394020157b fix: update oracle triggers tests expected values 2024-12-19 10:26:48 +01:00
Nybkox
f16aab12e1 fix: add trigger event type to oracle triggers query 2024-12-19 10:17:16 +01:00
Nybkox
e08216d6dd feat: mssql triggers tests 2024-12-19 10:13:15 +01:00
Nybkox
ebb4f8e73e fix: mssql triggers query naming 2024-12-19 10:13:04 +01:00
Nybkox
b9939e5d5f fix: correctly map mysql triggers sql output 2024-12-19 09:58:54 +01:00
Nybkox
9eb3bca8d6 feat: add other create/drop for triggers tests 2024-12-19 09:54:17 +01:00
Nybkox
022f263bf9 fix: update pssql triggers test sql 2024-12-19 09:53:47 +01:00
Nybkox
56972652d5 fix: pssql triggers - map tgtype to timing / event type correctly 2024-12-19 09:53:11 +01:00
SPRINX0\prochazka
d75b9e2688 ace-builds patch #954 2024-12-19 09:21:57 +01:00
Nybkox
d2c5440e39 feat: pssql tirgger test 2024-12-19 08:33:26 +01:00
Jan Prochazka
8ff30e426e fix - expand limited when accessing bykeyboard nav 2024-12-18 14:13:01 +01:00
Jan Prochazka
7cdbef609e arm-64 builds 2024-12-18 10:59:21 +01:00
Jan Prochazka
f6195a468d v6.1.0 2024-12-18 10:37:54 +01:00
Jan Prochazka
c23bf72d55 changelog 2024-12-18 10:35:54 +01:00
Jan Prochazka
c02441402b fixed search in list 2024-12-18 10:22:02 +01:00
Jan Prochazka
b9a8764b55 fixed search 2024-12-18 10:01:52 +01:00
Jan Prochazka
a2374c1981 v6.0.1-beta.6 2024-12-18 09:39:57 +01:00
Jan Prochazka
9cfd5af704 prevent jump to first item when focusing because of mouse 2024-12-18 09:37:40 +01:00
Jan Prochazka
a6f473b8ed better connection UX 2024-12-18 09:08:42 +01:00
Jan Prochazka
e0a74402cb dropdown for default database 2024-12-18 08:43:51 +01:00
Jan Prochazka
c6e57b278e use default database 2024-12-18 08:18:09 +01:00
Jan Prochazka
e63f1f8f09 clickAction refactor, settings - open detail after keyboard navigation 2024-12-18 08:08:45 +01:00
Nybkox
e866c019f0 feat: mysql and oracle triggers tests 2024-12-18 06:09:30 +01:00
Nybkox
5f4bd6d3e3 fix: triggers typing and yaml conv 2024-12-18 06:05:49 +01:00
SPRINX0\prochazka
57da9c9885 changelog wip 2024-12-17 17:17:31 +01:00
SPRINX0\prochazka
e6a3acf4c2 search GUI improved 2024-12-17 16:27:54 +01:00
SPRINX0\prochazka
537869e862 data grid - no rows info 2024-12-17 16:03:21 +01:00
SPRINX0\prochazka
ae2ff7b3b1 fixed display CLOB an NCLOB columns in Oracle #944 2024-12-17 15:31:10 +01:00
SPRINX0\prochazka
1db01dbdb1 optimalization 2024-12-17 14:59:44 +01:00
SPRINX0\prochazka
7988438dc7 fix search connections 2024-12-17 14:43:05 +01:00
SPRINX0\prochazka
3b32823f94 v6.0.1-beta.5 2024-12-17 14:29:56 +01:00
SPRINX0\prochazka
3370c754f2 try to fix postgres plugin 2024-12-17 14:29:45 +01:00
SPRINX0\prochazka
2d84e5a611 dataGrid align numbers right #957 2024-12-17 13:43:21 +01:00
SPRINX0\prochazka
8d5f73849e upgraded ace-builds #954 2024-12-17 12:42:09 +01:00
SPRINX0\prochazka
7a5019164a configurable search in connections 2024-12-17 12:26:50 +01:00
SPRINX0\prochazka
f5733ea2d7 handle camelCase in tokenizer 2024-12-17 10:16:19 +01:00
Nybkox
413287c691 fix: remove triggerName 2024-12-17 10:16:07 +01:00
Nybkox
9e941dfce2 feat: add triggers to ui 2024-12-17 10:16:07 +01:00
Nybkox
d32af771dc fix: add purename to trigger analysers 2024-12-17 10:16:07 +01:00
Nybkox
1c84f40bcf feat: add triggers loading message 2024-12-17 10:16:06 +01:00
Nybkox
671eba22e0 feat: add oracle triggers to analyser 2024-12-17 10:16:06 +01:00
Nybkox
c00cb3076c feat: add mysql triggers to analyser 2024-12-17 10:16:06 +01:00
Nybkox
62daa13e54 feat: add mssql triggers to analyser 2024-12-17 10:16:06 +01:00
Nybkox
4cc0a66a7d feat: add pssql triggers to analyser 2024-12-17 10:16:06 +01:00
SPRINX0\prochazka
92e13220d8 tokenized search in references 2024-12-17 10:12:45 +01:00
SPRINX0\prochazka
2a5fdd852a search tokenizer optimalization 2024-12-17 10:09:52 +01:00
SPRINX0\prochazka
7759fd862f ability to disable tab preview mode 2024-12-17 09:34:16 +01:00
SPRINX0\prochazka
ca5dd0ac30 v6.0.1-beta.4 2024-12-16 17:01:39 +01:00
SPRINX0\prochazka
18be29fd88 Merge branch 'feature/search' 2024-12-16 17:01:26 +01:00
SPRINX0\prochazka
6ea54a5b0a tokenized column search 2024-12-16 17:01:05 +01:00
SPRINX0\prochazka
5d294f6236 camel search tokenizer 2024-12-16 16:58:10 +01:00
SPRINX0\prochazka
5544b6291b search tokenizer 2024-12-16 16:50:17 +01:00
SPRINX0\prochazka
bf4841bca4 filter name optimalization 2024-12-16 16:23:55 +01:00
SPRINX0\prochazka
358a641449 refactor - not working 2024-12-16 15:53:52 +01:00
SPRINX0\prochazka
7e1ceb69ae UX fix 2024-12-16 15:36:26 +01:00
SPRINX0\prochazka
939f04ae62 optimalization 2024-12-16 15:19:54 +01:00
SPRINX0\prochazka
2fc2ac491c find in SQL text 2024-12-16 14:45:38 +01:00
SPRINX0\prochazka
20a5a50516 rename file 2024-12-16 14:10:18 +01:00
SPRINX0\prochazka
0932f4c537 search columns WIP 2024-12-16 14:08:19 +01:00
SPRINX0\prochazka
c46c9a4e16 lisgt matcher refactor 2024-12-16 13:08:59 +01:00
SPRINX0\prochazka
a20b4b3339 search settings 2024-12-16 11:47:53 +01:00
SPRINX0\prochazka
dc302f89c7 Display comments into TABLES and COLUMNS lists #755 2024-12-16 09:46:07 +01:00
SPRINX0\prochazka
f5a2d142e2 fixed [MSSQL] Foreign keys show up in a weird way #734 2024-12-16 08:26:25 +01:00
SPRINX0\prochazka
1020a5820b Merge branch 'master' of https://github.com/dbgate/dbgate 2024-12-13 16:27:17 +01:00
SPRINX0\prochazka
deb13505b8 fixed loading constraints #734 2024-12-13 16:27:14 +01:00
Jan Prochazka
71c06e31f7 sql terminator in oracle routines 2024-12-13 15:06:12 +01:00
Jan Prochazka
a203480a72 oracle function tests 2024-12-13 15:06:12 +01:00
Jan Prochazka
c0fcd681be oracle procedure & function analyser 2024-12-13 15:06:12 +01:00
Jan Prochazka
7402bb6823 test fix 2024-12-13 15:06:11 +01:00
Jan Prochazka
35d791bee4 fixed some tests 2024-12-13 15:06:11 +01:00
Jan Prochazka
ad4a599800 oracle view fix 2024-12-13 15:06:11 +01:00
Jan Prochazka
33db85f03b oracle tests 2024-12-13 15:06:11 +01:00
Jan Prochazka
ec5d05fc26 oracle fixes 2024-12-13 15:06:11 +01:00
Jan Prochazka
face7ecdb5 rename sql object oracle 2024-12-13 15:06:11 +01:00
Jan Prochazka
6035319035 uncommented oracle container for tests 2024-12-13 15:06:11 +01:00
Jan Prochazka
e698da71fb oracle tests - run on CI 2024-12-13 15:06:11 +01:00
Jan Prochazka
b466de781a oracle - rename table, deploy scripts passes 2024-12-13 15:06:11 +01:00
Jan Prochazka
e5d583310d merged tests pipelines 2024-12-13 15:06:11 +01:00
Jan Prochazka
f72dbf19c2 driver tests 2024-12-13 15:06:11 +01:00
Jan Prochazka
10538a04b4 oracle import pass 2024-12-13 15:06:11 +01:00
Jan Prochazka
d71452a397 oracle - implemented scope identity 2024-12-13 15:06:11 +01:00
Jan Prochazka
3f45bfcdd0 select scope identity test 2024-12-13 15:06:11 +01:00
Jan Prochazka
545e9863b6 fix identity test 2024-12-13 15:06:11 +01:00
Jan Prochazka
a37f2a5240 select scope identity test 2024-12-13 15:06:11 +01:00
Jan Prochazka
d6b4c0a96b oracle alter table passes 2024-12-13 15:06:11 +01:00
Jan Prochazka
d56e917b3f oracle query spec passed 2024-12-13 15:06:11 +01:00
Jan Prochazka
860c811504 oracle table-create test passes 2024-12-13 15:06:11 +01:00
Jan Prochazka
c93e8c35ec table-analyse oracle tests pass 2024-12-13 15:06:11 +01:00
Jan Prochazka
5278e5da0c fixed oracle index analyser 2024-12-13 15:06:11 +01:00
Jan Prochazka
e7e3c307fc table analyse refactor (works on oracle) 2024-12-13 15:06:11 +01:00
Jan Prochazka
e9af85038e try to use smaller oracle image 2024-12-13 15:06:11 +01:00
Jan Prochazka
bf04721e36 run oracle tests on CI 2024-12-13 15:06:11 +01:00
SPRINX0\prochazka
a810dc4204 focused connection changes 2024-12-13 10:48:42 +01:00
Jan Prochazka
faad82fc34 Merge pull request #961 from dbgate/feature/postgis-geo
feat: transform geography binary data to wkt
2024-12-12 16:47:29 +01:00
Nybkox
b8ae53db7d feat: transform geography binary data to wkt 2024-12-12 16:37:54 +01:00
Jan Prochazka
1571295ab6 focus UX 2024-12-12 16:29:12 +01:00
Jan Prochazka
d3cc3a92c1 v6.0.1-beta.3 2024-12-12 16:15:18 +01:00
Jan Prochazka
381dc6a535 Add apple silicon only build #949 2024-12-12 16:15:03 +01:00
Jan Prochazka
6b9df571af handler UX scroll problem 2024-12-12 16:06:36 +01:00
Jan Prochazka
897547371e configurable connection click, database click #959 2024-12-12 15:53:00 +01:00
Jan Prochazka
bf85a922ca first oracle test works 2024-12-10 16:03:26 +01:00
Jan Prochazka
00525f6b81 oracle tests WIP 2024-12-10 15:40:15 +01:00
SPRINX0\prochazka
6dd27eb34f basic auth check config #934 2024-12-10 13:11:03 +01:00
SPRINX0\prochazka
0b30386fee e2e test config 2024-12-10 08:48:53 +01:00
SPRINX0\prochazka
1f7f0ea8a2 Merge branch 'feature/e2e-tests' 2024-12-10 08:33:52 +01:00
SPRINX0\prochazka
0ae0cee766 renamed test job 2024-12-10 08:33:28 +01:00
SPRINX0\prochazka
4cbc7f3ae5 connect test 2024-12-09 17:22:29 +01:00
SPRINX0\prochazka
f1cd0ab689 fix tests 2024-12-09 17:13:32 +01:00
SPRINX0\prochazka
7f367a1f84 test fixes 2024-12-09 16:52:02 +01:00
SPRINX0\prochazka
5405b9bf72 cypress run server inline test 2024-12-09 16:32:56 +01:00
SPRINX0\prochazka
75f75d95a6 ux tests 2024-12-09 15:41:50 +01:00
SPRINX0\prochazka
ae5c539e31 UI tests 2024-12-09 15:33:45 +01:00
SPRINX0\prochazka
e7797cedc1 try to fix test 2024-12-09 15:11:38 +01:00
SPRINX0\prochazka
f3c3ddd73a e2e on CI 2024-12-09 14:51:56 +01:00
SPRINX0\prochazka
c201f06103 fixed workflow 2024-12-09 13:45:40 +01:00
SPRINX0\prochazka
111a3a678f cypress test WIP 2024-12-09 13:44:08 +01:00
SPRINX0\prochazka
51c4964003 fixed deps 2024-12-06 16:48:15 +01:00
SPRINX0\prochazka
5fac064a48 update deps 2024-12-06 16:41:22 +01:00
SPRINX0\prochazka
e84f45ae39 fix 2024-12-06 16:39:56 +01:00
SPRINX0\prochazka
7e119d40a4 e2e tests 2024-12-06 16:38:42 +01:00
SPRINX0\prochazka
22b47d1066 e2e test in github 2024-12-06 16:12:51 +01:00
SPRINX0\prochazka
ac0ea6a937 cy - connect to mysql 2024-12-06 15:46:23 +01:00
SPRINX0\prochazka
ed7bfe0c21 e2e tests branch 2024-12-06 14:36:59 +01:00
SPRINX0\prochazka
55cac14ecf v6.0.1-packer-beta.2 2024-12-06 14:14:39 +01:00
SPRINX0\prochazka
691635c5d1 changed volatile deps 2024-12-06 14:14:27 +01:00
SPRINX0\prochazka
9209a2d7d3 v6.0.1-packer-beta.1 2024-12-06 13:16:08 +01:00
Jan Prochazka
05b044d965 Merge pull request #953 from dbgate/featrue/callable-templates
feat: myssql callable template
2024-12-05 15:40:43 +01:00
Nybkox
e231a3a41a feat: pssql callable template 2024-12-05 15:32:33 +01:00
Nybkox
216614a4b1 feat: allow dollar dollar string in query params 2024-12-05 15:32:23 +01:00
Nybkox
ae19d14951 feat: add return type to pssql funcs 2024-12-05 15:31:50 +01:00
Nybkox
7f639361b8 feat: myssql callable template 2024-12-05 12:45:49 +01:00
SPRINX0\prochazka
ba706b85d3 changelog 2024-12-05 12:25:13 +01:00
SPRINX0\prochazka
3b91d921e8 v6.0.0 2024-12-05 12:11:30 +01:00
SPRINX0\prochazka
660555d664 v6.0.0-premium-beta.6 2024-12-05 10:20:52 +01:00
SPRINX0\prochazka
3550710f23 fix 2024-12-05 10:20:18 +01:00
SPRINX0\prochazka
1429c29537 v6.0.0-premium-beta.5 2024-12-05 10:16:26 +01:00
SPRINX0\prochazka
95113490f1 disabled aws build 2024-12-05 10:16:11 +01:00
SPRINX0\prochazka
1eafdb944a fix redirect 2024-12-05 10:15:32 +01:00
SPRINX0\prochazka
a32cd0b2ae typo 2024-12-05 09:56:24 +01:00
SPRINX0\prochazka
249fbf3f96 v6.0.0-beta.4 2024-12-04 14:39:43 +01:00
SPRINX0\prochazka
64877af64a changed build envfrom macos12 to macos14 2024-12-04 14:39:32 +01:00
SPRINX0\prochazka
ff94e46179 v6.0.0-beta.3 2024-12-04 14:19:13 +01:00
SPRINX0\prochazka
921bd4613a fix 2024-12-04 14:17:42 +01:00
SPRINX0\prochazka
6f4a49ea97 refactor 2024-12-04 14:15:13 +01:00
SPRINX0\prochazka
9029fccad4 default parameter encoding for execute scripts 2024-12-04 13:45:12 +01:00
SPRINX0\prochazka
edf3a072c5 tsql call 2024-12-04 13:27:20 +01:00
SPRINX0\prochazka
0fde8c49a7 generate SQL - execute procedure TSQL 2024-12-04 13:15:06 +01:00
SPRINX0\prochazka
767c835a8e parameters in db struct extender 2024-12-04 12:40:12 +01:00
SPRINX0\prochazka
38f0223dc0 UX 2024-12-04 10:51:26 +01:00
SPRINX0\prochazka
ec707b5af3 up-to-date sql object info tab 2024-12-04 10:44:43 +01:00
SPRINX0\prochazka
462d5e3187 removed schemaName from mysql 2024-12-04 10:23:16 +01:00
SPRINX0\prochazka
5cc4c07941 fix - mysql has not schema 2024-12-04 10:16:58 +01:00
SPRINX0\prochazka
20de78f88a table editor - mark tab unsaved 2024-12-04 10:00:03 +01:00
SPRINX0\prochazka
a63d70ca7e handled "driver not found" error 2024-12-04 09:52:06 +01:00
SPRINX0\prochazka
672d6b88b2 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-12-04 07:46:06 +01:00
SPRINX0\prochazka
add1612c92 6.0 changelog 2024-12-04 07:46:04 +01:00
Jan Prochazka
96b964e609 Merge pull request #952 from dbgate/feature/mariadb-indexes-collation
feat: add collation to maria db indexes query and analyser
2024-12-04 07:07:56 +01:00
Nybkox
6b40190097 feat: add collation to maria db indexes query and analyser 2024-12-03 19:28:16 +01:00
SPRINX0\prochazka
f5b0bc5605 v6.0.0-premium-beta.2 2024-12-03 18:00:43 +01:00
SPRINX0\prochazka
64a58252e5 reverted tmp change 2024-12-03 17:56:51 +01:00
SPRINX0\prochazka
46571684f6 v6.0.0-alpha.1 2024-12-03 17:52:09 +01:00
SPRINX0\prochazka
710022539b tmp change 2024-12-03 17:51:36 +01:00
SPRINX0\prochazka
bc75d559b0 fix 2024-12-03 17:50:53 +01:00
SPRINX0\prochazka
a82ee5cc65 v6.0.0-alpha.1 2024-12-03 17:41:51 +01:00
SPRINX0\prochazka
6647dd16f8 tmp version change 2024-12-03 17:41:12 +01:00
SPRINX0\prochazka
6f4e1e07f7 fix - build pro NPM 2024-12-03 17:39:50 +01:00
SPRINX0\prochazka
5cd951a9c1 v6.0.0-alpha.1 2024-12-03 17:25:52 +01:00
SPRINX0\prochazka
f492a215b4 changed NPM versions to 6.0 2024-12-03 17:22:37 +01:00
SPRINX0\prochazka
bbd2d74a28 v5.5.7-beta.69 2024-12-03 17:00:57 +01:00
SPRINX0\prochazka
2b697e21ba Merge branch 'master' of https://github.com/dbgate/dbgate 2024-12-03 17:00:46 +01:00
SPRINX0\prochazka
3a4e4ecbdc fix 2024-12-03 17:00:44 +01:00
SPRINX0\prochazka
05cbb915d6 v5.5.7-alpha.68 2024-12-03 16:38:01 +01:00
SPRINX0\prochazka
26a2bb75fa v5.5.7-beta.67 2024-12-03 16:35:18 +01:00
Jan Prochazka
71b0bb78ec Merge pull request #947 from dbgate/feature/parameters
Feature/parameters
2024-12-03 16:27:28 +01:00
Nybkox
4e4eb39a19 fix: remove distinct from mysql params query 2024-12-03 16:19:57 +01:00
Jan Prochazka
b6399c8271 try to fix test 2024-12-03 16:05:23 +01:00
SPRINX0\prochazka
eb4a764407 correctly show focus in form view 2024-12-03 15:41:46 +01:00
Nybkox
27188eb2c5 fix: normalize type name for mysql params 2024-12-03 15:39:22 +01:00
Nybkox
57a997adc3 fix: expect decimal for numeric params in mysql tests 2024-12-03 15:39:05 +01:00
Nybkox
a72a03cc3a fix: add null safety for mysql function w/o params 2024-12-03 15:33:11 +01:00
Nybkox
bb185d9e9f fix: add join on schema for mysql params query 2024-12-03 15:32:33 +01:00
Nybkox
0298660714 fix: add shchema to mysql programables query 2024-12-03 15:32:21 +01:00
SPRINX0\prochazka
8bf1dbb10d recent & unsaved connection folder 2024-12-03 15:30:02 +01:00
SPRINX0\prochazka
8e5ef98a7c unsaved connections limit 2024-12-03 15:20:28 +01:00
SPRINX0\prochazka
72bd536aec show unsaved connections in connection tree 2024-12-03 14:54:22 +01:00
Nybkox
1a4009a6b2 chore: remove unused query results 2024-12-03 14:13:31 +01:00
Nybkox
e40357c052 fix: replace fullDataType with dataType in mssql params 2024-12-03 14:11:26 +01:00
Nybkox
222ea07cf2 fix: update mssql params queries 2024-12-03 14:11:04 +01:00
SPRINX0\prochazka
6b3f398de3 fixed keyboard navigation 2024-12-03 13:53:06 +01:00
SPRINX0\prochazka
d796fa7ff4 connection menu refactor 2024-12-03 13:37:36 +01:00
SPRINX0\prochazka
6fdfd8717f Open form removed from default actions 2024-12-03 13:31:26 +01:00
Nybkox
81cea4c0f2 fix: pssql params to string helper 2024-12-03 13:07:50 +01:00
SPRINX0\prochazka
6a64633650 fixed menu for non-mac 2024-12-03 13:07:02 +01:00
Nybkox
21702f1593 fix: do not fetch pssql params w/o name 2024-12-03 13:03:05 +01:00
Nybkox
32ddb9c4c7 fix: normalize pssql parameters datatype 2024-12-03 13:02:46 +01:00
Nybkox
10fc62ceb7 fix: do not order pssql params by name 2024-12-03 13:02:40 +01:00
Nybkox
d3018a3136 fix: correct function name in tests 2024-12-03 13:02:39 +01:00
Jan Prochazka
f9bcbd588b mac keyboard menu 2024-12-03 12:52:04 +01:00
Nybkox
5c7d2bfd85 fix: update parameter group keys 2024-12-03 11:01:47 +01:00
SPRINX0\prochazka
788f0ebf77 show error for current connection 2024-12-03 10:53:59 +01:00
Nybkox
0eca5dd95d fix: typos 2024-12-03 10:37:36 +01:00
SPRINX0\prochazka
47322b0bbb expand/collapse tables 2024-12-03 10:23:54 +01:00
SPRINX0\prochazka
518a05a6f0 expand/collapse DB with keyboard 2024-12-03 10:16:51 +01:00
Jan Prochazka
352e426e17 fix 2024-12-03 09:48:56 +01:00
Jan Prochazka
666122f265 add mysql create/drop procedure test 2024-12-03 09:46:12 +01:00
SPRINX0\prochazka
61e32f6d95 query designer moved to premium 2024-12-03 08:39:05 +01:00
SPRINX0\prochazka
7aabc8f0be refresh licenses 2024-12-02 17:36:56 +01:00
Jan Prochazka
a66dc03b99 params test - added object for test parent object match 2024-11-29 10:48:15 +01:00
Jan Prochazka
cf5ecb3150 test create SQL 2024-11-29 10:44:09 +01:00
Jan Prochazka
46c365c5cd mysql parameter types 2024-11-29 10:29:12 +01:00
Jan Prochazka
ea76751e4a postgres function tests 2024-11-29 10:06:34 +01:00
Jan Prochazka
0bef3f8e71 postgre procedure type tests 2024-11-29 09:57:46 +01:00
Jan Prochazka
c26c9fae12 mssql parameters test 2024-11-29 09:35:15 +01:00
Jan Prochazka
446c615bb8 typo 2024-11-29 08:47:13 +01:00
Nybkox
c516873541 feat: add parameters to mysql 2024-11-28 21:35:26 +01:00
Nybkox
d4326de087 fix: proceduresParameters mssql query 2024-11-28 21:07:59 +01:00
Nybkox
eca966bb90 fix: add null safety to getParametersSqlString 2024-11-28 21:07:59 +01:00
Nybkox
262b4732e3 feat: use parameterMode instead of isOutputParameter 2024-11-28 21:07:59 +01:00
Nybkox
7f9a30f568 fix: add missing conditions for pssql parameters query 2024-11-28 21:07:59 +01:00
Nybkox
a89d2e1365 fix: add missing conditions for mssql parameters queries 2024-11-28 21:07:59 +01:00
Nybkox
fcd6f6c8fc fix: correcttly map parameters to object list 2024-11-28 21:07:59 +01:00
Nybkox
8313d7f9f1 fix: update mssql parameters query to match ParameterInfo 2024-11-28 21:07:59 +01:00
Nybkox
3a12601103 feat: add parameters to createSql for pssql 2024-11-28 21:07:59 +01:00
Nybkox
926949dc89 fix: remove redundant field from ParameterInfo 2024-11-28 21:07:59 +01:00
Nybkox
6b155083ef feat: stored procedures and funciton parameters support for pssql 2024-11-28 21:07:59 +01:00
Nybkox
2b2ecac3ab feat: stored procedures and funciton parameters support for mssql 2024-11-28 21:07:59 +01:00
Jan Prochazka
35e9ff607d v5.5.7-beta.66 2024-11-28 18:47:35 +01:00
Jan Prochazka
ae037834f2 connections UX 2024-11-28 18:45:39 +01:00
Jan Prochazka
3ac24436ba focused connection widget 2024-11-28 18:17:22 +01:00
Jan Prochazka
2ca17e826c connection tree UX 2024-11-28 17:53:22 +01:00
SPRINX0\prochazka
4fb6128499 v5.5.7-premium-beta.65 2024-11-28 15:12:52 +01:00
SPRINX0\prochazka
c359332746 v5.5.7-beta.64 2024-11-28 15:11:54 +01:00
SPRINX0\prochazka
1cd8e8e376 typo 2024-11-28 15:02:27 +01:00
SPRINX0\prochazka
48ec2bdac8 connections UX 2024-11-28 14:59:19 +01:00
SPRINX0\prochazka
2283e91532 v5.5.7-beta.63 2024-11-27 18:30:05 +01:00
SPRINX0\prochazka
647894ad60 fix 2024-11-27 18:28:45 +01:00
SPRINX0\prochazka
574573abbb fix 2024-11-27 18:28:17 +01:00
SPRINX0\prochazka
a735a03cd7 fix 2024-11-27 18:27:11 +01:00
SPRINX0\prochazka
83881a0dac docker build fix 2024-11-27 18:26:05 +01:00
SPRINX0\prochazka
c04c6bbd2c v5.5.7-beta.62 2024-11-26 16:20:53 +01:00
SPRINX0\prochazka
42bbbc7ff4 fix job 2024-11-26 16:20:29 +01:00
SPRINX0\prochazka
1ecffeda71 default action improved & configurable 2024-11-26 15:19:39 +01:00
SPRINX0\prochazka
92992d1e95 ctx menu - open in new window 2024-11-26 14:19:20 +01:00
SPRINX0\prochazka
bc9df9750f single connection mode list handler 2024-11-26 12:47:36 +01:00
SPRINX0\prochazka
27e70e8031 new UX - fixed support for singledb connections 2024-11-26 12:29:16 +01:00
SPRINX0\prochazka
c823b8d19a UX 2024-11-26 09:30:37 +01:00
SPRINX0\prochazka
170ff77eec Merge branch 'feature/connection-keyboard-browse' 2024-11-26 09:11:23 +01:00
SPRINX0\prochazka
c241f5c562 focused DB UX 2024-11-26 09:04:13 +01:00
SPRINX0\prochazka
e06d964de4 connections UX WIP 2024-11-25 16:34:09 +01:00
SPRINX0\prochazka
dfdb86de6f db widget UX 2024-11-25 15:52:26 +01:00
SPRINX0\prochazka
a37b74f693 focusable databases WIP 2024-11-25 13:46:16 +01:00
SPRINX0\prochazka
398d9f15df focus connection or database WIP 2024-11-25 11:00:41 +01:00
SPRINX0\prochazka
ab7c2d7a31 set tab preview mode off in markTabUnsaved 2024-11-25 08:31:24 +01:00
Jan Prochazka
090549ff91 v5.5.7-beta.61 2024-11-22 16:20:00 +01:00
Jan Prochazka
10330c6597 Merge branch 'master' of github.com:dbgate/dbgate 2024-11-22 16:19:34 +01:00
Jan Prochazka
da2fe6a891 table & view preview mode switch 2024-11-22 16:19:29 +01:00
Jan Prochazka
01b88221c5 Merge pull request #945 from dbgate/feature-close-tab-hotkey-for-web
feat: add close tab hotkey for web
2024-11-22 15:26:43 +01:00
Nybkox
46d25710b8 feat: add close tab hotkey for web 2024-11-22 15:20:01 +01:00
Jan Prochazka
a40ec7e66b show all databases for filtered connections 2024-11-22 11:40:51 +01:00
Jan Prochazka
c8d2031d24 fix - don't call for DB list when searching in connections list 2024-11-22 11:25:46 +01:00
Jan Prochazka
4f838e0ae3 Merge branch 'feature/tab-preview-mode' 2024-11-22 10:43:24 +01:00
Jan Prochazka
c7cc1b7611 theme changes (hover bg, dark selection bg) 2024-11-22 10:22:38 +01:00
Jan Prochazka
bf67a5f13d UX 2024-11-22 10:15:43 +01:00
Jan Prochazka
e6bbe66873 use keyboard for navigation between searchbox and table list 2024-11-22 10:03:49 +01:00
Jan Prochazka
1a930acf0a tables keyboard navigation 2024-11-22 09:45:01 +01:00
Jan Prochazka
a497467137 fix 2024-11-22 08:45:30 +01:00
Jan Prochazka
b463416633 Merge branch 'master' into feature/tab-preview-mode 2024-11-22 08:35:28 +01:00
Jan Prochazka
ccf6240d65 Merge pull request #914 from dataspun/feature/geometry-autodetect-map
Update postgres plugin drivers.js
2024-11-22 08:28:40 +01:00
Jan Prochazka
5ccc12019d Merge pull request #943 from dbgate/feature-reopen-closed-tab
Feature reopen closed tab
2024-11-22 08:22:48 +01:00
Jan Prochazka
f57fa9aee9 Merge pull request #942 from dbgate/feature-prevent-spawning-same-modal
feat: prevent spawning the same modal if one instance is opened
2024-11-22 08:21:00 +01:00
Nybkox
80e841a43d feat: add reopen closed tab command 2024-11-22 02:19:41 +01:00
Nybkox
e1d759041d feat: prevent closing tab if any modal is open 2024-11-21 18:44:21 +01:00
SPRINX0\prochazka
fd6df055c0 focus tab by enter 2024-11-21 17:07:18 +01:00
SPRINX0\prochazka
669d0b9dac correctly focusing tabs 2024-11-21 16:53:46 +01:00
SPRINX0\prochazka
b9f9501e67 handle tab focus 2024-11-21 16:49:56 +01:00
SPRINX0\prochazka
4b1c021871 Revert "don't focus in tabs"
This reverts commit 90946c582d.
2024-11-21 16:12:27 +01:00
SPRINX0\prochazka
1f79627dbe change selected object when switching tab 2024-11-21 15:47:46 +01:00
SPRINX0\prochazka
dc18be07ce #938 current database is not changed after closing tab 2024-11-21 15:03:43 +01:00
Nybkox
83ac45f8cf feat: invalidate commands on modal show/close 2024-11-21 14:32:05 +01:00
SPRINX0\prochazka
c6cd865663 default action handling 2024-11-21 13:46:19 +01:00
Jan Prochazka
477636e0d7 Merge pull request #941 from dbgate/feature/word-wrap-option
feat: add word wrap option #823
2024-11-21 12:20:18 +01:00
Nybkox
77414ba934 feat: add word wrap option #823 2024-11-21 11:17:51 +01:00
SPRINX0\prochazka
86186072ed db app obj WIP 2024-11-20 17:12:54 +01:00
SPRINX0\prochazka
1216bcf9bf sql object tab refactor 2024-11-20 15:37:01 +01:00
SPRINX0\prochazka
788ea70d32 db app objc refactors 2024-11-20 15:30:47 +01:00
SPRINX0\prochazka
18de37c4e4 sql object tab 2024-11-20 14:53:06 +01:00
SPRINX0\prochazka
aeb81bd97f sql object tab - ability to show template 2024-11-20 14:33:05 +01:00
SPRINX0\prochazka
a68660f1ab sql object tab 2024-11-20 14:24:37 +01:00
SPRINX0\prochazka
5abfa85a0e script templates refactor 2024-11-20 13:51:18 +01:00
SPRINX0\prochazka
794f43d9ae call click when changing table by arrow 2024-11-20 11:25:07 +01:00
SPRINX0\prochazka
4b2f762200 v5.5.7-alpha.60 2024-11-20 10:19:40 +01:00
SPRINX0\prochazka
fc3664571b fixed resolving plugin 2024-11-20 10:18:54 +01:00
SPRINX0\prochazka
5db8f11fd6 dbappobj highlight 2024-11-20 08:16:13 +01:00
SPRINX0\prochazka
598674a7e0 show focus in data grid 2024-11-19 16:35:55 +01:00
SPRINX0\prochazka
af17eceb27 table keyboard navigation WIP 2024-11-19 15:54:18 +01:00
SPRINX0\prochazka
90946c582d don't focus in tabs 2024-11-19 14:13:58 +01:00
SPRINX0\prochazka
d619e0f961 tab preview mode - basic concept #767 2024-11-19 14:10:41 +01:00
SPRINX0\prochazka
08311145c8 docs 2024-11-19 13:04:00 +01:00
SPRINX0\prochazka
a80e37a208 maxMissingTablesRatio parameter 2024-11-19 12:55:42 +01:00
SPRINX0\prochazka
c88114cabe link to G2 2024-11-19 12:24:47 +01:00
SPRINX0\prochazka
e33f3a1492 rename view/procedure/function 2024-11-19 12:18:56 +01:00
SPRINX0\prochazka
8328fdad33 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-11-19 10:35:14 +01:00
Jan Prochazka
8035380e7b commented handle autoincrement change 2024-11-19 10:34:49 +01:00
SPRINX0\prochazka
b4ea528643 fix 2024-11-19 10:32:09 +01:00
Jan Prochazka
b0012872fa force recreate table when changing autoincrement flag 2024-11-19 10:06:52 +01:00
SPRINX0\prochazka
274fb595a2 UX 2024-11-18 17:03:13 +01:00
SPRINX0\prochazka
c7ef4b9231 UX 2024-11-18 17:03:06 +01:00
Jan Prochazka
e64cfce423 fix test 2024-11-18 15:04:37 +01:00
Jan Prochazka
c0c9c7be20 mysql change column to not null 2024-11-18 14:57:02 +01:00
SPRINX0\prochazka
2ae98d0c2d Merge branch 'master' of https://github.com/dbgate/dbgate 2024-11-18 13:47:47 +01:00
SPRINX0\prochazka
a129834c16 fix 2024-11-18 13:47:42 +01:00
Jan Prochazka
71a9d6c5c0 fix 2024-11-18 13:47:23 +01:00
Jan Prochazka
1ce8f6bd1f set NOT NULL for column with default value 2024-11-18 13:41:29 +01:00
Jan Prochazka
85c4821606 added deploy test 2024-11-18 12:48:34 +01:00
Jan Prochazka
06ed9d7dfc fix 2024-11-18 12:40:53 +01:00
SPRINX0\prochazka
90de5edc99 v5.5.7-beta.59 2024-11-18 09:56:49 +01:00
SPRINX0\prochazka
72786e5dbb try to fix build 2024-11-18 09:56:38 +01:00
SPRINX0\prochazka
d92c08548b fix 2024-11-18 09:54:08 +01:00
SPRINX0\prochazka
b1893234c7 horizontal splitter improved 2024-11-18 09:25:58 +01:00
Jan Prochazka
534deff274 v5.5.7-beta.58 2024-11-18 08:31:13 +01:00
Jan Prochazka
bdeffea79c remove cpu-features from build 2024-11-18 08:30:58 +01:00
Jan Prochazka
ab2fdf26d2 v5.5.7-beta.57 2024-11-15 18:02:03 +01:00
Jan Prochazka
0c902f037b removed bufferutil 2024-11-15 18:01:53 +01:00
Jan Prochazka
c0595aec0a code style 2024-11-15 17:16:11 +01:00
Jan Prochazka
6fdb20fc34 v5.5.7-beta.56 2024-11-15 17:14:48 +01:00
Jan Prochazka
8e74031fb1 try to fix electron app 2024-11-15 17:14:32 +01:00
Jan Prochazka
81e4b947b6 apidoc 2024-11-15 16:48:33 +01:00
Jan Prochazka
c5d23410f4 docs 2024-11-15 16:28:00 +01:00
Jan Prochazka
f23575c405 api documentation WIP 2024-11-15 16:09:04 +01:00
Jan Prochazka
8cb98cf643 apidoc 2024-11-15 14:45:31 +01:00
Jan Prochazka
b09a558670 v5.5.7-beta.55 2024-11-15 14:01:00 +01:00
Jan Prochazka
abc3c5f880 fixed windows build 2024-11-15 13:57:24 +01:00
Jan Prochazka
1c0d966c3c fix 2024-11-15 13:52:01 +01:00
Jan Prochazka
20d7264aab v5.5.7-beta.54 2024-11-15 12:10:18 +01:00
Jan Prochazka
4cf987b89a SSH optimalization 2024-11-15 12:09:24 +01:00
Jan Prochazka
5bcd3f807d v5.5.7-alpha.53 2024-11-15 12:02:11 +01:00
Jan Prochazka
9e04f2b9c6 fixed typo 2024-11-15 12:01:32 +01:00
Jan Prochazka
a7ceb8951c v5.5.7-alpha.52 2024-11-15 11:51:27 +01:00
Jan Prochazka
7392b223f4 bundling refactor 2024-11-15 11:49:25 +01:00
SPRINX0\prochazka
51ce4f1bb5 fix 2024-11-14 16:48:55 +01:00
SPRINX0\prochazka
059c310aaf fix 2024-11-14 16:38:43 +01:00
SPRINX0\prochazka
11dad8ced3 Merge branch 'feature/npm-refactor-poc' 2024-11-14 15:40:17 +01:00
SPRINX0\prochazka
18ed0fc020 v5.5.7-alpha.51 2024-11-14 14:38:52 +01:00
SPRINX0\prochazka
eaa1b73851 dbgatebuiltin keyword for builtin plugins 2024-11-14 14:37:24 +01:00
SPRINX0\prochazka
5c2c24e009 v5.5.7-alpha.50 2024-11-14 14:14:05 +01:00
SPRINX0\prochazka
1957531600 removed dbgateplugin keyword for builtin plugins 2024-11-14 14:13:47 +01:00
SPRINX0\prochazka
e47a165e11 v5.5.7-alpha.49 2024-11-14 13:40:16 +01:00
SPRINX0\prochazka
3ca1adcb48 docs 2024-11-14 13:39:43 +01:00
SPRINX0\prochazka
66bf1c847a docs 2024-11-14 13:38:26 +01:00
SPRINX0\prochazka
47eea9b9b3 readme 2024-11-14 13:37:46 +01:00
SPRINX0\prochazka
5c5a5f3b53 documentation 2024-11-14 12:53:19 +01:00
SPRINX0\prochazka
1c7729a797 added new plugins to dbgate-serve 2024-11-14 12:35:44 +01:00
SPRINX0\prochazka
cd06f13fcb optimalized NPM plugins 2024-11-14 12:27:17 +01:00
SPRINX0\prochazka
632870d448 v5.5.7-alpha.48 2024-11-14 10:48:59 +01:00
SPRINX0\prochazka
762055379a v5.5.7-alpha.47 2024-11-14 10:12:59 +01:00
SPRINX0\prochazka
7374749340 v5.5.7-alpha.46 2024-11-14 10:11:05 +01:00
SPRINX0\prochazka
e379be0107 fix 2024-11-14 10:10:15 +01:00
SPRINX0\prochazka
2784053b83 v5.5.7-alpha.45 2024-11-14 08:28:14 +01:00
SPRINX0\prochazka
12c4a0d498 fix 2024-11-14 08:25:44 +01:00
SPRINX0\prochazka
35b5ea138d v5.5.7-alpha.44 2024-11-14 08:12:47 +01:00
SPRINX0\prochazka
d75e1fc660 fix 2024-11-14 08:12:34 +01:00
SPRINX0\prochazka
3ab6df5da2 v5.5.7-alpha.43 2024-11-13 16:22:38 +01:00
SPRINX0\prochazka
8d4fc391a4 fix 2024-11-13 16:21:08 +01:00
SPRINX0\prochazka
35f9fc3741 v5.5.7-alpha.42 2024-11-13 16:17:50 +01:00
SPRINX0\prochazka
b465f3eb99 v5.5.7-alpha.41 2024-11-13 16:15:16 +01:00
SPRINX0\prochazka
c4e6a90722 v5.5.7-alpha.40 2024-11-13 16:11:11 +01:00
SPRINX0\prochazka
9cc4af2b56 build scripts 2024-11-13 16:10:05 +01:00
SPRINX0\prochazka
5ccdd7633b readme 2024-11-13 15:27:12 +01:00
SPRINX0\prochazka
880f4403cb removed publish dbgate (obsolete) 2024-11-13 15:09:36 +01:00
SPRINX0\prochazka
cba391904a mongodb-client-encryption moved to optionalDependencies 2024-11-13 15:05:30 +01:00
SPRINX0\prochazka
0fc397ace5 v5.5.7-beta.39 2024-11-13 14:53:01 +01:00
SPRINX0\prochazka
56a241b7f4 try to fix build 2024-11-13 14:52:48 +01:00
SPRINX0\prochazka
97eb999e4c v5.5.7-beta.38 2024-11-13 14:34:50 +01:00
SPRINX0\prochazka
9deaa89f21 try to fix build 2024-11-13 14:34:38 +01:00
SPRINX0\prochazka
74bae65e32 fix 2024-11-13 14:26:49 +01:00
SPRINX0\prochazka
7091917578 v5.5.7-beta.37 2024-11-13 13:57:04 +01:00
SPRINX0\prochazka
bbf72d9ed7 removed obsolete package 2024-11-13 13:56:56 +01:00
SPRINX0\prochazka
552d10ef48 v5.5.7-beta.36 2024-11-13 13:34:46 +01:00
SPRINX0\prochazka
7c5479157a fix 2024-11-13 13:34:35 +01:00
SPRINX0\prochazka
2463dba380 v5.5.7-beta.35 2024-11-13 13:28:07 +01:00
SPRINX0\prochazka
2f9209a92d added app to workspace when building electron app 2024-11-13 13:27:52 +01:00
SPRINX0\prochazka
370bd92518 v5.5.7-beta.34 2024-11-13 12:32:46 +01:00
SPRINX0\prochazka
ec083924fc v5.5.7-alpha.33 2024-11-13 12:32:11 +01:00
SPRINX0\prochazka
22b450f7e0 fix 2024-11-13 12:31:28 +01:00
SPRINX0\prochazka
1dd73e7319 v5.5.7-alpha.32 2024-11-13 11:00:18 +01:00
SPRINX0\prochazka
c4fe4b40dd v5.5.7-beta.31 2024-11-13 10:59:59 +01:00
SPRINX0\prochazka
251137ac60 centralized dependencies 2024-11-13 10:57:13 +01:00
SPRINX0\prochazka
0ad7c99274 removed optional dependencies from API and app 2024-11-13 10:48:02 +01:00
SPRINX0\prochazka
f53142d98a removed native module tooling 2024-11-13 10:44:24 +01:00
SPRINX0\prochazka
1f868523b0 NPM plugin refactor 2024-11-13 10:31:01 +01:00
SPRINX0\prochazka
94db02db2e v5.5.7-beta.30 2024-11-13 09:07:09 +01:00
SPRINX0\prochazka
9f0e06e663 native module refactor POC 2024-11-13 09:06:51 +01:00
SPRINX0\prochazka
c6dab85fc2 fixes 2024-11-12 17:38:18 +01:00
SPRINX0\prochazka
59aa2e3f33 v5.5.7-alpha.29 2024-11-12 09:25:27 +01:00
SPRINX0\prochazka
21b26773e6 fix 2024-11-12 09:23:02 +01:00
SPRINX0\prochazka
f308c5f6b0 v5.5.7-alpha.28 2024-11-12 08:51:23 +01:00
SPRINX0\prochazka
2763b6028a data duplicator - folder settings 2024-11-12 08:35:59 +01:00
SPRINX0\prochazka
a3df6d6e7d v5.5.7-alpha.27 2024-11-12 08:27:28 +01:00
SPRINX0\prochazka
473bfcbec5 shell.executeQuery supports sqlFile 2024-11-12 08:26:24 +01:00
SPRINX0\prochazka
8976c9e653 v5.5.7-alpha.26 2024-11-12 08:17:01 +01:00
SPRINX0\prochazka
86795dcc63 fix 2024-11-12 08:16:47 +01:00
SPRINX0\prochazka
9b90f15621 v5.5.7-alpha.25 2024-11-12 08:04:11 +01:00
Jan Prochazka
7d0d9d3e22 try to fix tests 2024-11-11 16:22:18 +01:00
Jan Prochazka
17f0248a3e try to remove tests 2024-11-11 16:14:35 +01:00
Jan Prochazka
25d3dcad59 fix 2024-11-11 16:09:18 +01:00
Jan Prochazka
cbd857422f clickhouse test - removed from scripts deploy 2024-11-11 15:58:10 +01:00
Jan Prochazka
e65b4d0c2a typo 2024-11-11 15:50:46 +01:00
Jan Prochazka
6bcebb63e4 create deploy journal is now warning 2024-11-11 15:50:32 +01:00
Jan Prochazka
ac7708138c added run_count to script driver deployer 2024-11-11 15:47:46 +01:00
Jan Prochazka
9d8ec9cc6b script base deployer 2024-11-11 15:37:58 +01:00
SPRINX0\prochazka
1b8a2cb923 v5.5.7-premium-beta.24 2024-11-11 13:45:01 +01:00
SPRINX0\prochazka
a97ab9c09e Merge branch 'master' of https://github.com/dbgate/dbgate 2024-11-11 13:44:29 +01:00
Jan Prochazka
9a73eb3620 fixed deploy 2024-11-11 13:20:04 +01:00
SPRINX0\prochazka
f50e460335 v5.5.7-premium-beta.23 2024-11-11 13:10:46 +01:00
SPRINX0\prochazka
fa72d9a39f preloaded rows fixes 2024-11-11 13:05:19 +01:00
SPRINX0\prochazka
75b4f49e31 db alter plan improvements 2024-11-11 11:07:57 +01:00
SPRINX0\prochazka
a069093f6b indexes in yaml model 2024-11-11 08:42:00 +01:00
SPRINX0\prochazka
62c741198a deploy: ignoreNameRegex 2024-11-11 08:11:44 +01:00
SPRINX0\prochazka
0266d912e0 fix 2024-11-08 16:05:01 +01:00
SPRINX0\prochazka
55bc0fc93f Merge branch 'master' of https://github.com/dbgate/dbgate 2024-11-08 15:36:09 +01:00
SPRINX0\prochazka
47c00d7eb0 alter plan utility functions 2024-11-08 15:36:07 +01:00
Jan Prochazka
ad9fac861e fixed deploy tests 2024-11-08 14:12:56 +01:00
SPRINX0\prochazka
14afd08fcb removed experimental status of deploy 2024-11-08 12:56:01 +01:00
SPRINX0\prochazka
319580554f fixed mssql primary key respects column order 2024-11-08 12:13:00 +01:00
SPRINX0\prochazka
c750bd04ad export model - filter by schema 2024-11-08 09:12:28 +01:00
SPRINX0\prochazka
bdd55d8432 export model - schema filter WIP 2024-11-07 17:38:22 +01:00
SPRINX0\prochazka
98464e414b v5.5.7-beta.22 2024-11-07 17:18:37 +01:00
SPRINX0\prochazka
2f2d9c45a3 fixed schema select #924 2024-11-07 17:17:56 +01:00
SPRINX0\prochazka
3665a0d064 Merge branch 'feature/duplicator-weak-refs' 2024-11-07 16:56:24 +01:00
SPRINX0\prochazka
c19c69266a shwll connection fixes 2024-11-07 16:55:39 +01:00
SPRINX0\prochazka
bafa2c2fff data duplicator fixes 2024-11-07 16:33:57 +01:00
Jan Prochazka
2d823140b9 data duplicator fix 2024-11-07 14:05:13 +01:00
Jan Prochazka
1fb4a06092 data duplicator - handle weak refs 2024-11-07 13:42:41 +01:00
SPRINX0\prochazka
cb450a0313 duplicator wek refs WIP 2024-11-07 13:15:33 +01:00
SPRINX0\prochazka
7aaf6bb024 drop all objects 2024-11-07 10:07:32 +01:00
SPRINX0\prochazka
6a02ba3220 drop all objects WIP 2024-11-06 17:06:58 +01:00
SPRINX0\prochazka
83610783e0 fixed on message click 2024-11-06 16:48:22 +01:00
SPRINX0\prochazka
cec26b0614 filterable messages view 2024-11-06 16:29:20 +01:00
SPRINX0\prochazka
fbf288198b message view UX 2024-11-06 15:39:51 +01:00
SPRINX0\prochazka
193940fd63 view JSON log message 2024-11-06 15:25:43 +01:00
SPRINX0\prochazka
bd169c316a messages view improvements 2024-11-06 14:13:43 +01:00
SPRINX0\prochazka
5315f65cfb table editor fixes 2024-11-06 13:00:53 +01:00
SPRINX0\prochazka
5a859d81d3 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-11-06 12:19:45 +01:00
SPRINX0\prochazka
904fc4d500 missing file 2024-11-06 12:19:42 +01:00
Jan Prochazka
634fe18127 db diff fix 2024-11-06 12:01:43 +01:00
Jan Prochazka
89a9cc4380 Revert "reverted testEqualConstraints"
This reverts commit 57c62fbe27.
2024-11-06 11:21:39 +01:00
Jan Prochazka
57c62fbe27 reverted testEqualConstraints 2024-11-06 11:12:57 +01:00
SPRINX0\prochazka
590eff1e3b Merge branch 'feature/export-model' 2024-11-06 10:54:31 +01:00
SPRINX0\prochazka
a71309a604 fix 2024-11-06 10:52:30 +01:00
SPRINX0\prochazka
343e983b64 export model feature 2024-11-06 10:47:49 +01:00
SPRINX0\prochazka
e31a52b659 export model WIP 2024-11-04 17:03:52 +01:00
SPRINX0\prochazka
41162ee2c3 handling conid==__model 2024-11-04 15:21:36 +01:00
SPRINX0\prochazka
55745c18e9 invalid token fix 2024-11-04 14:17:28 +01:00
SPRINX0\prochazka
7d6b77ad2a v5.5.7-packer-beta.21 2024-11-04 11:03:33 +01:00
SPRINX0\prochazka
90813b23d8 constraint check fixed 2024-11-04 11:03:19 +01:00
SPRINX0\prochazka
a999d29b1d v5.5.7-packer-beta.20 2024-11-04 10:14:32 +01:00
SPRINX0\prochazka
f6f9b0a61a fix 2024-11-04 10:14:13 +01:00
SPRINX0\prochazka
724edf44cb v5.5.7-packer-beta.19 2024-11-04 09:29:29 +01:00
SPRINX0\prochazka
07248ca49f v5.5.7-packer-beta.18 2024-11-04 09:07:27 +01:00
SPRINX0\prochazka
3a068c37b5 build fix 2024-11-04 09:07:12 +01:00
SPRINX0\prochazka
df44e5f6e9 #925 by default without query parameters 2024-11-04 08:59:29 +01:00
SPRINX0\prochazka
9328d966ba v5.5.7-packer-beta.17 2024-11-04 08:44:08 +01:00
SPRINX0\prochazka
1a293deec7 v5.5.7-packer.17 2024-11-04 08:40:11 +01:00
SPRINX0\prochazka
665a70ba3d load model transform 2024-11-01 16:58:25 +01:00
SPRINX0\prochazka
967587c8e4 model transform 2024-11-01 16:57:48 +01:00
SPRINX0\prochazka
4927c13e55 mdoel transform 2024-11-01 15:58:52 +01:00
SPRINX0\prochazka
1f9f997748 compare tab => premium 2024-11-01 13:28:40 +01:00
SPRINX0\prochazka
521e4ea3a2 v5.5.7-alpha.16 2024-11-01 12:39:58 +01:00
SPRINX0\prochazka
941843e4c0 generateDeploySql added to dbgateApi 2024-11-01 12:38:53 +01:00
SPRINX0\prochazka
1434a42421 readme 2024-11-01 10:21:49 +01:00
SPRINX0\prochazka
8f57d3a316 redshift driver should be only for premium 2024-11-01 10:17:13 +01:00
SPRINX0\prochazka
a74b789a8c diff tools fixes 2024-11-01 10:06:23 +01:00
SPRINX0\prochazka
ac4dd37249 fixed YESTERDAY filter parser 2024-11-01 10:06:13 +01:00
Jan Prochazka
75b3b4e012 comment 2024-11-01 08:36:21 +01:00
Jan Prochazka
188ab4c483 fixed view redeploy 2024-11-01 08:34:40 +01:00
Jan Prochazka
f9a562808d alter view test 2024-10-31 17:01:02 +01:00
Jan Prochazka
4ea763124b fixed undelete view for SQL server 2024-10-31 16:34:00 +01:00
Jan Prochazka
836d15c68f delete columns 2024-10-31 15:42:07 +01:00
Jan Prochazka
8ce4c0a7ce test refactor 2024-10-31 15:04:34 +01:00
Jan Prochazka
9613c2c410 undelete view 2024-10-31 15:01:01 +01:00
Jan Prochazka
e5d4bbadc1 deploy test refactor 2024-10-31 14:20:11 +01:00
Jan Prochazka
5d4d2a447a dbdeploy: undelete table works 2024-10-31 14:03:44 +01:00
Jan Prochazka
d905962298 v5.5.7-beta.15 2024-10-31 13:02:24 +01:00
Jan Prochazka
4ab9ad6881 deleted columns prefix 2024-10-31 12:50:08 +01:00
Jan Prochazka
2ce20b5fac mark view as deleted 2024-10-31 12:35:32 +01:00
Jan Prochazka
81297383cb support for rename SQL object (mssql, postgres) 2024-10-31 10:48:32 +01:00
Jan Prochazka
2aed60390c fixed default value diff 2024-10-31 10:00:40 +01:00
Jan Prochazka
67386da136 fixed clickhouse test - skip nullability check 2024-10-31 08:59:12 +01:00
Jan Prochazka
bdc40c2c02 test fix 2024-10-31 08:45:12 +01:00
SPRINX0\prochazka
1d916e43d5 fix - community app should not check license 2024-10-31 08:32:57 +01:00
SPRINX0\prochazka
98bff4925a Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-30 09:22:45 +01:00
SPRINX0\prochazka
9af2c80b05 v5.5.7-beta.14 2024-10-30 09:22:29 +01:00
Jan Prochazka
3a03c82f8d allowTableMarkDropped WIP 2024-10-30 08:58:05 +01:00
Jan Prochazka
de66e75eb2 undrop WIP 2024-10-29 18:12:05 +01:00
Jan Prochazka
10d79dca4d added test - table is never dropped in deploy db 2024-10-29 18:02:31 +01:00
SPRINX0\prochazka
460f511bf6 autoIndexForeignKeysTransform for db deployer 2024-10-29 16:59:28 +01:00
SPRINX0\prochazka
81207f95d8 fixed default value comparing 2024-10-29 16:35:22 +01:00
Jan Prochazka
3d3aca3290 fixed clickhouse default value handling 2024-10-29 16:18:57 +01:00
Jan Prochazka
d661b9f6a4 fixed mssql defaults 2024-10-29 16:07:51 +01:00
Jan Prochazka
7deeb78d69 default value test + fixed clickhouse default support 2024-10-29 15:52:36 +01:00
Jan Prochazka
4dcf47b81f test fix 2024-10-29 15:22:13 +01:00
Jan Prochazka
a674b9b3a1 check default value 2024-10-29 15:05:43 +01:00
SPRINX0\prochazka
b2aa4d9377 table diff - default values tests 2024-10-29 14:44:28 +01:00
SPRINX0\prochazka
bc4a595815 handler anonymousPrimaryKey for deploy 2024-10-29 14:38:22 +01:00
SPRINX0\prochazka
2704825d03 db deploy fixes 2024-10-29 14:28:26 +01:00
SPRINX0\prochazka
456d3ba42e Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-29 08:27:04 +01:00
SPRINX0\prochazka
803a272331 #926 fixed App crashes when trying to 'Open Structure' in a readonly connection 2024-10-29 08:26:55 +01:00
Jan Prochazka
89e4bfe3e1 v5.5.7-packer-beta.13 2024-10-25 13:32:47 +02:00
Jan Prochazka
697440693e v5.5.7-packer-beta.12 2024-10-25 12:41:07 +02:00
Jan Prochazka
c0cd04a96f packer - install-packages.sh script 2024-10-25 12:40:51 +02:00
Jan Prochazka
c92f02bdda v5.5.7-packer-beta.11 2024-10-25 11:55:40 +02:00
Jan Prochazka
c4766d163c install fix 2024-10-25 11:54:32 +02:00
Jan Prochazka
18889092aa v5.5.7-packer-beta.10 2024-10-25 11:19:13 +02:00
Jan Prochazka
adafa3c5c4 delete old AMIs script 2024-10-25 11:17:52 +02:00
Jan Prochazka
91994016a7 v5.5.7-packer-beta.9 2024-10-25 09:35:08 +02:00
Jan Prochazka
dfcf253217 v5.5.7-packer-beta.8 2024-10-25 09:17:57 +02:00
Jan Prochazka
bd3503912f Merge tag 'v5.5.7-packer-beta.7'
v5.5.7-packer-beta.7
2024-10-25 09:17:17 +02:00
Jan Prochazka
b062c5fd66 aws region missing 2024-10-25 09:07:28 +02:00
SPRINX0\prochazka
b1cd60d0dd v5.5.7-packer-beta.7 2024-10-24 16:19:28 +02:00
SPRINX0\prochazka
67ac1a1c8d v5.5.7-packet-beta.7 2024-10-24 16:18:13 +02:00
SPRINX0\prochazka
c166eab2e8 v5.5.7-packer-beta.6 2024-10-24 16:06:40 +02:00
SPRINX0\prochazka
c6b3ced493 v5.5.7-packer-beta.5 2024-10-24 16:06:07 +02:00
SPRINX0\prochazka
4e922e806d cloud upgrade from github releases 2024-10-24 15:56:56 +02:00
Jan Prochazka
0e087565b3 v5.5.7-packer-beta.4 2024-10-24 12:17:46 +02:00
Jan Prochazka
104b25d898 v5.5.7-packer-beta.3 2024-10-24 12:11:46 +02:00
Jan Prochazka
9bdff41ec1 fixed packer build 2024-10-24 12:11:35 +02:00
Jan Prochazka
d27b0a7be3 v5.5.7-packer-beta.2 2024-10-24 12:00:55 +02:00
Jan Prochazka
076b20ef6d fix 2024-10-24 12:00:42 +02:00
Jan Prochazka
d93d107039 v5.5.7-packer-beta.1 2024-10-24 11:59:26 +02:00
Jan Prochazka
708dcfd088 packer pipeline 2024-10-24 11:58:44 +02:00
Jan Prochazka
14501a70b9 reporting SSH tunnel errors 2024-10-23 16:35:56 +02:00
Jan Prochazka
3b82679c2d port changed 2024-10-23 12:14:28 +02:00
SPRINX0\prochazka
48d4fb4fec UX 2024-10-23 09:49:10 +02:00
SPRINX0\prochazka
a03ca73d93 admin page workflow 2024-10-23 09:41:39 +02:00
SPRINX0\prochazka
a46e592cfb workflow changes 2024-10-22 17:06:56 +02:00
SPRINX0\prochazka
634bea1bda missing logging 2024-10-22 16:33:47 +02:00
SPRINX0\prochazka
3dfa23a30c sortable object list controls #922 2024-10-22 16:09:48 +02:00
SPRINX0\prochazka
24408dd7c2 empty value testing 2024-10-22 15:23:32 +02:00
Jan Prochazka
32c7919885 special pages workflow changed 2024-10-21 17:36:46 +02:00
Jan Prochazka
967615b6e5 special page refactor 2024-10-21 13:18:16 +02:00
Jan Prochazka
aee3a28465 refactor 2024-10-21 13:07:29 +02:00
Jan Prochazka
3fdf27f820 special page refactor 2024-10-21 13:02:50 +02:00
Jan Prochazka
e9302c7d6f performance fix 2024-10-18 15:08:59 +02:00
Jan Prochazka
c38fe83e48 aws ubuntu layout 2024-10-18 13:57:55 +02:00
Jan Prochazka
adfc427d25 aws AMI layout 2024-10-18 13:32:17 +02:00
Jan Prochazka
3912a58127 build ami script 2024-10-18 11:35:33 +02:00
SPRINX0\prochazka
720d25e838 changelog 2024-10-17 13:33:14 +02:00
SPRINX0\prochazka
bc1d77a6f8 v5.5.6 2024-10-17 13:25:01 +02:00
SPRINX0\prochazka
00a8d472ff v5.5.6-beta.11 2024-10-17 12:53:56 +02:00
SPRINX0\prochazka
5076ee0463 v5.5.6 2024-10-17 12:52:26 +02:00
SPRINX0\prochazka
4a5ddd65f4 v5.5.6-premium-beta.10 2024-10-17 11:58:06 +02:00
SPRINX0\prochazka
ec3c224f44 show storage connection error 2024-10-17 11:56:24 +02:00
SPRINX0\prochazka
39b99ecf8f v5.5.6-premium-beta.9 2024-10-15 16:51:11 +02:00
SPRINX0\prochazka
50232f9b90 v5.5.6-premium-beta.8 2024-10-15 16:24:02 +02:00
SPRINX0\prochazka
66e8cc6e1d v5.5.6-premium-beta.7 2024-10-15 15:10:16 +02:00
SPRINX0\prochazka
f6b4c94d00 fix 2024-10-15 15:10:05 +02:00
SPRINX0\prochazka
f3ce240bcd v5.5.6-premium-beta.6 2024-10-15 14:38:18 +02:00
SPRINX0\prochazka
7be9bf1bab logging 2024-10-15 14:37:16 +02:00
SPRINX0\prochazka
d39871be70 v5.5.6-premium-beta.5 2024-10-15 13:55:19 +02:00
SPRINX0\prochazka
3324eec011 support for DEBUG_PRINT_ENV_VARIABLES 2024-10-15 13:55:00 +02:00
SPRINX0\prochazka
dd9790fca5 removed deprecated mongodb useUnifiedTopology options 2024-10-15 12:30:04 +02:00
SPRINX0\prochazka
41a3769c5f fixed headers sent error 2024-10-15 12:28:47 +02:00
SPRINX0\prochazka
42601ff960 fixed mongo update for mongo4 #916 2024-10-15 10:38:44 +02:00
SPRINX0\prochazka
e54cffbaf3 v5.5.6-beta.4 2024-10-15 10:21:03 +02:00
Jan Prochazka
f8dbad362c Merge branch 'master' of github.com:dbgate/dbgate 2024-10-15 10:20:08 +02:00
SPRINX0\prochazka
925db50418 log 2024-10-15 10:08:15 +02:00
SPRINX0\prochazka
f40db68579 process should exit on unhandled exception 2024-10-15 10:05:18 +02:00
Jan Prochazka
3afde5f1fa v5.5.6-beta.3 2024-10-15 09:44:22 +02:00
Jan Prochazka
b631519009 exit forked process after PIPE is closed #917 #915 2024-10-15 09:41:02 +02:00
SPRINX0\prochazka
f05c60c628 v5.5.6-premium-beta.2 2024-10-14 17:53:51 +02:00
SPRINX0\prochazka
e235de6d56 versionText field added 2024-10-14 17:48:04 +02:00
SPRINX0\prochazka
da47c437e0 v5.5.6-beta.1 2024-10-14 14:29:14 +02:00
SPRINX0\prochazka
f0048bc6cf correctly close connections #920 2024-10-14 14:28:26 +02:00
SPRINX0\prochazka
f80ae284fa correctly close idle connections #920 2024-10-14 14:03:07 +02:00
Jan Prochazka
06753ff312 v5.5.5 2024-10-11 10:22:00 +02:00
Jan Prochazka
1e07614306 changelog 2024-10-11 10:20:37 +02:00
Jan Prochazka
2d716ba43a v5.5.5-premium-beta.5 2024-10-11 09:59:28 +02:00
Jan Prochazka
c39dc6295d css fix 2024-10-11 09:58:54 +02:00
Jan Prochazka
7557666135 v5.5.5-premium-beta.4 2024-10-11 09:40:30 +02:00
SPRINX0\prochazka
5ee65124cb v5.5.5-beta.3 2024-10-10 16:20:01 +02:00
SPRINX0\prochazka
ab79617377 v5.5.5-premium-beta.2 2024-10-10 16:19:41 +02:00
SPRINX0\prochazka
3b2a47a4ef login admin/user switch 2024-10-10 15:39:56 +02:00
SPRINX0\prochazka
7b4d9d8717 quick login buttons 2024-10-10 15:17:54 +02:00
SPRINX0\prochazka
4c9734ac7f fixed hiding columns #887 #911 2024-10-10 13:59:41 +02:00
SPRINX0\prochazka
152e8a80ab title 2024-10-10 13:48:42 +02:00
SPRINX0\prochazka
98a348b091 CSS 2024-10-10 13:37:08 +02:00
SPRINX0\prochazka
e448f63ec3 store query parameters 2024-10-10 13:21:48 +02:00
SPRINX0\prochazka
0b13850eca query parameters #913 2024-10-10 13:09:34 +02:00
SPRINX0\prochazka
de97404602 redis load keys fix 2024-10-10 07:53:41 +02:00
Jeremy
2c0b76fb3f Update drivers.js
add geometry to spatial types
2024-10-09 14:20:44 -07:00
SPRINX0\prochazka
cbd6ce7872 v5.5.5-premium-beta.1 2024-10-09 16:08:30 +02:00
SPRINX0\prochazka
3c479eb33c changeset function 2024-10-09 16:07:18 +02:00
SPRINX0\prochazka
beaff158cc query result - use editor behaviour from driver 2024-10-08 15:12:12 +02:00
SPRINX0\prochazka
6806620d90 fixed datetime filtering #912 2024-10-08 14:38:46 +02:00
SPRINX0\prochazka
4459347169 fix 2024-10-08 13:48:50 +02:00
SPRINX0\prochazka
98e01497e9 checkout source parameter 2024-10-08 13:10:48 +02:00
SPRINX0\prochazka
4ed4c8c32c fix 2024-10-08 12:49:15 +02:00
SPRINX0\prochazka
620acecdff trial days left warning 2024-10-08 12:43:01 +02:00
SPRINX0\prochazka
2d214cfdb3 fix 2024-10-08 10:02:03 +02:00
SPRINX0\prochazka
cd36259739 AWS IAM auth for PostgreSQL 2024-10-08 09:55:51 +02:00
SPRINX0\prochazka
d049d8c571 AWS IAM connection for MySQL 2024-10-08 09:30:51 +02:00
SPRINX0\prochazka
7c51fcad96 AWS IAM WIP 2024-10-07 16:05:12 +02:00
SPRINX0\prochazka
1948c8ef89 fix export dialog for useSeparateSchemas=true 2024-10-07 08:22:35 +02:00
SPRINX0\prochazka
f2b2ac6fd0 v5.5.4 2024-10-04 15:29:08 +02:00
SPRINX0\prochazka
4098f63ce2 changelog 2024-10-04 15:19:31 +02:00
SPRINX0\prochazka
4d903abd85 changelog 2024-10-04 15:17:42 +02:00
SPRINX0\prochazka
f00eb2d3ef v5.5.4-beta.10 2024-10-04 14:54:44 +02:00
SPRINX0\prochazka
6752dcfd39 fixed crash #908 2024-10-04 14:54:29 +02:00
SPRINX0\prochazka
c3022eb80a v5.5.4-premium-beta.9 2024-10-04 14:42:45 +02:00
SPRINX0\prochazka
2f6cbf25df v5.5.5 2024-10-04 14:42:03 +02:00
SPRINX0\prochazka
8dfc2e7bcd handle expired license 2024-10-04 14:32:46 +02:00
Jan Prochazka
fa0ad477cc fixed crash #908 2024-10-04 07:58:32 +02:00
SPRINX0\prochazka
ac6a68c38d v5.5.4-alpha.8 2024-10-03 09:58:24 +02:00
SPRINX0\prochazka
25223471e7 fix deployer 2024-10-03 09:58:03 +02:00
SPRINX0\prochazka
56535b1e6f v5.5.4-alpha.7 2024-10-02 16:04:00 +02:00
SPRINX0\prochazka
131c51f7c4 v5.4.4-alpha.7 2024-10-02 16:02:02 +02:00
SPRINX0\prochazka
a27a2077ed comment 2024-10-02 10:45:06 +02:00
SPRINX0\prochazka
7758fabc89 code style 2024-10-02 10:44:35 +02:00
SPRINX0\prochazka
c9da9bdd23 Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-02 10:43:32 +02:00
Jan Prochazka
9520b053af Merge pull request #907 from yoadey/master
Fix 727: access_token not a jwt
2024-10-02 10:43:27 +02:00
SPRINX0\prochazka
8a1e717c1b v5.5.4-premium-beta.6 2024-10-02 10:26:43 +02:00
SPRINX0\prochazka
21127f661a better error reporting 2024-10-02 10:25:11 +02:00
yoadey
7d614a2395 Fix 727: access_token not a jwt 2024-10-02 10:24:19 +02:00
SPRINX0\prochazka
669ae024f9 v5.5.4-beta.5 2024-10-01 16:43:09 +02:00
SPRINX0\prochazka
9d8dd558e2 fixed load postgres schema on Azure #906 2024-10-01 16:36:26 +02:00
SPRINX0\prochazka
67f58a8dfe fix 2024-10-01 12:42:56 +02:00
SPRINX0\prochazka
52d230b9e2 v5.5.4-alpha.4 2024-10-01 12:21:57 +02:00
SPRINX0\prochazka
fd2a35fb4a Merge branch 'master' of https://github.com/dbgate/dbgate 2024-10-01 12:17:36 +02:00
Jan Prochazka
d61b5e135f safer env vars in dbmodel connection 2024-10-01 12:17:25 +02:00
SPRINX0\prochazka
ef23b786ac better error reporting 2024-10-01 12:15:22 +02:00
SPRINX0\prochazka
29a66bfcb0 v5.5.4-alpha.3 2024-10-01 11:07:52 +02:00
Jan Prochazka
ef5e30df3d dbmodel - allow connection from env variables 2024-10-01 11:07:16 +02:00
SPRINX0\prochazka
ab28a06bef close dbhandles after shell script (missing tableReader) 2024-10-01 10:56:52 +02:00
SPRINX0\prochazka
9910c54aa6 v5.5.4-alpha.2 2024-10-01 10:39:32 +02:00
Jan Prochazka
6ec431f471 dbmodel fix 2024-10-01 10:38:35 +02:00
Jan Prochazka
3ec7f651c1 not connected deploy test 2024-10-01 10:37:05 +02:00
SPRINX0\prochazka
87aa60bc3e v5.5.4-alpha.1 2024-10-01 09:19:47 +02:00
SPRINX0\prochazka
73874aa5a1 added missing dependency 2024-10-01 09:19:32 +02:00
Jan Prochazka
976438f860 fixed LOGIN & PASSWORD scenario #903 2024-09-27 10:46:33 +02:00
Jan Prochazka
eb095b7c44 changelog 2024-09-27 10:12:07 +02:00
Jan Prochazka
3ca745e74b v5.5.3 2024-09-27 10:11:58 +02:00
Jan Prochazka
040de84d93 changelog 2024-09-27 10:10:31 +02:00
Jan Prochazka
4f1d63440e v5.5.3-beta.4 2024-09-27 08:39:17 +02:00
Jan Prochazka
7c3cf1bb67 Merge branch 'feature/copy-stdin-import' 2024-09-27 08:34:03 +02:00
Jan Prochazka
cbf1b0a3cc import SQL dump tests 2024-09-27 08:33:16 +02:00
Jan Prochazka
5287a86397 import from postgres dump 2024-09-27 08:08:13 +02:00
SPRINX0\prochazka
ae599ac6f6 copy from stdin WIP 2024-09-26 16:06:54 +02:00
SPRINX0\prochazka
a08a8ef208 upgraded dbgate-query-splitter 2024-09-26 15:50:08 +02:00
SPRINX0\prochazka
19a4d97765 postgres copystream support 2024-09-26 15:48:29 +02:00
SPRINX0\prochazka
6f1f5f84c6 fix 2024-09-26 14:52:50 +02:00
SPRINX0\prochazka
e2f352149d fix + ability to choose imported table 2024-09-26 14:49:58 +02:00
SPRINX0\prochazka
1fa39b20d2 fixedTargetName fix 2024-09-26 13:57:10 +02:00
SPRINX0\prochazka
53dc2e6f03 fixed import/export for separate schemas 2024-09-26 13:22:36 +02:00
SPRINX0\prochazka
555f30c0b3 v5.5.3-beta.3 2024-09-26 12:39:45 +02:00
SPRINX0\prochazka
7549d37a04 better column mapping 2024-09-26 12:39:29 +02:00
SPRINX0\prochazka
29072eb71b v5.5.3-beta.2 2024-09-26 12:38:05 +02:00
SPRINX0\prochazka
48e9e77be5 column drop down in column map modal 2024-09-26 12:29:24 +02:00
SPRINX0\prochazka
4dd3f15ba3 better column chooser 2024-09-26 12:14:10 +02:00
SPRINX0\prochazka
3603501ae2 show single schema only if it is default schema 2024-09-26 10:01:02 +02:00
Jan Prochazka
338180a21a fix 2024-09-26 09:44:37 +02:00
Jan Prochazka
28193ed6f3 new_table - id should be not null 2024-09-26 09:41:32 +02:00
Jan Prochazka
0509710602 handle DB errors 2024-09-26 09:38:49 +02:00
Jan Prochazka
a4872b4159 fixed Syntax error when trying to sort by UUID column #895 2024-09-26 09:06:14 +02:00
Jan Prochazka
888f5c6260 v5.5.3-beta.1 2024-09-26 08:20:37 +02:00
Jan Prochazka
7a5abb5f47 Fixed separate schema mode, more logging #894 2024-09-26 08:12:51 +02:00
Jan Prochazka
61a9f02899 fix WIP 2024-09-25 17:01:14 +02:00
Jan Prochazka
354c4201f7 changelog 2024-09-25 12:07:35 +02:00
Jan Prochazka
d8340087c5 v5.5.2 2024-09-25 10:50:18 +02:00
Jan Prochazka
e3249c6d79 fixed readonly connection for MySQL 2024-09-25 10:49:37 +02:00
Jan Prochazka
58e65608e4 fixed postgres connections for readonly connection #900 2024-09-25 10:46:07 +02:00
Jan Prochazka
2b6fdf5a6a readme 2024-09-25 10:32:55 +02:00
Jan Prochazka
967d7849ee azure SQL in changelog 2024-09-25 10:26:38 +02:00
Jan Prochazka
7c476ab2f0 changelog 2024-09-25 09:31:40 +02:00
Jan Prochazka
caaf35e45a v5.5.1 2024-09-25 09:30:23 +02:00
Jan Prochazka
6ddc9ee6c5 fix 2024-09-25 09:25:00 +02:00
Jan Prochazka
39aa250223 changelog 2024-09-25 09:23:04 +02:00
Jan Prochazka
031a92db8e v5.5.0 2024-09-25 09:22:36 +02:00
Jan Prochazka
0c2579897f fixed multiple shortcuts handling #898 2024-09-25 09:10:54 +02:00
Jan Prochazka
b63479bf45 changelog 2024-09-25 08:50:44 +02:00
Jan Prochazka
4c89552265 v5.4.5-beta.15 2024-09-25 08:23:04 +02:00
SPRINX0\prochazka
517002e079 fixed importing mysql dump #702 2024-09-24 15:54:54 +02:00
SPRINX0\prochazka
85bfb1986d fixed redirect_uri parameter #891 2024-09-24 14:45:00 +02:00
SPRINX0\prochazka
632421eb73 copy connection error to clipboard 2024-09-24 14:23:06 +02:00
SPRINX0\prochazka
21365be411 v5.4.5-beta.14 2024-09-24 09:00:23 +02:00
SPRINX0\prochazka
eaa54022fc css fix 2024-09-24 09:00:05 +02:00
SPRINX0\prochazka
55f7f39efd postgres - user current_schema instead of search_path 2024-09-24 08:59:58 +02:00
SPRINX0\prochazka
71e709b346 v5.4.5-beta.13 2024-09-23 09:40:49 +02:00
SPRINX0\prochazka
1d2d295a45 log driver errors, even when they are sent to client 2024-09-23 09:40:29 +02:00
SPRINX0\prochazka
5ed23beff0 v5.4.5-premium-beta.12 2024-09-20 16:48:01 +02:00
SPRINX0\prochazka
43a8db55a2 v5.4.5-beta.11 2024-09-20 16:00:53 +02:00
SPRINX0\prochazka
0a9cba7bf7 quick export from table result #892 2024-09-20 16:00:03 +02:00
SPRINX0\prochazka
02af761bf7 postgre fix 2024-09-20 15:50:16 +02:00
SPRINX0\prochazka
6882a146e7 fixed build error 2024-09-20 15:00:11 +02:00
SPRINX0\prochazka
c9834f9792 Merge branch 'feature/separate-schemas-2' 2024-09-20 14:54:07 +02:00
SPRINX0\prochazka
21b4baf700 v5.4.5-beta.10 2024-09-20 14:52:47 +02:00
SPRINX0\prochazka
d3a24627dd postgres - show system databases when using separate schemas 2024-09-20 14:52:29 +02:00
SPRINX0\prochazka
8aac9cf59d loading schemas indicator + error reporting 2024-09-20 14:40:14 +02:00
SPRINX0\prochazka
ce70b2e71a support separate schemas for mssql 2024-09-20 13:30:39 +02:00
SPRINX0\prochazka
62a5ef60f6 v5.4.5-beta.9 2024-09-20 12:56:32 +02:00
SPRINX0\prochazka
95430e9c11 useSeparateSchemas option for docker 2024-09-20 12:35:02 +02:00
SPRINX0\prochazka
1cee36cc9b force exit after tests 2024-09-20 12:29:29 +02:00
SPRINX0\prochazka
aa475f81a0 uncommented test + fix 2024-09-20 12:19:03 +02:00
SPRINX0\prochazka
1173d5db1d sqlserver fix 2024-09-20 10:49:53 +02:00
SPRINX0\prochazka
f34d0cbb90 fixed SQLite 2024-09-20 10:47:51 +02:00
SPRINX0\prochazka
780d187911 skip SQLite on CI 2024-09-20 10:41:31 +02:00
SPRINX0\prochazka
48d4374346 introduced dbhandle instead of overwriting 3rd party client's fields 2024-09-20 10:27:03 +02:00
Jan Prochazka
6b8b511d0d try to comment problematic test 2024-09-19 19:01:19 +02:00
Jan Prochazka
c6be115634 fixed test 2024-09-19 18:54:29 +02:00
Jan Prochazka
dadde225f1 db handle 2024-09-19 18:53:12 +02:00
Jan Prochazka
31dfc1dc28 try to fix test 2024-09-19 18:42:08 +02:00
Jan Prochazka
1de163af44 unique db name prop 2024-09-19 18:38:30 +02:00
Jan Prochazka
2181eada53 fix 2024-09-19 18:35:36 +02:00
Jan Prochazka
75e63d2710 JEST --detectOpenHandles flag 2024-09-19 18:31:52 +02:00
Jan Prochazka
fbfcdcbc40 Revert "test"
This reverts commit 0238e6a7f1.
2024-09-19 18:30:52 +02:00
Jan Prochazka
0238e6a7f1 test 2024-09-19 18:29:35 +02:00
Jan Prochazka
be17301c91 try to fix test 2024-09-19 18:23:25 +02:00
Jan Prochazka
b1118c7f43 try to fix test 2024-09-19 18:17:30 +02:00
Jan Prochazka
24bf5e5b0c fix 2024-09-19 18:10:27 +02:00
Jan Prochazka
122471f81f neutral schema cond 2024-09-19 18:03:20 +02:00
Jan Prochazka
a6136cee25 skipSeparateSchemas flag 2024-09-19 17:07:34 +02:00
Jan Prochazka
83357ba2cc fix 2024-09-19 16:05:54 +02:00
Jan Prochazka
4fe10b26b0 postgre analyser fix 2024-09-19 16:00:54 +02:00
Jan Prochazka
485f6c9759 v5.4.5-beta.8 2024-09-19 15:22:00 +02:00
Jan Prochazka
732c5b763b fix 2024-09-19 15:21:41 +02:00
Jan Prochazka
4431d08a88 separate schema selector in frontend 2024-09-19 15:19:16 +02:00
Jan Prochazka
cb7224ac94 fix 2024-09-19 14:23:34 +02:00
Jan Prochazka
66b39c1f80 fixes 2024-09-19 14:17:11 +02:00
Jan Prochazka
b30f139b5d postgre analyser supports compisite db names 2024-09-19 14:15:22 +02:00
Jan Prochazka
f39ec26c29 UI fix 2024-09-19 13:43:55 +02:00
Jan Prochazka
8c3c32aeba default schema refactor 2024-09-19 13:41:49 +02:00
Jan Prochazka
9eb27f5e92 refresh schema list 2024-09-19 11:15:44 +02:00
Jan Prochazka
3e5b45de8f schemaList moved from dbinfo to separate request 2024-09-19 10:59:09 +02:00
Jan Prochazka
e7b4a6ffcc Merge branch 'feature/db-schema' 2024-09-19 09:53:55 +02:00
Jan Prochazka
e7ec75138d fix 2024-09-19 09:52:54 +02:00
Jan Prochazka
6c4679d83b fixed scenario after save table 2024-09-19 09:32:49 +02:00
Jan Prochazka
5f23b29c4e create table in multi-schema 2024-09-19 09:24:08 +02:00
Jan Prochazka
55db98fe1b removed unused imports 2024-09-19 09:02:20 +02:00
Jan Prochazka
f7c5ffa0ce create table - changed workflow 2024-09-19 09:00:13 +02:00
Jan Prochazka
d1e98e5640 fixed incremental analysis when changed schema+test 2024-09-18 16:30:45 +02:00
Jan Prochazka
e785fdf9b7 test fix for clickhouse 2024-09-18 16:16:46 +02:00
Jan Prochazka
fc0db925c5 schema analyser test 2024-09-18 16:01:02 +02:00
SPRINX0\prochazka
5ab686b721 schema update in database analyser 2024-09-18 15:37:34 +02:00
SPRINX0\prochazka
327d43096f schema selector is cached by conid and database 2024-09-18 14:07:33 +02:00
SPRINX0\prochazka
1f7b632553 fix - show schema selector, when no schema is available 2024-09-18 14:01:58 +02:00
SPRINX0\prochazka
592d7987ab show objects by schemas 2024-09-18 13:50:02 +02:00
SPRINX0\prochazka
c429424fda v5.4.5-beta.7 2024-09-17 17:17:32 +02:00
SPRINX0\prochazka
0b4709d383 import/export tab title 2024-09-17 17:14:57 +02:00
SPRINX0\prochazka
336929ff3f export menu changed 2024-09-17 16:56:41 +02:00
SPRINX0\prochazka
677f83cc4b fixed filtering in json columns for postgres #889 2024-09-17 16:42:07 +02:00
SPRINX0\prochazka
5c58c35a64 Merge branch 'feature/import-export' 2024-09-17 16:17:46 +02:00
SPRINX0\prochazka
b346a458a6 fix 2024-09-17 16:00:59 +02:00
SPRINX0\prochazka
226512a4ca removed open wizard from shell function 2024-09-17 15:50:01 +02:00
SPRINX0\prochazka
a0527d78e9 save import/export jobs 2024-09-17 15:47:40 +02:00
SPRINX0\prochazka
3357295d98 removed ImportExportModal 2024-09-17 15:19:45 +02:00
SPRINX0\prochazka
fc6a43b4fe download fileat first in imports 2024-09-17 15:06:54 +02:00
SPRINX0\prochazka
260b2e4b12 JSON export - support for object style, key field, root field 2024-09-17 14:28:31 +02:00
Jan Prochazka
f080b18d3f refactor 2024-09-17 13:47:28 +02:00
Jan Prochazka
56f015ffd5 JSON import rootField support 2024-09-17 13:23:51 +02:00
Jan Prochazka
fd8a28831e JSON object import 2024-09-17 12:52:53 +02:00
Jan Prochazka
503e09ddd1 import test small refactor 2024-09-17 12:40:41 +02:00
Jan Prochazka
880912806a JSON import 2024-09-17 12:32:03 +02:00
SPRINX0\prochazka
665ce22741 JSON import 2024-09-17 12:16:59 +02:00
SPRINX0\prochazka
e5c9ec7681 Merge branch 'feature/import-export' of https://github.com/dbgate/dbgate into feature/import-export 2024-09-17 10:45:43 +02:00
SPRINX0\prochazka
74fceeec78 fix 2024-09-17 10:45:41 +02:00
Jan Prochazka
77d60ccfa5 auto-detect CSV delimiter in test 2024-09-17 10:29:49 +02:00
SPRINX0\prochazka
0c2b25f79a auto-detect CSV delimiter 2024-09-17 10:28:58 +02:00
Jan Prochazka
4065e05013 CSV import fixed 2024-09-17 09:59:47 +02:00
Jan Prochazka
319a7fd003 csv import test (failing) 2024-09-16 18:50:14 +02:00
Jan Prochazka
26c01f43f9 drag & drop file to export/import tab 2024-09-16 17:28:30 +02:00
Jan Prochazka
88d7e07bea fixed upload file 2024-09-16 17:16:54 +02:00
SPRINX0\prochazka
a9a5a3491e showModal(ImportExportModal => openImportExportTab 2024-09-16 13:03:49 +02:00
SPRINX0\prochazka
d255273368 open import/export tab function 2024-09-16 12:47:13 +02:00
SPRINX0\prochazka
a7846b4adf import export tab working 2024-09-16 12:15:43 +02:00
SPRINX0\prochazka
ce431e6e21 fix 2024-09-16 10:25:52 +02:00
SPRINX0\prochazka
f8e39a2a5d runtests on feature branch 2024-09-16 10:25:30 +02:00
SPRINX0\prochazka
e5135b1a9d run tests on feature branch 2024-09-16 10:23:31 +02:00
459 changed files with 50427 additions and 6561 deletions

View File

@@ -1,86 +1,86 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Electron app BETA
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
fail-fast: false
matrix:
os: [macos-12, windows-2022, ubuntu-22.04]
# os: [macOS-10.15]
os:
- macos-14
- windows-2022
- ubuntu-22.04
steps:
- name: Install python 3.11 (MacOS)
if: matrix.os == 'macos-14'
run: |
brew install python@3.11
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: yarn adjustPackageJson
node-version: 22.x
- name: adjustPackageJson
run: |
yarn adjustPackageJson
node adjustPackageJson --community
- name: setUpdaterChannel beta
run: |
node setUpdaterChannel beta
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
- name: yarn install
run: |
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillNativeModulesElectron
run: |
yarn fillNativeModulesElectron
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
yarn fillPackagedPlugins
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
run: |
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
- name: publishSnap
if: matrix.os == 'ubuntu-22.04'
run: |
snapcraft upload --release=beta app/dist/*.snap
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
- name: Copy artifacts
run: |
mkdir artifacts
@@ -92,8 +92,10 @@ jobs:
cp app/dist/*win*.exe artifacts/dbgate-beta.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-beta.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-beta-arm64.zip || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-beta.dmg || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-beta.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-beta-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-beta-arm64.dmg || true
mv app/dist/*.snap artifacts/dbgate-beta.snap || true
mv app/dist/*.exe artifacts/ || true
mv app/dist/*.zip artifacts/ || true
@@ -106,18 +108,21 @@ jobs:
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
name: '${{ matrix.os }}'
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
if: 'startsWith(github.ref, ''refs/tags/'')'
with:
files: 'artifacts/**'
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;

View File

@@ -1,43 +1,44 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Electron app PREMIUM BETA
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
fail-fast: false
matrix:
# os: [windows-2022]
# os: [ubuntu-22.04]
# os: [windows-2022, ubuntu-22.04]
os: [macos-12, windows-2022, ubuntu-22.04]
# os: [macOS-10.15]
os:
- macos-14
- windows-2022
- ubuntu-22.04
steps:
- name: Install python 3.11 (MacOS)
if: matrix.os == 'macos-14'
run: |
brew install python@3.11
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
token: '${{ secrets.GH_TOKEN }}'
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -49,81 +50,82 @@ jobs:
yarn
node sync.js --nowatch
cd ..
- name: adjustPackageJson
run: |
cd ..
cd dbgate-merged
- name: yarn adjustPackageJson
run: |
cd ..
cd dbgate-merged
yarn adjustPackageJson
- name: adjustPackageJsonPremium
run: |
cd ..
cd dbgate-merged
node adjustPackageJsonPremium
node adjustPackageJson --premium
- name: setUpdaterChannel premium-beta
run: |
cd ..
cd dbgate-merged
node setUpdaterChannel premium-beta
- name: yarn set timeout
run: |
cd ..
cd dbgate-merged
yarn config set network-timeout 100000
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillNativeModulesElectron
run: |
cd ..
cd dbgate-merged
yarn fillNativeModulesElectron
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
cd ..
cd dbgate-merged
yarn fillPackagedPlugins
- name: Publish
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
- name: Copy artifacts
run: |
mkdir artifacts
cp ../dbgate-merged/app/dist/*.deb artifacts/dbgate-premium-beta.deb || true
cp ../dbgate-merged/app/dist/*x86*.AppImage artifacts/dbgate-premium-beta.AppImage || true
cp ../dbgate-merged/app/dist/*arm64*.AppImage artifacts/dbgate-premium-beta-arm64.AppImage || true
cp ../dbgate-merged/app/dist/*armv7l*.AppImage artifacts/dbgate-premium-beta-armv7l.AppImage || true
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-beta.exe || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta.dmg || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-beta.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-beta-arm64.zip || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-beta.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-beta-arm64.dmg || true
mv ../dbgate-merged/app/dist/*.snap artifacts/dbgate-premium-beta.snap || true
mv ../dbgate-merged/app/dist/*.exe artifacts/ || true
mv ../dbgate-merged/app/dist/*.zip artifacts/ || true
@@ -136,18 +138,23 @@ jobs:
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
name: '${{ matrix.os }}'
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
if: 'startsWith(github.ref, ''refs/tags/'')'
with:
files: 'artifacts/**'
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |
cd ..
cd dbgate-merged
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;

View File

@@ -1,44 +1,44 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Electron app PREMIUM
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
# - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
# branches:
# - production
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
fail-fast: false
matrix:
# os: [ubuntu-22.04, windows-2016]
os: [macos-12, windows-2022, ubuntu-22.04]
os:
- macos-14
- windows-2022
- ubuntu-22.04
steps:
- name: Install python 3.11 (MacOS)
if: matrix.os == 'macos-14'
run: |
brew install python@3.11
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
token: '${{ secrets.GH_TOKEN }}'
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -50,106 +50,111 @@ jobs:
yarn
node sync.js --nowatch
cd ..
- name: adjustPackageJson
run: |
cd ..
cd dbgate-merged
- name: yarn adjustPackageJson
run: |
cd ..
cd dbgate-merged
yarn adjustPackageJson
- name: yarn adjustPackageJsonPremium
run: |
cd ..
cd dbgate-merged
node adjustPackageJsonPremium
node adjustPackageJson --premium
- name: setUpdaterChannel premium
run: |
cd ..
cd dbgate-merged
node setUpdaterChannel premium
- name: yarn set timeout
run: |
cd ..
cd dbgate-merged
yarn config set network-timeout 100000
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillNativeModulesElectron
run: |
cd ..
cd dbgate-merged
yarn fillNativeModulesElectron
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
cd ..
cd dbgate-merged
yarn fillPackagedPlugins
- name: Publish
run: |
cd ..
cd dbgate-merged
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
- name: Copy artifacts
run: |
mkdir artifacts
cp ../dbgate-merged/app/dist/*.deb artifacts/dbgate-premium-latest.deb || true
cp ../dbgate-merged/app/dist/*x86*.AppImage artifacts/dbgate-premium-latest.AppImage || true
cp ../dbgate-merged/app/dist/*.exe artifacts/dbgate-premium-latest.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-premium-windows-latest.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-premium-windows-latest-arm64.zip || true
cp ../dbgate-merged/app/dist/*arm64*.AppImage artifacts/dbgate-premium-latest-arm64.AppImage || true
cp ../dbgate-merged/app/dist/*armv7l*.AppImage artifacts/dbgate-premium-latest-armv7l.AppImage || true
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-latest.exe || true
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-windows-premium-latest.zip || true
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-windows-premium-latest-arm64.zip || true
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-latest.dmg || true
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-latest-x64.dmg || true
cp ../dbgate-merged/app/dist/*-mac_arm64.dmg artifacts/dbgate-premium-latest-arm64.dmg || true
mv ../dbgate-merged/app/dist/*.snap artifacts/dbgate-premium-latest.snap || true
mv ../dbgate-merged/app/dist/*.exe artifacts/ || true
mv ../dbgate-merged/app/dist/*.zip artifacts/ || true
mv ../dbgate-merged/app/dist/*.tar.gz artifacts/ || true
mv ../dbgate-merged/app/dist/*.AppImage artifacts/ || true
mv ../dbgate-merged/app/dist/*.deb artifacts/ || true
mv ../dbgate-merged/app/dist/*.snap artifacts/ || true
mv ../dbgate-merged/app/dist/*.dmg artifacts/ || true
mv ../dbgate-merged/app/dist/*.blockmap artifacts/ || true
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
name: '${{ matrix.os }}'
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
if: 'startsWith(github.ref, ''refs/tags/'')'
with:
files: 'artifacts/**'
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |
cd ..
cd dbgate-merged
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;

View File

@@ -1,93 +1,85 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Electron app
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
# - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
# branches:
# - production
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
fail-fast: false
matrix:
# os: [ubuntu-22.04, windows-2016]
os: [macos-12, windows-2022, ubuntu-22.04]
os:
- macos-14
- windows-2022
- ubuntu-22.04
steps:
- name: Install python 3.11 (MacOS)
if: matrix.os == 'macos-14'
run: |
brew install python@3.11
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: yarn adjustPackageJson
node-version: 22.x
- name: adjustPackageJson
run: |
yarn adjustPackageJson
node adjustPackageJson --community
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
- name: yarn install
run: |
# yarn --version
# yarn config set network-timeout 300000
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillNativeModulesElectron
run: |
yarn fillNativeModulesElectron
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: fillPackagedPlugins
run: |
yarn fillPackagedPlugins
- name: Install Snapcraft
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
run: |
yarn run build:app
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
- name: generatePadFile
run: |
yarn generatePadFile
- name: publishSnap
if: matrix.os == 'ubuntu-22.04'
run: |
snapcraft upload --release=stable app/dist/*.snap
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Copy artifacts
run: |
mkdir artifacts
@@ -96,74 +88,44 @@ jobs:
cp app/dist/*x86*.AppImage artifacts/dbgate-latest.AppImage || true
cp app/dist/*arm64*.AppImage artifacts/dbgate-latest-arm64.AppImage || true
cp app/dist/*armv7l*.AppImage artifacts/dbgate-latest-armv7l.AppImage || true
cp app/dist/*.exe artifacts/dbgate-latest.exe || true
cp app/dist/*win*.exe artifacts/dbgate-latest.exe || true
cp app/dist/*win_x64.zip artifacts/dbgate-windows-latest.zip || true
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-latest-arm64.zip || true
cp app/dist/*-mac_universal.dmg artifacts/dbgate-latest.dmg || true
cp app/dist/*-mac_x64.dmg artifacts/dbgate-latest-x64.dmg || true
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-latest-arm64.dmg || true
mv app/dist/*.snap artifacts/dbgate-latest.snap || true
mv app/dist/*.exe artifacts/ || true
mv app/dist/*.zip artifacts/ || true
mv app/dist/*.tar.gz artifacts/ || true
mv app/dist/*.AppImage artifacts/ || true
mv app/dist/*.deb artifacts/ || true
mv app/dist/*.snap artifacts/ || true
mv app/dist/*.dmg artifacts/ || true
mv app/dist/*.snap artifacts/dbgate-latest.snap || true
mv app/dist/*.blockmap artifacts/ || true
mv app/dist/*.yml artifacts/ || true
rm artifacts/builder-debug.yml
# - name: Copy artifacts Linux, MacOs
# if: matrix.os != 'windows-2016'
# run: |
# mkdir artifacts
# cp app/dist/*.AppImage artifacts/ || true
# cp app/dist/*.dmg artifacts/ || true
# cp app/dist/*.deb artifacts/ || true
# mv app/dist/*.deb artifacts/dbgate-linux.deb || true
# mv app/dist/*.AppImage artifacts/dbgate-linux.AppImage || true
# mv app/dist/*.dmg artifacts/dbgate-mac.dmg || true
# - name: Copy artifacts Win
# if: matrix.os == 'windows-2016'
# run: |
# mkdir artifacts
# cp app/dist/*.exe artifacts/ || true
# mv app/dist/*.exe artifacts/dbgate-windows.exe
# mv app/dist/latest.yml artifacts/latest.yml || true
- name: Copy PAD file
if: matrix.os == 'windows-2022'
run: |
mv app/dist/dbgate-pad.xml artifacts/ || true
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}
name: '${{ matrix.os }}'
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
if: 'startsWith(github.ref, ''refs/tags/'')'
with:
files: 'artifacts/**'
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |
# - name: Create Release
# id: create_release
# uses: actions/create-release@v1
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# with:
# tag_name: ${{ github.ref }}
# release_name: Release ${{ github.ref }}
# draft: false
# prerelease: false
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;

View File

@@ -0,0 +1,123 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: AWS image PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
jobs:
build:
runs-on: '${{ matrix.os }}'
strategy:
matrix:
os:
- ubuntu-22.04
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Setup `packer`
uses: hashicorp/setup-packer@main
with:
version: latest
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: adjustPackageJson
run: |
cd ..
cd dbgate-merged
node adjustPackageJson --premium
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare packer build
run: |
cd ..
cd dbgate-merged
yarn run prepare:packer
cd packer
zip -r cloud-build.zip build
- name: Copy artifacts
run: |
mkdir artifacts
cp ../dbgate-merged/packer/cloud-build.zip artifacts/cloud-build.zip || true
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Run `packer init`
run: |
cd ../dbgate-merged/packer
packer init ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
- name: Run `packer build`
run: |
cd ../dbgate-merged/packer
packer build ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
- name: Install jq
run: |
sudo apt-get install jq -y
- name: Delete old AMIs
run: |
cd ../dbgate-merged/packer
chmod +x delete-old-amis.sh
./delete-old-amis.sh
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'

View File

@@ -1,35 +1,32 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Docker image PREMIUM
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
matrix:
os: [ubuntu-22.04]
os:
- ubuntu-22.04
steps:
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
dbgate/dbgate-premium
images: dbgate/dbgate-premium
flavor: |
latest=false
tags: |
@@ -37,19 +34,16 @@ jobs:
type=match,pattern=\d+.\d+.\d+,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
type=raw,value=latest,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
token: '${{ secrets.GH_TOKEN }}'
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -61,44 +55,51 @@ jobs:
yarn
node sync.js --nowatch
cd ..
- name: adjustPackageJson
run: |
cd ..
cd dbgate-merged
node adjustPackageJson --premium
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
# yarn --version
# yarn config set network-timeout 300000
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare docker image
run: |
cd ..
cd dbgate-merged
yarn run prepare:docker
yarn run prepare:docker
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ../dbgate-merged/docker
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64'

View File

@@ -1,35 +1,32 @@
name: Docker image
on:
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Docker image Community
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
matrix:
os: [ubuntu-22.04]
os:
- ubuntu-22.04
steps:
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
dbgate/dbgate
images: dbgate/dbgate
flavor: |
latest=false
tags: |
@@ -37,7 +34,6 @@ jobs:
type=match,pattern=\d+.\d+.\d+,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
type=raw,value=latest,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
- name: Docker alpine meta
id: alpmeta
uses: docker/metadata-action@v4
@@ -51,51 +47,53 @@ jobs:
type=match,pattern=\d+.\d+.\d+,suffix=-alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
type=raw,value=alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: adjustPackageJson
run: |
node adjustPackageJson --community
- name: yarn install
run: |
# yarn --version
# yarn config set network-timeout 300000
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Prepare docker image
run: |
yarn run prepare:docker
yarn run prepare:docker
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
- name: Build and push alpine
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
file: ./docker/Dockerfile-alpine
tags: ${{ steps.alpmeta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: '${{ steps.alpmeta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'

99
.github/workflows/build-npm-pro.yaml vendored Normal file
View File

@@ -0,0 +1,99 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: NPM packages PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
jobs:
build:
runs-on: '${{ matrix.os }}'
strategy:
matrix:
os:
- ubuntu-22.04
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
path: dbgate-pro
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: adjustNpmPackageJsonPremium
run: |
cd ..
cd dbgate-merged
node adjustNpmPackageJsonPremium
- name: Configure NPM token
env:
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
run: |
cd ..
cd dbgate-merged
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
- name: Remove dbmodel - should be not published
run: |
cd ..
cd dbgate-merged
rm -rf packages/dbmodel
- name: yarn install
run: |
cd ..
cd dbgate-merged
yarn install
- name: setCurrentVersion
run: |
cd ..
cd dbgate-merged
yarn setCurrentVersion
- name: printSecrets
run: |
cd ..
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Publish dbgate-api-premium
run: |
cd ..
cd dbgate-merged/packages/api
npm publish
- name: Publish dbgate-web-premium
run: |
cd ..
cd dbgate-merged/packages/web
npm publish
- name: Publish dbgate-serve-premium
run: |
cd ..
cd dbgate-merged/packages/serve
npm publish
- name: Publish dbgate-plugin-cosmosdb
run: |
cd ..
cd dbgate-merged/plugins/dbgate-plugin-cosmosdb
npm publish

View File

@@ -1,31 +1,23 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: NPM packages
# on: [push]
on:
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
# on:
# push:
# branches:
# - production
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: '${{ matrix.os }}'
strategy:
matrix:
os: [ubuntu-22.04]
os:
- ubuntu-22.04
steps:
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -34,37 +26,30 @@ jobs:
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Configure NPM token
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
run: |
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
- name: yarn install
run: |
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
- name: printSecrets
run: |
yarn printSecrets
yarn printSecrets
env:
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
- name: Publish types
working-directory: packages/types
run: |
npm publish
- name: Publish tools
working-directory: packages/tools
run: |
npm publish
- name: Publish sqltree
working-directory: packages/sqltree
run: |
@@ -74,87 +59,66 @@ jobs:
working-directory: packages/api
run: |
npm publish
- name: Publish datalib
working-directory: packages/datalib
run: |
npm publish
- name: Publish filterparser
working-directory: packages/filterparser
run: |
npm publish
- name: Publish web
working-directory: packages/web
run: |
npm publish
- name: Publish dbgate (obsolete)
working-directory: packages/dbgate
run: |
npm publish
- name: Publish dbgate-serve
working-directory: packages/serve
run: |
npm publish
- name: Publish dbmodel
working-directory: packages/dbmodel
run: |
npm publish
- name: Publish dbgate-plugin-csv
working-directory: plugins/dbgate-plugin-csv
run: |
npm publish
- name: Publish dbgate-plugin-xml
working-directory: plugins/dbgate-plugin-xml
run: |
npm publish
- name: Publish dbgate-plugin-excel
working-directory: plugins/dbgate-plugin-excel
run: |
npm publish
- name: Publish dbgate-plugin-mssql
working-directory: plugins/dbgate-plugin-mssql
run: |
npm publish
- name: Publish dbgate-plugin-mysql
working-directory: plugins/dbgate-plugin-mysql
run: |
npm publish
- name: Publish dbgate-plugin-mongo
working-directory: plugins/dbgate-plugin-mongo
run: |
npm publish
- name: Publish dbgate-plugin-postgres
working-directory: plugins/dbgate-plugin-postgres
run: |
npm publish
- name: Publish dbgate-plugin-sqlite
working-directory: plugins/dbgate-plugin-sqlite
run: |
npm publish
- name: Publish dbgate-plugin-redis
working-directory: plugins/dbgate-plugin-redis
run: |
npm publish
- name: Publish dbgate-plugin-oracle
working-directory: plugins/dbgate-plugin-oracle
run: |
npm publish
- name: Publish dbgate-plugin-clickhouse
working-directory: plugins/dbgate-plugin-clickhouse
run: |

View File

@@ -1,31 +1,43 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Run tests
on:
'on':
push:
branches:
- master
- develop
- feature/**
jobs:
test-runner:
runs-on: ubuntu-latest
container: node:18
container: 'node:18'
steps:
- name: Context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- name: Install dependencies for cypress
run: |
apt-get update
apt-get install -y xvfb libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libnss3 libxss1 libasound2 libxtst6
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: yarn install
run: |
yarn install
- name: Build packer dist for cypress
run: |
yarn prepare:packer
- name: yarn install cypress
run: |
cd e2e-tests
yarn install
- name: Run Cypress tests
run: |
cd e2e-tests
yarn test:ci
- name: Integration tests
run: |
cd integration-tests
yarn test:ci
# yarn wait:ci
- name: Filter parser tests
if: always()
run: |
@@ -39,49 +51,42 @@ jobs:
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
github-token: '${{ secrets.GITHUB_TOKEN }}'
result-file: integration-tests/result.json
action-name: Integration tests
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
github-token: '${{ secrets.GITHUB_TOKEN }}'
result-file: packages/filterparser/result.json
action-name: Filter parser test results
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
github-token: '${{ secrets.GITHUB_TOKEN }}'
result-file: packages/datalib/result.json
action-name: Datalib (perspectives) test results
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: Pwd2020Db
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
options: '--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5'
mysql:
image: mysql:8.0.18
image: 'mysql:8.0.18'
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
mssql:
image: mcr.microsoft.com/mssql/server
env:
ACCEPT_EULA: Y
ACCEPT_EULA: 'Y'
SA_PASSWORD: Pwd2020Db
MSSQL_PID: Express
clickhouse:
image: bitnami/clickhouse:24.8.4
image: 'bitnami/clickhouse:24.8.4'
env:
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
# cockroachdb:
# image: cockroachdb/cockroach
oracle:
image: 'gvenzl/oracle-xe:21-slim'
env:
ORACLE_PASSWORD: Pwd2020Db

2
.gitignore vendored
View File

@@ -28,8 +28,6 @@ docker/plugins
npm-debug.log*
yarn-debug.log*
yarn-error.log*
app/src/nativeModulesContent.js
packages/api/src/nativeModulesContent.js
packages/api/src/packagedPluginsContent.js
.VSCodeCounter

View File

@@ -8,10 +8,132 @@ Builds:
- linux - application for linux
- win - application for Windows
### 6.1.1
- ADDED: Trigger support (SQL Server, PostgreSQL, MySQL, Oracle)
- FIXED: PostgreSQL and Oracle export #970
- FIXED: Cursor Becomes Stuck When Escaping "Case" #954
- CHANGED: Defualt search criteria for tables are names only
- FIXED: Search in packed list
### 6.1.0
- ADDED: Fulltext search in DB model and connections, highlight searched names
- ADDED: Tab preview mode configuration #963
- CHANGED: Single-click to open server connection/database + ability to configure this #959
- ADDED: Option to align numbers to right in data grid #957
- FIXED: Cursor Becomes Stuck When Escaping "Case" #954
- ADDED: Postgres GEOGRAPHY types are shown on map, event when executing query #948
- FIXED: Error displaying CLOB and NCLOB in Oracle
- FIXED: Analysing of foreign keys in Postgres and MS SQL, when the same FKS are used across different schemas
- ADDED: Support of views, procedures, functions to Oracle. Added integration tests for Oracle
- ADDED: Display "No rows" message, quick add new row
- ADDED: Choose default database from list
- ADDED: Default database is automatically selected on connect
- ADDED: Apple-Silicon-only build for Mac #949
- ADDED: Display comment into tables and column list #755
### 6.0.0
- ADDED: Order or filter the indexes for huge tables #922
- ADDED: Empty string filters
- CHANGED: (Premium) Workflow for new installation (used in Docker and AWS distribution)
- ADDED: Show stored procedure and function parameters (MySQL, PostgreSQL, SQL Server, MariaDB) #348
- FIXED: Selected database has changed when closing database grouped tab #983
- ADDED: Add line break option to editor #823
- ADDED: Order or filter the indexes for huge tables #922
- ADDED: Preview mode for the top bar tab like vscode #767
- ADDED: Keyboard navigatioon between connections, databases and tables
- FIXED: Fixed some issues in connection search
- FIXED: Schema selection in Export does not provide all schemas #924
- CHANGED: Standardized Window menu in MacOS app
- FIXED: Typecast ::date is treated as a parameter #925
- FIXED: App crashes when trying to 'Open Structure' in a readonly connection #926
- FIXED: Selected database has changed when closing database grouped tab #938
- CHANGED: (Premium) Query designer and Query perspective designer moved to Premium editioin
- CHANGED: (Premium) Compare database tool - many improvements, moved to Premium edition
- ADDED: (Premium) Export DB model - exporting model to YAML folder, JSON or SQL folder
- CHANGED: Model deployer - many improvements, support of rename missing objects
- ADDED: (Premium) Premium NPM distribution
- CHANGED: (Premium) Amazon Redshift driver moved to Premium edition
- ADDED: Generated API documentation https://dbgate.org/docs/apidoc.html
- ADDED: NPM distribution now supports all dbgate database connectors, many improvements NPM packages
- CHANGED: Optimalized size of NPM plugins (eg. dbgate-plugin-mssql from 1.34 MB to 71 kB)
- CHANGED: Unsaved connections are now shown in "Recent and unsaved" folder after disconnect
- FIXED: Correctly show focused control, as defined by UX standards
- ADDED: Data duplicator - weak references
- ADDED: View JSON detail of log messages from export/import jobs and query executions
- ADDED: Rename procedure/function context menu
- ADDED: Show SQL quick view
### 5.5.6
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
- FIXED: Correctly closing connection behind SSH tunnel (#920)
- FIXED: Updating MongoDB documents on MongoDB 4 (#916)
- FIXED: (Premium) DbGate container correctly waits for underlying storage database, if database container is started after dbgate container is started
- FIXED: (Premium) Better handling of connection storage errors
### 5.5.5
- ADDED: AWS IAM authentication for MySQL, MariaDB, PostgreSQL (Premium)
- FIXED: Datitme filtering #912
- FIXED: Load redis keys
- ADDED: Query parameters #913
- FIXED: Data grid with hidden columns #911
- ADDED: Added buttons for one-click authentification methods (Anonymous, OAuth) (Team Premium)
- ADDED: Link for switching Admin/user login (Team Premium)
- FIXED: Save connection params in administration for MS SQL and Postgres storages (Team Premium)
### 5.5.4
- FIXED: correct handling when use LOGIN and PASSWORD env variables #903
- FIXED: fixed problems in dbmodel commandline tool
- ADDED: dbmodel - allow connection defined in environment variables
- FIXED: Load postgres schema on Azure #906
- FIXED: Oauth2 in combination with Google doesn't log payload #727
- CHANGED: Improved error reporting for unhandler errors
- CHANGED: Don't restart docker container in case of unhandler error
- FIXED: Crash when displaying specific data values from MongoDB #908
- ADDED: (Premium) Show purchase button after trial license is expired
### 5.5.3
- FIXED: Separate schema mode #894 - for databases with many schemas
- FIXED: Sort by UUID column in POstgreSQL #895
- ADDED: Load pg_dump outputs #893
- ADDED: Improved column mapping in import/export #330
- FIXED: Fixed some errors in create-table workflow
- CHANGED: Show single schema by default only if all objects are from default schema
- FIXED: MS Entra authentication for Azure SQL
### 5.5.2
- FIXED: MySQL, PostgreSQL readonly conections #900
### 5.5.1
- ADDED: Clickhouse support (#532)
- ADDED: MySQL - specify table engine, show table engine in table list
- FIXED: Hidden primary key name in PK editor for DB engines with anonymous PK (MySQL)
- CHANGED: Import/export dialog is now tacub instead of modal
- ADDED: Saving import/export job
- REMOVED: Ability to reopen export/import wizard from generated script. This was a bit hack, now you could save import/export job instead
- ADDED: Autodetect CSV delimited
- FIXED: Import CSV files with spaces around quotes
- ADDED: JSON file import
- ADDED: JSON export can export objects with ID field used as object key
- ADDED: JSON and JSON lines imports supports importing from web URL
- FIXED: Editing imported URL in job editor
- ADDED: Quick export from table data grid (#892)
- CHANGED: Create table workflow is reworked, you can specify schema and table name in table editor
- FIXED: After saving new table, table editor is reset to empty state
- ADDED: (PostgreSQL, SQL Server) - ability to filter objects by schema
- ADDED: (PostgreSQL, SQL Server) - Use separate schemas option - for databases with lot of schemas, only selected schema is loaded
- FIXED: Internal refactor of drivers, client objects are not more messed up with auxiliary fields
- ADDED: Copy connection error to clipboard after clicking on error icon
- FIXED: (MySQL) Fixed importing SQL dump exported from mysqldump (#702)
- FIXED: (PostgreSQL) Fixed filtering JSONB fields (#889)
- FIXED: OIDC authentication not working anymore (#891)
- ADDED: Added tests for import from CSV and JSON
- FIXED: multiple shortcuts handling #898
- ADDED: (Premium) MS Entra authentization for Azure SQL databases
### 5.4.4
- CHANGED: Improved autoupdate, notification is now in app
- CHANGED: Default behaviour of autoupdate, new version is downloaded after click of "Download" button
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)\
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)
- ADDED: Option to run check for new version manually
- FIXED: Fixed autoupgrade channel for premium edition
- FIXED: Fixes following issues: #886, #865, #782, #375

View File

@@ -17,6 +17,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
* **Download** application for Windows, Linux or Mac from [dbgate.org](https://dbgate.org/download/)
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
* Use nodeJs [scripting interface](https://dbgate.org/docs/scripting) ([API documentation](https://dbgate.org/docs/apidoc))
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
## Supported databases
* MySQL
@@ -26,10 +28,11 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* MongoDB
* Redis
* SQLite
* Amazon Redshift
* Amazon Redshift (Premium)
* CockroachDB
* MariaDB
* CosmosDB (Premium)
* ClickHouse
<!-- Learn more about DbGate features at the [DbGate website](https://dbgate.org/), or try our online [demo application](https://demo.dbgate.org) -->
@@ -69,8 +72,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* Redis tree view, generate script from keys, run Redis script
* Runs as application for Windows, Linux and Mac. Or in Docker container on server and in web Browser on client.
* Import, export from/to CSV, Excel, JSON, NDJSON, XML
* Free table editor - quick table data editing (cleanup data after import/before export, prototype tables etc.)
* Archives - backup your data in NDJSON files on local filesystem (or on DbGate server, when using web application)
* NDJSON data viewer and editor - browse NDJSON data, edit data and structure directly on NDJSON files. Works also for big NDSON files
* Charts, export chart to HTML page
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
* Extensible plugin architecture
@@ -84,7 +87,7 @@ Any contributions are welcome. If you want to contribute without coding, conside
* Create issue, if you find problem in app, or you have idea to new feature. If issue already exists, you could leave comment on it, to prioritise most wanted issues
* Create some tutorial video on [youtube](https://www.youtube.com/playlist?list=PLCo7KjCVXhr0RfUSjM9wJMsp_ShL1q61A)
* Become a backer on [GitHub sponsors](https://github.com/sponsors/dbgate) or [Open collective](https://opencollective.com/dbgate)
* Where a small coding is acceptable for you, you could [create plugin](https://dbgate.org/docs/plugin-development.html). Plugins for new themes can be created actually without JS coding
* Where a small coding is acceptable for you, you could [create plugin](https://dbgate.org/docs/plugin-development). Plugins for new themes can be created actually without JS coding
Thank you!

View File

@@ -1,12 +1,73 @@
const fs = require('fs');
const path = require('path');
const volatilePackages = require('./common/volatilePackages');
function adjustFile(file) {
function adjustFile(file, isApp = false) {
const json = JSON.parse(fs.readFileSync(file, { encoding: 'utf-8' }));
function processPackageFile(packageFile) {
const pluginJson = JSON.parse(fs.readFileSync(packageFile, { encoding: 'utf-8' }));
for (const depkey of ['dependencies', 'optionalDependencies']) {
for (const dependency of Object.keys(pluginJson[depkey] || {})) {
if (!volatilePackages.includes(dependency)) {
// add only voletile packages
continue;
}
if (!json[depkey]) {
json[depkey] = {};
}
if (json[depkey][dependency]) {
if (json[depkey][dependency] != pluginJson[depkey][dependency]) {
console.log(`Dependency ${dependency} in ${packageName} is different from ${file}`);
}
continue;
}
json[depkey][dependency] = pluginJson[depkey][dependency];
}
}
}
for (const packageName of fs.readdirSync('plugins')) {
if (!packageName.startsWith('dbgate-plugin-')) continue;
processPackageFile(path.join('plugins', packageName, 'package.json'));
}
if (isApp) {
// add volatile dependencies from api to app
processPackageFile(path.join('packages', 'api', 'package.json'));
}
if (process.platform != 'win32') {
delete json.optionalDependencies.msnodesqlv8;
}
if (process.argv.includes('--community')) {
delete json.optionalDependencies['mongodb-client-encryption'];
}
if (isApp && process.argv.includes('--premium')) {
json.build.win.target = [
{
target: 'nsis',
arch: ['x64'],
},
];
json.build.linux.target = [
{
target: 'AppImage',
arch: ['x64'],
},
];
json.name = 'dbgate-premium';
json.build.artifactName = 'dbgate-premium-${version}-${os}_${arch}.${ext}';
json.build.appId = 'org.dbgate.premium';
json.build.productName = 'DbGate Premium';
}
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
}
adjustFile('packages/api/package.json');
adjustFile('app/package.json');
adjustFile('app/package.json', true);
fs.writeFileSync('common/useBundleExternals.js', "module.exports = 'true';", 'utf-8');

View File

@@ -1,6 +1,6 @@
{
"name": "dbgate",
"version": "5.0.0-alpha.1",
"version": "6.0.0-alpha.1",
"private": true,
"author": "Jan Prochazka <jenasoft.database@gmail.com>",
"description": "Opensource database administration tool",
@@ -19,7 +19,6 @@
"artifactName": "dbgate-${version}-${os}_${arch}.${ext}",
"appId": "org.dbgate",
"productName": "DbGate",
"afterSign": "electron-builder-notarize",
"asarUnpack": "**/*.node",
"mac": {
"category": "database",
@@ -38,9 +37,11 @@
"target": "default",
"arch": [
"universal",
"x64"
"x64",
"arm64"
]
}
},
"notarize": true
},
"linux": {
"target": [
@@ -109,7 +110,8 @@
"files": [
"packages",
"src",
"icon.png"
"icon.png",
"!node_modules/cpu-features/build/**"
]
},
"homepage": "./",
@@ -128,12 +130,6 @@
"copyfiles": "^2.2.0",
"cross-env": "^6.0.3",
"electron": "30.0.2",
"electron-builder": "23.1.0",
"electron-builder-notarize": "^1.5.2"
},
"optionalDependencies": {
"better-sqlite3": "9.6.0",
"msnodesqlv8": "^4.2.1",
"oracledb": "^6.6.0"
"electron-builder": "25.1.8"
}
}

View File

@@ -108,7 +108,7 @@ function commandItem(item) {
}
function buildMenu() {
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true }), item => {
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true, isMac: isMac() }), item => {
if (item.divider) {
return { type: 'separator' };
}
@@ -430,7 +430,6 @@ function createWindow() {
);
global.API_PACKAGE = apiPackage;
global.NATIVE_MODULES = path.join(__dirname, 'nativeModules');
// console.log('global.API_PACKAGE', global.API_PACKAGE);
const api = require(apiPackage);

View File

@@ -1,4 +1,4 @@
module.exports = ({ editMenu }) => [
module.exports = ({ editMenu, isMac }) => [
{
label: 'File',
submenu: [
@@ -9,9 +9,9 @@ module.exports = ({ editMenu }) => [
{ command: 'new.queryDesign', hideDisabled: true },
{ command: 'new.diagram', hideDisabled: true },
{ command: 'new.perspective', hideDisabled: true },
{ command: 'new.freetable', hideDisabled: true },
{ command: 'new.shell', hideDisabled: true },
{ command: 'new.jsonl', hideDisabled: true },
{ command: 'new.modelTransform', hideDisabled: true },
{ divider: true },
{ command: 'file.open', hideDisabled: true },
{ command: 'file.openArchive', hideDisabled: true },
@@ -24,20 +24,6 @@ module.exports = ({ editMenu }) => [
{ command: 'app.disconnect', hideDisabled: true, skipInApp: true },
],
},
{
label: 'Window',
submenu: [
{ command: 'tabs.closeTab', hideDisabled: false },
{ command: 'tabs.closeAll', hideDisabled: false },
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
{ divider: true },
{ command: 'app.zoomIn', hideDisabled: true },
{ command: 'app.zoomOut', hideDisabled: true },
{ command: 'app.zoomReset', hideDisabled: true },
],
},
editMenu
? {
label: 'Edit',
@@ -75,6 +61,15 @@ module.exports = ({ editMenu }) => [
{ divider: true },
{ command: 'theme.changeTheme', hideDisabled: true },
{ command: 'settings.show' },
{ divider: true },
{ command: 'tabs.closeTab', hideDisabled: false },
{ command: 'tabs.closeAll', hideDisabled: false },
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
{ divider: true },
{ command: 'app.zoomIn', hideDisabled: true },
{ command: 'app.zoomOut', hideDisabled: true },
{ command: 'app.zoomReset', hideDisabled: true },
],
},
{
@@ -94,6 +89,14 @@ module.exports = ({ editMenu }) => [
{ command: 'app.resetSettings', hideDisabled: true },
],
},
...(isMac
? [
{
role: 'window',
submenu: [{ role: 'minimize' }, { role: 'zoom' }, { type: 'separator' }, { role: 'front' }],
},
]
: []),
{
label: 'Help',
submenu: [

View File

@@ -1,3 +0,0 @@
const content = require('./nativeModulesContent');
module.exports = content;

View File

@@ -0,0 +1,9 @@
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
const content = {};
content['better-sqlite3'] = () => require('better-sqlite3');
content['oracledb'] = () => require('oracledb');
module.exports = content;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,19 @@
const useBundleExternals = require('./useBundleExternals');
const getBundleExternals = require('./getBundleExternals');
function buildExternalsFromDependencies(packageJson) {
if (useBundleExternals == 'true') {
return getBundleExternals();
}
const { dependencies, optionalDependencies } = packageJson;
const externals = {};
for (let dep in dependencies || {}) {
externals[dep] = `commonjs ${dep}`;
}
for (let dep in optionalDependencies || {}) {
externals[dep] = `commonjs ${dep}`;
}
return externals;
}
module.exports = buildExternalsFromDependencies;

View File

@@ -0,0 +1,33 @@
const directory = process.argv[2];
const fs = require('fs');
const volatilePackages = require('./volatilePackages');
const apiPackageJson = JSON.parse(fs.readFileSync(`packages/api/package.json`, { encoding: 'utf-8' }));
const dependencies = {};
const optionalDependencies = {};
for (const pkg of volatilePackages) {
if (pkg == 'msnodesqlv8' && process.platform != 'win32') {
continue;
}
if (apiPackageJson.dependencies[pkg]) {
dependencies[pkg] = apiPackageJson.dependencies[pkg];
}
if (apiPackageJson.optionalDependencies?.[pkg]) {
optionalDependencies[pkg] = apiPackageJson.optionalDependencies[pkg];
}
}
fs.writeFileSync(
`${directory}/package.json`,
JSON.stringify(
{
dependencies,
optionalDependencies,
},
null,
2
),
'utf-8'
);

View File

@@ -0,0 +1,10 @@
const volatilePackages = require('./volatilePackages');
function getBundleExternals() {
return volatilePackages.reduce((acc, item) => {
acc[item] = `commonjs ${item}`;
return acc;
}, {});
}
module.exports = getBundleExternals;

174
common/processWorkflows.js Normal file
View File

@@ -0,0 +1,174 @@
const fs = require('fs');
const path = require('path');
const yaml = require('js-yaml');
const _ = require('lodash');
const indir = path.resolve(path.join(__dirname, '..', 'workflow-templates'));
const outdir = path.resolve(path.join(__dirname, '..', '.github', 'workflows'));
const includes = {};
const HEADER = `# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
`;
function readIncludes() {
for (const file of fs.readdirSync(indir)) {
const text = fs.readFileSync(path.join(indir, file), { encoding: 'utf-8' });
const json = yaml.load(text);
if (json._module) {
for (const key in json) {
if (key === '_module') {
continue;
}
includes[key] = json[key];
}
}
}
}
let modified = false;
function conditionMatch(condition, args) {
if (_.isString(condition)) {
return args.defs.includes(condition);
}
return false;
}
function processJsonStep(json, args) {
return _.cloneDeepWith(json, value => {
if (_.isArray(value)) {
const res = [];
let arrayModified = false;
for (const item of value) {
if (item._if) {
modified = true;
arrayModified = true;
if (conditionMatch(item._if, args)) {
res.push(_.omit(item, ['_if']));
}
} else if (item._replace || item._include) {
const replaceWith = item._replace ? args.replace?.[item._replace] : includes[item._include];
if (replaceWith) {
modified = true;
arrayModified = true;
if (_.isArray(replaceWith)) {
res.push(...replaceWith);
} else {
res.push(replaceWith);
}
} else {
res.push(item);
}
} else {
res.push(item);
}
}
if (arrayModified) {
return res;
}
return undefined;
}
if (_.isPlainObject(value)) {
if (_.intersection(args.allDefs ?? [], Object.keys(value))?.length > 0) {
modified = true;
for (const key in value) {
if (args.defs.includes(key)) {
return value[key];
}
}
return undefined;
}
}
if (_.isString(value)) {
let stringModified = false;
for (const key of Object.keys(args.stringReplace ?? {})) {
if (value.includes(key)) {
modified = true;
stringModified = true;
value = value.replaceAll(key, args.stringReplace[key]);
}
}
if (stringModified) {
return value;
}
return undefined;
}
if (value?._include) {
modified = true;
return includes[value?._include];
}
if (value?._replace) {
modified = true;
return args?.replace[value?._replace];
}
});
}
function processJson(json, args = {}) {
const MAX_STEPS = 64;
for (let i = 0; i < MAX_STEPS; i++) {
modified = false;
json = processJsonStep(json, args);
if (!modified) {
break;
}
}
return json;
}
function processFiles() {
const dumpOptions = {
lineWidth: -1,
};
for (const file of fs.readdirSync(indir)) {
const text = fs.readFileSync(path.join(indir, file), { encoding: 'utf-8' });
const json = yaml.load(text);
if (json._module) {
continue;
}
if (json._templates) {
const allDefs = Object.keys(json._templates);
for (const key in json._templates) {
allDefs.push(...(json._templates[key].defs ?? []));
}
for (const key in json._templates) {
const args = {
defs: [key, ...(json._templates[key]?.defs ?? [])],
replace: json._templates[key]?.replace,
stringReplace: json._templates[key]?.['string-replace'],
allDefs,
};
const converted = processJson(_.omit(json, ['_templates']), args);
const out = path.join(outdir, json._templates[key].file);
fs.writeFileSync(out, HEADER + yaml.dump(converted, dumpOptions));
}
} else {
fs.writeFileSync(path.join(outdir, file), HEADER + yaml.dump(processJson(json), dumpOptions));
}
}
}
function deleteOldFiles() {
const files = fs.readdirSync(outdir);
for (const file of files) {
fs.unlinkSync(path.join(outdir, file));
}
}
function run() {
deleteOldFiles();
readIncludes();
processFiles();
}
run();

View File

@@ -0,0 +1 @@
module.exports = 'false';

View File

@@ -0,0 +1,25 @@
// these packages will be never bundled with webpack
const volatilePackages = [
'@clickhouse/client',
'bson', // this package is already bundled and is used in mongodb
'mongodb',
'mongodb-client-encryption',
'tedious',
'msnodesqlv8',
'mysql2',
'oracledb',
'pg-copy-streams',
'pg',
'ioredis',
'node-redis-dump2',
'better-sqlite3',
'@azure/cosmos',
'@aws-sdk/rds-signer',
'activedirectory2',
'axios',
'ssh2',
'wkx',
];
module.exports = volatilePackages;

View File

@@ -0,0 +1,29 @@
const { defineConfig } = require('cypress');
const killPort = require('kill-port');
const { clearTestingData } = require('./e2eTestTools');
const waitOn = require('wait-on');
const { exec } = require('child_process');
module.exports = defineConfig({
e2e: {
setupNodeEvents(on, config) {
// implement node event listeners here
on('before:spec', async details => {
await clearTestingData();
if (config.isInteractive) {
await killPort(3000);
serverProcess = exec('yarn start');
await waitOn({ resources: ['http://localhost:3000'] });
serverProcess.stdout.on('data', data => {
console.log(data.toString());
});
serverProcess.stderr.on('data', data => {
console.error(data.toString());
});
}
});
},
},
});

View File

@@ -0,0 +1,70 @@
describe('Initialization', () => {
it('successfully loads', () => {
cy.visit('http://localhost:3000');
cy.contains('Database not selected');
});
it('adds connection', () => {
const runOnCI = Cypress.env('runOnCI');
cy.visit('http://localhost:3000');
// cy.get('[data-testid=ConnectionList_buttonNewConnection]').click();
cy.get('[data-testid=ConnectionDriverFields_connectionType]').select('MySQL');
cy.get('[data-testid=ConnectionDriverFields_user]').clear().type('root');
cy.get('[data-testid=ConnectionDriverFields_password]').clear().type('Pwd2020Db');
if (runOnCI) {
cy.get('[data-testid=ConnectionDriverFields_server]').clear().type('mysql');
} else {
cy.get('[data-testid=ConnectionDriverFields_port]').clear().type('16004');
}
cy.get('[data-testid=ConnectionDriverFields_displayName]').clear().type('test-mysql-1');
cy.get('[data-testid=ConnectionTab_buttonSave]').click();
cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
cy.contains('performance_schema');
});
// it('SSH connection', () => {
// const runOnCI = Cypress.env('runOnCI');
// cy.get('body')
// .trigger('keydown', {
// key: 'F1',
// code: 'F1',
// which: 112,
// keyCode: 112,
// bubbles: true,
// })
// .trigger('keyup', {
// key: 'F1',
// code: 'F1',
// which: 112,
// keyCode: 112,
// bubbles: true,
// });
// cy.get('body').type('Close all');
// cy.get('body').type('{enter}');
// cy.visit('http://localhost:3000');
// cy.get('[data-testid=ConnectionList_buttonNewConnection]').click();
// cy.get('[data-testid=ConnectionDriverFields_connectionType]').select('MySQL');
// cy.get('[data-testid=ConnectionDriverFields_user]').clear().type('root');
// cy.get('[data-testid=ConnectionDriverFields_password]').clear().type('root');
// cy.get('[data-testid=ConnectionSshTunnelFields_sshLogin]').clear().type('root');
// cy.get('[data-testid=ConnectionSshTunnelFields_sshPassword]').clear().type('root');
// if (runOnCI) {
// cy.get('[data-testid=ConnectionSshTunnelFields_sshHost]').clear().type('mysql-ssh');
// } else {
// cy.get('[data-testid=ConnectionSshTunnelFields_sshPort]').clear().type('16006');
// }
// cy.get('[data-testid=ConnectionDriverFields_displayName]').clear().type('test-mysql-ssh-1');
// cy.get('[data-testid=ConnectionTab_buttonSave]').click();
// cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
// cy.contains('performance_schema');
// });
// it('import chinook DB', () => {
// cy.visit('http://localhost:3000');
// cy.get('[data-testid=ConnectionTab_buttonConnect]').click();
// });
});

View File

@@ -0,0 +1,5 @@
{
"name": "Using fixtures to represent data",
"email": "hello@cypress.io",
"body": "Fixtures are a great way to mock data for responses to routes"
}

View File

@@ -0,0 +1,25 @@
// ***********************************************
// This example commands.js shows you how to
// create various custom commands and overwrite
// existing commands.
//
// For more comprehensive examples of custom
// commands please read more here:
// https://on.cypress.io/custom-commands
// ***********************************************
//
//
// -- This is a parent command --
// Cypress.Commands.add('login', (email, password) => { ... })
//
//
// -- This is a child command --
// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... })
//
//
// -- This is a dual command --
// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... })
//
//
// -- This will overwrite an existing command --
// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... })

View File

@@ -0,0 +1,20 @@
// ***********************************************************
// This example support/e2e.js is processed and
// loaded automatically before your test files.
//
// This is a great place to put global configuration and
// behavior that modifies Cypress.
//
// You can change the location of this file or turn off
// automatically serving support files with the
// 'supportFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/configuration
// ***********************************************************
// Import commands.js using ES2015 syntax:
import './commands'
// Alternatively you can use CommonJS syntax:
// require('./commands')

View File

@@ -0,0 +1,25 @@
version: '3'
services:
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
- 16000:5432
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 16004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
mysql-ssh:
build: mysql-ssh
restart: always
ports:
- 16005:3306
- 16006:22

29
e2e-tests/e2eTestTools.js Normal file
View File

@@ -0,0 +1,29 @@
const path = require('path');
const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
// function createTimeStamp() {
// const now = new Date();
// const year = now.getFullYear();
// const month = String(now.getMonth() + 1).padStart(2, '0'); // měsíc je 0-indexovaný
// const day = String(now.getDate()).padStart(2, '0');
// const hours = String(now.getHours()).padStart(2, '0');
// const minutes = String(now.getMinutes()).padStart(2, '0');
// const seconds = String(now.getSeconds()).padStart(2, '0');
// // Poskládáme datum a čas do názvu souboru
// const ts = `${year}-${month}-${day}_${hours}-${minutes}-${seconds}`;
// return ts;
// }
function clearTestingData() {
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
}
}
module.exports = {
clearTestingData,
};

View File

@@ -0,0 +1,22 @@
FROM ubuntu:22.04
RUN apt-get update && \
apt-get install -y openssh-server mysql-server && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN mkdir /var/run/sshd
RUN sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config
RUN echo 'root:root' | chpasswd
RUN service mysql start && \
mysql -uroot -e "ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'root'; FLUSH PRIVILEGES;" && \
service mysql stop
EXPOSE 22 3306
COPY start.sh /start.sh
RUN chmod +x /start.sh
CMD ["/start.sh"]

View File

@@ -0,0 +1,7 @@
#!/bin/bash
service ssh start
service mysql start
tail -f /dev/null

21
e2e-tests/package.json Normal file
View File

@@ -0,0 +1,21 @@
{
"name": "e2e-tests",
"version": "1.0.0",
"main": "index.js",
"license": "GPL",
"devDependencies": {
"axios": "^1.7.9",
"cross-env": "^7.0.3",
"cypress": "^13.16.1",
"kill-port": "^2.0.1",
"start-server-and-test": "^2.0.8"
},
"scripts": {
"cy:open": "cypress open --config experimentalInteractiveRunEvents=true",
"cy:run": "cypress run",
"cy:run:ci": "cypress run --env runOnCI=true",
"start": "cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"test:ci": "start-server-and-test start http://localhost:3000 cy:run:ci",
"test": "start-server-and-test start http://localhost:3000 cy:run"
}
}

1363
e2e-tests/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +0,0 @@
const fs = require('fs');
let fillContent = '';
if (process.platform == 'win32') {
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');\n`;
}
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');\n`;
fillContent += `content['oracledb'] = () => require('oracledb');\n`;
const getContent = empty => `
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
const content = {};
${empty ? '' : fillContent}
module.exports = content;
`;
fs.writeFileSync(
'packages/api/src/nativeModulesContent.js',
getContent(process.argv.includes('--electron') ? true : false)
);
fs.writeFileSync('app/src/nativeModulesContent.js', getContent(false));

View File

@@ -3,24 +3,33 @@ const _ = require('lodash');
const fp = require('lodash/fp');
const { testWrapper } = require('../tools');
const engines = require('../engines');
const { getAlterDatabaseScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
const {
getAlterDatabaseScript,
extendDatabaseInfo,
generateDbPairingId,
formatQueryWithoutParams,
runCommandOnDriver,
} = require('dbgate-tools');
function flatSource() {
const initSql = ['CREATE TABLE ~t1 (~id int primary key)', 'CREATE TABLE ~t2 (~id int primary key)'];
function flatSource(engineCond = x => !x.skipReferences) {
return _.flatten(
engines
.filter(x => !x.skipReferences)
.filter(engineCond)
.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
);
}
async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
await driver.query(conn, `create table t1 (id int not null primary key)`);
await runCommandOnDriver(conn, driver, `create table ~t1 (~id int not null primary key)`);
await driver.query(
await runCommandOnDriver(
conn,
`create table t2 (
id int not null primary key,
t1_id int null references t1(id)
driver,
`create table ~t2 (
~id int not null primary key,
~t1_id int null references ~t1(~id)
)`
);
@@ -61,9 +70,29 @@ describe('Alter database', () => {
db => {
_.remove(db[type], x => x.pureName == 'obj1');
},
object.create1
formatQueryWithoutParams(driver, object.create1)
);
expect(db[type].length).toEqual(0);
})
);
test.each(flatSource(x => x.supportRenameSqlObject))(
'Rename object - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await runCommandOnDriver(conn, driver, object.create1);
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
const dmp = driver.createDumper();
dmp.renameSqlObject(structure[type][0], 'renamed1');
await driver.query(conn, dmp.s);
const structure2 = await driver.analyseFull(conn);
expect(structure2[type].length).toEqual(1);
expect(structure2[type][0].pureName).toEqual('renamed1');
})
);
});

View File

@@ -4,15 +4,26 @@ const fp = require('lodash/fp');
const { testWrapper } = require('../tools');
const engines = require('../engines');
const crypto = require('crypto');
const { getAlterTableScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
const {
getAlterTableScript,
extendDatabaseInfo,
generateDbPairingId,
formatQueryWithoutParams,
} = require('dbgate-tools');
function pickImportantTableInfo(engine, table) {
const props = ['columnName'];
const props = ['columnName', 'defaultValue'];
if (!engine.skipNullability) props.push('notNull');
if (!engine.skipAutoIncrement) props.push('autoIncrement');
return {
pureName: table.pureName,
columns: table.columns.filter(x => x.columnName != 'rowid').map(fp.pick(props)),
columns: table.columns
.filter(x => x.columnName != 'rowid')
.map(fp.pick(props))
.map(props => _.omitBy(props, x => x == null))
.map(props =>
_.omitBy(props, (v, k) => k == 'defaultValue' && v == 'NULL' && engine.setNullDefaultInsteadOfDrop)
),
};
}
@@ -22,27 +33,36 @@ function checkTableStructure(engine, t1, t2) {
}
async function testTableDiff(engine, conn, driver, mangle) {
await driver.query(conn, `create table t0 (id int not null primary key)`);
await driver.query(conn, formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`));
await driver.query(
conn,
`create table t1 (
col_pk int not null primary key,
col_std int,
col_def int default 12,
${engine.skipReferences ? '' : 'col_fk int references t0(id),'}
col_idx int,
col_uq int ${engine.skipUnique ? '' : 'unique'} ,
col_ref int ${engine.skipUnique ? '' : 'unique'}
formatQueryWithoutParams(
driver,
`create table ~t1 (
~col_pk int not null primary key,
~col_std int,
~col_def int default 12,
${engine.skipReferences ? '' : '~col_fk int references ~t0(~id),'}
~col_idx int,
~col_uq int ${engine.skipUnique ? '' : 'unique'} ,
~col_ref int ${engine.skipUnique ? '' : 'unique'}
)`
)
);
if (!engine.skipIndexes) {
await driver.query(conn, `create index idx1 on t1(col_idx)`);
await driver.query(conn, formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`));
}
if (!engine.skipReferences) {
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
await driver.query(
conn,
formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
)
);
}
const tget = x => x.tables.find(y => y.pureName == 't1');
@@ -136,4 +156,40 @@ describe('Alter table', () => {
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
// test.each(engines.map(engine => [engine.label, engine]))(
// 'Change autoincrement - %s',
// testWrapper(async (conn, driver, engine) => {
// await testTableDiff(engine, conn, driver, tbl => {
// tbl.columns.find(x => x.columnName == 'col_pk').autoIncrement = true;
// });
// })
// );
});

View File

@@ -2,7 +2,7 @@ const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
const { runCommandOnDriver } = require('dbgate-tools');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
describe('Data duplicator', () => {
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
@@ -84,11 +84,77 @@ describe('Data duplicator', () => {
],
});
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Skip nullable weak refs - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: false },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
options: {
setNullForUnresolvedNullableRefs: true,
},
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('1');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('2');
const res3 = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
);
expect(res3.rows[0].cnt.toString()).toEqual('1');
})
);
});

View File

@@ -0,0 +1,144 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const tableReader = require('dbgate-api/src/shell/tableReader');
const copyStream = require('dbgate-api/src/shell/copyStream');
const importDatabase = require('dbgate-api/src/shell/importDatabase');
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
const { runQueryOnDriver, runCommandOnDriver } = require('dbgate-tools');
function createImportStream() {
const pass = new stream.PassThrough({
objectMode: true,
});
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }], __isStreamHeader: true });
pass.write({ id: 1, country: 'Czechia' });
pass.write({ id: 2, country: 'Austria' });
pass.write({ country: 'Germany', id: 3 });
pass.write({ country: 'Romania', id: 4 });
pass.write({ country: 'Great Britain', id: 5 });
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
pass.end();
return pass;
}
function createExportStream() {
const writable = new stream.Writable({ objectMode: true });
writable.resultArray = [];
writable._write = (chunk, encoding, callback) => {
writable.resultArray.push(chunk);
callback();
};
return writable;
}
describe('DB Import/export', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Import one table - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
const reader = createImportStream();
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader, writer);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Import two tables - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
const reader1 = createImportStream();
const writer1 = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader1, writer1);
const reader2 = createImportStream();
const writer2 = await tableWriter({
systemConnection: conn,
driver,
pureName: 't2',
createIfNotExists: true,
});
await copyStream(reader2, writer2);
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
const structure = await driver.analyseFull(conn);
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Export one table - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await runCommandOnDriver(conn, driver, 'create table ~t1 (~id int primary key, ~country varchar(100))');
const data = [
[1, 'Czechia'],
[2, 'Austria'],
[3, 'Germany'],
[4, 'Romania'],
[5, 'Great Britain'],
[6, 'Bosna, Hecegovina'],
];
for (const row of data) {
await runCommandOnDriver(conn, driver, dmp =>
dmp.put('insert into ~t1(~id, ~country) values (%v, %v)', ...row)
);
}
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: 't1',
});
const writer = createExportStream();
await copyStream(reader, writer);
expect(writer.resultArray.filter(x => !x.__isStreamHeader).map(row => [row.id, row.country])).toEqual(data);
})
);
});

View File

@@ -1,75 +0,0 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const copyStream = require('dbgate-api/src/shell/copyStream');
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
function createImportStream() {
const pass = new stream.PassThrough({
objectMode: true,
});
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }], __isStreamHeader: true });
pass.write({ id: 1, country: 'Czechia' });
pass.write({ id: 2, country: 'Austria' });
pass.write({ country: 'Germany', id: 3 });
pass.write({ country: 'Romania', id: 4 });
pass.write({ country: 'Great Britain', id: 5 });
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
pass.end();
return pass;
}
describe('DB Import', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Import one table - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
const reader = createImportStream();
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader, writer);
const res = await driver.query(conn, `select count(*) as cnt from t1`);
expect(res.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Import two tables - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
const reader1 = createImportStream();
const writer1 = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader1, writer1);
const reader2 = createImportStream();
const writer2 = await tableWriter({
systemConnection: conn,
driver,
pureName: 't2',
createIfNotExists: true,
});
await copyStream(reader2, writer2);
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
});

View File

@@ -1,58 +1,176 @@
/// TODO
const { testWrapper } = require('../tools');
const { testWrapper, testWrapperPrepareOnly } = require('../tools');
const _ = require('lodash');
const engines = require('../engines');
const deployDb = require('dbgate-api/src/shell/deployDb');
const { databaseInfoFromYamlModel } = require('dbgate-tools');
const { databaseInfoFromYamlModel, runQueryOnDriver, formatQueryWithoutParams } = require('dbgate-tools');
const generateDeploySql = require('dbgate-api/src/shell/generateDeploySql');
const connectUtility = require('dbgate-api/src/utility/connectUtility');
function checkStructure(structure, model) {
function checkStructure(
engine,
structure,
model,
{ checkRenameDeletedObjects = false, disallowExtraObjects = false } = {}
) {
const expected = databaseInfoFromYamlModel(model);
expect(structure.tables.length).toEqual(expected.tables.length);
for (const [realTable, expectedTable] of _.zip(
_.sortBy(structure.tables, 'pureName'),
_.sortBy(expected.tables, 'pureName')
)) {
expect(realTable.columns.length).toBeGreaterThanOrEqual(expectedTable.columns.length);
for (const expectedTable of expected.tables) {
const realTable = structure.tables.find(x => x.pureName == expectedTable.pureName);
for (const column of expectedTable.columns) {
const realColumn = realTable.columns.find(x => x.columnName == column.columnName);
expect(realColumn).toBeTruthy();
if (!engine.skipNullability) {
expect(realColumn.notNull).toEqual(column.notNull);
}
}
for (const realColumn of realTable.columns) {
const column = expectedTable.columns.find(x => x.columnName == realColumn.columnName);
if (!column) {
if (checkRenameDeletedObjects) {
expect(realColumn.columnName).toMatch(/^_deleted_/);
}
if (disallowExtraObjects) {
expect(realColumn).toBeFalsy();
}
}
}
}
for (const realTable of structure.tables) {
const expectedTable = expected.tables.find(x => x.pureName == realTable.pureName);
if (!expectedTable) {
if (checkRenameDeletedObjects) {
expect(realTable.pureName).toMatch(/^_deleted_/);
}
if (disallowExtraObjects) {
expect(realTable).toBeFalsy();
}
}
}
for (const expectedView of expected.views) {
const realView = structure.views.find(x => x.pureName == expectedView.pureName);
expect(realView).toBeTruthy();
}
for (const realView of structure.views) {
const expectedView = expected.views.find(x => x.pureName == realView.pureName);
if (!expectedView) {
if (disallowExtraObjects) {
expect(realView).toBeFalsy();
}
}
}
}
async function testDatabaseDeploy(conn, driver, dbModelsYaml, testEmptyLastScript) {
// function convertObjectText(text, driver) {
// if (!text) return undefined;
// text = formatQueryWithoutParams(driver, text);
// if (driver.dialect.requireFromDual && text.startsWith('create view ') && !text.includes('from')) {
// text = text + ' from dual';
// }
// return text;
// }
// function convertModelToEngine(model, driver) {
// return model.map(x => ({
// ...x,
// text: convertObjectText(x.text, driver),
// }));
// }
function convertModelToEngine(model, driver) {
return model.map(x => ({
...x,
text: x.text ? formatQueryWithoutParams(driver, x.text) : undefined,
}));
}
async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
const { testEmptyLastScript, finalCheckAgainstModel, markDeleted, allowDropStatements } = options || {};
let index = 0;
const dbdiffOptionsExtra = markDeleted
? {
deletedTablePrefix: '_deleted_',
deletedColumnPrefix: '_deleted_',
deletedSqlObjectPrefix: '_deleted_',
}
: {};
dbdiffOptionsExtra.schemaMode = 'ignore';
for (const loadedDbModel of dbModelsYaml) {
const { sql, isEmpty } = await generateDeploySql({
systemConnection: conn,
driver,
loadedDbModel,
});
console.debug('Generated deploy script:', sql);
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
if (_.isString(loadedDbModel)) {
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel));
} else {
const { sql, isEmpty } = await generateDeploySql({
systemConnection: conn.isPreparedOnly ? undefined : conn,
connection: conn.isPreparedOnly ? conn : undefined,
driver,
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
dbdiffOptionsExtra,
});
console.debug('Generated deploy script:', sql);
if (!allowDropStatements) {
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
}
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
expect(isEmpty).toBeTruthy();
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
expect(isEmpty).toBeTruthy();
}
await deployDb({
systemConnection: conn.isPreparedOnly ? undefined : conn,
connection: conn.isPreparedOnly ? conn : undefined,
driver,
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
dbdiffOptionsExtra,
});
}
await deployDb({
systemConnection: conn,
driver,
loadedDbModel,
});
index++;
}
const structure = await driver.analyseFull(conn);
checkStructure(structure, dbModelsYaml[dbModelsYaml.length - 1]);
const dbhan = conn.isPreparedOnly ? await connectUtility(driver, conn, 'read') : conn;
const structure = await driver.analyseFull(dbhan);
if (conn.isPreparedOnly) await driver.close(dbhan);
checkStructure(
engine,
structure,
convertModelToEngine(finalCheckAgainstModel ?? _.findLast(dbModelsYaml, x => _.isArray(x)), driver),
options
);
}
describe('Deploy database', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Deploy database simple - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
json: {
name: 't1',
columns: [{ name: 'id', type: 'int' }],
primaryKey: ['id'],
},
},
],
]);
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Deploy database simple - %s - not connected',
testWrapperPrepareOnly(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
@@ -71,6 +189,7 @@ describe('Deploy database', () => {
'Deploy database simple twice - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
@@ -95,7 +214,7 @@ describe('Deploy database', () => {
},
],
],
true
{ testEmptyLastScript: true }
);
})
);
@@ -103,7 +222,7 @@ describe('Deploy database', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Add column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
@@ -135,6 +254,7 @@ describe('Deploy database', () => {
'Dont drop column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
@@ -162,7 +282,7 @@ describe('Deploy database', () => {
},
],
],
true
{ testEmptyLastScript: true }
);
})
);
@@ -171,6 +291,7 @@ describe('Deploy database', () => {
'Foreign keys - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
@@ -217,7 +338,7 @@ describe('Deploy database', () => {
},
],
],
true
{ testEmptyLastScript: true }
);
})
);
@@ -225,7 +346,7 @@ describe('Deploy database', () => {
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Deploy preloaded data - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
@@ -246,7 +367,7 @@ describe('Deploy database', () => {
],
]);
const res = await driver.query(conn, `select count(*) as cnt from t1`);
const res = await runQueryOnDriver(conn, driver, `select count(*) as ~cnt from ~t1`);
expect(res.rows[0].cnt.toString()).toEqual('3');
})
);
@@ -254,7 +375,7 @@ describe('Deploy database', () => {
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Deploy preloaded data - update - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
@@ -293,15 +414,15 @@ describe('Deploy database', () => {
],
]);
const res = await driver.query(conn, `select val from t1 where id = 2`);
const res = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 2`);
expect(res.rows[0].val.toString()).toEqual('5');
})
);
test.each(engines.enginesPostgre.map(engine => [engine.label, engine]))(
test.each([engines.postgreSqlEngine].map(engine => [engine.label, engine]))(
'Current timestamp default value - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(conn, driver, [
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
@@ -321,9 +442,354 @@ describe('Deploy database', () => {
],
]);
await driver.query(conn, `insert into t1 (id) values (1)`);
const res = await driver.query(conn, ` select val from t1 where id = 1`);
await runQueryOnDriver(conn, driver, `insert into ~t1 (~id) values (1)`);
const res = await runQueryOnDriver(conn, driver, ` select ~val from ~t1 where ~id = 1`);
expect(res.rows[0].val.toString().substring(0, 2)).toEqual('20');
})
);
test.each(engines.filter(x => !x.skipChangeColumn).map(engine => [engine.label, engine]))(
'Change column to NOT NULL column with default - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.table.yaml',
json: {
name: 't1',
columns: [
{ name: 'id', type: 'int', notNull: true },
{ name: 'val', type: 'int' },
],
primaryKey: ['id'],
},
},
],
'insert into ~t1 (~id, ~val) values (1, 1); insert into ~t1 (~id) values (2)',
[
{
name: 't1.table.yaml',
json: {
name: 't1',
columns: [
{ name: 'id', type: 'int', notNull: true },
{ name: 'val', type: 'int', notNull: true, default: '20' },
],
primaryKey: ['id'],
},
},
],
'insert into ~t1 (~id) values (3);',
]);
const res1 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 1`);
expect(res1.rows[0].val).toEqual(1);
const res2 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 2`);
expect(res2.rows[0].val).toEqual(20);
const res3 = await runQueryOnDriver(conn, driver, `select ~val from ~t1 where ~id = 3`);
expect(res2.rows[0].val).toEqual(20);
})
);
const T1 = {
name: 't1.table.yaml',
json: {
name: 't1',
columns: [
{ name: 'id', type: 'int' },
{ name: 'val', type: 'int' },
],
primaryKey: ['id'],
},
};
const T2 = {
name: 't2.table.yaml',
json: {
name: 't2',
columns: [
{ name: 'id', type: 'int' },
{ name: 'val', type: 'int' },
],
primaryKey: ['id'],
},
};
const T1_DELETED = {
name: '_deleted_t1.table.yaml',
json: {
name: '_deleted_t1',
columns: [
{ name: 'id', type: 'int' },
{ name: 'val', type: 'int' },
],
primaryKey: ['id'],
},
};
const T1_NO_VAL = {
name: 't1.table.yaml',
json: {
name: 't1',
columns: [{ name: 'id', type: 'int' }],
primaryKey: ['id'],
},
};
const T1_DELETED_VAL = {
name: 't1.table.yaml',
json: {
name: 't1',
columns: [
{ name: 'id', type: 'int' },
{ name: '_deleted_val', type: 'int' },
],
primaryKey: ['id'],
},
};
const V1 = {
name: 'v1.view.sql',
text: 'create view ~v1 as select * from ~t1',
};
const V1_VARIANT2 = {
name: 'v1.view.sql',
text: 'create view ~v1 as select ~id + ~id ~idsum from ~t1',
};
const V1_DELETED = {
name: '_deleted_v1.view.sql',
text: 'create view ~_deleted_v1 as select * from ~t1',
};
test.each(engines.map(engine => [engine.label, engine]))(
'Dont remove column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
finalCheckAgainstModel: [T1],
disallowExtraObjects: true,
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Dont remove table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], []], {
finalCheckAgainstModel: [T1],
disallowExtraObjects: true,
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Mark table removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
markDeleted: true,
disallowExtraObjects: true,
finalCheckAgainstModel: [T1_DELETED],
});
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
'Mark view removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1]], {
markDeleted: true,
disallowExtraObjects: true,
finalCheckAgainstModel: [T1, V1_DELETED],
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Mark column removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
markDeleted: true,
disallowExtraObjects: true,
finalCheckAgainstModel: [T1_DELETED_VAL],
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Undelete table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
[T1],
// delete table
[],
// undelete table
[T1],
],
{
markDeleted: true,
disallowExtraObjects: true,
}
);
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
'Undelete view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1, V1]], {
markDeleted: true,
disallowExtraObjects: true,
allowDropStatements: true,
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Undelete column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL], [T1]], {
markDeleted: true,
disallowExtraObjects: true,
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'View redeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
[T1, V1],
[T1, V1],
[T1, V1],
],
{
markDeleted: true,
disallowExtraObjects: true,
allowDropStatements: true,
}
);
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Change view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
engine,
conn,
driver,
[
[T1, V1],
[T1, V1_VARIANT2],
],
{
markDeleted: true,
disallowExtraObjects: true,
allowDropStatements: true,
}
);
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Script drived deploy - basic predeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: '1.predeploy.sql',
text: 'create table ~t1 (~id int primary key); insert into ~t1 (~id) values (1);',
},
],
]);
const res1 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~t1');
expect(res1.rows[0].cnt == 1).toBeTruthy();
const res2 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~dbgate_deploy_journal');
expect(res2.rows[0].cnt == 1).toBeTruthy();
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Script drived deploy - install+uninstall - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
[
{
name: 't1.uninstall.sql',
text: 'drop table ~t1',
},
{
name: 't1.install.sql',
text: 'create table ~t1 (~id int primary key); insert into ~t1 (~id) values (1)',
},
{
name: 't2.once.sql',
text: 'create table ~t2 (~id int primary key); insert into ~t2 (~id) values (1)',
},
],
[
{
name: 't1.uninstall.sql',
text: 'drop table ~t1',
},
{
name: 't1.install.sql',
text: 'create table ~t1 (~id int primary key, ~val int); insert into ~t1 (~id, ~val) values (1, 11)',
},
{
name: 't2.once.sql',
text: 'insert into ~t2 (~id) values (2)',
},
],
]);
const res1 = await runQueryOnDriver(conn, driver, 'SELECT ~val from ~t1 where ~id = 1');
expect(res1.rows[0].val == 11).toBeTruthy();
const res2 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~t2');
expect(res2.rows[0].cnt == 1).toBeTruthy();
const res3 = await runQueryOnDriver(conn, driver, 'SELECT COUNT(*) AS ~cnt FROM ~dbgate_deploy_journal');
expect(res3.rows[0].cnt == 3).toBeTruthy();
const res4 = await runQueryOnDriver(
conn,
driver,
"SELECT ~run_count from ~dbgate_deploy_journal where ~name = 't2.once.sql'"
);
expect(res4.rows[0].run_count == 1).toBeTruthy();
const res5 = await runQueryOnDriver(
conn,
driver,
"SELECT ~run_count from ~dbgate_deploy_journal where ~name = 't1.install.sql'"
);
expect(res5.rows[0].run_count == 2).toBeTruthy();
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Mark table removed, one remains - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {
markDeleted: true,
disallowExtraObjects: true,
finalCheckAgainstModel: [T1_DELETED, T2],
});
})
);
});

View File

@@ -0,0 +1,151 @@
const dbgateApi = require('dbgate-api/src/shell');
// const jsonLinesWriter = require('dbgate-api/src/shell/jsonLinesWriter');
const tmp = require('tmp');
// const dbgatePluginCsv = require('dbgate-plugin-csv/src/backend');
const fs = require('fs');
const requirePlugin = require('dbgate-api/src/shell/requirePlugin');
const CSV_DATA = `Issue Number; Title; Github URL; Labels; State; Created At; Updated At; Reporter; Assignee
801; "Does it 'burst' the database on startup or first lUI load ? "; https://github.com/dbgate/dbgate/issues/801; ""; open; 05/23/2024; 05/23/2024; rgarrigue;
799; "BUG: latest AppImage crashes on opening in Fedora 39"; https://github.com/dbgate/dbgate/issues/799; ""; open; 05/21/2024; 05/24/2024; BenGraham-Git;
798; "MongoDB write operations fail"; https://github.com/dbgate/dbgate/issues/798; "bug,solved"; open; 05/21/2024; 05/24/2024; mahmed0715;
797; "BUG: Unable to open SQL files"; https://github.com/dbgate/dbgate/issues/797; "bug"; open; 05/20/2024; 05/21/2024; cesarValdivia;
795; "BUG: MS SQL Server connection error (KEY_USAGE_BIT_INCORRECT)"; https://github.com/dbgate/dbgate/issues/795; ""; open; 05/20/2024; 05/20/2024; keskinonur;
794; "GLIBC_2.29' not found and i have 2.31"; https://github.com/dbgate/dbgate/issues/794; ""; closed; 05/20/2024; 05/21/2024; MFdanGM;
793; "BUG: PostgresSQL doesn't show tables when connected"; https://github.com/dbgate/dbgate/issues/793; ""; open; 05/20/2024; 05/22/2024; stomper013;
792; "FEAT: Wayland support"; https://github.com/dbgate/dbgate/issues/792; ""; closed; 05/19/2024; 05/21/2024; VosaXalo;
`;
async function getReaderRows(reader) {
const jsonLinesFileName = tmp.tmpNameSync();
const writer = await dbgateApi.jsonLinesWriter({
fileName: jsonLinesFileName,
});
await dbgateApi.copyStream(reader, writer);
const jsonData = fs.readFileSync(jsonLinesFileName, 'utf-8');
const rows = jsonData
.split('\n')
.filter(x => x.trim() !== '')
.map(x => JSON.parse(x));
return rows;
}
test('csv import test', async () => {
const dbgatePluginCsv = requirePlugin('dbgate-plugin-csv');
const csvFileName = tmp.tmpNameSync();
fs.writeFileSync(csvFileName, CSV_DATA);
const reader = await dbgatePluginCsv.shellApi.reader({
fileName: csvFileName,
});
const rows = await getReaderRows(reader);
expect(rows[0].columns).toEqual([
{ columnName: 'Issue Number' },
{ columnName: 'Title' },
{ columnName: 'Github URL' },
{ columnName: 'Labels' },
{ columnName: 'State' },
{ columnName: 'Created At' },
{ columnName: 'Updated At' },
{ columnName: 'Reporter' },
{ columnName: 'Assignee' },
]);
expect(rows.length).toEqual(9);
expect(rows[1]).toEqual({
'Issue Number': '801',
Title: "Does it 'burst' the database on startup or first lUI load ? ",
'Github URL': 'https://github.com/dbgate/dbgate/issues/801',
Labels: '',
State: 'open',
'Created At': '05/23/2024',
'Updated At': '05/23/2024',
Reporter: 'rgarrigue',
Assignee: '',
});
});
test('JSON array import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify([
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
])
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
]);
});
test('JSON object import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify({
k1: { id: 1, val: 'v1' },
k2: { id: 2, val: 'v2' },
})
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
jsonStyle: 'object',
keyField: 'mykey',
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ mykey: 'k1', id: 1, val: 'v1' },
{ mykey: 'k2', id: 2, val: 'v2' },
]);
});
test('JSON filtered object import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify({
filtered: {
k1: { id: 1, val: 'v1' },
k2: { id: 2, val: 'v2' },
},
})
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
jsonStyle: 'object',
keyField: 'mykey',
rootField: 'filtered',
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ mykey: 'k1', id: 1, val: 'v1' },
{ mykey: 'k2', id: 2, val: 'v2' },
]);
});

View File

@@ -1,8 +1,9 @@
const { testWrapper } = require('../tools');
const engines = require('../engines');
const _ = require('lodash');
const { formatQueryWithoutParams, runCommandOnDriver } = require('dbgate-tools');
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
const initSql = ['CREATE TABLE ~t1 (~id int primary key)', 'CREATE TABLE ~t2 (~id int primary key)'];
function flatSource() {
return _.flatten(
@@ -10,6 +11,18 @@ function flatSource() {
);
}
function flatSourceParameters() {
return _.flatten(
engines.map(engine =>
(engine.parameters || []).map(parameter => [engine.label, parameter.testName, parameter, engine])
)
);
}
function flatSourceTriggers() {
return _.flatten(engines.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine])));
}
const obj1Match = expect.objectContaining({
pureName: 'obj1',
});
@@ -26,9 +39,9 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Full analysis - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await driver.query(conn, object.create1, { discardResult: true });
await runCommandOnDriver(conn, driver, object.create1);
const structure = await driver.analyseFull(conn);
expect(structure[type].length).toEqual(1);
@@ -39,11 +52,11 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Incremental analysis - add - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await driver.query(conn, object.create2, { discardResult: true });
await runCommandOnDriver(conn, driver, object.create2);
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.create1, { discardResult: true });
await runCommandOnDriver(conn, driver, object.create1);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(2);
@@ -54,12 +67,12 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Incremental analysis - drop - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await driver.query(conn, object.create1, { discardResult: true });
await driver.query(conn, object.create2, { discardResult: true });
await runCommandOnDriver(conn, driver, object.create1);
await runCommandOnDriver(conn, driver, object.create2);
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.drop2, { discardResult: true });
await runCommandOnDriver(conn, driver, object.drop2);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(1);
@@ -70,15 +83,15 @@ describe('Object analyse', () => {
test.each(flatSource())(
'Create SQL - add - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await driver.query(conn, object.create1, { discardResult: true });
await runCommandOnDriver(conn, driver, object.create1);
const structure1 = await driver.analyseFull(conn);
await driver.query(conn, object.drop1, { discardResult: true });
await runCommandOnDriver(conn, driver, object.drop1);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2[type].length).toEqual(0);
await driver.query(conn, structure1[type][0].createSql, { discardResult: true });
await driver.script(conn, structure1[type][0].createSql);
const structure3 = await driver.analyseIncremental(conn, structure2);
@@ -86,4 +99,75 @@ describe('Object analyse', () => {
expect(structure3[type][0]).toEqual(type.includes('views') ? view1Match : obj1Match);
})
);
test.each(flatSourceParameters())(
'Test parameters simple analyse - %s - %s',
testWrapper(async (conn, driver, testName, parameter, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
for (const sql of engine.parametersOtherSql) await runCommandOnDriver(conn, driver, sql);
await runCommandOnDriver(conn, driver, parameter.create);
const structure = await driver.analyseFull(conn);
const parameters = structure[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
expect(parameters.length).toEqual(parameter.list.length);
for (let i = 0; i < parameters.length; i += 1) {
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
}
})
);
test.each(flatSourceParameters())(
'Test parameters create SQL - %s - %s',
testWrapper(async (conn, driver, testName, parameter, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
for (const sql of engine.parametersOtherSql) await runCommandOnDriver(conn, driver, sql);
await runCommandOnDriver(conn, driver, parameter.create);
const structure1 = await driver.analyseFull(conn);
await runCommandOnDriver(conn, driver, parameter.drop);
const obj = structure1[parameter.objectTypeField].find(x => x.pureName == 'obj1');
await driver.script(conn, obj.createSql, { discardResult: true });
const structure2 = await driver.analyseFull(conn);
const parameters = structure2[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
expect(parameters.length).toEqual(parameter.list.length);
for (let i = 0; i < parameters.length; i += 1) {
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
}
})
);
test.each(flatSourceTriggers())(
'Test triggers - %s - %s',
testWrapper(async (conn, driver, trigger) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
const { triggerOtherDropSql, triggerOtherCreateSql, create, drop, expected, objectTypeField } = trigger;
if (triggerOtherCreateSql) await runCommandOnDriver(conn, driver, triggerOtherCreateSql);
await runCommandOnDriver(conn, driver, create);
const structure = await driver.analyseFull(conn);
await runCommandOnDriver(conn, driver, drop);
if (triggerOtherDropSql) await runCommandOnDriver(conn, driver, triggerOtherDropSql);
const createdTrigger = structure[objectTypeField].find(x => x.pureName == expected.pureName);
expect(createdTrigger).toEqual(expect.objectContaining(expected));
// test trigger createSql
if (triggerOtherCreateSql) await runCommandOnDriver(conn, driver, triggerOtherCreateSql);
await driver.script(conn, createdTrigger.createSql);
const structure2 = await driver.analyseFull(conn);
const createdTrigger2 = structure2[objectTypeField].find(x => x.pureName == expected.pureName);
expect(createdTrigger2).toEqual(expect.objectContaining(expected));
})
);
});
console.log(flatSourceTriggers());

View File

@@ -1,11 +1,12 @@
const engines = require('../engines');
const { splitQuery } = require('dbgate-query-splitter');
const { testWrapper } = require('../tools');
const { runQueryOnDriver, runCommandOnDriver, formatQueryWithoutParams } = require('dbgate-tools');
const initSql = [
'CREATE TABLE t1 (id int primary key)',
'INSERT INTO t1 (id) VALUES (1)',
'INSERT INTO t1 (id) VALUES (2)',
'CREATE TABLE ~t1 (~id int primary key)',
'INSERT INTO ~t1 (~id) VALUES (1)',
'INSERT INTO ~t1 (~id) VALUES (2)',
];
expect.extend({
@@ -51,7 +52,7 @@ class StreamHandler {
function executeStreamItem(driver, conn, sql) {
return new Promise(resolve => {
const handler = new StreamHandler(resolve);
driver.stream(conn, sql, handler);
driver.stream(conn, formatQueryWithoutParams(driver, sql), handler);
});
}
@@ -68,9 +69,11 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const res = await driver.query(conn, 'SELECT id FROM t1 ORDER BY id');
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT ~id FROM ~t1 ORDER BY ~id'));
expect(res.columns).toEqual([
expect.objectContaining({
columnName: 'id',
@@ -91,8 +94,11 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple stream query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
const results = await executeStream(driver, conn, 'SELECT id FROM t1 ORDER BY id');
for (const sql of initSql) {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const results = await executeStream(driver, conn, 'SELECT ~id FROM ~t1 ORDER BY ~id');
expect(results.length).toEqual(1);
const res = results[0];
@@ -104,11 +110,14 @@ describe('Query', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'More queries - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const results = await executeStream(
driver,
conn,
'SELECT id FROM t1 ORDER BY id; SELECT id FROM t1 ORDER BY id DESC'
'SELECT ~id FROM ~t1 ORDER BY ~id; SELECT ~id FROM ~t1 ORDER BY ~id DESC'
);
expect(results.length).toEqual(2);
@@ -128,7 +137,7 @@ describe('Query', () => {
const results = await executeStream(
driver,
conn,
'CREATE TABLE t1 (id int primary key); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
'CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ORDER BY ~id; '
);
expect(results.length).toEqual(1);
@@ -144,7 +153,7 @@ describe('Query', () => {
const results = await executeStream(
driver,
conn,
'CREATE TABLE t1 (id int); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2) '
'CREATE TABLE ~t1 (~id int); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2) '
);
expect(results.length).toEqual(0);
})
@@ -153,16 +162,57 @@ describe('Query', () => {
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
'Save data query - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
for (const sql of initSql) {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
await driver.script(
conn,
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;',
formatQueryWithoutParams(
driver,
'INSERT INTO ~t1 (~id) VALUES (3);INSERT INTO ~t1 (~id) VALUES (4);UPDATE ~t1 SET ~id=10 WHERE ~id=1;DELETE FROM ~t1 WHERE ~id=2;'
),
{ discardResult: true }
);
const res = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t1');
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT COUNT(*) AS ~cnt FROM ~t1'));
// console.log(res);
expect(res.rows[0].cnt == 3).toBeTruthy();
})
);
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Select scope identity - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true, autoIncrement: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
const structure = await driver.analyseFull(conn);
const table = structure.tables.find(x => x.pureName == 't1');
let res;
if (driver.dialect.requireStandaloneSelectForScopeIdentity) {
await runCommandOnDriver(conn, driver, dmp => dmp.put("INSERT INTO ~t1 (~val) VALUES ('aaa')"));
res = await runQueryOnDriver(conn, driver, dmp => dmp.selectScopeIdentity(table));
} else {
res = await runQueryOnDriver(conn, driver, dmp => {
dmp.putCmd("INSERT INTO ~t1 (~val) VALUES ('aaa')");
dmp.selectScopeIdentity(table);
});
}
const row = res.rows[0];
const keys = Object.keys(row);
expect(keys.length).toEqual(1);
expect(row[keys[0]] == 1).toBeTruthy();
})
);
});

View File

@@ -0,0 +1,90 @@
const stableStringify = require('json-stable-stringify');
const _ = require('lodash');
const fp = require('lodash/fp');
const { testWrapper, extractConnection } = require('../tools');
const engines = require('../engines');
const { runCommandOnDriver } = require('dbgate-tools');
async function baseStructure(conn, driver) {
await driver.query(conn, `create table t1 (id int not null primary key)`);
await driver.query(
conn,
`create table t2 (
id int not null primary key,
t1_id int
)`
);
}
describe('Schema tests', () => {
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
'Create schema - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
const structure1 = await driver.analyseFull(conn);
const schemas1 = await driver.listSchemas(conn);
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeFalsy();
const count = schemas1.length;
expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1);
const schemas2 = await driver.listSchemas(conn);
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(schemas2.length).toEqual(count + 1);
expect(schemas2.find(x => x.isDefault).schemaName).toEqual(engine.defaultSchemaName);
expect(structure2).toBeNull();
})
);
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
'Drop schema - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure1 = await driver.analyseFull(conn);
const schemas1 = await driver.listSchemas(conn);
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1);
const schemas2 = await driver.listSchemas(conn);
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeFalsy();
expect(structure2).toBeNull();
})
);
test.each(engines.filter(x => x.supportSchemas && !x.skipSeparateSchemas).map(engine => [engine.label, engine]))(
'Table inside schema - %s',
testWrapper(async (handle, driver, engine) => {
await baseStructure(handle, driver);
await runCommandOnDriver(handle, driver, dmp => dmp.createSchema('myschema'));
const schemaConnDef = {
...extractConnection(engine),
database: `${handle.database}::myschema`,
};
const schemaConn = await driver.connect(schemaConnDef);
await driver.query(schemaConn, `create table myschema.myt1 (id int not null primary key)`);
const structure1 = await driver.analyseFull(schemaConn);
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0].pureName).toEqual('myt1');
})
);
});
describe('Base analyser test', () => {
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Structure without change - %s',
testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver);
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2).toBeNull();
})
);
});

View File

@@ -1,28 +1,34 @@
const { runCommandOnDriver } = require('dbgate-tools');
const engines = require('../engines');
const { testWrapper } = require('../tools');
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50))';
const ix1Sql = 'CREATE index ix1 ON t1(val1, id)';
const t1Sql = 'CREATE TABLE ~t1 (~id int not null primary key, ~val1 varchar(50))';
const ix1Sql = 'CREATE index ~ix1 ON ~t1(~val1, ~id)';
const t2Sql = engine =>
`CREATE TABLE t2 (id int not null primary key, val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
const t3Sql = 'CREATE TABLE t3 (id int not null primary key, valfk int, foreign key (valfk) references t2(id))';
`CREATE TABLE ~t2 (~id int not null primary key, ~val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
const t3Sql = 'CREATE TABLE ~t3 (~id int not null primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))';
const t4Sql = 'CREATE TABLE ~t4 (~id int not null primary key, ~valdef int default 12 not null)';
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
const txMatch = (engine, tname, vcolname, nextcol) =>
const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
expect.objectContaining({
pureName: tname,
columns: [
expect.objectContaining({
columnName: 'id',
dataType: expect.stringMatching(/int.*/i),
dataType: expect.stringMatching(/int.*|number/i),
...(engine.skipNullability ? {} : { notNull: true }),
}),
expect.objectContaining({
columnName: vcolname,
...(engine.skipNullability ? {} : { notNull: false }),
dataType: engine.skipStringLength
? expect.stringMatching(/.*string|char.*/i)
: expect.stringMatching(/.*char.*\(50\)/i),
...(engine.skipNullability ? {} : { notNull: !!defaultValue }),
...(defaultValue
? { defaultValue }
: {
dataType: engine.skipStringLength
? expect.stringMatching(/.*string|char.*/i)
: expect.stringMatching(/.*char.*\(50\)/i),
}),
}),
...(nextcol
? [
@@ -48,14 +54,16 @@ const txMatch = (engine, tname, vcolname, nextcol) =>
const t1Match = engine => txMatch(engine, 't1', 'val1');
const t2Match = engine => txMatch(engine, 't2', 'val2');
const t2NextColMatch = engine => txMatch(engine, 't2', 'val2', true);
const t4Match = engine => txMatch(engine, 't4', 'valdef', null, '12');
describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table structure - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
const structure = await driver.analyseFull(conn);
console.log(JSON.stringify(structure, null, 2));
expect(structure.tables.length).toEqual(1);
expect(structure.tables[0]).toEqual(t1Match(engine));
@@ -65,13 +73,13 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table add - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0]).toEqual(t2Match(engine));
await driver.query(conn, t1Sql);
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(2);
@@ -83,14 +91,14 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await driver.query(conn, t2Sql(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
await driver.query(conn, 'DROP TABLE t2');
await runCommandOnDriver(conn, driver, dmp => dmp.put('DROP TABLE ~t2'));
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(1);
@@ -101,15 +109,14 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await driver.query(conn, t2Sql(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
await driver.query(
conn,
`ALTER TABLE t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} nextcol varchar(50)`
await runCommandOnDriver(conn, driver, dmp =>
dmp.put(`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol varchar(50)`)
);
const structure2 = await driver.analyseIncremental(conn, structure1);
@@ -124,8 +131,8 @@ describe('Table analyse', () => {
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Index - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t1Sql);
await driver.query(conn, ix1Sql);
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(ix1Sql));
const structure = await driver.analyseFull(conn);
const t1 = structure.tables.find(x => x.pureName == 't1');
@@ -139,7 +146,7 @@ describe('Table analyse', () => {
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
'Unique - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure = await driver.analyseFull(conn);
const t2 = structure.tables.find(x => x.pureName == 't2');
@@ -153,8 +160,8 @@ describe('Table analyse', () => {
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Foreign key - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await driver.query(conn, t2Sql(engine));
await driver.query(conn, t3Sql);
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql));
// await driver.query(conn, fkSql);
const structure = await driver.analyseFull(conn);
@@ -169,4 +176,16 @@ describe('Table analyse', () => {
);
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Table structure - default value - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql));
const structure = await driver.analyseFull(conn);
expect(structure.tables.length).toEqual(1);
expect(structure.tables[0]).toEqual(t4Match(engine));
})
);
});

View File

@@ -2,7 +2,7 @@ const _ = require('lodash');
const fp = require('lodash/fp');
const engines = require('../engines');
const { testWrapper } = require('../tools');
const { extendDatabaseInfo } = require('dbgate-tools');
const { extendDatabaseInfo, runCommandOnDriver } = require('dbgate-tools');
function createExpector(value) {
return _.cloneDeepWith(value, x => {
@@ -25,7 +25,7 @@ function checkTableStructure2(t1, t2) {
}
async function testTableCreate(conn, driver, table) {
await driver.query(conn, `create table t0 (id int not null primary key)`);
await runCommandOnDriver(conn, driver, dmp => dmp.put('create table ~t0 (~id int not null primary key)'));
const dmp = driver.createDumper();
const table1 = {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +1,21 @@
version: '3'
services:
# postgres:
# image: postgres
# restart: always
# environment:
# POSTGRES_PASSWORD: Pwd2020Db
# ports:
# - 15000:5432
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
- 15000:5432
# mariadb:
# image: mariadb
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# - 15004:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 15004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql:
# image: mysql:8.0.18
@@ -26,13 +26,13 @@ services:
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
clickhouse:
image: bitnami/clickhouse:24.8.4
restart: always
ports:
- 15005:8123
environment:
- CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
# clickhouse:
# image: bitnami/clickhouse:24.8.4
# restart: always
# ports:
# - 15005:8123
# environment:
# - CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
# mssql:
# image: mcr.microsoft.com/mssql/server
@@ -70,3 +70,9 @@ services:
# - cockroachdb
# restart: on-failure
oracle:
image: gvenzl/oracle-xe:21-slim
environment:
ORACLE_PASSWORD: Pwd2020Db
ports:
- 15006:1521

View File

@@ -1,9 +1,9 @@
const views = {
type: 'views',
create1: 'CREATE VIEW obj1 AS SELECT id FROM t1',
create2: 'CREATE VIEW obj2 AS SELECT id FROM t2',
drop1: 'DROP VIEW obj1',
drop2: 'DROP VIEW obj2',
create1: 'CREATE VIEW ~obj1 AS SELECT ~id FROM ~t1',
create2: 'CREATE VIEW ~obj2 AS SELECT ~id FROM ~t2',
drop1: 'DROP VIEW ~obj1',
drop2: 'DROP VIEW ~obj2',
};
const matviews = {
type: 'matviews',
@@ -13,163 +13,587 @@ const matviews = {
drop2: 'DROP MATERIALIZED VIEW obj2',
};
const engines = [
{
label: 'MySQL',
connection: {
engine: 'mysql@dbgate-plugin-mysql',
password: 'Pwd2020Db',
user: 'root',
server: 'mysql',
port: 3306,
},
local: {
server: 'localhost',
port: 15001,
},
// skipOnCI: true,
objects: [views],
dbSnapshotBySeconds: true,
const mysqlEngine = {
label: 'MySQL',
connection: {
engine: 'mysql@dbgate-plugin-mysql',
password: 'Pwd2020Db',
user: 'root',
server: 'mysql',
port: 3306,
},
{
label: 'MariaDB',
connection: {
engine: 'mariadb@dbgate-plugin-mysql',
password: 'Pwd2020Db',
user: 'root',
server: 'mysql',
port: 3306,
},
local: {
server: 'localhost',
port: 15004,
},
skipOnCI: true,
objects: [views],
dbSnapshotBySeconds: true,
local: {
server: 'localhost',
port: 15001,
},
{
label: 'PostgreSQL',
connection: {
engine: 'postgres@dbgate-plugin-postgres',
password: 'Pwd2020Db',
user: 'postgres',
server: 'postgres',
port: 5432,
objects: [
views,
{
type: 'procedures',
create1: 'CREATE PROCEDURE obj1() BEGIN SELECT * FROM t1; END',
create2: 'CREATE PROCEDURE obj2() BEGIN SELECT * FROM t2; END',
drop1: 'DROP PROCEDURE obj1',
drop2: 'DROP PROCEDURE obj2',
},
local: {
server: 'localhost',
port: 15000,
],
dbSnapshotBySeconds: true,
dumpFile: 'data/chinook-mysql.sql',
dumpChecks: [
{
sql: 'select count(*) as res from genre',
res: '25',
},
objects: [
views,
matviews,
{
type: 'procedures',
create1: 'CREATE PROCEDURE obj1() LANGUAGE SQL AS $$ select * from t1 $$',
create2: 'CREATE PROCEDURE obj2() LANGUAGE SQL AS $$ select * from t2 $$',
drop1: 'DROP PROCEDURE obj1',
drop2: 'DROP PROCEDURE obj2',
],
parametersOtherSql: ['CREATE PROCEDURE obj2(a int, b int) BEGIN SELECT * FROM t1; END'],
parameters: [
{
testName: 'simple',
create: 'CREATE PROCEDURE obj1(a int) BEGIN SELECT * FROM t1; END',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'int',
},
],
},
{
testName: 'paramTypes',
create: 'CREATE PROCEDURE obj1(a int, b varchar(50), c numeric(10,2)) BEGIN SELECT * FROM t1; END',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'int',
},
{
parameterName: 'b',
parameterMode: 'IN',
dataType: 'varchar(50)',
},
{
parameterName: 'c',
parameterMode: 'IN',
dataType: 'decimal(10,2)',
},
],
},
{
testName: 'paramModes',
create: 'CREATE PROCEDURE obj1(IN a int, OUT b int, INOUT c int) BEGIN SELECT * FROM t1; END',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'int',
},
{
parameterName: 'b',
parameterMode: 'OUT',
dataType: 'int',
},
{
parameterName: 'c',
parameterMode: 'INOUT',
dataType: 'int',
},
],
},
],
triggers: [
{
testName: 'triggers insert after',
create: 'CREATE TRIGGER obj1 AFTER INSERT ON t1 FOR EACH ROW BEGIN END',
drop: 'DROP TRIGGER obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'INSERT',
triggerTiming: 'AFTER',
},
{
type: 'functions',
create1:
'CREATE FUNCTION obj1() returns int LANGUAGE plpgsql AS $$ declare res integer; begin select count(*) into res from t1; return res; end; $$',
create2:
'CREATE FUNCTION obj2() returns int LANGUAGE plpgsql AS $$ declare res integer; begin select count(*) into res from t2; return res; end; $$',
drop1: 'DROP FUNCTION obj1',
drop2: 'DROP FUNCTION obj2',
},
{
testName: 'triggers insert before',
create: 'CREATE TRIGGER obj1 BEFORE INSERT ON t1 FOR EACH ROW BEGIN END',
drop: 'DROP TRIGGER obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'INSERT',
triggerTiming: 'BEFORE',
},
],
},
],
};
const mariaDbEngine = {
label: 'MariaDB',
connection: {
engine: 'mariadb@dbgate-plugin-mysql',
password: 'Pwd2020Db',
user: 'root',
server: 'mysql',
port: 3306,
},
{
label: 'SQL Server',
connection: {
engine: 'mssql@dbgate-plugin-mssql',
password: 'Pwd2020Db',
user: 'sa',
server: 'mssql',
port: 1433,
local: {
server: 'localhost',
port: 15004,
},
objects: [views],
dbSnapshotBySeconds: true,
dumpFile: 'data/chinook-mysql.sql',
dumpChecks: [
{
sql: 'select count(*) as res from genre',
res: '25',
},
local: {
server: 'localhost',
port: 15002,
],
};
const postgreSqlEngine = {
label: 'PostgreSQL',
connection: {
engine: 'postgres@dbgate-plugin-postgres',
password: 'Pwd2020Db',
user: 'postgres',
server: 'postgres',
port: 5432,
},
local: {
server: 'localhost',
port: 15000,
},
objects: [
views,
matviews,
{
type: 'procedures',
create1: 'CREATE PROCEDURE obj1() LANGUAGE SQL AS $$ select * from t1 $$',
create2: 'CREATE PROCEDURE obj2() LANGUAGE SQL AS $$ select * from t2 $$',
drop1: 'DROP PROCEDURE obj1',
drop2: 'DROP PROCEDURE obj2',
},
objects: [
views,
{
type: 'procedures',
create1: 'CREATE PROCEDURE obj1 AS SELECT id FROM t1',
create2: 'CREATE PROCEDURE obj2 AS SELECT id FROM t2',
drop1: 'DROP PROCEDURE obj1',
drop2: 'DROP PROCEDURE obj2',
{
type: 'functions',
create1:
'CREATE FUNCTION obj1() returns int LANGUAGE plpgsql AS $$ declare res integer; begin select count(*) into res from t1; return res; end; $$',
create2:
'CREATE FUNCTION obj2() returns int LANGUAGE plpgsql AS $$ declare res integer; begin select count(*) into res from t2; return res; end; $$',
drop1: 'DROP FUNCTION obj1',
drop2: 'DROP FUNCTION obj2',
},
],
supportSchemas: true,
supportRenameSqlObject: true,
defaultSchemaName: 'public',
dumpFile: 'data/chinook-postgre.sql',
dumpChecks: [
{
sql: 'select count(*) as res from "public"."Genre"',
res: '25',
},
],
parametersOtherSql: ['CREATE PROCEDURE obj2(a integer, b integer) LANGUAGE SQL AS $$ select * from t1 $$'],
parameters: [
{
testName: 'simple',
create: 'CREATE PROCEDURE obj1(a integer) LANGUAGE SQL AS $$ select * from t1 $$',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'integer',
},
],
},
{
testName: 'dataTypes',
create: 'CREATE PROCEDURE obj1(a integer, b varchar(20), c numeric(18,2)) LANGUAGE SQL AS $$ select * from t1 $$',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'integer',
},
{
parameterName: 'b',
parameterMode: 'IN',
dataType: 'varchar',
},
{
parameterName: 'c',
parameterMode: 'IN',
dataType: 'numeric',
},
],
},
{
testName: 'paramModes',
create: 'CREATE PROCEDURE obj1(IN a integer, INOUT b integer) LANGUAGE SQL AS $$ select * from t1 $$',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: 'a',
parameterMode: 'IN',
dataType: 'integer',
},
{
parameterName: 'b',
parameterMode: 'INOUT',
dataType: 'integer',
},
],
},
{
testName: 'paramModesFunction',
objectTypeField: 'functions',
create: `
create or replace function obj1(
out min_len int,
out max_len int)
language plpgsql
as $$
begin
select min(id),
max(id)
into min_len, max_len
from t1;
end;$$`,
drop: 'DROP FUNCTION obj1',
list: [
{
parameterName: 'min_len',
parameterMode: 'OUT',
dataType: 'integer',
},
{
parameterName: 'max_len',
parameterMode: 'OUT',
dataType: 'integer',
},
],
},
],
triggers: [
{
testName: 'triggers after each row',
create: `CREATE TRIGGER obj1
AFTER INSERT ON t1
FOR EACH ROW
EXECUTE FUNCTION test_function();
`,
drop: 'DROP TRIGGER obj1 ON t1;',
triggerOtherCreateSql: `CREATE OR REPLACE FUNCTION test_function()
RETURNS TRIGGER AS $$
BEGIN
END;
$$ LANGUAGE plpgsql;`,
triggerOtherDropSql: 'DROP FUNCTION test_function',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'INSERT',
triggerTiming: 'AFTER',
},
],
},
{
testName: 'triggers before each row',
create: `CREATE TRIGGER obj1
BEFORE INSERT ON t1
FOR EACH ROW
EXECUTE FUNCTION test_function();
`,
drop: 'DROP TRIGGER obj1 ON t1;',
triggerOtherCreateSql: `CREATE OR REPLACE FUNCTION test_function()
RETURNS TRIGGER AS $$
BEGIN
END;
$$ LANGUAGE plpgsql;`,
triggerOtherDropSql: 'DROP FUNCTION test_function',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'INSERT',
triggerTiming: 'BEFORE',
},
},
],
};
const sqlServerEngine = {
label: 'SQL Server',
connection: {
engine: 'mssql@dbgate-plugin-mssql',
password: 'Pwd2020Db',
user: 'sa',
server: 'mssql',
port: 1433,
},
{
label: 'SQLite',
generateDbFile: true,
connection: {
engine: 'sqlite@dbgate-plugin-sqlite',
},
objects: [views],
local: {
server: 'localhost',
port: 15002,
},
{
label: 'CockroachDB',
connection: {
engine: 'cockroach@dbgate-plugin-postgres',
user: 'root',
server: 'cockroachdb',
port: 26257,
objects: [
views,
{
type: 'procedures',
create1: 'CREATE PROCEDURE obj1 AS SELECT id FROM t1',
create2: 'CREATE PROCEDURE obj2 AS SELECT id FROM t2',
drop1: 'DROP PROCEDURE obj1',
drop2: 'DROP PROCEDURE obj2',
},
local: {
server: 'localhost',
port: 15003,
{
type:'triggers',
create1: 'CREATE TRIGGER obj1 ON t1 AFTER INSERT AS BEGIN SELECT * FROM t1 END',
create2: 'CREATE TRIGGER obj2 ON t2 AFTER INSERT AS BEGIN SELECT * FROM t2 END',
drop1: 'DROP TRIGGER obj1',
drop2: 'DROP TRIGGER obj2',
}
],
parametersOtherSql: ['CREATE PROCEDURE obj2 (@p1 int, @p2 int) AS SELECT id from t1'],
parameters: [
{
testName: 'simple',
create: 'CREATE PROCEDURE obj1 (@param1 int) AS SELECT id from t1',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: '@param1',
parameterMode: 'IN',
dataType: 'int',
},
],
},
skipOnCI: true,
objects: [views, matviews],
{
testName: 'dataTypes',
create: 'CREATE PROCEDURE obj1 (@p1 bit, @p2 nvarchar(20), @p3 decimal(18,2), @p4 float) AS SELECT id from t1',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: '@p1',
parameterMode: 'IN',
dataType: 'bit',
},
{
parameterName: '@p2',
parameterMode: 'IN',
dataType: 'nvarchar(20)',
},
{
parameterName: '@p3',
parameterMode: 'IN',
dataType: 'decimal(18,2)',
},
{
parameterName: '@p4',
parameterMode: 'IN',
dataType: 'float',
},
],
},
{
testName: 'outputParam',
create: 'CREATE PROCEDURE obj1 (@p1 int OUTPUT) AS SELECT id from t1',
drop: 'DROP PROCEDURE obj1',
objectTypeField: 'procedures',
list: [
{
parameterName: '@p1',
parameterMode: 'OUT',
dataType: 'int',
},
],
},
],
supportSchemas: true,
supportRenameSqlObject: true,
defaultSchemaName: 'dbo',
// skipSeparateSchemas: true,
triggers: [
{
testName: 'triggers before each row',
create: `CREATE TRIGGER obj1 ON t1 AFTER INSERT AS BEGIN SELECT * FROM t1 END`,
drop: 'DROP TRIGGER obj1',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'INSERT',
triggerTiming: 'AFTER',
},
},
{
testName: 'triggers before each row',
create: `CREATE TRIGGER obj1 ON t1 AFTER UPDATE AS BEGIN SELECT * FROM t1 END`,
drop: 'DROP TRIGGER obj1',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
eventType: 'UPDATE',
triggerTiming: 'AFTER',
},
},
],
};
const sqliteEngine = {
label: 'SQLite',
generateDbFile: true,
connection: {
engine: 'sqlite@dbgate-plugin-sqlite',
},
{
label: 'ClickHouse',
connection: {
engine: 'clickhouse@dbgate-plugin-clickhouse',
databaseUrl: 'http://clickhouse:8123',
password: 'Pwd2020Db',
},
local: {
databaseUrl: 'http://localhost:15005',
},
skipOnCI: false,
objects: [views],
skipDataModifications: true,
skipReferences: true,
skipIndexes: true,
skipNullability: true,
skipUnique: true,
skipAutoIncrement: true,
skipPkColumnTesting: true,
skipDataDuplicator: true,
skipStringLength: true,
alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true,
objects: [views],
skipOnCI: false,
skipChangeColumn: true,
};
const cockroachDbEngine = {
label: 'CockroachDB',
connection: {
engine: 'cockroach@dbgate-plugin-postgres',
user: 'root',
server: 'cockroachdb',
port: 26257,
},
local: {
server: 'localhost',
port: 15003,
},
objects: [views, matviews],
};
const clickhouseEngine = {
label: 'ClickHouse',
connection: {
engine: 'clickhouse@dbgate-plugin-clickhouse',
databaseUrl: 'http://clickhouse:8123',
password: 'Pwd2020Db',
},
local: {
databaseUrl: 'http://localhost:15005',
},
objects: [views],
skipDataModifications: true,
skipReferences: true,
skipIndexes: true,
skipNullability: true,
skipUnique: true,
skipAutoIncrement: true,
skipPkColumnTesting: true,
skipDataDuplicator: true,
skipStringLength: true,
alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true,
skipChangeColumn: true,
};
const oracleEngine = {
label: 'Oracle',
connection: {
engine: 'oracle@dbgate-plugin-oracle',
password: 'Pwd2020Db',
user: 'system',
server: 'oracle',
port: 1521,
serviceName: 'xe',
},
local: {
server: 'localhost',
port: 15006,
},
skipOnCI: false,
dbSnapshotBySeconds: true,
setNullDefaultInsteadOfDrop: true,
skipIncrementalAnalysis: true,
objects: [
views,
{
type: 'procedures',
create1: 'CREATE PROCEDURE ~obj1 AS BEGIN SELECT ~id FROM ~t1 END',
create2: 'CREATE PROCEDURE ~obj2 AS BEGIN SELECT ~id FROM ~t2 END',
drop1: 'DROP PROCEDURE ~obj1',
drop2: 'DROP PROCEDURE ~obj2',
},
{
type: 'functions',
create1:
'CREATE FUNCTION ~obj1 RETURN NUMBER IS v_count NUMBER; \n BEGIN SELECT COUNT(*) INTO v_count FROM ~t1;\n RETURN v_count;\n END ~obj1',
create2:
'CREATE FUNCTION ~obj2 RETURN NUMBER IS v_count NUMBER; \n BEGIN SELECT COUNT(*) INTO v_count FROM ~t2;\n RETURN v_count;\n END ~obj2',
drop1: 'DROP FUNCTION ~obj1',
drop2: 'DROP FUNCTION ~obj2',
},
],
triggers: [
{
testName: 'triggers after each row',
create: 'CREATE OR REPLACE TRIGGER obj1 AFTER INSERT ON "t1" FOR EACH ROW BEGIN END obj1;',
drop: 'DROP TRIGGER obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'OBJ1',
eventType: 'INSERT',
triggerTiming: 'AFTER EACH ROW',
},
},
{
testName: 'triggers before each row',
create: 'CREATE OR REPLACE TRIGGER obj1 BEFORE INSERT ON "t1" FOR EACH ROW BEGIN END obj1;',
drop: 'DROP TRIGGER obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'OBJ1',
eventType: 'INSERT',
triggerTiming: 'BEFORE EACH ROW',
},
},
],
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
// mariaDbEngine,
postgreSqlEngine,
sqlServerEngine,
sqliteEngine,
// cockroachDbEngine,
clickhouseEngine,
oracleEngine,
];
const filterLocal = [
// filter local testing
'-MySQL',
'-MariaDB',
'-PostgreSQL',
'-SQL Server',
'-SQLite',
'-CockroachDB',
'ClickHouse',
const enginesOnLocal = [
// all engines, which would be run on local test
// mysqlEngine,
// mariaDbEngine,
// postgreSqlEngine,
// sqlServerEngine,
// sqliteEngine,
// cockroachDbEngine,
// clickhouseEngine,
oracleEngine,
];
const enginesPostgre = engines.filter(x => x.label == 'PostgreSQL');
module.exports = process.env.CITEST ? enginesOnCi : enginesOnLocal;
module.exports = process.env.CITEST
? engines.filter(x => !x.skipOnCI)
: engines.filter(x => filterLocal.find(y => x.label == y));
module.exports.enginesPostgre = enginesPostgre;
module.exports.mysqlEngine = mysqlEngine;
module.exports.mariaDbEngine = mariaDbEngine;
module.exports.postgreSqlEngine = postgreSqlEngine;
module.exports.sqlServerEngine = sqlServerEngine;
module.exports.sqliteEngine = sqliteEngine;
module.exports.cockroachDbEngine = cockroachDbEngine;
module.exports.clickhouseEngine = clickhouseEngine;
module.exports.oracleEngine = oracleEngine;

View File

@@ -1,6 +1,6 @@
{
"name": "dbgate-integration-tests",
"version": "5.0.0-alpha.1",
"version": "6.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"repository": {
"type": "git",
@@ -13,7 +13,7 @@
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
},
"jest": {
@@ -22,6 +22,7 @@
"devDependencies": {
"cross-env": "^7.0.3",
"jest": "^27.0.1",
"pino-pretty": "^11.2.2"
"pino-pretty": "^11.2.2",
"tmp": "^0.2.3"
}
}

View File

@@ -1,4 +1,10 @@
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
};
const { prettyFactory } = require('pino-pretty');
const tmp = require('tmp');
const pretty = prettyFactory({
colorize: true,
@@ -20,3 +26,5 @@ global.console = {
process.stdout.write(messages.join(' ') + '\n');
},
};
tmp.setGracefulCleanup();

View File

@@ -1,15 +1,12 @@
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
};
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
function randomDbName() {
function randomDbName(dialect) {
const generatedKey = crypto.randomBytes(6);
const newKey = generatedKey.toString('hex');
return `db${newKey}`;
const res = `db${newKey}`;
if (dialect.upperCaseAllDbObjectNames) return res.toUpperCase();
return res;
}
function extractConnection(engine) {
@@ -37,23 +34,50 @@ async function connect(engine, database) {
return conn;
} else {
const conn = await driver.connect(connection);
await driver.query(conn, `CREATE DATABASE ${database}`);
const dmp = driver.createDumper();
dmp.createDatabase(database);
await driver.query(conn, dmp.s);
await driver.close(conn);
const res = await driver.connect({
...connection,
database,
database: (driver.dialect.userDatabaseNamePrefix ?? '') + database,
});
return res;
}
}
async function prepareConnection(engine, database) {
const connection = extractConnection(engine);
const driver = requireEngineDriver(connection);
if (engine.generateDbFile) {
return {
...connection,
databaseFile: `dbtemp/${database}`,
isPreparedOnly: true,
};
} else {
const conn = await driver.connect(connection);
const dmp = driver.createDumper();
dmp.createDatabase(database);
await driver.query(conn, dmp.s);
await driver.close(conn);
return {
...connection,
database: (driver.dialect.userDatabaseNamePrefix ?? '') + database,
isPreparedOnly: true,
};
}
}
const testWrapper =
body =>
async (label, ...other) => {
const engine = other[other.length - 1];
const driver = requireEngineDriver(engine.connection);
const conn = await connect(engine, randomDbName());
const conn = await connect(engine, randomDbName(driver.dialect));
try {
await body(conn, driver, ...other);
} finally {
@@ -61,9 +85,19 @@ const testWrapper =
}
};
const testWrapperPrepareOnly =
body =>
async (label, ...other) => {
const engine = other[other.length - 1];
const driver = requireEngineDriver(engine.connection);
const conn = await prepareConnection(engine, randomDbName(driver.dialect));
await body(conn, driver, ...other);
};
module.exports = {
randomDbName,
connect,
extractConnection,
testWrapper,
testWrapperPrepareOnly,
};

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "5.4.5-beta.6",
"version": "6.1.3-beta.1",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -21,6 +21,7 @@
"start:api:auth": "yarn workspace dbgate-api start:auth | pino-pretty",
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin | pino-pretty",
"start:api:storage": "yarn workspace dbgate-api start:storage | pino-pretty",
"start:api:storage:built": "yarn workspace dbgate-api start:storage:built | pino-pretty",
"sync:pro": "cd sync && yarn start",
"start:web": "yarn workspace dbgate-web dev",
"start:sqltree": "yarn workspace dbgate-sqltree start",
@@ -34,8 +35,10 @@
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
"build:api": "yarn workspace dbgate-api build",
"build:web:docker": "yarn workspace dbgate-web build",
"build:api:doc": "yarn workspace dbgate-api build:doc",
"build:web": "yarn workspace dbgate-web build",
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
@@ -44,22 +47,23 @@
"setCurrentVersion": "node setCurrentVersion",
"printSecrets": "node printSecrets",
"generatePadFile": "node generatePadFile",
"adjustPackageJson": "node adjustPackageJson",
"fillNativeModules": "node fillNativeModules",
"fillNativeModulesElectron": "node fillNativeModules --electron",
"fillPackagedPlugins": "node fillPackagedPlugins",
"resetPackagedPlugins": "node resetPackagedPlugins",
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
"install:drivers:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && yarn add oracledb && cd ..",
"prepare:docker": "yarn plugins:copydist && yarn build:web:docker && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2 && copyfiles packer/install-packages.sh packer/build -f && yarn install:drivers:packer",
"install:drivers:docker": "node common/defineVolatileDependencies.js docker && cd docker && yarn install && cd ..",
"install:drivers:packer": "node common/defineVolatileDependencies.js packer/build",
"prepare:docker": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
"ts:api": "yarn workspace dbgate-api ts",
"ts:web": "yarn workspace dbgate-web ts",
"ts": "yarn ts:api && yarn ts:web",
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn fillNativeModules && yarn build:plugins:frontend",
"dbgate-serve": "node packages/dbgate/bin/dbgate-serve.js"
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn build:plugins:frontend",
"dbgate-serve": "node packages/dbgate/bin/dbgate-serve.js",
"workflows": "node common/processWorkflows.js"
},
"dependencies": {
"concurrently": "^5.1.0",

View File

@@ -1,6 +1,8 @@
DEVMODE=1
SHELL_SCRIPTING=1
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
# PERMISSIONS=~widgets/app,~widgets/plugins
# DISABLE_SHELL=1
# HIDE_APP_EDITOR=1

View File

@@ -12,15 +12,14 @@ This example exports table Customer info CSV file.
```javascript
const dbgateApi = require('dbgate-api');
const dbgatePluginMssql = require("dbgate-plugin-mssql");
const dbgatePluginMysql = require("dbgate-plugin-mysql");
const dbgatePluginCsv = require("dbgate-plugin-csv");
dbgateApi.registerPlugins(dbgatePluginMssql);
dbgateApi.registerPlugins(dbgatePluginMysql);
async function run() {
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql', user: 'sa', password: 'xxxx', database: 'Chinook' },
schemaName: 'dbo',
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'Chinook' },
pureName: 'Customer',
});
const writer = await dbgatePluginCsv.shellApi.writer({ fileName: 'Customer.csv' });
@@ -59,8 +58,8 @@ Copies data from reader into writer. Reader and writer should be created from fu
Reads table or view.
```js
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
schemaName: 'dbo',
connection: { server: 'localhost', engine: 'postgres@dbgate-plugin-postgres', user: 'root', password: 'xxxx', database: 'DB_NAME' },
schemaName: 'public',
pureName: 'Customer',
});
```
@@ -69,7 +68,7 @@ Reads table or view.
Executes query and reads its result.
```js
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
sql: 'SELECT * FROM Album',
});
```
@@ -81,8 +80,7 @@ Imports data into table. Options are optional, default values are false.
- createIfNotExists - create table, if not exists
```js
const reader = await dbgateApi.tableWriter({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
schemaName: 'dbo',
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
pureName: 'Customer',
options: {
dropIfExists: false,

11
packages/api/doctpl.hbs Normal file
View File

@@ -0,0 +1,11 @@
---
layout: docs
title: API documentation
order: 21
docs_left: true
hide_hero: true
---
# API Documentation
{{>main}}

View File

@@ -1,60 +1,47 @@
DEVMODE=1
CONNECTIONS=mysql,postgres,postgres1,mongo,mongo2,mysqlssh,sqlite,relational
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
LABEL_mysql=MySql localhost
SERVER_mysql=localhost
LABEL_mysql=MySql
SERVER_mysql=dbgatedckstage1.sprinx.cz
USER_mysql=root
PASSWORD_mysql=test
PORT_mysql=3307
PASSWORD_mysql=Pwd2020Db
PORT_mysql=3306
ENGINE_mysql=mysql@dbgate-plugin-mysql
LABEL_postgres=Postgres localhost
SERVER_postgres=localhost
LABEL_postgres=Postgres
SERVER_postgres=dbgatedckstage1.sprinx.cz
USER_postgres=postgres
PASSWORD_postgres=Pwd2020Db
PORT_postgres=5432
ENGINE_postgres=postgres@dbgate-plugin-postgres
LABEL_postgres1=Postgres localhost test DB
SERVER_postgres1=localhost
USER_postgres1=postgres
PASSWORD_postgres1=Pwd2020Db
PORT_postgres1=5432
ENGINE_postgres1=postgres@dbgate-plugin-postgres
DATABASE_postgres1=test
LABEL_mongo=Mongo URL
URL_mongo=mongodb://localhost:27017
LABEL_mongo=Mongo
SERVER_mongo=dbgatedckstage1.sprinx.cz
USER_mongo=root
PASSWORD_mongo=Pwd2020Db
PORT_mongo=27017
ENGINE_mongo=mongo@dbgate-plugin-mongo
LABEL_mongo2=Mongo Server
SERVER_mongo2=localhost
ENGINE_mongo2=mongo@dbgate-plugin-mongo
LABEL_redis=Redis
SERVER_redis=dbgatedckstage1.sprinx.cz
ENGINE_redis=redis@dbgate-plugin-redis
PORT_redis=6379
LABEL_mysqlssh=MySql SSH
SERVER_mysqlssh=localhost
USER_mysqlssh=root
PASSWORD_mysqlssh=xxx
PORT_mysqlssh=3316
ENGINE_mysqlssh=mysql@dbgate-plugin-mysql
USE_SSH_mysqlssh=1
SSH_HOST_mysqlssh=demo.dbgate.org
SSH_PORT_mysqlssh=22
SSH_MODE_mysqlssh=userPassword
SSH_LOGIN_mysqlssh=root
SSH_PASSWORD_mysqlssh=xxx
LABEL_mssql=SQL Server
SERVER_mssql=dbgatedckstage1.sprinx.cz
USER_mssql=sa
PASSWORD_mssql=Pwd2020Db
PORT_mssql=1433
ENGINE_mssql=mssql@dbgate-plugin-mssql
LABEL_sqlite=sqlite
FILE_sqlite=/home/jena/.dbgate/files/sqlite/feeds.sqlite
ENGINE_sqlite=sqlite@dbgate-plugin-sqlite
LABEL_relational=Relational dataset repo
SERVER_relational=relational.fit.cvut.cz
USER_relational=guest
PASSWORD_relational=relational
ENGINE_relational=mariadb@dbgate-plugin-mysql
READONLY_relational=1
LABEL_oracle=Oracle
SERVER_oracle=dbgatedckstage1.sprinx.cz
USER_oracle=system
PASSWORD_oracle=Pwd2020Db
PORT_oracle=1521
ENGINE_oracle=oracle@dbgate-plugin-oracle
SERVICE_NAME_oracle=xe
# SETTINGS_dataGrid.showHintColumns=1

View File

@@ -12,6 +12,6 @@ DBCONFIG_mysql=[{"name":"Chinook","connectionColor":"cyan"}]
SINGLE_CONNECTION=mysql
SINGLE_DATABASE=Chinook
# SINGLE_DATABASE=Chinook
PERMISSIONS=files/charts/read

View File

@@ -1,7 +1,7 @@
{
"name": "dbgate-api",
"main": "src/index.js",
"version": "5.0.0-alpha.1",
"version": "6.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"repository": {
"type": "git",
@@ -16,20 +16,23 @@
"export",
"dbgate"
],
"files": [
"src"
],
"dependencies": {
"@aws-sdk/rds-signer": "^3.665.0",
"activedirectory2": "^2.1.0",
"async-lock": "^1.2.4",
"async-lock": "^1.2.6",
"axios": "^0.21.1",
"body-parser": "^1.19.0",
"bufferutil": "^4.0.1",
"byline": "^5.0.0",
"compare-versions": "^3.6.0",
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"dbgate-datalib": "^5.0.0-alpha.1",
"dbgate-query-splitter": "^4.10.3",
"dbgate-sqltree": "^5.0.0-alpha.1",
"dbgate-tools": "^5.0.0-alpha.1",
"dbgate-datalib": "^6.0.0-alpha.1",
"dbgate-query-splitter": "^4.11.2",
"dbgate-sqltree": "^6.0.0-alpha.1",
"dbgate-tools": "^6.0.0-alpha.1",
"debug": "^4.3.4",
"diff": "^5.0.0",
"diff2html": "^3.4.13",
@@ -55,8 +58,10 @@
"pinomin": "^1.0.4",
"portfinder": "^1.0.28",
"rimraf": "^3.0.0",
"semver": "^7.6.3",
"simple-encryptor": "^4.0.0",
"ssh2": "^1.11.0",
"stream-json": "^1.8.0",
"tar": "^6.0.5"
},
"scripts": {
@@ -67,24 +72,22 @@
"start:dblogin": "env-cmd -f env/dblogin/.env node src/index.js --listen-api",
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db --listen-api",
"start:storage": "env-cmd -f env/storage/.env node src/index.js --listen-api",
"start:storage:built": "env-cmd -f env/storage/.env cross-env DEVMODE= BUILTWEBMODE=1 node dist/bundle.js --listen-api",
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
"ts": "tsc",
"build": "webpack"
"build": "webpack",
"build:doc": "jsdoc2md --template doctpl.hbs ./src/shell/* > ../../../dbgate.github.io/_docs/apidoc.md"
},
"devDependencies": {
"@types/fs-extra": "^9.0.11",
"@types/lodash": "^4.14.149",
"dbgate-types": "^5.0.0-alpha.1",
"dbgate-types": "^6.0.0-alpha.1",
"env-cmd": "^10.1.0",
"jsdoc-to-markdown": "^9.0.5",
"node-loader": "^1.0.2",
"nodemon": "^2.0.2",
"typescript": "^4.4.3",
"webpack": "^5.91.0",
"webpack-cli": "^5.1.4"
},
"optionalDependencies": {
"better-sqlite3": "9.6.0",
"msnodesqlv8": "^4.2.1",
"oracledb": "^6.6.0"
}
}

View File

@@ -83,9 +83,16 @@ class OAuthProvider extends AuthProviderBase {
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
);
const { access_token, refresh_token } = resp.data;
const { access_token, refresh_token, id_token } = resp.data;
const payload = jwt.decode(access_token);
let payload = jwt.decode(access_token);
// Fallback to id_token in case the access_token is not a JWT
// https://www.oauth.com/oauth2-servers/access-tokens/
// https://github.com/dbgate/dbgate/issues/727
if (!payload && id_token) {
payload = jwt.decode(id_token);
}
logger.info({ payload }, 'User payload returned from OAUTH');
@@ -198,6 +205,19 @@ class LoginsProvider extends AuthProviderBase {
amoid = 'logins';
async login(login, password, options = undefined) {
if (login && password && process.env['LOGIN'] == login && process.env['PASSWORD'] == password) {
return {
accessToken: jwt.sign(
{
amoid: this.amoid,
login,
},
getTokenSecret(),
{ expiresIn: getTokenLifetime() }
),
};
}
if (password == process.env[`LOGIN_PASSWORD_${login}`]) {
return {
accessToken: jwt.sign(
@@ -210,6 +230,7 @@ class LoginsProvider extends AuthProviderBase {
),
};
}
return { error: 'Invalid credentials' };
}
@@ -250,11 +271,10 @@ function hasEnvLogins() {
return false;
}
function detectEnvAuthProvider() {
function detectEnvAuthProviderCore() {
if (process.env.AUTH_PROVIDER) {
return process.env.AUTH_PROVIDER;
}
if (process.env.STORAGE_DATABASE) {
return 'denyall';
}
@@ -270,6 +290,14 @@ function detectEnvAuthProvider() {
return 'none';
}
function detectEnvAuthProvider() {
const authProvider = detectEnvAuthProviderCore();
if (process.env.BASIC_AUTH && authProvider != 'logins' && authProvider != 'ad') {
throw new Error(`BASIC_AUTH is not supported with ${authProvider} auth provider`);
}
return authProvider;
}
function createEnvAuthProvider() {
const authProvider = detectEnvAuthProvider();
switch (authProvider) {

View File

@@ -6,7 +6,7 @@ const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('..
const socket = require('../utility/socket');
const loadFilesRecursive = require('../utility/loadFilesRecursive');
const getJslFileName = require('../utility/getJslFileName');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const dbgateApi = require('../shell');
const jsldata = require('./jsldata');
const platformInfo = require('../utility/platformInfo');
@@ -74,7 +74,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
logger.error({ err }, 'Error reading archive files');
logger.error(extractErrorLogData(err), 'Error reading archive files');
return [];
}
},

View File

@@ -1,7 +1,7 @@
const axios = require('axios');
const jwt = require('jsonwebtoken');
const getExpressPath = require('../utility/getExpressPath');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const AD = require('activedirectory2').promiseWrapper;
const crypto = require('crypto');
const { getTokenSecret, getTokenLifetime } = require('../auth/authCommon');
@@ -22,7 +22,8 @@ function unauthorizedResponse(req, res, text) {
// if (req.path == getExpressPath('/connections/list')) {
// return res.json([]);
// }
return res.sendStatus(401).send(text);
return res.status(401).send(text);
}
function authMiddleware(req, res, next) {
@@ -35,8 +36,9 @@ function authMiddleware(req, res, next) {
'/auth/login',
'/auth/redirect',
'/stream',
'storage/get-connections-for-login-page',
'auth/get-providers',
'/storage/get-connections-for-login-page',
'/storage/set-admin-password',
'/auth/get-providers',
'/connections/dblogin-web',
'/connections/dblogin-app',
'/connections/dblogin-auth',
@@ -68,10 +70,11 @@ function authMiddleware(req, res, next) {
return next();
} catch (err) {
if (skipAuth) {
req.isInvalidToken = true;
return next();
}
logger.error({ err }, 'Sending invalid token error');
logger.error(extractErrorLogData(err), 'Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}
@@ -88,7 +91,12 @@ module.exports = {
const { amoid, login, password, isAdminPage } = params;
if (isAdminPage) {
if (process.env.ADMIN_PASSWORD && process.env.ADMIN_PASSWORD == password) {
let adminPassword = process.env.ADMIN_PASSWORD;
if (!adminPassword) {
const adminConfig = await storage.readConfig({ group: 'admin' });
adminPassword = adminConfig?.adminPassword;
}
if (adminPassword && adminPassword == password) {
return {
accessToken: jwt.sign(
{

View File

@@ -9,6 +9,7 @@ const _ = require('lodash');
const AsyncLock = require('async-lock');
const jwt = require('jsonwebtoken');
const processArgs = require('../utility/processArgs');
const currentVersion = require('../currentVersion');
const platformInfo = require('../utility/platformInfo');
const connections = require('../controllers/connections');
@@ -17,6 +18,7 @@ const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools');
const lock = new AsyncLock();
@@ -39,10 +41,12 @@ module.exports = {
const isUserLoggedIn = authProvider.isUserLoggedIn(req);
const singleConid = authProvider.getSingleConnectionId(req);
const storageConnectionError = storage.getStorageConnectionError();
const singleConnection = singleConid
? await connections.getCore({ conid: singleConid })
: connections.singleConnection;
const singleConnection =
singleConid && !storageConnectionError
? await connections.getCore({ conid: singleConid })
: connections.singleConnection;
let configurationError = null;
if (process.env.STORAGE_DATABASE && process.env.BASIC_AUTH) {
@@ -50,10 +54,25 @@ module.exports = {
'Basic authentization is not allowed, when using storage. Cannot use both STORAGE_DATABASE and BASIC_AUTH';
}
const checkedLicense = await checkLicense();
const isLicenseValid = checkedLicense?.status == 'ok';
if (storageConnectionError && !configurationError) {
configurationError = extractErrorMessage(storageConnectionError);
}
return {
const checkedLicense = storageConnectionError ? null : await checkLicense();
const isLicenseValid = checkedLicense?.status == 'ok';
const logoutUrl = storageConnectionError ? null : await authProvider.getLogoutUrl();
const adminConfig = storageConnectionError ? null : await storage.readConfig({ group: 'admin' });
storage.startRefreshLicense();
const isAdminPasswordMissing = !!(
process.env.STORAGE_DATABASE &&
!process.env.ADMIN_PASSWORD &&
!process.env.BASIC_AUTH &&
!adminConfig?.adminPasswordState
);
const configResult = {
runAsPortal: !!connections.portalConnections,
singleDbConnection: connections.singleDbConnection,
singleConnection: singleConnection,
@@ -64,24 +83,34 @@ module.exports = {
isDocker: platformInfo.isDocker,
isElectron: platformInfo.isElectron,
isLicenseValid,
isLicenseExpired: checkedLicense?.isExpired,
trialDaysLeft:
checkedLicense?.licenseTypeObj?.isTrial && !checkedLicense?.isExpired ? checkedLicense?.daysLeft : null,
checkedLicense,
configurationError,
logoutUrl: await authProvider.getLogoutUrl(),
logoutUrl,
permissions,
login,
// ...additionalConfigProps,
isBasicAuth: !!process.env.BASIC_AUTH,
isAdminLoginForm: !!(
process.env.STORAGE_DATABASE &&
process.env.ADMIN_PASSWORD &&
!process.env.BASIC_AUTH &&
checkedLicense?.type == 'premium'
(process.env.ADMIN_PASSWORD || adminConfig?.adminPasswordState == 'set') &&
!process.env.BASIC_AUTH
),
isAdminPasswordMissing,
isInvalidToken: req?.isInvalidToken,
adminPasswordState: adminConfig?.adminPasswordState,
storageDatabase: process.env.STORAGE_DATABASE,
logsFilePath: getLogsFilePath(),
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
connectionsFilePath: path.join(
datadir(),
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
),
...currentVersion,
};
return configResult;
},
logout_meta: {

View File

@@ -12,7 +12,7 @@ const { pickSafeConnectionInfo } = require('../utility/crypting');
const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
const processArgs = require('../utility/processArgs');
const { safeJsonParse, getLogger } = require('dbgate-tools');
const { safeJsonParse, getLogger, extractErrorLogData } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
const pipeForkLogs = require('../utility/pipeForkLogs');
@@ -76,6 +76,7 @@ function getPortalCollections() {
allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'),
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
parent: process.env[`PARENT_${id}`] || undefined,
useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`],
// SSH tunnel
useSshTunnel: process.env[`USE_SSH_${id}`],
@@ -198,8 +199,11 @@ module.exports = {
const dir = datadir();
if (!portalConnections) {
// @ts-ignore
this.datastore = new JsonLinesDatabase(path.join(dir, 'connections.jsonl'));
this.datastore = new JsonLinesDatabase(
path.join(dir, processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl')
);
}
await this.checkUnsavedConnectionsLimit();
},
list_meta: true,
@@ -218,7 +222,7 @@ module.exports = {
},
test_meta: true,
test(connection) {
test({ connection, requestDbList }) {
const subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
@@ -233,7 +237,7 @@ module.exports = {
}
);
pipeForkLogs(subprocess);
subprocess.send(connection);
subprocess.send({ ...connection, requestDbList });
return new Promise(resolve => {
subprocess.on('message', resp => {
if (handleProcessCommunication(resp, subprocess)) return;
@@ -299,6 +303,32 @@ module.exports = {
return res;
},
async checkUnsavedConnectionsLimit() {
if (!this.datastore) {
return;
}
const MAX_UNSAVED_CONNECTIONS = 5;
await this.datastore.transformAll(connections => {
const count = connections.filter(x => x.unsaved).length;
if (count > MAX_UNSAVED_CONNECTIONS) {
const res = [];
let unsavedToSkip = count - MAX_UNSAVED_CONNECTIONS;
for (const item of connections) {
if (item.unsaved) {
if (unsavedToSkip > 0) {
unsavedToSkip--;
} else {
res.push(item);
}
} else {
res.push(item);
}
}
return res;
}
});
},
update_meta: true,
async update({ _id, values }, req) {
if (portalConnections) return;
@@ -367,6 +397,11 @@ module.exports = {
get_meta: true,
async get({ conid }, req) {
if (conid == '__model') {
return {
_id: '__model',
};
}
testConnectionPermission(conid, req);
return this.getCore({ conid, mask: true });
},
@@ -429,7 +464,7 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true };
} catch (err) {
logger.error({ err }, 'Error getting DB token');
logger.error(extractErrorLogData(err), 'Error getting DB token');
return { error: err.message };
}
},
@@ -445,7 +480,7 @@ module.exports = {
const resp = await authProvider.login(null, null, { conid: volatile._id });
return resp;
} catch (err) {
logger.error({ err }, 'Error getting DB token');
logger.error(extractErrorLogData(err), 'Error getting DB token');
return { error: err.message };
}
},
@@ -477,4 +512,10 @@ module.exports = {
}
return null;
},
reloadConnectionList_meta: true,
async reloadConnectionList() {
if (portalConnections) return;
await this.datastore.unload();
},
};

View File

@@ -12,6 +12,8 @@ const {
extendDatabaseInfo,
modelCompareDbDiffOptions,
getLogger,
extractErrorLogData,
filterStructureBySchema,
} = require('dbgate-tools');
const { html, parse } = require('diff2html');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -30,6 +32,8 @@ const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const crypto = require('crypto');
const loadModelTransform = require('../utility/loadModelTransform');
const exportDbModelSql = require('../utility/exportDbModelSql');
const logger = getLogger('databaseConnections');
@@ -146,7 +150,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request do process');
logger.error(extractErrorLogData(err), 'Error sending request do process');
this.close(conn.conid, conn.database);
}
});
@@ -213,6 +217,17 @@ module.exports = {
return res.result || null;
},
schemaList_meta: true,
async schemaList({ conid, database }, req) {
testConnectionPermission(conid, req);
return this.loadDataCore('schemaList', { conid, database });
},
dispatchDatabaseChangedEvent_meta: true,
dispatchDatabaseChangedEvent({ event, conid, database }) {
socket.emitChanged(event, { conid, database });
},
loadKeys_meta: true,
async loadKeys({ conid, database, root, filter }, req) {
testConnectionPermission(conid, req);
@@ -307,7 +322,7 @@ module.exports = {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging DB connection');
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
this.close(conid, database);
return {
@@ -337,6 +352,11 @@ module.exports = {
syncModel_meta: true,
async syncModel({ conid, database, isFullRefresh }, req) {
if (conid == '__model') {
socket.emitChanged('database-structure-changed', { conid, database });
return { status: 'ok' };
}
testConnectionPermission(conid, req);
const conn = await this.ensureOpened(conid, database);
conn.subprocess.send({ msgtype: 'syncModel', isFullRefresh });
@@ -351,7 +371,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
@@ -380,11 +400,12 @@ module.exports = {
},
structure_meta: true,
async structure({ conid, database }, req) {
async structure({ conid, database, modelTransFile = null }, req) {
testConnectionPermission(conid, req);
if (conid == '__model') {
const model = await importDbModel(database);
return model;
const trans = await loadModelTransform(modelTransFile);
return trans ? trans(model) : model;
}
const opened = await this.ensureOpened(conid, database);
@@ -420,14 +441,35 @@ module.exports = {
},
exportModel_meta: true,
async exportModel({ conid, database }, req) {
async exportModel({ conid, database, outputFolder, schema }, req) {
testConnectionPermission(conid, req);
const archiveFolder = await archive.getNewArchiveFolder({ database });
await fs.mkdir(path.join(archivedir(), archiveFolder));
const realFolder = outputFolder.startsWith('archive:')
? resolveArchiveFolder(outputFolder.substring('archive:'.length))
: outputFolder;
const model = await this.structure({ conid, database });
await exportDbModel(model, path.join(archivedir(), archiveFolder));
socket.emitChanged(`archive-folders-changed`);
return { archiveFolder };
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
await exportDbModel(extendDatabaseInfo(filteredModel), realFolder);
if (outputFolder.startsWith('archive:')) {
socket.emitChanged(`archive-files-changed`, { folder: outputFolder.substring('archive:'.length) });
}
return { status: 'ok' };
},
exportModelSql_meta: true,
async exportModelSql({ conid, database, outputFolder, outputFile, schema }, req) {
testConnectionPermission(conid, req);
const connection = await connections.getCore({ conid });
const driver = requireEngineDriver(connection);
const model = await this.structure({ conid, database });
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
await exportDbModelSql(extendDatabaseInfo(filteredModel), driver, outputFolder, outputFile);
return { status: 'ok' };
},
generateDeploySql_meta: true,

View File

@@ -18,11 +18,14 @@ function readFirstLine(file) {
}
if (reader.hasNextLine()) {
reader.nextLine((err, line) => {
if (err) reject(err);
resolve(line);
if (err) {
reader.close(() => reject(err)); // Ensure reader is closed on error
return;
}
reader.close(() => resolve(line)); // Ensure reader is closed after reading
});
} else {
resolve(null);
reader.close(() => resolve(null)); // Properly close if no lines are present
}
});
});

View File

@@ -94,7 +94,7 @@ module.exports = {
if (!manifest.keywords) {
continue;
}
if (!manifest.keywords.includes('dbgateplugin')) {
if (!manifest.keywords.includes('dbgateplugin') && !manifest.keywords.includes('dbgatebuiltin')) {
continue;
}
const readmeFile = path.join(isPackaged ? packagedPluginsDir() : pluginsdir(), packageName, 'README.md');

View File

@@ -12,6 +12,7 @@ const {
jsonScriptToJavascript,
getLogger,
safeJsonParse,
pinoLogRecordToMessageRecord,
} = require('dbgate-tools');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
@@ -68,18 +69,20 @@ module.exports = {
dispatchMessage(runid, message) {
if (message) {
const json = safeJsonParse(message.message);
if (_.isPlainObject(message)) logger.log(message);
else logger.info(message);
if (json) logger.log(json);
else logger.info(message.message);
const toEmit = _.isPlainObject(message)
? {
time: new Date(),
...message,
}
: {
message,
time: new Date(),
};
const toEmit = {
time: new Date(),
...message,
message: json ? json.msg : message.message,
};
if (json && json.level >= 50) {
if (toEmit.level >= 50) {
toEmit.severity = 'error';
}
@@ -108,7 +111,7 @@ module.exports = {
const scriptFile = path.join(uploadsdir(), runid + '.js');
fs.writeFileSync(`${scriptFile}`, scriptText);
fs.mkdirSync(directory);
const pluginNames = _.union(fs.readdirSync(pluginsdir()), packagedPluginList);
const pluginNames = extractPlugins(scriptText);
logger.info({ scriptFile }, 'Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
@@ -131,7 +134,16 @@ module.exports = {
}
);
const pipeDispatcher = severity => data => {
return this.dispatchMessage(runid, { severity, message: data.toString().trim() });
const json = safeJsonParse(data, null);
if (json) {
return this.dispatchMessage(runid, pinoLogRecordToMessageRecord(json));
} else {
return this.dispatchMessage(runid, {
message: json == null ? data.toString().trim() : null,
severity,
});
}
};
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
@@ -165,7 +177,7 @@ module.exports = {
start_meta: true,
async start({ script }) {
const runid = crypto.randomUUID()
const runid = crypto.randomUUID();
if (script.type == 'json') {
const js = jsonScriptToJavascript(script);

View File

@@ -11,7 +11,7 @@ const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('serverConnection');
@@ -52,7 +52,7 @@ module.exports = {
if (existing) return existing;
const connection = await connections.getCore({ conid });
if (!connection) {
throw new Error(`Connection with conid="${conid}" not fund`);
throw new Error(`Connection with conid="${conid}" not found`);
}
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
@@ -112,7 +112,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
@@ -134,6 +134,7 @@ module.exports = {
listDatabases_meta: true,
async listDatabases({ conid }, req) {
if (!conid) return [];
if (conid == '__model') return [];
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
return opened.databases;
@@ -167,12 +168,12 @@ module.exports = {
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging server connection');
logger.error(extractErrorLogData(err), 'Error pinging server connection');
this.close(conid);
}
})
);
socket.setStreamIdFilter(strmid, { conid: conidArray });
socket.setStreamIdFilter(strmid, { conid: [...(conidArray ?? []), '__model'] });
return { status: 'ok' };
},
@@ -217,7 +218,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request');
logger.error(extractErrorLogData(err), 'Error sending request');
this.close(conn.conid);
}
});

View File

@@ -8,7 +8,7 @@ const path = require('path');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const { appdir } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const pipeForkLogs = require('../utility/pipeForkLogs');
const config = require('./config');
@@ -222,7 +222,7 @@ module.exports = {
try {
session.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error pinging session');
logger.error(extractErrorLogData(err), 'Error pinging session');
return {
status: 'error',

View File

@@ -17,4 +17,15 @@ module.exports = {
async getConnectionsForLoginPage() {
return null;
},
getStorageConnectionError() {
return null;
},
readConfig_meta: true,
async readConfig({ group }) {
return {};
},
startRefreshLicense() {},
};

View File

@@ -1,7 +1,7 @@
const crypto = require('crypto');
const path = require('path');
const { uploadsdir, getLogsFilePath } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('uploads');
const axios = require('axios');
const os = require('os');
@@ -110,7 +110,7 @@ module.exports = {
return response.data;
} catch (err) {
logger.error({ err }, 'Error uploading gist');
logger.error(extractErrorLogData(err), 'Error uploading gist');
return {
apiErrorMessage: err.message,

View File

@@ -1,5 +1,5 @@
module.exports = {
version: '5.0.0-alpha.1',
buildTime: '2021-04-17T07:22:49.702Z'
version: '6.0.0-alpha.1',
buildTime: '2024-12-01T00:00:00Z'
};

View File

@@ -1,10 +1,17 @@
const { setLogConfig, getLogger, setLoggerName } = require('dbgate-tools');
const { setLogConfig, getLogger, setLoggerName, extractErrorLogData } = require('dbgate-tools');
const processArgs = require('./utility/processArgs');
const fs = require('fs');
const moment = require('moment');
const path = require('path');
const { logsdir, setLogsFilePath, getLogsFilePath } = require('./utility/directories');
const { createLogger } = require('pinomin');
const currentVersion = require('./currentVersion');
const logger = getLogger('apiIndex');
process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
process.exit(1);
});
if (processArgs.startProcess) {
setLoggerName(processArgs.startProcess.replace(/Process$/, ''));
@@ -94,10 +101,17 @@ function configureLogger() {
if (processArgs.listenApi) {
configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:');
for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
}
}
}
const shell = require('./shell/index');
const currentVersion = require('./currentVersion');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),

View File

@@ -28,6 +28,7 @@ const files = require('./controllers/files');
const scheduler = require('./controllers/scheduler');
const queryHistory = require('./controllers/queryHistory');
const onFinished = require('on-finished');
const processArgs = require('./utility/processArgs');
const { rundir } = require('./utility/directories');
const platformInfo = require('./utility/platformInfo');
@@ -35,6 +36,8 @@ const getExpressPath = require('./utility/getExpressPath');
const _ = require('lodash');
const { getLogger } = require('dbgate-tools');
const { getDefaultAuthProvider } = require('./auth/authProvider');
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
const { isProApp } = require('./utility/checkLicense');
const logger = getLogger('main');
@@ -73,8 +76,15 @@ function start() {
if (platformInfo.isDocker) {
// server static files inside docker container
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
} else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
} else if (processArgs.runE2eTests) {
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public')));
} else if (platformInfo.isNpmDist) {
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
app.use(
getExpressPath('/'),
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
);
} else if (process.env.DEVWEB) {
// console.log('__dirname', __dirname);
// console.log(path.join(__dirname, '../../web/public/build'));
@@ -126,6 +136,10 @@ function start() {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
port: parseInt(
@@ -162,6 +176,10 @@ function start() {
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
process.on('SIGBREAK', shutdown);
if (process.env.CLOUD_UPGRADE_FILE) {
startCloudUpgradeTimer();
}
}
function useAllControllers(app, electron) {

View File

@@ -1,13 +0,0 @@
const argIndex = process.argv.indexOf('--native-modules');
const redirectFile = global['NATIVE_MODULES'] || (argIndex > 0 ? process.argv[argIndex + 1] : null);
function requireDynamic(file) {
try {
// @ts-ignore
return __non_webpack_require__(redirectFile);
} catch (err) {
return require(redirectFile);
}
}
module.exports = redirectFile ? requireDynamic(redirectFile) : require('./nativeModulesContent');

View File

@@ -0,0 +1,9 @@
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
const content = {};
content['better-sqlite3'] = () => require('better-sqlite3');
content['oracledb'] = () => require('oracledb');
module.exports = content;

View File

@@ -17,12 +17,19 @@ Platform: ${process.platform}
function start() {
childProcessChecker();
process.on('message', async connection => {
// @ts-ignore
const { requestDbList } = connection;
if (handleProcessCommunication(connection)) return;
try {
const driver = requireEngineDriver(connection);
const conn = await connectUtility(driver, connection, 'app');
const res = await driver.getVersion(conn);
process.send({ msgtype: 'connected', ...res });
const dbhan = await connectUtility(driver, connection, 'app');
const res = await driver.getVersion(dbhan);
let databases = undefined;
if (requestDbList) {
databases = await driver.listDatabases(dbhan);
}
process.send({ msgtype: 'connected', ...res, databases });
await driver.close(dbhan);
} catch (e) {
console.error(e);
process.send({

View File

@@ -1,7 +1,15 @@
const stableStringify = require('json-stable-stringify');
const { splitQuery } = require('dbgate-query-splitter');
const childProcessChecker = require('../utility/childProcessChecker');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const {
extractBoolSettingsValue,
extractIntSettingsValue,
getLogger,
isCompositeDbName,
dbNameLogCategory,
extractErrorMessage,
extractErrorLogData,
} = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -11,7 +19,7 @@ const { dumpSqlSelect } = require('dbgate-sqltree');
const logger = getLogger('dbconnProcess');
let systemConnection;
let dbhan;
let storedConnection;
let afterConnectCallbacks = [];
let afterAnalyseCallbacks = [];
@@ -35,7 +43,7 @@ async function checkedAsyncCall(promise) {
} catch (err) {
setStatus({
name: 'error',
message: err.message,
message: extractErrorMessage(err, 'Checked call error'),
});
// console.error(err);
setTimeout(() => process.exit(1), 1000);
@@ -46,10 +54,16 @@ async function checkedAsyncCall(promise) {
let loadingModel = false;
async function handleFullRefresh() {
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
resolveAnalysedPromises();
// skip loading DB structure
return;
}
loadingModel = true;
const driver = requireEngineDriver(storedConnection);
setStatusName('loadStructure');
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection, serverVersion));
analysedStructure = await checkedAsyncCall(driver.analyseFull(dbhan, serverVersion));
analysedTime = new Date().getTime();
process.send({ msgtype: 'structure', structure: analysedStructure });
process.send({ msgtype: 'structureTime', analysedTime });
@@ -60,12 +74,15 @@ async function handleFullRefresh() {
}
async function handleIncrementalRefresh(forceSend) {
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
resolveAnalysedPromises();
// skip loading DB structure
return;
}
loadingModel = true;
const driver = requireEngineDriver(storedConnection);
setStatusName('checkStructure');
const newStructure = await checkedAsyncCall(
driver.analyseIncremental(systemConnection, analysedStructure, serverVersion)
);
const newStructure = await checkedAsyncCall(driver.analyseIncremental(dbhan, analysedStructure, serverVersion));
analysedTime = new Date().getTime();
if (newStructure != null) {
analysedStructure = newStructure;
@@ -103,7 +120,8 @@ function setStatusName(name) {
async function readVersion() {
const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(systemConnection);
const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`);
process.send({ msgtype: 'version', version });
serverVersion = version;
}
@@ -114,8 +132,13 @@ async function handleConnect({ connection, structure, globalSettings }) {
if (!structure) setStatusName('pending');
const driver = requireEngineDriver(storedConnection);
systemConnection = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
systemConnection.feedback = feedback => setStatus({ feedback });
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
);
dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion());
if (structure) {
analysedStructure = structure;
@@ -138,7 +161,7 @@ async function handleConnect({ connection, structure, globalSettings }) {
}
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -163,10 +186,14 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.script(systemConnection, sql, { useTransaction });
await driver.script(dbhan, sql, { useTransaction });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
});
}
}
@@ -175,10 +202,14 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.operation(systemConnection, operation, { useTransaction });
await driver.operation(dbhan, operation, { useTransaction });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing DB operation'),
});
}
}
@@ -188,10 +219,14 @@ async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
// console.log(sql);
const res = await driver.query(systemConnection, sql);
const res = await driver.query(dbhan, sql);
process.send({ msgtype: 'response', msgid, ...res });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' });
process.send({
msgtype: 'response',
msgid,
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
});
}
}
@@ -202,52 +237,64 @@ async function handleSqlSelect({ msgid, select }) {
return handleQueryData({ msgid, sql: dmp.s }, true);
}
async function handleDriverDataCore(msgid, callMethod) {
async function handleDriverDataCore(msgid, callMethod, { logName }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
}
}
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
}
async function handleCollectionData({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.readCollection(systemConnection, options));
return handleDriverDataCore(msgid, driver => driver.readCollection(dbhan, options), { logName: 'readCollection' });
}
async function handleLoadKeys({ msgid, root, filter }) {
return handleDriverDataCore(msgid, driver => driver.loadKeys(systemConnection, root, filter));
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter), { logName: 'loadKeys' });
}
async function handleExportKeys({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.exportKeys(systemConnection, options));
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
}
async function handleLoadKeyInfo({ msgid, key }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(systemConnection, key));
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(dbhan, key), { logName: 'loadKeyInfo' });
}
async function handleCallMethod({ msgid, method, args }) {
return handleDriverDataCore(msgid, driver => {
if (storedConnection.isReadOnly) {
throw new Error('Connection is read only, cannot call custom methods');
}
return handleDriverDataCore(
msgid,
driver => {
if (storedConnection.isReadOnly) {
throw new Error('Connection is read only, cannot call custom methods');
}
ensureExecuteCustomScript(driver);
return driver.callMethod(systemConnection, method, args);
});
ensureExecuteCustomScript(driver);
return driver.callMethod(dbhan, method, args);
},
{ logName: `callMethod:${method}` }
);
}
async function handleLoadKeyTableRange({ msgid, key, cursor, count }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(systemConnection, key, cursor, count));
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(dbhan, key, cursor, count), {
logName: 'loadKeyTableRange',
});
}
async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) {
return handleDriverDataCore(msgid, driver =>
driver.loadFieldValues(systemConnection, { schemaName, pureName }, field, search)
);
return handleDriverDataCore(msgid, driver => driver.loadFieldValues(dbhan, { schemaName, pureName }, field, search), {
logName: 'loadFieldValues',
});
}
function ensureExecuteCustomScript(driver) {
@@ -264,10 +311,10 @@ async function handleUpdateCollection({ msgid, changeSet }) {
const driver = requireEngineDriver(storedConnection);
try {
ensureExecuteCustomScript(driver);
const result = await driver.updateCollection(systemConnection, changeSet);
const result = await driver.updateCollection(dbhan, changeSet);
process.send({ msgtype: 'response', msgid, result });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error updating collection') });
}
}
@@ -277,18 +324,24 @@ async function handleSqlPreview({ msgid, objects, options }) {
try {
const dmp = driver.createDumper();
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, systemConnection);
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, dbhan);
await generator.dump();
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(() => {
setTimeout(async () => {
logger.error('Exiting because of unhandled exception');
await driver.close(dbhan);
process.exit(0);
}, 500);
}
} catch (err) {
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
isError: true,
errorMessage: extractErrorMessage(err, 'Error generating SQL preview'),
});
}
}
@@ -297,14 +350,19 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
try {
const res = await generateDeploySql({
systemConnection,
systemConnection: dbhan,
connection: storedConnection,
analysedStructure,
modelFolder,
});
process.send({ ...res, msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
process.send({
msgtype: 'response',
msgid,
isError: true,
errorMessage: extractErrorMessage(err, 'Error generating deploy SQL'),
});
}
}
@@ -337,6 +395,7 @@ const messageHandlers = {
loadFieldValues: handleLoadFieldValues,
sqlSelect: handleSqlSelect,
exportKeys: handleExportKeys,
schemaList: handleSchemaList,
// runCommand: handleRunCommand,
};
@@ -348,10 +407,12 @@ async function handleMessage({ msgtype, ...other }) {
function start() {
childProcessChecker();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);
@@ -361,8 +422,8 @@ function start() {
try {
await handleMessage(message);
} catch (err) {
logger.error({ err }, 'Error in DB connection');
process.send({ msgtype: 'error', error: err.message });
logger.error(extractErrorLogData(err), 'Error in DB connection');
process.send({ msgtype: 'error', error: extractErrorMessage(err, 'Error processing message') });
}
});
}

View File

@@ -1,12 +1,12 @@
const stableStringify = require('json-stable-stringify');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger, extractErrorLogData } = require('dbgate-tools');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const logger = getLogger('srvconnProcess');
let systemConnection;
let dbhan;
let storedConnection;
let lastDatabases = null;
let lastStatus = null;
@@ -16,7 +16,7 @@ let afterConnectCallbacks = [];
async function handleRefresh() {
const driver = requireEngineDriver(storedConnection);
try {
let databases = await driver.listDatabases(systemConnection);
let databases = await driver.listDatabases(dbhan);
if (storedConnection?.allowedDatabases?.trim()) {
const allowedDatabaseList = storedConnection.allowedDatabases
.split('\n')
@@ -39,14 +39,14 @@ async function handleRefresh() {
name: 'error',
message: err.message,
});
// console.error(err);
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
setTimeout(() => process.exit(1), 1000);
}
}
async function readVersion() {
const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(systemConnection);
const version = await driver.getVersion(dbhan);
process.send({ msgtype: 'version', version });
}
@@ -70,7 +70,7 @@ async function handleConnect(connection) {
const driver = requireEngineDriver(storedConnection);
try {
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
readVersion();
handleRefresh();
if (extractBoolSettingsValue(globalSettings, 'connection.autoRefresh', false)) {
@@ -84,7 +84,7 @@ async function handleConnect(connection) {
name: 'error',
message: err.message,
});
// console.error(err);
logger.error(extractErrorLogData(err), 'Error connecting to server');
setTimeout(() => process.exit(1), 1000);
}
@@ -95,7 +95,7 @@ async function handleConnect(connection) {
}
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -108,14 +108,14 @@ function handlePing() {
async function handleDatabaseOp(op, { msgid, name }) {
try {
const driver = requireEngineDriver(storedConnection);
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
if (driver[op]) {
await driver[op](systemConnection, name);
await driver[op](dbhan, name);
} else {
const dmp = driver.createDumper();
dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script');
await driver.query(systemConnection, dmp.s, { discardResult: true });
await driver.query(dbhan, dmp.s, { discardResult: true });
}
await handleRefresh();
@@ -137,11 +137,11 @@ async function handleDriverDataCore(msgid, callMethod) {
}
async function handleServerSummary({ msgid }) {
return handleDriverDataCore(msgid, driver => driver.serverSummary(systemConnection));
return handleDriverDataCore(msgid, driver => driver.serverSummary(dbhan));
}
async function handleSummaryCommand({ msgid, command, row }) {
return handleDriverDataCore(msgid, driver => driver.summaryCommand(systemConnection, command, row));
return handleDriverDataCore(msgid, driver => driver.summaryCommand(dbhan, command, row));
}
const messageHandlers = {
@@ -161,10 +161,12 @@ async function handleMessage({ msgtype, ...other }) {
function start() {
childProcessChecker();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);
@@ -178,6 +180,7 @@ function start() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
}
});
}

View File

@@ -14,7 +14,7 @@ const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require
const logger = getLogger('sessionProcess');
let systemConnection;
let dbhan;
let storedConnection;
let afterConnectCallbacks = [];
// let currentHandlers = [];
@@ -177,7 +177,7 @@ function handleStream(driver, resultIndexHolder, sqlItem) {
return new Promise((resolve, reject) => {
const start = sqlItem.trimStart || sqlItem.start;
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
driver.stream(systemConnection, sqlItem.text, handler);
driver.stream(dbhan, sqlItem.text, handler);
});
}
@@ -196,7 +196,7 @@ async function handleConnect(connection) {
storedConnection = connection;
const driver = requireEngineDriver(storedConnection);
systemConnection = await connectUtility(driver, storedConnection, 'app');
dbhan = await connectUtility(driver, storedConnection, 'app');
for (const [resolve] of afterConnectCallbacks) {
resolve();
}
@@ -210,7 +210,7 @@ async function handleConnect(connection) {
// }
function waitConnected() {
if (systemConnection) return Promise.resolve();
if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]);
});
@@ -230,7 +230,7 @@ async function handleStartProfiler({ jslid }) {
const writer = new TableWriter();
writer.initializeFromReader(jslid);
currentProfiler = await driver.startProfiler(systemConnection, {
currentProfiler = await driver.startProfiler(dbhan, {
row: data => writer.rowFromReader(data),
});
currentProfiler.writer = writer;
@@ -241,7 +241,7 @@ async function handleStopProfiler({ jslid }) {
const driver = requireEngineDriver(storedConnection);
currentProfiler.writer.close();
driver.stopProfiler(systemConnection, currentProfiler);
driver.stopProfiler(dbhan, currentProfiler);
currentProfiler = null;
}
@@ -304,7 +304,7 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
const writer = new TableWriter();
writer.initializeFromReader(jslid);
const reader = await driver.readQuery(systemConnection, sql);
const reader = await driver.readQuery(dbhan, sql);
reader.on('data', data => {
writer.rowFromReader(data);
@@ -340,10 +340,12 @@ function start() {
lastPing = new Date().getTime();
setInterval(() => {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
@@ -362,6 +364,8 @@ function start() {
executingScripts == 0
) {
logger.info('Session not active, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
}
}, 10 * 1000);

View File

@@ -3,7 +3,7 @@ const platformInfo = require('../utility/platformInfo');
const childProcessChecker = require('../utility/childProcessChecker');
const { handleProcessCommunication } = require('../utility/processComm');
const { SSHConnection } = require('../utility/SSHConnection');
const { getLogger } = require('dbgate-tools');
const { getLogger, extractErrorLogData, extractErrorMessage } = require('dbgate-tools');
const logger = getLogger('sshProcess');
@@ -40,13 +40,13 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
logger.error({ err }, 'Error creating SSH tunnel connection:');
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',
connection,
tunnelConfig,
errorMessage: err.message,
errorMessage: extractErrorMessage(err.message),
});
}
}

View File

@@ -0,0 +1,19 @@
const autoIndexForeignKeysTransform = () => database => {
return {
...database,
tables: database.tables.map(table => {
return {
...table,
indexes: [
...(table.indexes || []),
...table.foreignKeys.map(fk => ({
constraintName: `IX_${fk.constraintName}`,
columns: fk.columns.map(x => ({ columnName: x.columnName })),
})),
],
};
}),
};
};
module.exports = autoIndexForeignKeysTransform;

View File

@@ -2,6 +2,13 @@ const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
const Stream = require('stream');
const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
/**
* Copies reader to writer. Used for import, export tables and transfer data between tables
* @param {readerType} input - reader object
* @param {writerType} output - writer object
* @param {object} options - options
* @returns {Promise}
*/
function copyStream(input, output, options) {
const { columns } = options || {};

View File

@@ -12,6 +12,7 @@ const { resolveArchiveFolder } = require('../utility/directories');
async function dataDuplicator({
connection,
archive,
folder,
items,
options,
analysedStructure = null,
@@ -19,32 +20,44 @@ async function dataDuplicator({
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(pool);
try {
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
const sourceDir = archive
? resolveArchiveFolder(archive)
: folder?.startsWith('archive:')
? resolveArchiveFolder(folder.substring('archive:'.length))
: folder;
const dupl = new DataDuplicator(
dbhan,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
const dupl = new DataDuplicator(
pool,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream ||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
}
module.exports = dataDuplicator;

View File

@@ -0,0 +1,21 @@
const dataTypeMapperTransform = (oldType, newType) => database => {
return {
...database,
tables: database.tables.map(table => {
return {
...table,
columns: table.columns.map(column => {
if (column.dataType?.toLowerCase() === oldType?.toLowerCase()) {
return {
...column,
dataType: newType,
};
}
return column;
}),
};
}),
};
};
module.exports = dataTypeMapperTransform;

View File

@@ -1,17 +1,73 @@
const generateDeploySql = require('./generateDeploySql');
const executeQuery = require('./executeQuery');
const { ScriptDrivedDeployer } = require('dbgate-datalib');
const connectUtility = require('../utility/connectUtility');
const requireEngineDriver = require('../utility/requireEngineDriver');
const loadModelFolder = require('../utility/loadModelFolder');
const crypto = require('crypto');
async function deployDb({ connection, systemConnection, driver, analysedStructure, modelFolder, loadedDbModel }) {
const { sql } = await generateDeploySql({
connection,
systemConnection,
driver,
analysedStructure,
modelFolder,
loadedDbModel,
});
// console.log('RUNNING DEPLOY SCRIPT:', sql);
await executeQuery({ connection, systemConnection, driver, sql });
/**
* Deploys database model stored in modelFolder (table as yamls) to database
* @param {object} options
* @param {connectionType} options.connection - connection object
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
* @param {string} options.targetSchema - target schema for deployment
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
*/
async function deployDb({
connection,
systemConnection,
driver,
analysedStructure,
modelFolder,
loadedDbModel,
modelTransforms,
dbdiffOptionsExtra,
ignoreNameRegex = '',
targetSchema = null,
maxMissingTablesRatio = undefined,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
try {
const scriptDeployer = new ScriptDrivedDeployer(
dbhan,
driver,
Array.isArray(loadedDbModel) ? loadedDbModel : modelFolder ? await loadModelFolder(modelFolder) : [],
crypto
);
await scriptDeployer.runPre();
const { sql } = await generateDeploySql({
connection,
systemConnection: dbhan,
driver,
analysedStructure,
modelFolder,
loadedDbModel,
modelTransforms,
dbdiffOptionsExtra,
ignoreNameRegex,
targetSchema,
maxMissingTablesRatio,
});
// console.log('RUNNING DEPLOY SCRIPT:', sql);
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
await scriptDeployer.runPost();
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = deployDb;

View File

@@ -0,0 +1,51 @@
const executeQuery = require('./executeQuery');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger, extendDatabaseInfo } = require('dbgate-tools');
const logger = getLogger('dropAllDbObjects');
/**
* Drops all database objects
* @param {object} options
* @param {connectionType} options.connection - connection object
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @returns {Promise}
*/
async function dropAllDbObjects({ connection, systemConnection, driver, analysedStructure }) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
analysedStructure = extendDatabaseInfo(analysedStructure);
const dmp = driver.createDumper();
for (const table of analysedStructure.tables) {
for (const fk of table.foreignKeys) {
dmp.dropForeignKey(fk);
}
}
for (const table of analysedStructure.tables) {
dmp.dropTable(table);
}
for (const field of Object.keys(analysedStructure)) {
if (dmp.getSqlObjectSqlName(field)) {
for (const obj of analysedStructure[field]) {
dmp.dropSqlObject(obj);
}
}
}
await executeQuery({ connection, systemConnection, driver, sql: dmp.s, logScriptItems: true });
}
module.exports = dropAllDbObjects;

View File

@@ -27,15 +27,23 @@ async function dumpDatabase({
logger.info(`Dumping database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(pool, {
outputFile,
databaseName,
schemaName,
});
await doDump(dumper);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(dbhan, {
outputFile,
databaseName,
schemaName,
});
await doDump(dumper);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dumpDatabase;

View File

@@ -1,17 +1,49 @@
const fs = require('fs-extra');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger } = require('dbgate-tools');
const { getLogger, getLimitedQuery } = require('dbgate-tools');
const logger = getLogger('execQuery');
async function executeQuery({ connection = undefined, systemConnection = undefined, driver = undefined, sql }) {
logger.info({ sql }, `Execute query`);
/**
* Executes SQL query
* @param {object} options
* @param {connectionType} [options.connection] - connection object
* @param {object} [options.systemConnection] - system connection (result of driver.connect). If not provided, new connection will be created
* @param {object} [options.driver] - driver object. If not provided, it will be loaded from connection
* @param {string} [options.sql] - SQL query
* @param {string} [options.sqlFile] - SQL file
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
*/
async function executeQuery({
connection = undefined,
systemConnection = undefined,
driver = undefined,
sql,
sqlFile = undefined,
logScriptItems = false,
}) {
if (!logScriptItems) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
}
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'script'));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
await driver.script(pool, sql);
if (sqlFile) {
logger.debug(`Loading SQL file ${sqlFile}`);
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
}
try {
logger.info(`Connected.`);
await driver.script(dbhan, sql, { logScriptItems });
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = executeQuery;

View File

@@ -6,11 +6,30 @@ const {
extendDatabaseInfo,
modelCompareDbDiffOptions,
enrichWithPreloadedRows,
skipNamesInStructureByRegex,
replaceSchemaInStructure,
filterStructureBySchema,
skipDbGateInternalObjects,
} = require('dbgate-tools');
const importDbModel = require('../utility/importDbModel');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
/**
* Generates query for deploying model into database
* @param {object} options
* @param {connectionType} options.connection - connection object
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {object} options.analysedStructure - analysed structure of the database. If not provided, it will be loaded
* @param {string} options.modelFolder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {import('dbgate-tools').DatabaseModelFile[]} options.loadedDbModel - loaded database model - collection of yaml and SQL files loaded into array
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {object} options.dbdiffOptionsExtra - extra options for dbdiff
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
* @param {string} options.targetSchema - target schema for deployment
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
*/
async function generateDeploySql({
connection,
systemConnection = undefined,
@@ -18,44 +37,95 @@ async function generateDeploySql({
analysedStructure = undefined,
modelFolder = undefined,
loadedDbModel = undefined,
modelTransforms = undefined,
dbdiffOptionsExtra = {},
ignoreNameRegex = '',
targetSchema = null,
maxMissingTablesRatio = undefined,
}) {
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read'));
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(pool);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
if (
driver?.dialect?.multipleSchema &&
!targetSchema &&
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
) {
throw new Error('targetSchema is required for databases with multiple schemas');
}
const deployedModel = generateDbPairingId(
extendDatabaseInfo(loadedDbModel ? databaseInfoFromYamlModel(loadedDbModel) : await importDbModel(modelFolder))
);
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
const opts = {
...modelCompareDbDiffOptions,
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
noDropTable: true,
noDropColumn: true,
noDropConstraint: true,
noDropSqlObject: true,
noRenameTable: true,
noRenameColumn: true,
};
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, pool, driver);
let deployedModelSource = loadedDbModel
? databaseInfoFromYamlModel(loadedDbModel)
: await importDbModel(modelFolder);
// console.log('currentModelPairedPreloaded', currentModelPairedPreloaded.tables[0]);
// console.log('deployedModel', deployedModel.tables[0]);
// console.log('currentModel', currentModel.tables[0]);
// console.log('currentModelPaired', currentModelPaired.tables[0]);
const res = getAlterDatabaseScript(
currentModelPairedPreloaded,
deployedModel,
opts,
currentModelPairedPreloaded,
deployedModel,
driver
);
return res;
if (ignoreNameRegex) {
analysedStructure = skipNamesInStructureByRegex(analysedStructure, new RegExp(ignoreNameRegex, 'i'));
deployedModelSource = skipNamesInStructureByRegex(deployedModelSource, new RegExp(ignoreNameRegex, 'i'));
}
analysedStructure = skipDbGateInternalObjects(analysedStructure);
for (const transform of modelTransforms || []) {
deployedModelSource = transform(deployedModelSource);
}
if (targetSchema) {
deployedModelSource = replaceSchemaInStructure(deployedModelSource, targetSchema);
analysedStructure = filterStructureBySchema(analysedStructure, targetSchema);
}
const deployedModel = generateDbPairingId(extendDatabaseInfo(deployedModelSource));
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
const opts = {
...modelCompareDbDiffOptions,
noDropTable: true,
noDropColumn: true,
noDropConstraint: true,
noDropSqlObject: true,
noRenameTable: true,
noRenameColumn: true,
...dbdiffOptionsExtra,
};
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, dbhan, driver);
if (maxMissingTablesRatio != null) {
const missingTables = currentModelPaired.tables.filter(
x => !deployedModel.tables.find(y => y.pairingId == x.pairingId)
);
const missingTableCount = missingTables.length;
const missingTablesRatio = missingTableCount / (currentModelPaired.tables.length || 1);
if (missingTablesRatio > maxMissingTablesRatio) {
throw new Error(`Too many missing tables (${missingTablesRatio * 100}%), aborting deploy`);
}
}
// console.log('currentModelPairedPreloaded', currentModelPairedPreloaded.tables[0]);
// console.log('deployedModel', deployedModel.tables[0]);
// console.log('currentModel', currentModel.tables[0]);
// console.log('currentModelPaired', currentModelPaired.tables[0]);
const res = getAlterDatabaseScript(
currentModelPairedPreloaded,
deployedModel,
opts,
currentModelPairedPreloaded,
deployedModel,
driver
);
return res;
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = generateDeploySql;

View File

@@ -9,14 +9,28 @@ const { getLogger } = require('dbgate-tools');
const logger = getLogger('importDb');
class ImportStream extends stream.Transform {
constructor(pool, driver) {
constructor(dbhan, driver) {
super({ objectMode: true });
this.pool = pool;
this.dbhan = dbhan;
this.driver = driver;
this.writeQueryStream = null;
}
async _transform(chunk, encoding, cb) {
try {
await this.driver.script(this.pool, chunk);
if (chunk.specialMarker == 'copy_stdin_start') {
this.writeQueryStream = await this.driver.writeQueryFromStream(this.dbhan, chunk.text);
} else if (chunk.specialMarker == 'copy_stdin_line') {
this.writeQueryStream.write(chunk.text);
} else if (chunk.specialMarker == 'copy_stdin_end') {
this.writeQueryStream.end();
await new Promise((resolve, reject) => {
this.writeQueryStream.on('finish', resolve);
this.writeQueryStream.on('error', reject);
});
this.writeQueryStream = null;
} else {
await this.driver.script(this.dbhan, chunk.text, { queryOptions: { importSqlDump: true } });
}
} catch (err) {
this.emit('error', err.message);
}
@@ -44,17 +58,28 @@ async function importDatabase({ connection = undefined, systemConnection = undef
logger.info(`Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
const downloadedFile = await download(inputFile);
logger.info(`Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, driver.getQuerySplitterOptions('script'));
const importStream = new ImportStream(pool, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, {
...driver.getQuerySplitterOptions('import'),
returnRichInfo: true,
});
const importStream = new ImportStream(dbhan, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = importDatabase;

View File

@@ -6,7 +6,7 @@ const copyStream = require('./copyStream');
const fakeObjectReader = require('./fakeObjectReader');
const consoleObjectWriter = require('./consoleObjectWriter');
const jsonLinesWriter = require('./jsonLinesWriter');
const jsonArrayWriter = require('./jsonArrayWriter');
const jsonWriter = require('./jsonWriter');
const jsonLinesReader = require('./jsonLinesReader');
const sqlDataWriter = require('./sqlDataWriter');
const jslDataReader = require('./jslDataReader');
@@ -29,6 +29,12 @@ const modifyJsonLinesReader = require('./modifyJsonLinesReader');
const dataDuplicator = require('./dataDuplicator');
const dbModelToJson = require('./dbModelToJson');
const jsonToDbModel = require('./jsonToDbModel');
const jsonReader = require('./jsonReader');
const dataTypeMapperTransform = require('./dataTypeMapperTransform');
const sqlTextReplacementTransform = require('./sqlTextReplacementTransform');
const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform');
const generateDeploySql = require('./generateDeploySql');
const dropAllDbObjects = require('./dropAllDbObjects');
const dbgateApi = {
queryReader,
@@ -37,8 +43,9 @@ const dbgateApi = {
tableReader,
copyStream,
jsonLinesWriter,
jsonArrayWriter,
jsonLinesReader,
jsonReader,
jsonWriter,
sqlDataWriter,
fakeObjectReader,
consoleObjectWriter,
@@ -61,6 +68,11 @@ const dbgateApi = {
dataDuplicator,
dbModelToJson,
jsonToDbModel,
dataTypeMapperTransform,
sqlTextReplacementTransform,
autoIndexForeignKeysTransform,
generateDeploySql,
dropAllDbObjects,
};
requirePlugin.initializeDbgateApi(dbgateApi);

View File

@@ -1,52 +0,0 @@
const { getLogger } = require('dbgate-tools');
const fs = require('fs');
const stream = require('stream');
const logger = getLogger('jsonArrayWriter');
class StringifyStream extends stream.Transform {
constructor() {
super({ objectMode: true });
this.wasHeader = false;
this.wasRecord = false;
}
_transform(chunk, encoding, done) {
let skip = false;
if (!this.wasHeader) {
skip = chunk.__isStreamHeader;
this.wasHeader = true;
}
if (!skip) {
if (!this.wasRecord) {
this.push('[\n');
} else {
this.push(',\n');
}
this.wasRecord = true;
this.push(JSON.stringify(chunk));
}
done();
}
_flush(done) {
if (!this.wasRecord) {
this.push('[]\n');
} else {
this.push('\n]\n');
}
done();
}
}
async function jsonArrayWriter({ fileName, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream();
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
}
module.exports = jsonArrayWriter;

View File

@@ -2,6 +2,7 @@ const fs = require('fs');
const stream = require('stream');
const byline = require('byline');
const { getLogger } = require('dbgate-tools');
const download = require('./download');
const logger = getLogger('jsonLinesReader');
class ParseStream extends stream.Transform {
@@ -32,11 +33,21 @@ class ParseStream extends stream.Transform {
}
}
/**
* Reader function, which reads JSNOL file or URL. JSONL format - text file, every line is JSON encoded row.
* @param {Object} options
* @param {string} options.fileName - file name or URL
* @param {string} options.encoding - encoding of the file
* @param {number} options.limitRows - maximum number of rows to read
* @returns {Promise<readerType>} - reader object
*/
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(
fileName,
downloadedFile,
// @ts-ignore
encoding
);

Some files were not shown because too many files have changed in this diff Show More