Compare commits

...

266 Commits

Author SHA1 Message Date
Nybkox
1e67ca3794 feat: default value for language select 2025-03-04 22:05:04 +01:00
Nybkox
ceebf6dbe1 chore: add logging to translations:check ok scenario 2025-03-04 22:05:04 +01:00
Nybkox
8d4f9fd953 fix: use simple languagnes names 2025-03-04 22:05:04 +01:00
Nybkox
1c3032068e feat: remove unused by default when extracting 2025-03-04 21:17:01 +01:00
Nybkox
7b4b72166f chore: move sortJsonKeys helper 2025-03-04 20:54:42 +01:00
Nybkox
707e5bb8b0 chore: update git ignore 2025-03-04 20:53:53 +01:00
Nybkox
ad5d364c57 chore: extract translations 2025-03-04 20:53:53 +01:00
Nybkox
138fadf672 feat: compile messages 2025-03-04 20:53:53 +01:00
Nybkox
82eabc41fe feat: sort translation json keys alphabetically 2025-03-04 20:53:52 +01:00
Nybkox
3e6aab6b00 feat: basic translations to ui 2025-03-04 20:53:52 +01:00
Nybkox
5396b3f1fb feat: add translations:check command 2025-03-04 20:53:52 +01:00
Nybkox
b1ba887922 feat: separate remove-unused command 2025-03-04 20:53:52 +01:00
Nybkox
93a1c593fe feat: add basic language switch to settings 2025-03-04 20:53:52 +01:00
Nybkox
b7044248cb feat: add translations api for fe 2025-03-04 20:53:52 +01:00
Nybkox
ea5e2f660b feat: add --removeUnused flag to extract translations 2025-03-04 20:53:52 +01:00
Nybkox
e9779a3d2f feat: add add-missing command to translations cli 2025-03-04 20:53:52 +01:00
Nybkox
1c6ec0f8e3 refactor: add index.js to translations-cli, add translations:extract to package.json 2025-03-04 20:53:52 +01:00
Nybkox
84bd81e525 feat: throw when found the same translation key with different default values 2025-03-04 20:53:52 +01:00
Nybkox
a84cbee9db feat: basic translations extract 2025-03-04 20:53:52 +01:00
CI workflows
97b16c8c0c chore: auto-update github workflows 2025-03-04 15:43:56 +00:00
CI workflows
0a6a35b022 Update pro ref 2025-03-04 15:43:43 +00:00
SPRINX0\prochazka
6565b4101b SYNC: try to fix local e2e tests 2025-03-04 15:43:27 +00:00
SPRINX0\prochazka
53dc50c0dd Merge branch 'feature/impexp' 2025-03-04 15:55:10 +01:00
SPRINX0\prochazka
7b56485c74 report progress for quick exports 2025-03-04 15:51:43 +01:00
SPRINX0\prochazka
cfc9b809fc key hack - correct reporting progress rows 2025-03-04 15:30:18 +01:00
SPRINX0\prochazka
4015e2566e import/export progress reporter 2025-03-04 15:21:22 +01:00
SPRINX0\prochazka
1d474a967c report read row count, if written row count not available 2025-03-04 15:17:58 +01:00
SPRINX0\prochazka
bffc34485a report wwritten rows 2025-03-04 15:08:24 +01:00
SPRINX0\prochazka
3bf22a8606 import/export log messages 2025-03-04 14:29:52 +01:00
SPRINX0\prochazka
257ffa3cc4 show import/export error 2025-03-04 14:26:11 +01:00
SPRINX0\prochazka
0c104d5d29 progress indicator in exports 2025-03-04 13:55:36 +01:00
Jan Prochazka
2a59faec17 Merge pull request #1063 from nyaaao/cassandra-local-data-center-env
feat: configure cassandra local datacenter via environment variable
2025-03-04 12:55:20 +01:00
nyaaao
00534f7edd feat: allow specifying cassandra local datacenter via environment variable 2025-03-04 13:28:29 +02:00
SPRINX0\prochazka
4006f03444 removed invalid param 2025-03-04 10:06:05 +01:00
SPRINX0\prochazka
beca5c6e45 using stream.pipeline for better handling errors 2025-03-04 09:51:29 +01:00
SPRINX0\prochazka
69f781d3de handle copyStreamError 2025-03-04 08:58:04 +01:00
SPRINX0\prochazka
3f3160406f propagate error in mongo stream 2025-03-03 16:05:12 +01:00
CI workflows
8067cff9bd chore: auto-update github workflows 2025-03-03 14:15:13 +00:00
CI workflows
7d776bb2af Update pro ref 2025-03-03 14:14:57 +00:00
SPRINX0\prochazka
880bb0d7cb SYNC: Change or add option to disable Shift + Space for AI Assistant #1060 2025-03-03 14:14:41 +00:00
SPRINX0\prochazka
3aea01fb78 v6.2.2-packer-beta.4 2025-03-03 10:34:57 +01:00
SPRINX0\prochazka
7025f4701d Merge branch 'master' of https://github.com/dbgate/dbgate 2025-03-03 10:34:32 +01:00
CI workflows
9d98b06132 chore: auto-update github workflows 2025-03-03 09:34:17 +00:00
SPRINX0\prochazka
47cb83c1ff Merge branch 'master' of https://github.com/dbgate/dbgate 2025-03-03 10:34:12 +01:00
CI workflows
6ff8847251 Update pro ref 2025-03-03 09:34:01 +00:00
SPRINX0\prochazka
9d9367d127 v6.2.2-packer-beta.3 2025-03-03 10:21:24 +01:00
SPRINX0\prochazka
ae7c1ae666 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-03-03 10:21:08 +01:00
CI workflows
44fe3cb7bd chore: auto-update github workflows 2025-03-03 09:16:39 +00:00
CI workflows
bc783eb511 Update pro ref 2025-03-03 09:16:24 +00:00
SPRINX0\prochazka
0491bd5364 v6.2.2-packer-beta.2 2025-03-03 09:42:30 +01:00
SPRINX0\prochazka
cf47cccc97 v6.2.2-packer.1 2025-03-03 09:35:47 +01:00
CI workflows
c65ab35107 chore: auto-update github workflows 2025-03-03 08:33:14 +00:00
CI workflows
b8ea16f4d4 Update pro ref 2025-03-03 08:32:57 +00:00
Jan Prochazka
b40b5f0c1c v6.2.1 2025-02-28 10:55:29 +01:00
Jan Prochazka
b193e29fdb changelog 2025-02-28 10:55:07 +01:00
Jan Prochazka
1b602c134f v6.2.1-premium-beta.11 2025-02-28 10:31:38 +01:00
Jan Prochazka
e462c6d412 v6.2.1-premium-beta.10 2025-02-28 10:28:55 +01:00
Jan Prochazka
d0c83f3c96 Merge pull request #1058 from dbgate/feature/xml-fixes
Feature/xml fixes
2025-02-28 10:28:21 +01:00
Jan Prochazka
0547aa2095 xml format indent 2025-02-28 10:20:18 +01:00
Nybkox
322907b972 feat: add themes to xml view 2025-02-27 17:12:27 +01:00
Nybkox
189b9a7ad6 feat: add xml-formatter 2025-02-27 15:55:06 +01:00
SPRINX0\prochazka
1d02927f6b scroll in xml cell data view 2025-02-27 12:44:34 +01:00
CI workflows
2ad739419a chore: auto-update github workflows 2025-02-27 09:06:11 +00:00
SPRINX0\prochazka
093ace1a89 updated build version 2025-02-27 10:05:45 +01:00
SPRINX0\prochazka
4c982a762b v6.2.1-premium-beta.9 2025-02-27 09:33:08 +01:00
SPRINX0\prochazka
a30efd6e81 allow SQLite on web #956 2025-02-27 09:26:38 +01:00
SPRINX0\prochazka
1c0fc0bff2 removed onsolete code 2025-02-27 08:21:13 +01:00
SPRINX0\prochazka
7da0c204df Merge branch 'feature/impexp' 2025-02-27 08:20:05 +01:00
SPRINX0\prochazka
a35421d8ab import fixed 2025-02-27 08:19:49 +01:00
SPRINX0\prochazka
450ce53dcf import WIP 2025-02-26 16:43:24 +01:00
SPRINX0\prochazka
e80ccfcb19 fixed open query from datagrid 2025-02-26 13:37:21 +01:00
SPRINX0\prochazka
9ad612bd78 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-26 10:26:03 +01:00
CI workflows
288857999f Update pro ref 2025-02-26 09:25:50 +00:00
SPRINX0\prochazka
217823652c v6.2.1-beta.8 2025-02-26 10:24:32 +01:00
SPRINX0\prochazka
268e1670c7 v6.2.1-premium-beta.7 2025-02-26 10:23:53 +01:00
SPRINX0\prochazka
ce7b155f72 remove armv7l app build 2025-02-26 10:20:56 +01:00
SPRINX0\prochazka
2a71d84580 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-26 10:14:03 +01:00
CI workflows
8f73dce934 chore: auto-update github workflows 2025-02-26 09:13:58 +00:00
CI workflows
5953e32988 Update ref to 6b5b092b9da447d89e64b30bfa2910833bf73312 2025-02-26 09:13:38 +00:00
SPRINX0\prochazka
df74322b05 v6.2.1-premium-beta.6 2025-02-26 10:02:05 +01:00
SPRINX0\prochazka
22d941ae45 v6.2.1-beta.5 2025-02-26 08:37:19 +01:00
CI workflows
4fd7c1c4e9 chore: auto-update github workflows 2025-02-26 07:37:04 +00:00
SPRINX0\prochazka
ee1aa6d42c workflow change 2025-02-26 08:36:39 +01:00
SPRINX0\prochazka
1343aba26e try to fix docker build 2025-02-26 08:33:50 +01:00
SPRINX0\prochazka
1483d84d8c v6.2.1-beta.4 2025-02-25 18:38:49 +01:00
CI workflows
02d58d66c9 chore: auto-update github workflows 2025-02-25 17:38:32 +00:00
SPRINX0\prochazka
2eb6ca28f7 full container - removed linux/arm/v7 docker platform 2025-02-25 18:38:12 +01:00
SPRINX0\prochazka
7575a9ce02 v6.2.1-beta.3 2025-02-25 18:17:00 +01:00
CI workflows
039b832be2 chore: auto-update github workflows 2025-02-25 17:16:32 +00:00
SPRINX0\prochazka
e0ed025507 added setup qemu 2025-02-25 18:15:52 +01:00
SPRINX0\prochazka
f2bd08f02a mixed mongoDB export #1044 2025-02-25 17:59:31 +01:00
SPRINX0\prochazka
63314b54bc v6.2.1-premium-beta.2 2025-02-25 17:24:36 +01:00
SPRINX0\prochazka
65165cd15f v6.2.1-beta.1 2025-02-25 16:58:44 +01:00
SPRINX0\prochazka
d5ddbca067 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-25 15:20:07 +01:00
SPRINX0\prochazka
517027e1fe SYNC: clearTestingData 2025-02-25 15:19:58 +01:00
SPRINX0\prochazka
b843ccc898 SYNC: skip AI test 2025-02-25 14:11:43 +00:00
SPRINX0\prochazka
fffd81267d SYNC: sqlite - passed transaction test 2025-02-25 14:11:41 +00:00
SPRINX0\prochazka
7d26dd5348 upgraded better-sqlite3 2025-02-25 14:33:56 +01:00
Jan Prochazka
a4d3189dac import models tests fixed 2025-02-25 13:58:03 +01:00
SPRINX0\prochazka
ec02743f83 SYNC: test fix 2025-02-25 11:38:55 +00:00
CI workflows
10e807751a chore: auto-update github workflows 2025-02-25 11:18:31 +00:00
SPRINX0\prochazka
a8e827b5e2 fixed port for e2e-mssql 2025-02-25 12:18:11 +01:00
CI workflows
f0eaba5857 chore: auto-update github workflows 2025-02-25 11:13:14 +00:00
SPRINX0\prochazka
7516b056a5 fixed workflow 2025-02-25 12:12:56 +01:00
CI workflows
312c3b8295 chore: auto-update github workflows 2025-02-25 11:09:55 +00:00
SPRINX0\prochazka
0a777dc909 fix 2025-02-25 12:09:36 +01:00
CI workflows
ec24376608 chore: auto-update github workflows 2025-02-25 11:06:08 +00:00
SPRINX0\prochazka
c9f45da217 fix 2025-02-25 12:05:10 +01:00
SPRINX0\prochazka
34cfbabf51 fix 2025-02-25 11:05:29 +01:00
SPRINX0\prochazka
f7e7a4c99c fix 2025-02-25 11:02:09 +01:00
SPRINX0\prochazka
58c318b5cf process templates fix 2025-02-25 11:00:31 +01:00
SPRINX0\prochazka
10b1644d8d fix 2025-02-25 10:57:22 +01:00
SPRINX0\prochazka
599f59fc39 test change 2025-02-25 10:55:35 +01:00
SPRINX0\prochazka
da4b8f6cb9 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-25 10:53:19 +01:00
SPRINX0\prochazka
5f2e882ed9 process templates 2025-02-25 10:53:13 +01:00
CI workflows
f20abe8878 Update ref to 83c1c85eb2761a642c17b2f0f882970718e0afca 2025-02-25 09:45:10 +00:00
SPRINX0\prochazka
a6ab37f21c SYNC: transaction test works for oracle 2025-02-25 09:28:12 +00:00
SPRINX0\prochazka
13fc3ee25e SYNC: fixed test 2025-02-25 09:28:11 +00:00
SPRINX0\prochazka
2268086eac SYNC: autocommit icons 2025-02-25 09:28:10 +00:00
SPRINX0\prochazka
9e9dec5389 SYNC: autocommit for Oracle 2025-02-25 09:28:08 +00:00
SPRINX0\prochazka
11d193a6dd SYNC: commit/rollback - control commands 2025-02-25 09:28:07 +00:00
SPRINX0\prochazka
f81207737c SYNC: INSERT SQL template 2025-02-25 09:28:06 +00:00
SPRINX0\prochazka
191c25a26b SYNC: autocommit WIP 2025-02-24 16:13:12 +00:00
Jan Prochazka
9c7727b7f3 oracle insert bulk inserts fix 2025-02-24 16:28:19 +01:00
Jan Prochazka
8bea9b8312 import data fixes 2025-02-24 16:14:51 +01:00
SPRINX0\prochazka
805a063fa1 SYNC: fixes 2025-02-24 15:14:22 +00:00
SPRINX0\prochazka
20a1cc89ae SYNC: import model+data test 2025-02-24 14:27:20 +00:00
SPRINX0\prochazka
963994b1e5 SYNC: some fixes 2025-02-24 14:19:17 +00:00
SPRINX0\prochazka
afcb226111 SYNC: mutli-sql tests 2025-02-24 13:18:52 +00:00
SPRINX0\prochazka
5b246fe44c SYNC: multi tests 2025-02-24 12:46:20 +00:00
SPRINX0\prochazka
d79f82e9b8 SYNC: fixed test 2025-02-24 12:27:03 +00:00
SPRINX0\prochazka
bdd9dc8c9d SYNC: multi-sql 2025-02-24 11:21:02 +00:00
Veronika Kincova
8bdd24aa1e SYNC: [skip ci] Drop table test 2025-02-24 10:23:50 +00:00
Veronika Kincova
13c0da5c8a SYNC: [skip ci] Only fix 2025-02-24 10:07:07 +00:00
Veronika Kincova
d31cfb15db SYNC: [skip ci] Backup - Truncate table test 2025-02-24 10:07:02 +00:00
SPRINX0\prochazka
8453bd3c26 SYNC: [skip ci] cy:open in root 2025-02-24 08:25:41 +00:00
SPRINX0\prochazka
457bf311c3 Commit/rollback in query editor #1039 2025-02-21 17:30:16 +01:00
SPRINX0\prochazka
ea2ec9ef54 refresh testid 2025-02-21 16:50:59 +01:00
SPRINX0\prochazka
d0c84ea469 better workflow for askUser, when closing window 2025-02-21 16:11:23 +01:00
SPRINX0\prochazka
cdec56b661 SYNC: [skip ci] Show server name alongside database name in title of the tab group #1041 2025-02-21 14:08:32 +00:00
SPRINX0\prochazka
8bc2d7cb05 update ref 2025-02-21 13:42:11 +01:00
SPRINX0\prochazka
21f3de0a18 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-21 13:30:42 +01:00
SPRINX0\prochazka
65b497c9f1 update pro ref 2025-02-21 13:30:40 +01:00
SPRINX0\prochazka
f37f1e794e SYNC: [skip ci] fixed FK joining #1051 2025-02-21 12:04:30 +00:00
SPRINX0\prochazka
0096a40132 removed obsolete query designer 2025-02-21 12:50:53 +01:00
SPRINX0\prochazka
b6b9b67836 fixed crash after filtering mongo array columns #1049 2025-02-21 12:38:22 +01:00
SPRINX0\prochazka
2b80a98a96 fix 2025-02-21 12:28:15 +01:00
SPRINX0\prochazka
8c5d47bfa0 run with push 2025-02-21 10:37:18 +01:00
SPRINX0\prochazka
00c0a0505f run with push 2025-02-21 10:28:17 +01:00
SPRINX0\prochazka
01ba8b4f3f commit info 2025-02-21 10:24:42 +01:00
SPRINX0\prochazka
919249215b fix 2025-02-21 10:20:41 +01:00
SPRINX0\prochazka
a62394a67c changed used secret 2025-02-21 10:13:56 +01:00
SPRINX0\prochazka
332fff279b fix 2025-02-21 09:53:20 +01:00
SPRINX0\prochazka
27df4c435f use different secret 2025-02-21 09:52:30 +01:00
SPRINX0\prochazka
a9998ebca7 tmp 2025-02-21 09:38:23 +01:00
SPRINX0\prochazka
82a8baabef fix 2025-02-21 09:37:17 +01:00
SPRINX0\prochazka
2af6d4ef29 fix 2025-02-21 09:35:56 +01:00
SPRINX0\prochazka
c2526c1e2d diflow config 2025-02-21 09:31:35 +01:00
SPRINX0\prochazka
6021fa721e diflow CI 2025-02-19 16:44:13 +01:00
SPRINX0\prochazka
0bd59dc0f8 filter by column data type fix 2025-02-17 11:29:31 +01:00
Jan Prochazka
1af0c34ac0 Merge branch 'master' of github.com:dbgate/dbgate 2025-02-15 09:35:28 +01:00
Jan Prochazka
77f5b6d0c8 cassandra in readme 2025-02-15 09:34:53 +01:00
SPRINX0\prochazka
cb3fdf1112 changelog 2025-02-14 15:05:45 +01:00
SPRINX0\prochazka
482d3d9188 v6.2.0 2025-02-14 14:56:52 +01:00
SPRINX0\prochazka
bb4c64f478 v6.1.7-premium-beta.10 2025-02-14 14:34:52 +01:00
SPRINX0\prochazka
a9d6e42add changelog 2025-02-14 14:34:08 +01:00
SPRINX0\prochazka
5fa6cf8eca Revert "build zip for mac (for autoupgrade)"
This reverts commit 6388129e27.
2025-02-14 14:27:19 +01:00
SPRINX0\prochazka
7c0de7b634 v6.1.7-beta.9 2025-02-14 14:06:13 +01:00
SPRINX0\prochazka
691f3238c5 v6.1.7-premium-beta.8 2025-02-14 14:05:56 +01:00
SPRINX0\prochazka
43163c787d renamed file 2025-02-14 14:04:25 +01:00
SPRINX0\prochazka
a86693d950 v6.1.7-premium-beta.7 2025-02-14 13:56:07 +01:00
SPRINX0\prochazka
6388129e27 build zip for mac (for autoupgrade) 2025-02-14 13:55:47 +01:00
SPRINX0\prochazka
7788806a8d update pro ref 2025-02-14 13:50:07 +01:00
SPRINX0\prochazka
30037dad83 better connection error reporting 2025-02-14 13:27:56 +01:00
SPRINX0\prochazka
d49c7d5e45 workflows 2025-02-14 12:49:11 +01:00
SPRINX0\prochazka
502109cdcb publish new plugins 2025-02-14 12:48:17 +01:00
SPRINX0\prochazka
3de775a163 ai assistant UX 2025-02-14 12:28:02 +01:00
SPRINX0\prochazka
d4de917895 AI UX 2025-02-14 08:57:20 +01:00
SPRINX0\prochazka
35cb718f1f last used actions changed in switch button 2025-02-13 16:23:18 +01:00
SPRINX0\prochazka
60a72f4d45 fix data grid join problem #565 2025-02-13 15:54:40 +01:00
SPRINX0\prochazka
6ae536d035 datagrid: open real query in case of error 2025-02-13 15:47:55 +01:00
SPRINX0\prochazka
23daa7a91c fixes 2025-02-13 15:32:36 +01:00
SPRINX0\prochazka
771f93b5fa updated pro ref 2025-02-13 14:35:31 +01:00
SPRINX0\prochazka
19826636c7 uncommented builds 2025-02-12 14:03:49 +01:00
SPRINX0\prochazka
57ba9b52ba v6.1.7-packer-beta.6 2025-02-12 13:57:26 +01:00
SPRINX0\prochazka
38c0d9bbc3 pro update 2025-02-12 13:57:01 +01:00
SPRINX0\prochazka
5fb3e0b941 v6.1.7-packer-beta.5 2025-02-12 13:43:41 +01:00
SPRINX0\prochazka
9aa09d138a update pro ref 2025-02-12 13:43:19 +01:00
SPRINX0\prochazka
f48efcc02d fix 2025-02-12 13:42:58 +01:00
SPRINX0\prochazka
1703e9e8c6 v6.1.7-packer-beta.4 2025-02-12 12:05:53 +01:00
SPRINX0\prochazka
940cf02935 build cloud 2025-02-12 12:05:40 +01:00
SPRINX0\prochazka
355aefc62a delete old azure VMs 2025-02-12 12:02:06 +01:00
SPRINX0\prochazka
0c0c9b2dad v6.1.7-packer-beta.3 2025-02-12 10:38:00 +01:00
SPRINX0\prochazka
d3df844917 update pro ref 2025-02-12 10:37:36 +01:00
SPRINX0\prochazka
e227f1f182 build script 2025-02-12 10:36:54 +01:00
SPRINX0\prochazka
f97b49bc53 Merge branch 'master' of https://github.com/dbgate/dbgate 2025-02-12 09:47:50 +01:00
SPRINX0\prochazka
788b6def6c azure ubuntu support 2025-02-12 09:47:44 +01:00
Jan Prochazka
8550707485 Merge pull request #1038 from dbgate/feature/xml-preview
feat: add xml preview
2025-02-11 17:03:18 +01:00
Nybkox
aa8dfa1c87 feat: add xml preview 2025-02-11 16:51:39 +01:00
Jan Prochazka
7b68dd0f47 Merge pull request #1037 from dbgate/feature/fix-modal-scroll
style(ModalBase): keep header and footer in place, scroll only content
2025-02-11 12:20:41 +01:00
Nybkox
f4c9c880bc style(ModalBase): keep header and footer in place, scroll only content 2025-02-11 12:19:04 +01:00
Jan Prochazka
cac3e6efec Merge pull request #1036 from dbgate/feature/fix-mysql-triggers-creation
fix: correct sql for creating new trigger in ui for mysql
2025-02-11 12:16:03 +01:00
Nybkox
d76786e344 fix: correct sql for creating new trigger in ui for mysql 2025-02-11 11:59:35 +01:00
Nybkox
171b967cc4 style(ModalBase): add max-height to normal modals 2025-02-11 11:54:31 +01:00
SPRINX0\prochazka
b5766494e1 try to fix cypress test 2025-02-11 10:54:44 +01:00
SPRINX0\prochazka
b72af50a85 v6.1.7-premium-beta.2 2025-02-11 10:45:39 +01:00
Jan Prochazka
63b66fe301 Merge pull request #1010 from dbgate/feature/cassandra
Feature/cassandra
2025-02-11 10:40:03 +01:00
Nybkox
244cb79774 fix: conditionally remove table aliases from column expressions 2025-02-11 09:54:55 +01:00
SPRINX0\prochazka
8d20b7d19f Merge branch 'master' into feature/cassandra 2025-02-11 08:37:15 +01:00
SPRINX0\prochazka
dc9da0b866 data modification test 2025-02-11 08:22:59 +01:00
Jan Prochazka
3b72a9751c fixed import test for oracle 2025-02-10 16:48:10 +01:00
SPRINX0\prochazka
bb3cb23ee0 Merge branch 'master' into feature/cassandra 2025-02-10 15:27:45 +01:00
SPRINX0\prochazka
737436d7e7 fix 2025-02-10 15:23:32 +01:00
SPRINX0\prochazka
bccfa79bb6 ref update 2025-02-10 15:23:14 +01:00
SPRINX0\prochazka
6bf5c58ec1 fix 2025-02-10 15:22:49 +01:00
SPRINX0\prochazka
151230098c fix 2025-02-10 14:29:10 +01:00
SPRINX0\prochazka
5debd8e115 fix 2025-02-10 14:28:52 +01:00
SPRINX0\prochazka
83633536db Merge branch 'master' into feature/cassandra 2025-02-10 14:23:16 +01:00
SPRINX0\prochazka
c187d902d2 Merge branch 'ai-assistant' 2025-02-10 14:19:46 +01:00
Nybkox
e2547c7295 feat: add test for importing data to an existing table 2025-02-06 11:11:06 +01:00
Nybkox
359557e321 feat: pass dataType to bulk insert value put 2025-02-06 11:09:14 +01:00
Nybkox
94cccf9e17 fix(cassandra): check column name correctly 2025-02-06 11:05:24 +01:00
Nybkox
13e4c2de03 fix(createBulkInsertStreamBase): save checked structure 2025-02-06 11:04:41 +01:00
Nybkox
844d7025fa feat(cassandra): parse error line from message 2025-02-06 10:38:12 +01:00
Nybkox
0f01f35d19 fix(cassandra): use put raw for numeric data types 2025-02-06 10:09:19 +01:00
Nybkox
505c219cfd chore: remove test logging 2025-02-06 09:49:14 +01:00
Nybkox
c40d745a3a fix: update deploy databse skip tests conditions 2025-02-06 09:43:46 +01:00
Nybkox
13ee14c752 fix: do not insert second id column 2025-02-06 09:39:49 +01:00
Nybkox
1d1e488755 feat: add cassandra to workflows 2025-02-06 09:18:38 +01:00
Nybkox
d9537e5fd4 feat: run cassandra tests on ci 2025-02-06 09:15:23 +01:00
Nybkox
5fc5497a9e fix(table-create): remove nullable if engine skips nullability 2025-02-06 09:15:23 +01:00
Nybkox
fd70df0f43 feat(cassandara): forceSortStructureColumns 2025-02-06 09:15:23 +01:00
Nybkox
c16d47473d feat: add forceSortStructureColumns for test engines 2025-02-06 09:15:23 +01:00
Nybkox
f5e2300460 fix(cassandra): use text type for string in table anylyse 2025-02-06 09:15:23 +01:00
Nybkox
73a3e8d498 fix(cassandra): ignore default value if engine skips it in table analyse 2025-02-06 09:15:23 +01:00
Nybkox
bb38b93927 feat: add forceSortResults for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
979f2c7dac fix: do not insert another pk if there is no uuid pk 2025-02-06 09:15:23 +01:00
Nybkox
808b774ad1 fix: create sql based on engine config in table-analyse 2025-02-06 09:15:23 +01:00
Nybkox
38ce62185e fix(cassandra): do not add 2nd id column 2025-02-06 09:15:23 +01:00
Nybkox
ac4e411d41 feat: skip delpoy tests 2025-02-06 09:15:23 +01:00
Nybkox
fdc784f42d feat: skip data duplicator for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
edcaf585ea refactor: remove redundant conditions 2025-02-06 09:15:23 +01:00
Nybkox
cdafe7e5f2 fix: force columns sorting when comparing 2025-02-06 09:15:23 +01:00
Nybkox
40cfe63b6f fix: skipDataModifications for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
2fee584da9 fix: skip auto increment for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
8b1d8d6d71 chore: add missing types for test engings exports 2025-02-06 09:15:23 +01:00
Nybkox
f63b61681f feat: force usage of text type for strings in cassandra tests 2025-02-06 09:15:23 +01:00
Nybkox
de4cea86da feat: skip order by for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
c0ca84f347 fix: prevent setting autoincrement when it is disabled in dialect 2025-02-06 09:15:23 +01:00
Nybkox
9c5a2c79f5 feat: add recordset and row zipping for cassandra query 2025-02-06 09:15:23 +01:00
Nybkox
8359746f47 feat: basic cassandra tests setup 2025-02-06 09:15:23 +01:00
Nybkox
516393856d feat: add TestEngineInfo typing 2025-02-06 09:15:23 +01:00
Nybkox
e7e57414b5 fix: update sheduler events info typing 2025-02-06 09:15:23 +01:00
Nybkox
687669330d fix: set createColumnWithColumnKeyword to false for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
d3147d3969 feat: add dropDatabse to cassandra dumper 2025-02-06 09:15:23 +01:00
Nybkox
ca8b7911a6 fix: do not filter keyspaces 2025-02-06 09:15:23 +01:00
Nybkox
3d23a13c34 fix: remove port from cassandra 2025-02-06 09:15:23 +01:00
Nybkox
cf8a104183 fix: use keyspace and replication in createDatabase command for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
bd7aa7884a fix: update engines typing after rebase 2025-02-06 09:15:23 +01:00
Nybkox
f2db514e45 feat: generateDefaultValueForUuid, set 'uuid()' for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
d35801dcc7 feat: disable renaming tables for cassandra 2025-02-06 09:15:23 +01:00
Nybkox
50f9e025c4 fix: remove old way of adding way uuids 2025-02-06 09:15:23 +01:00
SPRINX0\prochazka
09fa3ce438 feat: dumper data type handling 2025-02-06 09:15:23 +01:00
Nybkox
bcf89b1f09 WIP 2025-02-06 09:15:23 +01:00
Nybkox
645a1d35e8 feat: add omitTableAliases to sql dialect 2025-02-06 09:15:23 +01:00
Nybkox
1154eff459 feat: allow specifing dbhan client type 2025-02-06 09:15:23 +01:00
216 changed files with 5388 additions and 1409 deletions

View File

@@ -5,10 +5,10 @@ name: Electron app BETA
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
@@ -24,7 +24,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -58,7 +58,7 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
@@ -71,16 +71,16 @@ jobs:
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Copy artifacts
run: |
mkdir artifacts
@@ -111,16 +111,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,10 @@ name: Electron app PREMIUM BETA
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
@@ -24,7 +24,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -37,9 +37,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -88,7 +88,7 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
cd ..
@@ -102,16 +102,16 @@ jobs:
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Copy artifacts
run: |
mkdir artifacts
@@ -142,16 +142,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,10 @@ name: Electron app PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
@@ -24,7 +24,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -37,9 +37,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -88,7 +88,7 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
cd ..
@@ -102,16 +102,16 @@ jobs:
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: Copy artifacts
run: |
mkdir artifacts
@@ -142,16 +142,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -5,10 +5,10 @@ name: Electron app
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
@@ -24,7 +24,7 @@ jobs:
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -54,7 +54,7 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: fillPackagedPlugins
run: |
@@ -67,16 +67,16 @@ jobs:
yarn run build:app
env:
GH_TOKEN: '${{ secrets.GH_TOKEN }}'
WIN_CSC_LINK: '${{ secrets.WINCERT_2025 }}'
WIN_CSC_KEY_PASSWORD: '${{ secrets.WINCERT_2025_PASSWORD }}'
CSC_LINK: '${{ secrets.APPLECERT_CERTIFICATE }}'
CSC_KEY_PASSWORD: '${{ secrets.APPLECERT_PASSWORD }}'
APPLE_ID: '${{ secrets.APPLE_ID }}'
APPLE_TEAM_ID: '${{ secrets.APPLE_TEAM_ID }}'
APPLE_ID_PASSWORD: '${{ secrets.APPLE_ID_PASSWORD }}'
SNAPCRAFT_STORE_CREDENTIALS: '${{secrets.SNAPCRAFT_LOGIN}}'
APPLE_APP_SPECIFIC_PASSWORD: '${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}'
GH_TOKEN: ${{ secrets.GH_TOKEN }}
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
- name: generatePadFile
run: |
yarn generatePadFile
@@ -114,16 +114,16 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: false
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Print content of notarization-error.log
if: failure() && matrix.os == 'macos-14'
run: |

View File

@@ -1,15 +1,15 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: AWS image PREMIUM
name: Cloud images PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -26,6 +26,9 @@ jobs:
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Install jq
run: |
sudo apt-get install jq -y
- name: Setup `packer`
uses: hashicorp/setup-packer@main
with:
@@ -34,9 +37,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -69,7 +72,7 @@ jobs:
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: Prepare packer build
run: |
cd ..
@@ -84,41 +87,56 @@ jobs:
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: '${{ matrix.os }}'
name: ${{ matrix.os }}
path: artifacts
- name: Release
uses: softprops/action-gh-release@v1
if: 'startsWith(github.ref, ''refs/tags/'')'
if: startsWith(github.ref, 'refs/tags/')
with:
files: artifacts/**
prerelease: true
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
- name: Run `packer init`
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run `packer init` for Azure
run: |
cd ../dbgate-merged/packer
packer init ./azure-ubuntu.pkr.hcl
- name: Run `packer build` for Azure
run: |
cd ../dbgate-merged/packer
packer build ./azure-ubuntu.pkr.hcl
- name: Run `packer init` for AWS
run: |
cd ../dbgate-merged/packer
packer init ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
- name: Run `packer build`
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
- name: Run `packer build` for AWS
run: |
cd ../dbgate-merged/packer
packer build ./aws-ubuntu.pkr.hcl
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
- name: Install jq
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
- name: Delete old Azure VMs
run: |
sudo apt-get install jq -y
- name: Delete old AMIs
cd ../dbgate-merged/packer
chmod +x delete-old-azure-images.sh
./delete-old-azure-images.sh
env:
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
- name: Delete old AMIs (AWS)
run: |
cd ../dbgate-merged/packer
chmod +x delete-old-amis.sh
./delete-old-amis.sh
env:
AWS_ACCESS_KEY_ID: '${{secrets.AWS_ACCESS_KEY_ID}}'
AWS_SECRET_ACCESS_KEY: '${{secrets.AWS_SECRET_ACCESS_KEY}}'
AWS_DEFAULT_REGION: '${{secrets.AWS_DEFAULT_REGION}}'
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}

View File

@@ -5,11 +5,11 @@ name: Docker image PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -34,17 +34,17 @@ jobs:
type=match,pattern=\d+.\d+.\d+,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
type=raw,value=latest,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -83,24 +83,26 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: Prepare docker image
run: |
cd ..
cd dbgate-merged
yarn run prepare:docker
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ../dbgate-merged/docker
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64'
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64

View File

@@ -5,11 +5,11 @@ name: Docker image Community
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -47,10 +47,10 @@ jobs:
type=match,pattern=\d+.\d+.\d+,suffix=-alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
type=raw,value=alpine,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: adjustPackageJson
run: |
@@ -70,30 +70,32 @@ jobs:
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: Prepare docker image
run: |
yarn run prepare:docker
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: '${{ secrets.DOCKER_USERNAME }}'
password: '${{ secrets.DOCKER_PASSWORD }}'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
tags: '${{ steps.meta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
- name: Build and push alpine
uses: docker/build-push-action@v3
with:
push: true
context: ./docker
file: ./docker/Dockerfile-alpine
tags: '${{ steps.alpmeta.outputs.tags }}'
platforms: 'linux/amd64,linux/arm64,linux/arm/v7'
tags: ${{ steps.alpmeta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7

View File

@@ -5,11 +5,11 @@ name: NPM packages PREMIUM
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -30,9 +30,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -51,7 +51,7 @@ jobs:
node adjustNpmPackageJsonPremium
- name: Configure NPM token
env:
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
cd ..
cd dbgate-merged
@@ -77,7 +77,7 @@ jobs:
cd dbgate-merged
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: Publish dbgate-api-premium
run: |
cd ..

View File

@@ -5,11 +5,11 @@ name: NPM packages
'on':
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
- v[0-9]+.[0-9]+.[0-9]+
- v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+
jobs:
build:
runs-on: '${{ matrix.os }}'
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v2
with:
@@ -28,7 +28,7 @@ jobs:
node-version: 18.x
- name: Configure NPM token
env:
NPM_TOKEN: '${{ secrets.NPM_TOKEN }}'
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
- name: yarn install
@@ -41,7 +41,7 @@ jobs:
run: |
yarn printSecrets
env:
GIST_UPLOAD_SECRET: '${{secrets.GIST_UPLOAD_SECRET}}'
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
- name: Publish types
working-directory: packages/types
run: |
@@ -123,3 +123,11 @@ jobs:
working-directory: plugins/dbgate-plugin-clickhouse
run: |
npm publish
- name: Publish dbgate-plugin-dbf
working-directory: plugins/dbgate-plugin-dbf
run: |
npm publish
- name: Publish dbgate-plugin-cassandra
working-directory: plugins/dbgate-plugin-cassandra
run: |
npm publish

View File

@@ -30,8 +30,8 @@ jobs:
uses: docker/login-action@v2
with:
registry: ghcr.io
username: '${{ github.actor }}'
password: '${{ secrets.GITHUB_TOKEN }}'
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Push mysql-ssh-login to GHCR
run: |
docker tag dbgate/mysql-ssh-login:latest ghcr.io/dbgate/mysql-ssh-login:latest

36
.github/workflows/diflow.yaml vendored Normal file
View File

@@ -0,0 +1,36 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Diflow merge
'on':
push:
branches:
- master
jobs:
merge:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 22.x
- name: Checkout dbgate/diflow
uses: actions/checkout@v2
with:
repository: dbgate/diflow
path: diflow
- name: Diflow install
run: |
cd diflow
npm install
npm run build
- name: Git config
run: |
git config --global user.email "info@dbgate.io"
git config --global user.name "Diflow"
- name: Diflow run
run: |
cd diflow
node dist/diflow.js sync -r https://DIFLOW_GIT_SECRET@github.com/dbgate/dbgate-diflow-config.git -b master
env:
DIFLOW_GIT_SECRET: ${{ secrets.DIFLOW_GIT_SECRET }}

View File

@@ -24,9 +24,9 @@ jobs:
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: '${{ secrets.GH_TOKEN }}'
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: a2f824dc711b510a5e8235d3faf4aafab1965184
ref: 21048330597124a88fa1b8447e0bc18666eb69c5
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -78,25 +78,25 @@ jobs:
ports:
- '16000:5432'
mysql-cypress:
image: 'mysql:8.0.18'
image: mysql:8.0.18
ports:
- '16004:3306'
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
mysql-ssh-login:
image: 'ghcr.io/dbgate/mysql-ssh-login:latest'
image: ghcr.io/dbgate/mysql-ssh-login:latest
ports:
- '16012:22'
mysql-ssh-keyfile:
image: 'ghcr.io/dbgate/mysql-ssh-keyfile:latest'
image: ghcr.io/dbgate/mysql-ssh-keyfile:latest
ports:
- '16008:22'
dex:
image: 'ghcr.io/dbgate/dex:latest'
image: ghcr.io/dbgate/dex:latest
ports:
- '16009:5556'
mongo:
image: 'mongo:4.0.12'
image: mongo:4.0.12
env:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db
@@ -106,3 +106,17 @@ jobs:
image: redis
ports:
- '16011:6379'
mssql:
image: mcr.microsoft.com/mssql/server
ports:
- '16014:1433'
env:
ACCEPT_EULA: 'Y'
SA_PASSWORD: Pwd2020Db
MSSQL_PID: Express
oracle:
image: gvenzl/oracle-xe:21-slim
env:
ORACLE_PASSWORD: Pwd2020Db
ports:
- '16013:1521'

View File

@@ -0,0 +1,48 @@
# --------------------------------------------------------------------------------------------
# This file is generated. Do not edit manually
# --------------------------------------------------------------------------------------------
name: Update Workflows
'on':
push:
branches:
- master
paths:
- workflow-templates/**
jobs:
update-workflows:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
- name: Set up Node
uses: actions/setup-node@v3
with:
node-version: 22
- name: Install dependencies
run: |
rm package.json
rm yarn.lock
yarn add -W js-yaml lodash
- name: Run workflow generation
run: |
node common/processWorkflows.js
- name: Git config
run: |
git config --global user.email "info@dbgate.io"
git config --global user.name "CI workflows"
- name: Commit changes
run: |
# Only commit if there are changes
if [[ -n "$(git status --porcelain)" ]]; then
git add .github/workflows
git commit -m "chore: auto-update github workflows"
else
echo "No changes to commit"
fi
- name: Push changes
uses: ad-m/github-push-action@v0.6.0
with:
github_token: ${{ secrets.WORKFLOW_CHANGE_ACCESS_TOKEN }}
branch: master

View File

@@ -40,19 +40,19 @@ jobs:
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: integration-tests/result.json
action-name: Integration tests
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: packages/filterparser/result.json
action-name: Filter parser test results
- uses: tanmen/jest-reporter@v1
if: always()
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'
github-token: ${{ secrets.GITHUB_TOKEN }}
result-file: packages/datalib/result.json
action-name: Datalib (perspectives) test results
services:
@@ -64,7 +64,7 @@ jobs:
ports:
- '15000:5432'
mysql-integr:
image: 'mysql:8.0.18'
image: mysql:8.0.18
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
ports:
@@ -78,14 +78,18 @@ jobs:
ports:
- '15002:1433'
clickhouse-integr:
image: 'bitnami/clickhouse:24.8.4'
image: bitnami/clickhouse:24.8.4
env:
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
ports:
- '15005:8123'
oracle-integr:
image: 'gvenzl/oracle-xe:21-slim'
image: gvenzl/oracle-xe:21-slim
env:
ORACLE_PASSWORD: Pwd2020Db
ports:
- '15006:1521'
cassandradb:
image: cassandra:5.0.2
ports:
- '15942:9042'

3
.gitignore vendored
View File

@@ -32,4 +32,5 @@ packages/api/src/packagedPluginsContent.js
.VSCodeCounter
packages/web/public/*.html
e2e-tests/screenshots/*.png
e2e-tests/screenshots/*.png
.aider*

View File

@@ -8,6 +8,30 @@ Builds:
- linux - application for linux
- win - application for Windows
### 6.2.1
- ADDED: Commit/rollback and autocommit in scripts #1039
- FIXED: Doesn't import all the records from MongoDB #1044
- ADDED: Show server name alongside database name in title of the tab group #1041
- ADDED: Can't open Sqlite through web #956
- FIXED: Crashed after text input at columns search #1049
- FIXED: Incorrect autojoin for foreign keys with more columns #1051
- FIXED: Scroll in XML cell view, XML view respect themes
- REMOVED: armv7l build for Linux (because of problems with glibc compatibility)
- CHANGED: Upgraded to node:22 for docker builds
- CHANGED: Upgraded SQLite engine version (better-sqlite3@11.8.1)
### 6.2.0
- ADDED: Query AI Assistant (Premium)
- ADDED: Cassandra database support
- ADDED: XML cell data view
- FIXED: Filtering by value in Oracle #1009
- FIXED: Operand type clash: uniqueidentifier is incompatible with int #565
- FIXED: UX in administration
- FIXED: Error reporting of broken connections (sometimes it caused infinite loading of data grid)
- ADDED: Azure managed identity support (Team Premium)
- ADDED: Expanded JSON cell view
- CHANGED: Open real executed query, when datagrid shows loading error
### 6.1.6
- FIXED: Hotfix build process for premium edition
@@ -306,7 +330,7 @@ Builds:
- FIXED: Creating SQLite autoincrement column
- FIXED: Better error reporting from exports/import/dulicator
- CHANGED: Optimalizede OracleDB analysing algorithm
- ADDED: Mutli column filter for perspectives
- ADDED: Multi column filter for perspectives
- FIXED: Fixed some scenarios using tables from different DBs
- FIXED: Sessions with long-running queries are not killed

View File

@@ -33,6 +33,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
* MariaDB
* CosmosDB (Premium)
* ClickHouse
* Apache Cassandra
<!-- Learn more about DbGate features at the [DbGate website](https://dbgate.org/), or try our online [demo application](https://demo.dbgate.org) -->

View File

@@ -51,7 +51,6 @@
"target": "AppImage",
"arch": [
"x64",
"armv7l",
"arm64"
]
},

View File

@@ -0,0 +1,43 @@
//@ts-check
const { getDefaultTranslations, getLanguageTranslations } = require('./helpers');
/**
* @param {string} language
*/
function getMissingTranslations(language) {
const source = getDefaultTranslations();
/** @type {Record<string, string>} */
let target;
try {
target = getLanguageTranslations(language);
} catch {
console.log(`Language ${language} not found, creating a new one`);
target = {};
}
let added = 0;
let removed = 0;
for (const key in source) {
if (!target[key]) {
target[key] = `*** ${source[key]}`;
added++;
}
}
for (const key in target) {
if (!source[key]) {
delete target[key];
removed++;
}
}
const newLength = Object.keys(target).length;
return { result: target, stats: { added, removed, newLength } };
}
module.exports = {
getMissingTranslations,
};

View File

@@ -0,0 +1,16 @@
// @ts-check
//
const defaultLanguage = 'en';
/** @typedef {{ extensions: string[], directories: string[] }} ExtractConfig
/** @type {ExtractConfig} */
const defaultExtractConfig = {
extensions: ['.js', '.ts', '.svelte'],
directories: ['app', 'packages/web'],
};
module.exports = {
defaultLanguage,
defaultExtractConfig,
};

View File

@@ -0,0 +1,84 @@
//@ts-check
const fs = require('fs');
const { promisify } = require('util');
const { getFiles } = require('./helpers');
const readFilePromise = promisify(fs.readFile);
const translationRegex = /_t\(\s*['"]([^'"]+)['"]\s*,\s*\{\s*defaultMessage\s*:\s*['"]([^'"]+)['"]\s*\}/g;
/**
* @param {string} file
*
* @returns {Promise<Record<string, string>>}
*/
async function extractTranslationsFromFile(file) {
/** @type {Record<string, string>} */
const translations = {};
const content = await readFilePromise(file, 'utf-8');
let match;
while ((match = translationRegex.exec(content)) !== null) {
const [_, key, defaultText] = match;
translations[key] = defaultText;
}
return translations;
}
/** @typedef {{ ignoreDuplicates?: boolean }} ExtractOptions */
/**
* @param {string[]} directories
* @param {string[]} extensions
* @param {ExtractOptions} options
*
* @returns {Promise<Record<string, string>>}
*/
async function extractAllTranslations(directories, extensions, options = {}) {
const { ignoreDuplicates } = options;
try {
/** @type {Record<string, string>} */
const allTranslations = {};
/** @type {Record<string, string[]>} */
const translationKeyToFiles = {};
for (const dir of directories) {
const files = await getFiles(dir, extensions);
for (const file of files) {
const fileTranslations = await extractTranslationsFromFile(file);
for (const key in fileTranslations) {
if (!translationKeyToFiles[key]) {
translationKeyToFiles[key] = [];
}
translationKeyToFiles[key].push(file);
if (!ignoreDuplicates && allTranslations[key] && allTranslations[key] !== fileTranslations[key]) {
console.error(
`Different translations for the same key [${key}] found. ${file}: ${
fileTranslations[key]
}. Previous value: ${allTranslations[key]} was found in ${translationKeyToFiles[key].join(', ')}`
);
throw new Error(`Duplicate translation key found: ${key}`);
}
allTranslations[key] = fileTranslations[key];
}
}
}
return allTranslations;
} catch (error) {
console.error('Error extracting translations:', error);
throw error;
}
}
module.exports = {
extractTranslationsFromFile,
extractAllTranslations,
};

View File

@@ -0,0 +1,194 @@
//@ts-check
const path = require('path');
const fs = require('fs');
const { defaultLanguage } = require('./constants');
const sortJsonKeysAlphabetically = require('./sortJsonKeysAlphabetically');
/**
* @param {string} file
* @param {string[]} extensions
*
* @returns {boolean}
*/
function hasValidExtension(file, extensions) {
return extensions.includes(path.extname(file).toLowerCase());
}
/**
* @param {string} dir
* @param {string[]} extensions
*
* @returns {Promise<string[]>}
*/
async function getFiles(dir, extensions) {
const files = await fs.promises.readdir(dir);
const allFiles = await Promise.all(
files.map(async file => {
const filePath = path.join(dir, file);
const stats = await fs.promises.stat(filePath);
if (stats.isDirectory()) {
return getFiles(filePath, extensions);
} else if (stats.isFile() && hasValidExtension(file, extensions)) {
return filePath;
}
return null;
})
);
const validFiles = /** @type {string[]} */ (allFiles.flat().filter(file => file !== null));
return validFiles;
}
/**
* @param {string | string[]} value
*
* @returns {string}
*/
function formatDefaultValue(value) {
if (Array.isArray(value)) {
return value.join(', ');
}
return value;
}
const scriptDir = getScriptDir();
/** @param {string} file
*
* @returns {string}
*/
function resolveFile(file) {
if (path.isAbsolute(file)) {
return file;
}
return path.resolve(scriptDir, '..', '..', file);
}
/** @param {string[]} dirs
*
* @returns {string[]}
*/
function resolveDirs(dirs) {
return dirs.map(resolveFile);
}
/**
* @param {string[]} extensions
*
* @returns {string[]}
*/
function resolveExtensions(extensions) {
return extensions.map(ext => (ext.startsWith('.') ? ext : `.${ext}`));
}
function getScriptDir() {
if (require.main?.filename) {
return path.dirname(require.main.filename);
}
if ('pkg' in process && process.pkg) {
return path.dirname(process.execPath);
}
return __dirname;
}
/**
* @param {string} file
*/
function ensureFileDirExists(file) {
const dir = path.dirname(file);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
/**
* @param {Record<string, string>} existingTranslations - Previously extracted translations
* @param {Record<string, string>} newTranslations - Newly extracted translations
* @returns {{ added: string[], removed: string[], updated: string[] }} Translation changes
*/
const getTranslationChanges = (existingTranslations, newTranslations) => {
const existingKeys = new Set(Object.keys(existingTranslations || {}));
const newKeys = new Set(Object.keys(newTranslations));
const added = [...newKeys].filter(key => !existingKeys.has(key));
const removed = [...existingKeys].filter(key => !newKeys.has(key));
const updated = [...newKeys].filter(
key => existingKeys.has(key) && existingTranslations[key] !== newTranslations[key]
);
return { added, removed, updated };
};
function getDefaultTranslations() {
return getLanguageTranslations(defaultLanguage);
}
/**
* @param {string} language
*
* @returns {Record<string, string>}
*/
function getLanguageTranslations(language) {
const file = resolveFile(`translations/${language}.json`);
const content = fs.readFileSync(file, 'utf-8');
return JSON.parse(content);
}
/**
* @param {string} language
* @param {Record<string, string>} translations
*/
function setLanguageTranslations(language, translations) {
const file = resolveFile(`translations/${language}.json`);
const sorted = sortJsonKeysAlphabetically(translations);
fs.writeFileSync(file, JSON.stringify(sorted, null, 2));
}
/**
* @param {string} language
* @param {Record<string, string>} newTranslations
*/
function updateLanguageTranslations(language, newTranslations) {
const translations = getLanguageTranslations(language);
const updatedTranslations = { ...translations, ...newTranslations };
const sorted = sortJsonKeysAlphabetically(updatedTranslations);
setLanguageTranslations(language, sorted);
}
function getAllLanguages() {
const dir = resolveFile('translations');
const files = fs.readdirSync(dir);
const languages = files.filter(file => file.endsWith('.json')).map(file => file.replace('.json', ''));
return languages;
}
function getAllNonDefaultLanguages() {
return getAllLanguages().filter(language => language !== defaultLanguage);
}
module.exports = {
hasValidExtension,
getFiles,
formatDefaultValue,
resolveFile,
resolveDirs,
resolveExtensions,
ensureFileDirExists,
getTranslationChanges,
getDefaultTranslations,
getLanguageTranslations,
setLanguageTranslations,
updateLanguageTranslations,
getAllLanguages,
getAllNonDefaultLanguages,
};

View File

@@ -0,0 +1,3 @@
const { program } = require('./program');
program.parse();

View File

@@ -0,0 +1,163 @@
//@ts-check
const fs = require('fs');
const { program } = require('commander');
const {
resolveDirs,
resolveExtensions,
getTranslationChanges,
setLanguageTranslations,
getAllNonDefaultLanguages,
updateLanguageTranslations,
getDefaultTranslations,
} = require('./helpers');
const { extractAllTranslations } = require('./extract');
const { getMissingTranslations } = require('./addMissing');
const { defaultLanguage, defaultExtractConfig } = require('./constants');
const { removeUnusedAllTranslations, removeUnusedForSignelLanguage } = require('./removeUnused');
/**
* @typedef {import('./constants').ExtractConfig & { verbose?: boolean, ignoreUnused?: boolean }} ExtractOptions
*/
program.name('dbgate-translations-cli').description('CLI tool for managing translation').version('1.0.0');
program
.command('extract')
.description('Extract translation keys from source files')
.option('-d, --directories <directories...>', 'directories to search', defaultExtractConfig.directories)
.option('-e, --extensions <extensions...>', 'file extensions to process', defaultExtractConfig.extensions)
.option('-r, --ignoreUnused', 'Ignore unused keys in the output file')
.option('-v, --verbose', 'verbose mode')
.action(async (/** @type {ExtractOptions} */ options) => {
try {
const { directories, extensions, verbose, ignoreUnused } = options;
const resolvedRirectories = resolveDirs(directories);
const resolvedExtensions = resolveExtensions(extensions);
const extractedTranslations = await extractAllTranslations(resolvedRirectories, resolvedExtensions);
const defaultTranslations = getDefaultTranslations();
const { added, removed, updated } = getTranslationChanges(defaultTranslations, extractedTranslations);
console.log('\nTranslation changes:');
console.log(`- Added: ${added.length} keys`);
console.log(`- ${ignoreUnused ? 'Unused' : 'Removed'}: ${removed.length} keys`);
console.log(`- Updated: ${updated.length} keys`);
console.log(`- Total: ${Object.keys(extractedTranslations).length} keys`);
if (verbose) {
if (added.length > 0) {
console.log('\nNew keys:');
added.forEach(key => console.log(` + ${key}`));
}
if (removed.length > 0) {
console.log('\nRemoved keys:');
removed.forEach(key => console.log(` - ${key}`));
}
if (updated.length > 0) {
console.log('\nUpdated keys:');
updated.forEach(key => {
console.log(` ~ ${key}`);
console.log(` Old: ${defaultLanguage[key]}`);
console.log(` New: ${extractedTranslations[key]}`);
});
}
}
if (ignoreUnused) {
console.log('New translations were saved. Unused keys are kept.\n');
updateLanguageTranslations(defaultLanguage, extractedTranslations);
if (verbose) {
console.log('\nUnused keys:');
for (const key of removed) {
console.log(`${key}: "${defaultTranslations[key]}"`);
}
}
} else {
console.log('Unused keys were removed.\n');
setLanguageTranslations(defaultLanguage, extractedTranslations);
}
} catch (error) {
console.error(error);
console.error('Error during extraction:', error.message);
process.exit(1);
}
});
const ALL_LANGUAGES = 'all';
/**
* @param {string} target
*/
function addMissingTranslations(target) {
console.log(`Adding missing keys for language: ${target}`);
const { result, stats } = getMissingTranslations(target);
console.log(`Added: ${stats.added}, Removed: ${stats.removed}, Total: ${stats.newLength}`);
setLanguageTranslations(target, result);
console.log(`New translations for ${target} were saved.`);
}
program
.command('add-missing')
.description('Add missing keys for a langauge to the translation file')
.option('-t, --target <target>', 'language to add missing translations to', ALL_LANGUAGES)
.action(options => {
try {
const { target } = options;
const languages = getAllNonDefaultLanguages();
if (target === ALL_LANGUAGES) {
console.log('Adding missing keys for all languages\n');
for (const language of languages) {
addMissingTranslations(language);
console.log();
}
} else {
addMissingTranslations(target);
}
} catch (error) {
console.error(error);
console.error('Error during add-missing:', error.message);
process.exit(1);
}
});
program
.command('remove-unused')
.description('Remove unused keys from the translation files')
.option('-t, --target <target>', 'language to add missing translations to', ALL_LANGUAGES)
.action(async options => {
try {
const { target } = options;
if (target === ALL_LANGUAGES) {
console.log('Removing unused keys from all languages\n');
await removeUnusedAllTranslations();
} else {
await removeUnusedForSignelLanguage(target);
}
} catch (error) {
console.error(error);
console.error('Error during add-missing:', error.message);
process.exit(1);
}
});
program
.command('check')
.description('Check if there are multiple default values for the same key')
.action(async () => {
try {
await extractAllTranslations(defaultExtractConfig.directories, defaultExtractConfig.extensions);
console.log('No problems found while extracting translations.');
} catch (error) {
console.error(error);
console.error('Error during check:', error.message);
process.exit(1);
}
});
module.exports = { program };

View File

@@ -0,0 +1,46 @@
// @ts-check
const { defaultExtractConfig } = require('./constants');
const { extractAllTranslations } = require('./extract');
const { getLanguageTranslations, getAllLanguages, setLanguageTranslations } = require('./helpers');
const { directories, extensions } = defaultExtractConfig;
/**
* @param {string} language
* @param {Record<string, string>} source
*/
function getUsedTranslations(language, source) {
const languageTranslations = getLanguageTranslations(language);
for (const key in languageTranslations) {
if (!(key in source)) {
delete languageTranslations[key];
}
}
return languageTranslations;
}
async function removeUnusedAllTranslations() {
const source = await extractAllTranslations(directories, extensions);
const languages = getAllLanguages();
for (const language of languages) {
const newTranslations = getUsedTranslations(language, source);
setLanguageTranslations(language, newTranslations);
}
}
/**
* @param {string} language
*/
async function removeUnusedForSignelLanguage(language) {
const source = await extractAllTranslations(directories, extensions);
const newTranslations = getUsedTranslations(language, source);
setLanguageTranslations(language, newTranslations);
}
module.exports = {
removeUnusedAllTranslations,
removeUnusedForSignelLanguage,
};

View File

@@ -0,0 +1,24 @@
// @ts-check
/**
* @param {object|string} json
* @returns {object}
*/
function sortJsonKeysAlphabetically(json) {
const obj = typeof json === 'string' ? JSON.parse(json) : json;
if (obj === null || typeof obj !== 'object' || Array.isArray(obj)) {
return obj;
}
const sortedObj = Object.keys(obj)
.sort()
.reduce((result, key) => {
result[key] = obj[key];
return result;
}, {});
return sortedObj;
}
module.exports = sortJsonKeysAlphabetically;

View File

@@ -1,6 +1,6 @@
# this compose file is for testing purposes only
# use it for testing docker containsers built on local machine
version: "3"
version: '3'
services:
dbgate:
build: docker
@@ -15,31 +15,31 @@ services:
volumes:
- dbgate-data:/root/.dbgate
# environment:
# WEB_ROOT: /dbgate
# CONNECTIONS: mssql
# LABEL_mssql: MS Sql
# SERVER_mssql: mssql
# USER_mssql: sa
# PORT_mssql: 1433
# PASSWORD_mssql: Pwd2020Db
# ENGINE_mssql: mssql@dbgate-plugin-mssql
# proxy:
# # image: nginx
# build: test/nginx
# ports:
# - 8082:80
# CONNECTIONS: mssql
# LABEL_mssql: MS Sql
# SERVER_mssql: mssql
# USER_mssql: sa
# PORT_mssql: 1433
# PASSWORD_mssql: Pwd2020Db
# ENGINE_mssql: mssql@dbgate-plugin-mssql
proxy:
# image: nginx
build: test/nginx
ports:
- 8082:80
# volumes:
# - /home/jena/test/chinook:/mnt/sqt
# environment:
# CONNECTIONS: sqlite
volumes:
- /home/jena/test/chinook:/mnt/sqt
environment:
CONNECTIONS: sqlite
# LABEL_sqlite: sqt
# FILE_sqlite: /mnt/sqt/Chinook.db
# ENGINE_sqlite: sqlite@dbgate-plugin-sqlite
LABEL_sqlite: sqt
FILE_sqlite: /mnt/sqt/Chinook.db
ENGINE_sqlite: sqlite@dbgate-plugin-sqlite
# mssql:
# image: mcr.microsoft.com/mssql/server
@@ -51,4 +51,5 @@ services:
volumes:
dbgate-data:
driver: local
driver: local

View File

@@ -1,21 +1,10 @@
FROM ubuntu:22.04
FROM node:22
RUN apt-get update && apt-get install -y \
curl \
gnupg \
iputils-ping \
iproute2 \
unixodbc \
gcc \
g++ \
make
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | gpg --dearmor -o /usr/share/keyrings/nodesource-archive-keyring.gpg \
&& echo "deb [signed-by=/usr/share/keyrings/nodesource-archive-keyring.gpg] https://deb.nodesource.com/node_18.x jammy main" | tee /etc/apt/sources.list.d/nodesource.list \
&& echo "deb-src [signed-by=/usr/share/keyrings/nodesource-archive-keyring.gpg] https://deb.nodesource.com/node_18.x jammy main" | tee -a /etc/apt/sources.list.d/nodesource.list \
&& apt-get update && apt-get install -y nodejs \
&& rm -rf /var/lib/apt/lists/* \
&& npm install -g yarn
&& rm -rf /var/lib/apt/lists/*
WORKDIR /home/dbgate-docker

View File

@@ -0,0 +1,7 @@
module.exports = {
mysql: true,
postgres: true,
mssql: true,
oracle: true,
sqlite: true,
};

View File

@@ -0,0 +1,25 @@
const path = require('path');
const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
function clearTestingData() {
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
}
if (fs.existsSync(path.join(baseDir, 'settings-e2etests.json'))) {
fs.unlinkSync(path.join(baseDir, 'settings-e2etests.json'));
}
if (fs.existsSync(path.join(baseDir, 'files-e2etests'))) {
fs.rmdirSync(path.join(baseDir, 'files-e2etests'), { recursive: true });
}
if (fs.existsSync(path.join(baseDir, 'archive-e2etests'))) {
fs.rmdirSync(path.join(baseDir, 'archive-e2etests'), { recursive: true });
}
if (fs.existsSync(path.join(__dirname, '../my_guitar_shop.db'))) {
fs.unlinkSync(path.join(__dirname, '../my_guitar_shop.db'));
}
}
clearTestingData();

View File

@@ -1,6 +1,5 @@
const { defineConfig } = require('cypress');
const killPort = require('kill-port');
const { clearTestingData } = require('./e2eTestTools');
const waitOn = require('wait-on');
const { exec } = require('child_process');
const fs = require('fs');
@@ -14,11 +13,14 @@ module.exports = defineConfig({
// implement node event listeners here
on('before:spec', async details => {
await clearTestingData();
// console.log('********************* DETAILS *********************', JSON.stringify(details));
if (config.isInteractive) {
await killPort(3000);
try {
await killPort(3000);
} catch (e) {
console.warn('Error killing process on port 3000:', e.message);
}
switch (details.fileName) {
case 'add-connection':
serverProcess = exec('yarn start:add-connection');
@@ -35,6 +37,9 @@ module.exports = defineConfig({
case 'team':
serverProcess = exec('yarn start:team');
break;
case 'multi-sql':
serverProcess = exec('yarn start:multi-sql');
break;
}
await waitOn({ resources: ['http://localhost:3000'] });

View File

@@ -350,11 +350,13 @@ describe('Data browser data', () => {
cy.themeshot('comparesettings');
});
it.only('Query editor - AI assistant', () => {
it.skip('Query editor - AI assistant', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.testid('QueryTab_switchAiAssistantButton').click();
cy.testid('QueryAiAssistant_allowSendToAiServiceButton').click();
cy.testid('ConfirmModal_okButton').click();
cy.testid('QueryAiAssistant_promptInput').type('album names');
cy.testid('QueryAiAssistant_queryFromQuestionButton').click();
cy.contains('Use this').click();
@@ -362,4 +364,87 @@ describe('Data browser data', () => {
cy.contains('Balls to the Wall');
cy.themeshot('aiassistant');
});
it('Modify data', () => {
// TODO FIX: delete references cascade not working
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Employee').click();
cy.contains('Rows: 8');
cy.contains('Laura').click();
cy.contains('Laura').click();
cy.get('body').realType('Jane');
// cy.contains('Peacock').click();
// cy.testid('TableDataTab_deleteSelectedRows').click();
cy.contains('King').click();
cy.testid('TableDataTab_deleteSelectedRows').click();
cy.testid('TableDataTab_insertNewRow').click();
cy.get('body').realType('Novak');
cy.get('body').realPress('{enter}');
cy.realPress(['ArrowRight']);
cy.get('body').realType('Karel');
cy.testid('TableDataTab_save').click();
cy.contains('INSERT INTO `Employee`');
cy.contains("SET `FirstName`='Jane'");
cy.contains('DELETE FROM `Employee`');
cy.themeshot('modifydata');
// cy.testid('ConfirmSqlModal_okButton').click();
// cy.contains('Cannot delete or update a parent row')
// cy.testid('ConfirmSqlModal_okButton').click();
// cy.testid('TableDataTab_save').click();
// cy.testid('ConfirmSqlModal_deleteReferencesCascade').click();
// cy.testid('ConfirmSqlModal_okButton').click();
cy.testid('ConfirmSqlModal_okButton').click();
cy.contains('Novak');
cy.contains('Rows: 8');
});
// it('Import', () => {
// TBC after Import FIX
// cy.contains('MySql-connection').click();
// cy.contains('MyChinook').click();
// cy.contains('Customer').rightclickclick();
// cy.contains('Import').click();
// cy.get('input[type=file]').selectFile('cypress/fixtures/Customer_add.csv');
// cy.get('table tbody tr').eq(1).within(() => {
// cy.get('select').select('Append data');
// });
// });
it('Backup table', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('Customer').rightclick();
cy.contains('backup').click();
cy.testid('ConfirmSqlModal_okButton').click();
cy.contains ('_Customer').should('be.visible');
});
it('Truncate table', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('_Customer').click();
cy.contains('Leonie').click();
cy.contains('_Customer').rightclick();
cy.contains('Truncate table').click();
cy.testid('ConfirmSqlModal_okButton').click();
cy.contains('Leonie').click();
cy.testid ('TableDataTab_refreshGrid').click();
cy.contains('No rows loaded')
});
it('Drop table', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.contains('_Customer').rightclick();
cy.contains('Drop table').click();
cy.testid('ConfirmSqlModal_okButton').click();
cy.contains('_Customer').should('not.exist');
});
});

View File

@@ -0,0 +1,101 @@
const localconfig = require('../../.localconfig');
const { formatQueryWithoutParams } = require('dbgate-tools');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
};
function requireEngineDriver(engine) {
const [shortName, packageName] = engine.split('@');
const plugin = require(`../../../plugins/${packageName}/src/frontend/index`);
if (plugin.drivers) {
return plugin.drivers.find(x => x.engine == engine);
}
throw new Error(`Could not find engine driver ${engine}`);
}
Cypress.on('uncaught:exception', (err, runnable) => {
// if the error message matches the one about WorkerGlobalScope importScripts
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
// return false to let Cypress know we intentionally want to ignore this error
return false;
}
// otherwise let Cypress throw the error
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
function multiTest(testName, testDefinition) {
if (localconfig.mysql) {
it(testName + ' MySQL', () => testDefinition('MySql-connection', 'mysql@dbgate-plugin-mysql'));
}
if (localconfig.postgres) {
it(testName + ' Postgres', () => testDefinition('Postgres-connection', 'postgres@dbgate-plugin-postgres'));
}
if (localconfig.mssql) {
it(testName + ' Mssql', () => testDefinition('Mssql-connection', 'mssql@dbgate-plugin-mssql'));
}
if (localconfig.oracle) {
it(testName + ' Oracle', () =>
testDefinition('Oracle-connection', 'oracle@dbgate-plugin-oracle', {
databaseName: 'C##MY_GUITAR_SHOP',
implicitTransactions: true,
})
);
}
if (localconfig.sqlite) {
it(testName + ' Sqlite', () => testDefinition('Sqlite-connection', 'sqlite@dbgate-plugin-sqlite'));
}
}
describe('Mutli-sql tests', () => {
multiTest('Transactions', (connectionName, engine, options = {}) => {
const driver = requireEngineDriver(engine);
const databaseName = options.databaseName ?? 'my_guitar_shop';
const implicitTransactions = options.implicitTransactions ?? false;
cy.contains(connectionName).click();
cy.contains(databaseName).click();
cy.testid('TabsPanel_buttonNewQuery').click();
cy.wait(1000);
cy.get('body').type(
formatQueryWithoutParams(driver, "INSERT INTO ~categories (~category_id, ~category_name) VALUES (5, 'test');")
);
// rollback
if (!implicitTransactions) {
cy.testid('QueryTab_beginTransactionButton').click();
cy.contains('Begin Transaction finished');
}
cy.testid('QueryTab_executeButton').click();
cy.contains('Query execution finished');
cy.testid('QueryTab_rollbackTransactionButton').click();
cy.contains('Rollback Transaction finished');
// should contain 4 rows
cy.testid('SqlObjectList_container').contains('categories').click();
cy.contains('Guitars').click();
cy.testid('TableDataTab_refreshGrid').click();
cy.contains('Rows: 4');
// commit
cy.contains('Query #1').click();
if (!implicitTransactions) {
cy.testid('QueryTab_beginTransactionButton').click();
cy.contains('Begin Transaction finished');
}
cy.testid('QueryTab_executeButton').click();
cy.contains('Query execution finished');
cy.testid('QueryTab_commitTransactionButton').click();
cy.contains('Commit Transaction finished');
// should contain 5 rows
cy.testid('SqlObjectList_container').contains('categories').click();
cy.contains('Guitars').click();
cy.testid('TableDataTab_refreshGrid').click();
cy.contains('Rows: 5');
});
});

View File

@@ -0,0 +1,13 @@
{"__isStreamHeader":true,"pureName":"addresses","tableRowCount":"12","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:32","objectId":"addresses","contentHash":"2025-02-03 02:56:32","columns":[{"notNull":true,"autoIncrement":true,"columnName":"address_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"customer_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"line1","columnComment":"","dataType":"varchar(60)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":false,"autoIncrement":false,"columnName":"line2","columnComment":"","dataType":"varchar(60)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"city","columnComment":"","dataType":"varchar(40)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"state","columnComment":"","dataType":"varchar(2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"zip_code","columnComment":"","dataType":"varchar(10)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"phone","columnComment":"","dataType":"varchar(12)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"disabled","columnComment":"","dataType":"tinyint","defaultValue":"0","isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"addresses","constraintType":"primaryKey","columns":[{"columnName":"address_id"}]},"foreignKeys":[{"constraintName":"addresses_fk_customers","constraintType":"foreignKey","pureName":"addresses","refTableName":"customers","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"customer_id","refColumnName":"customer_id"}]}],"indexes":[{"constraintName":"addresses_fk_customers","indexType":"BTREE","isUnique":false,"columns":[{"columnName":"customer_id","isDescending":0}]}],"uniques":[],"engine":"mysql@dbgate-plugin-mysql"}
{"address_id":1,"customer_id":1,"line1":"100 East Ridgewood Ave.","line2":"","city":"Paramus","state":"NJ","zip_code":"07652","phone":"201-653-4472","disabled":0}
{"address_id":2,"customer_id":1,"line1":"21 Rosewood Rd.","line2":"","city":"Woodcliff Lake","state":"NJ","zip_code":"07677","phone":"201-653-4472","disabled":0}
{"address_id":3,"customer_id":2,"line1":"16285 Wendell St.","line2":"","city":"Omaha","state":"NE","zip_code":"68135","phone":"402-896-2576","disabled":0}
{"address_id":4,"customer_id":3,"line1":"19270 NW Cornell Rd.","line2":"","city":"Beaverton","state":"OR","zip_code":"97006","phone":"503-654-1291","disabled":0}
{"address_id":5,"customer_id":4,"line1":"186 Vermont St.","line2":"Apt. 2","city":"San Francisco","state":"CA","zip_code":"94110","phone":"415-292-6651","disabled":0}
{"address_id":6,"customer_id":4,"line1":"1374 46th Ave.","line2":"","city":"San Francisco","state":"CA","zip_code":"94129","phone":"415-292-6651","disabled":0}
{"address_id":7,"customer_id":5,"line1":"6982 Palm Ave.","line2":"","city":"Fresno","state":"CA","zip_code":"93711","phone":"559-431-2398","disabled":0}
{"address_id":8,"customer_id":6,"line1":"23 Mountain View St.","line2":"","city":"Denver","state":"CO","zip_code":"80208","phone":"303-912-3852","disabled":0}
{"address_id":9,"customer_id":7,"line1":"7361 N. 41st St.","line2":"Apt. B","city":"New York","state":"NY","zip_code":"10012","phone":"212-335-2093","disabled":0}
{"address_id":10,"customer_id":7,"line1":"3829 Broadway Ave.","line2":"Suite 2","city":"New York","state":"NY","zip_code":"10012","phone":"212-239-1208","disabled":0}
{"address_id":11,"customer_id":8,"line1":"2381 Buena Vista St.","line2":"","city":"Los Angeles","state":"CA","zip_code":"90023","phone":"213-772-5033","disabled":0}
{"address_id":12,"customer_id":8,"line1":"291 W. Hollywood Blvd.","line2":"","city":"Los Angeles","state":"CA","zip_code":"90024","phone":"213-391-2938","disabled":0}

View File

@@ -0,0 +1,46 @@
name: addresses
columns:
- name: address_id
type: int
default: null
autoIncrement: true
notNull: true
- name: customer_id
type: int
default: null
notNull: true
references: customers
- name: line1
type: varchar(60)
default: null
notNull: true
- name: line2
type: varchar(60)
default: null
- name: city
type: varchar(40)
default: null
notNull: true
- name: state
type: varchar(2)
default: null
notNull: true
- name: zip_code
type: varchar(10)
default: null
notNull: true
- name: phone
type: varchar(12)
default: null
notNull: true
- name: disabled
type: int
default: 0
notNull: true
primaryKey:
- address_id
indexes:
- name: addresses_fk_customers
unique: false
columns:
- customer_id

View File

@@ -0,0 +1,4 @@
{"__isStreamHeader":true,"pureName":"administrators","tableRowCount":"3","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:33","objectId":"administrators","contentHash":"2025-02-03 02:56:33","columns":[{"notNull":true,"autoIncrement":true,"columnName":"admin_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"email_address","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"password","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"first_name","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"last_name","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"administrators","constraintType":"primaryKey","columns":[{"columnName":"admin_id"}]},"foreignKeys":[],"indexes":[],"uniques":[],"engine":"mysql@dbgate-plugin-mysql"}
{"admin_id":1,"email_address":"admin@myguitarshop.com","password":"6a718fbd768c2378b511f8249b54897f940e9022","first_name":"Admin","last_name":"User"}
{"admin_id":2,"email_address":"joel@murach.com","password":"971e95957d3b74d70d79c20c94e9cd91b85f7aae","first_name":"Joel","last_name":"Murach"}
{"admin_id":3,"email_address":"mike@murach.com","password":"3f2975c819cefc686282456aeae3a137bf896ee8","first_name":"Mike","last_name":"Murach"}

View File

@@ -0,0 +1,25 @@
name: administrators
columns:
- name: admin_id
type: int
default: null
autoIncrement: true
notNull: true
- name: email_address
type: varchar(255)
default: null
notNull: true
- name: password
type: varchar(255)
default: null
notNull: true
- name: first_name
type: varchar(255)
default: null
notNull: true
- name: last_name
type: varchar(255)
default: null
notNull: true
primaryKey:
- admin_id

View File

@@ -0,0 +1,5 @@
{"__isStreamHeader":true,"pureName":"categories","tableRowCount":"4","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:32","objectId":"categories","contentHash":"2025-02-03 02:56:32","columns":[{"notNull":true,"autoIncrement":true,"columnName":"category_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"category_name","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"categories","constraintType":"primaryKey","columns":[{"columnName":"category_id"}]},"foreignKeys":[],"indexes":[],"uniques":[{"constraintName":"category_name","columns":[{"columnName":"category_name"}]}],"engine":"mysql@dbgate-plugin-mysql"}
{"category_id":2,"category_name":"Basses"}
{"category_id":3,"category_name":"Drums"}
{"category_id":1,"category_name":"Guitars"}
{"category_id":4,"category_name":"Keyboards"}

View File

@@ -0,0 +1,12 @@
name: categories
columns:
- name: category_id
type: int
default: null
notNull: true
- name: category_name
type: varchar(255)
default: null
notNull: true
primaryKey:
- category_id

View File

@@ -0,0 +1,9 @@
{"__isStreamHeader":true,"pureName":"customers","tableRowCount":"8","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:32","objectId":"customers","contentHash":"2025-02-03 02:56:32","columns":[{"notNull":true,"autoIncrement":true,"columnName":"customer_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"email_address","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"password","columnComment":"","dataType":"varchar(60)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"first_name","columnComment":"","dataType":"varchar(60)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"last_name","columnComment":"","dataType":"varchar(60)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":false,"autoIncrement":false,"columnName":"shipping_address_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":false,"autoIncrement":false,"columnName":"billing_address_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"customers","constraintType":"primaryKey","columns":[{"columnName":"customer_id"}]},"foreignKeys":[],"indexes":[],"uniques":[{"constraintName":"email_address","columns":[{"columnName":"email_address"}]}],"engine":"mysql@dbgate-plugin-mysql"}
{"customer_id":1,"email_address":"allan.sherwood@yahoo.com","password":"650215acec746f0e32bdfff387439eefc1358737","first_name":"Allan","last_name":"Sherwood","shipping_address_id":1,"billing_address_id":2}
{"customer_id":2,"email_address":"barryz@gmail.com","password":"3f563468d42a448cb1e56924529f6e7bbe529cc7","first_name":"Barry","last_name":"Zimmer","shipping_address_id":3,"billing_address_id":3}
{"customer_id":3,"email_address":"christineb@solarone.com","password":"ed19f5c0833094026a2f1e9e6f08a35d26037066","first_name":"Christine","last_name":"Brown","shipping_address_id":4,"billing_address_id":4}
{"customer_id":4,"email_address":"david.goldstein@hotmail.com","password":"b444ac06613fc8d63795be9ad0beaf55011936ac","first_name":"David","last_name":"Goldstein","shipping_address_id":5,"billing_address_id":6}
{"customer_id":5,"email_address":"erinv@gmail.com","password":"109f4b3c50d7b0df729d299bc6f8e9ef9066971f","first_name":"Erin","last_name":"Valentino","shipping_address_id":7,"billing_address_id":7}
{"customer_id":6,"email_address":"frankwilson@sbcglobal.net","password":"3ebfa301dc59196f18593c45e519287a23297589","first_name":"Frank Lee","last_name":"Wilson","shipping_address_id":8,"billing_address_id":8}
{"customer_id":7,"email_address":"gary_hernandez@yahoo.com","password":"1ff2b3704aede04eecb51e50ca698efd50a1379b","first_name":"Gary","last_name":"Hernandez","shipping_address_id":9,"billing_address_id":10}
{"customer_id":8,"email_address":"heatheresway@mac.com","password":"911ddc3b8f9a13b5499b6bc4638a2b4f3f68bf23","first_name":"Heather","last_name":"Esway","shipping_address_id":11,"billing_address_id":12}

View File

@@ -0,0 +1,31 @@
name: customers
columns:
- name: customer_id
type: int
default: null
autoIncrement: true
notNull: true
- name: email_address
type: varchar(255)
default: null
notNull: true
- name: password
type: varchar(60)
default: null
notNull: true
- name: first_name
type: varchar(60)
default: null
notNull: true
- name: last_name
type: varchar(60)
default: null
notNull: true
- name: shipping_address_id
type: int
default: null
- name: billing_address_id
type: int
default: null
primaryKey:
- customer_id

View File

@@ -0,0 +1,13 @@
{"__isStreamHeader":true,"pureName":"order_items","tableRowCount":"12","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:33","objectId":"order_items","contentHash":"2025-02-03 02:56:33","columns":[{"notNull":true,"autoIncrement":true,"columnName":"item_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"order_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"product_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"item_price","columnComment":"","dataType":"decimal(10,2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"discount_amount","columnComment":"","dataType":"decimal(10,2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"quantity","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"order_items","constraintType":"primaryKey","columns":[{"columnName":"item_id"}]},"foreignKeys":[{"constraintName":"items_fk_products","constraintType":"foreignKey","pureName":"order_items","refTableName":"products","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"product_id","refColumnName":"product_id"}]},{"constraintName":"items_fk_orders","constraintType":"foreignKey","pureName":"order_items","refTableName":"orders","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"order_id","refColumnName":"order_id"}]}],"indexes":[{"constraintName":"items_fk_orders","indexType":"BTREE","isUnique":false,"columns":[{"columnName":"order_id","isDescending":0}]},{"constraintName":"items_fk_products","indexType":"BTREE","isUnique":false,"columns":[{"columnName":"product_id","isDescending":0}]}],"uniques":[],"engine":"mysql@dbgate-plugin-mysql"}
{"item_id":1,"order_id":1,"product_id":2,"item_price":"1199.00","discount_amount":"359.70","quantity":1}
{"item_id":2,"order_id":2,"product_id":4,"item_price":"489.99","discount_amount":"186.20","quantity":1}
{"item_id":3,"order_id":3,"product_id":3,"item_price":"2517.00","discount_amount":"1308.84","quantity":1}
{"item_id":4,"order_id":3,"product_id":6,"item_price":"415.00","discount_amount":"161.85","quantity":1}
{"item_id":5,"order_id":4,"product_id":2,"item_price":"1199.00","discount_amount":"359.70","quantity":2}
{"item_id":6,"order_id":5,"product_id":5,"item_price":"299.00","discount_amount":"0.00","quantity":1}
{"item_id":7,"order_id":6,"product_id":5,"item_price":"299.00","discount_amount":"0.00","quantity":1}
{"item_id":8,"order_id":7,"product_id":1,"item_price":"699.00","discount_amount":"209.70","quantity":1}
{"item_id":9,"order_id":7,"product_id":7,"item_price":"799.99","discount_amount":"240.00","quantity":1}
{"item_id":10,"order_id":7,"product_id":9,"item_price":"699.99","discount_amount":"210.00","quantity":1}
{"item_id":11,"order_id":8,"product_id":10,"item_price":"799.99","discount_amount":"120.00","quantity":1}
{"item_id":12,"order_id":9,"product_id":1,"item_price":"699.00","discount_amount":"209.70","quantity":1}

View File

@@ -0,0 +1,40 @@
name: order_items
columns:
- name: item_id
type: int
default: null
autoIncrement: true
notNull: true
- name: order_id
type: int
default: null
notNull: true
references: orders
- name: product_id
type: int
default: null
notNull: true
references: products
- name: item_price
type: decimal(10,2)
default: null
notNull: true
- name: discount_amount
type: decimal(10,2)
default: null
notNull: true
- name: quantity
type: int
default: null
notNull: true
primaryKey:
- item_id
indexes:
- name: items_fk_orders
unique: false
columns:
- order_id
- name: items_fk_products
unique: false
columns:
- product_id

View File

@@ -0,0 +1,10 @@
{"__isStreamHeader":true,"pureName":"orders","tableRowCount":"9","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03T02:56:32","objectId":"orders","contentHash":"2025-02-03 02:56:32","columns":[{"notNull":true,"autoIncrement":true,"columnName":"order_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"customer_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"order_date","columnComment":"","dataType":"datetime","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"ship_amount","columnComment":"","dataType":"decimal(10,2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"tax_amount","columnComment":"","dataType":"decimal(10,2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":false,"autoIncrement":false,"columnName":"ship_date","columnComment":"","dataType":"datetime","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"ship_address_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"card_type","columnComment":"","dataType":"varchar(50)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"card_number","columnComment":"","dataType":"char(16)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"card_expires","columnComment":"","dataType":"char(7)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"billing_address_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"orders","constraintType":"primaryKey","columns":[{"columnName":"order_id"}]},"foreignKeys":[{"constraintName":"orders_fk_customers","constraintType":"foreignKey","pureName":"orders","refTableName":"customers","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"customer_id","refColumnName":"customer_id"}]}],"indexes":[{"constraintName":"orders_fk_customers","indexType":"BTREE","isUnique":false,"columns":[{"columnName":"customer_id","isDescending":0}]}],"uniques":[],"engine":"mysql@dbgate-plugin-mysql"}
{"order_id":1,"customer_id":1,"order_date":"2018-03-28T09:40:28","ship_amount":"5.00","tax_amount":"32.32","ship_date":"2018-03-30T15:32:51","ship_address_id":1,"card_type":"Visa","card_number":"4111111111111111","card_expires":"04/2020","billing_address_id":2}
{"order_id":2,"customer_id":2,"order_date":"2018-03-28T11:23:20","ship_amount":"5.00","tax_amount":"0.00","ship_date":"2018-03-29T12:52:14","ship_address_id":3,"card_type":"Visa","card_number":"4012888888881881","card_expires":"08/2019","billing_address_id":3}
{"order_id":3,"customer_id":1,"order_date":"2018-03-29T09:44:58","ship_amount":"10.00","tax_amount":"89.92","ship_date":"2018-03-31T09:11:41","ship_address_id":1,"card_type":"Visa","card_number":"4111111111111111","card_expires":"04/2017","billing_address_id":2}
{"order_id":4,"customer_id":3,"order_date":"2018-03-30T15:22:31","ship_amount":"5.00","tax_amount":"0.00","ship_date":"2018-04-03T16:32:21","ship_address_id":4,"card_type":"American Express","card_number":"378282246310005","card_expires":"04/2016","billing_address_id":4}
{"order_id":5,"customer_id":4,"order_date":"2018-03-31T05:43:11","ship_amount":"5.00","tax_amount":"0.00","ship_date":"2018-04-02T14:21:12","ship_address_id":5,"card_type":"Visa","card_number":"4111111111111111","card_expires":"04/2019","billing_address_id":6}
{"order_id":6,"customer_id":5,"order_date":"2018-03-31T18:37:22","ship_amount":"5.00","tax_amount":"0.00","ship_date":null,"ship_address_id":7,"card_type":"Discover","card_number":"6011111111111117","card_expires":"04/2019","billing_address_id":7}
{"order_id":7,"customer_id":6,"order_date":"2018-04-01T23:11:12","ship_amount":"15.00","tax_amount":"0.00","ship_date":"2018-04-03T10:21:35","ship_address_id":8,"card_type":"MasterCard","card_number":"5555555555554444","card_expires":"04/2019","billing_address_id":8}
{"order_id":8,"customer_id":7,"order_date":"2018-04-02T11:26:38","ship_amount":"5.00","tax_amount":"0.00","ship_date":null,"ship_address_id":9,"card_type":"Visa","card_number":"4012888888881881","card_expires":"04/2019","billing_address_id":10}
{"order_id":9,"customer_id":4,"order_date":"2018-04-03T12:22:31","ship_amount":"5.00","tax_amount":"0.00","ship_date":null,"ship_address_id":5,"card_type":"Visa","card_number":"4111111111111111","card_expires":"04/2019","billing_address_id":6}

View File

@@ -0,0 +1,54 @@
name: orders
columns:
- name: order_id
type: int
default: null
autoIncrement: true
notNull: true
- name: customer_id
type: int
default: null
notNull: true
references: customers
- name: order_date
type: datetime
default: null
notNull: true
- name: ship_amount
type: decimal(10,2)
default: null
notNull: true
- name: tax_amount
type: decimal(10,2)
default: null
notNull: true
- name: ship_date
type: datetime
default: null
- name: ship_address_id
type: int
default: null
notNull: true
- name: card_type
type: varchar(50)
default: null
notNull: true
- name: card_number
type: char(16)
default: null
notNull: true
- name: card_expires
type: char(7)
default: null
notNull: true
- name: billing_address_id
type: int
default: null
notNull: true
primaryKey:
- order_id
indexes:
- name: orders_fk_customers
unique: false
columns:
- customer_id

View File

@@ -0,0 +1,11 @@
{"__isStreamHeader":true,"pureName":"products","tableRowCount":"10","tableEngine":"InnoDB","objectComment":"","modifyDate":"2025-02-03 02:56:32","objectId":"products","contentHash":"2025-02-03 02:56:32","columns":[{"notNull":true,"autoIncrement":true,"columnName":"product_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"category_id","columnComment":"","dataType":"int","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"product_code","columnComment":"","dataType":"varchar(10)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"product_name","columnComment":"","dataType":"varchar(255)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"description","columnComment":"","dataType":"text","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"list_price","columnComment":"","dataType":"decimal(10,2)","defaultValue":null,"isUnsigned":false,"isZerofill":false},{"notNull":true,"autoIncrement":false,"columnName":"discount_percent","columnComment":"","dataType":"decimal(10,2)","defaultValue":"0.00","isUnsigned":false,"isZerofill":false},{"notNull":false,"autoIncrement":false,"columnName":"date_added","columnComment":"","dataType":"datetime","defaultValue":null,"isUnsigned":false,"isZerofill":false}],"primaryKey":{"constraintName":"PRIMARY","pureName":"products","constraintType":"primaryKey","columns":[{"columnName":"product_id"}]},"foreignKeys":[{"constraintName":"products_fk_categories","constraintType":"foreignKey","pureName":"products","refTableName":"categories","updateAction":"NO ACTION","deleteAction":"NO ACTION","columns":[{"columnName":"category_id","refColumnName":"category_id"}]}],"indexes":[{"constraintName":"products_fk_categories","indexType":"BTREE","isUnique":false,"columns":[{"columnName":"category_id","isDescending":0}]}],"uniques":[{"constraintName":"product_code","columns":[{"columnName":"product_code"}]}],"engine":"mysql@dbgate-plugin-mysql"}
{"product_id":1,"category_id":1,"product_code":"strat","product_name":"Fender Stratocaster","description":"The Fender Stratocaster is the electric guitar design that changed the world. New features include a tinted neck, parchment pickguard and control knobs, and a '70s-style logo. Includes select alder body, 21-fret maple neck with your choice of a rosewood or maple fretboard, 3 single-coil pickups, vintage-style tremolo, and die-cast tuning keys. This guitar features a thicker bridge block for increased sustain and a more stable point of contact with the strings. At this low price, why play anything but the real thing?\r\n\r\nFeatures:\r\n\r\n* New features:\r\n* Thicker bridge block\r\n* 3-ply parchment pick guard\r\n* Tinted neck","list_price":"699.00","discount_percent":"30.00","date_added":"2017-10-30T09:32:40"}
{"product_id":2,"category_id":1,"product_code":"les_paul","product_name":"Gibson Les Paul","description":"This Les Paul guitar offers a carved top and humbucking pickups. It has a simple yet elegant design. Cutting-yet-rich tone?the hallmark of the Les Paul?pours out of the 490R and 498T Alnico II magnet humbucker pickups, which are mounted on a carved maple top with a mahogany back. The faded finish models are equipped with BurstBucker Pro pickups and a mahogany top. This guitar includes a Gibson hardshell case (Faded and satin finish models come with a gig bag) and a limited lifetime warranty.\r\n\r\nFeatures:\r\n\r\n* Carved maple top and mahogany back (Mahogany top on faded finish models)\r\n* Mahogany neck, '59 Rounded Les Paul\r\n* Rosewood fingerboard (Ebony on Alpine white)\r\n* Tune-O-Matic bridge with stopbar\r\n* Chrome or gold hardware\r\n* 490R and 498T Alnico 2 magnet humbucker pickups (BurstBucker Pro on faded finish models)\r\n* 2 volume and 2 tone knobs, 3-way switch","list_price":"1199.00","discount_percent":"30.00","date_added":"2017-12-05T16:33:13"}
{"product_id":3,"category_id":1,"product_code":"sg","product_name":"Gibson SG","description":"This Gibson SG electric guitar takes the best of the '62 original and adds the longer and sturdier neck joint of the late '60s models. All the classic features you'd expect from a historic guitar. Hot humbuckers go from rich, sweet lightning to warm, tingling waves of sustain. A silky-fast rosewood fretboard plays like a dream. The original-style beveled mahogany body looks like a million bucks. Plus, Tune-O-Matic bridge and chrome hardware. Limited lifetime warranty. Includes hardshell case.\r\n\r\nFeatures:\r\n\r\n* Double-cutaway beveled mahogany body\r\n* Set mahogany neck with rounded '50s profile\r\n* Bound rosewood fingerboard with trapezoid inlays\r\n* Tune-O-Matic bridge with stopbar tailpiece\r\n* Chrome hardware\r\n* 490R humbucker in the neck position\r\n* 498T humbucker in the bridge position\r\n* 2 volume knobs, 2 tone knobs, 3-way switch\r\n* 24-3/4\" scale","list_price":"2517.00","discount_percent":"52.00","date_added":"2018-02-04T11:04:31"}
{"product_id":4,"category_id":1,"product_code":"fg700s","product_name":"Yamaha FG700S","description":"The Yamaha FG700S solid top acoustic guitar has the ultimate combo for projection and pure tone. The expertly braced spruce top speaks clearly atop the rosewood body. It has a rosewood fingerboard, rosewood bridge, die-cast tuners, body and neck binding, and a tortoise pickguard.\r\n\r\nFeatures:\r\n\r\n* Solid Sitka spruce top\r\n* Rosewood back and sides\r\n* Rosewood fingerboard\r\n* Rosewood bridge\r\n* White/black body and neck binding\r\n* Die-cast tuners\r\n* Tortoise pickguard\r\n* Limited lifetime warranty","list_price":"489.99","discount_percent":"38.00","date_added":"2018-06-01T11:12:59"}
{"product_id":5,"category_id":1,"product_code":"washburn","product_name":"Washburn D10S","description":"The Washburn D10S acoustic guitar is superbly crafted with a solid spruce top and mahogany back and sides for exceptional tone. A mahogany neck and rosewood fingerboard make fretwork a breeze, while chrome Grover-style machines keep you perfectly tuned. The Washburn D10S comes with a limited lifetime warranty.\r\n\r\nFeatures:\r\n\r\n * Spruce top\r\n * Mahogany back, sides\r\n * Mahogany neck Rosewood fingerboard\r\n * Chrome Grover-style machines","list_price":"299.00","discount_percent":"0.00","date_added":"2018-07-30T13:58:35"}
{"product_id":6,"category_id":1,"product_code":"rodriguez","product_name":"Rodriguez Caballero 11","description":"Featuring a carefully chosen, solid Canadian cedar top and laminated bubinga back and sides, the Caballero 11 classical guitar is a beauty to behold and play. The headstock and fretboard are of Indian rosewood. Nickel-plated tuners and Silver-plated frets are installed to last a lifetime. The body binding and wood rosette are exquisite.\r\n\r\nThe Rodriguez Guitar is hand crafted and glued to create precise balances. From the invisible careful sanding, even inside the body, that ensures the finished instrument's purity of tone, to the beautifully unique rosette inlays around the soundhole and on the back of the neck, each guitar is a credit to its luthier and worthy of being handed down from one generation to another.\r\n\r\nThe tone, resonance and beauty of fine guitars are all dependent upon the wood from which they are made. The wood used in the construction of Rodriguez guitars is carefully chosen and aged to guarantee the highest quality. No wood is purchased before the tree has been cut down, and at least 2 years must elapse before the tree is turned into lumber. The wood has to be well cut from the log. The grain must be close and absolutely vertical. The shop is totally free from humidity.","list_price":"415.00","discount_percent":"39.00","date_added":"2018-07-30T14:12:41"}
{"product_id":7,"category_id":2,"product_code":"precision","product_name":"Fender Precision","description":"The Fender Precision bass guitar delivers the sound, look, and feel today's bass players demand. This bass features that classic P-Bass old-school design. Each Precision bass boasts contemporary features and refinements that make it an excellent value. Featuring an alder body and a split single-coil pickup, this classic electric bass guitar lives up to its Fender legacy.\r\n\r\nFeatures:\r\n\r\n* Body: Alder\r\n* Neck: Maple, modern C shape, tinted satin urethane finish\r\n* Fingerboard: Rosewood or maple (depending on color)\r\n* 9-1/2\" Radius (241 mm)\r\n* Frets: 20 Medium-jumbo frets\r\n* Pickups: 1 Standard Precision Bass split single-coil pickup (Mid)\r\n* Controls: Volume, Tone\r\n* Bridge: Standard vintage style with single groove saddles\r\n* Machine heads: Standard\r\n* Hardware: Chrome\r\n* Pickguard: 3-Ply Parchment\r\n* Scale Length: 34\" (864 mm)\r\n* Width at Nut: 1-5/8\" (41.3 mm)\r\n* Unique features: Knurled chrome P Bass knobs, Fender transition logo","list_price":"799.99","discount_percent":"30.00","date_added":"2018-06-01T11:29:35"}
{"product_id":8,"category_id":2,"product_code":"hofner","product_name":"Hofner Icon","description":"With authentic details inspired by the original, the Hofner Icon makes the legendary violin bass available to the rest of us. Don't get the idea that this a just a \"nowhere man\" look-alike. This quality instrument features a real spruce top and beautiful flamed maple back and sides. The semi-hollow body and set neck will give you the warm, round tone you expect from the violin bass.\r\n\r\nFeatures:\r\n\r\n* Authentic details inspired by the original\r\n* Spruce top\r\n* Flamed maple back and sides\r\n* Set neck\r\n* Rosewood fretboard\r\n* 30\" scale\r\n* 22 frets\r\n* Dot inlay","list_price":"499.99","discount_percent":"25.00","date_added":"2018-07-30T14:18:33"}
{"product_id":9,"category_id":3,"product_code":"ludwig","product_name":"Ludwig 5-piece Drum Set with Cymbals","description":"This product includes a Ludwig 5-piece drum set and a Zildjian starter cymbal pack.\r\n\r\nWith the Ludwig drum set, you get famous Ludwig quality. This set features a bass drum, two toms, a floor tom, and a snare?each with a wrapped finish. Drum hardware includes LA214FP bass pedal, snare stand, cymbal stand, hi-hat stand, and a throne.\r\n\r\nWith the Zildjian cymbal pack, you get a 14\" crash, 18\" crash/ride, and a pair of 13\" hi-hats. Sound grooves and round hammer strikes in a simple circular pattern on the top surface of these cymbals magnify the basic sound of the distinctive alloy.\r\n\r\nFeatures:\r\n\r\n* Famous Ludwig quality\r\n* Wrapped finishes\r\n* 22\" x 16\" kick drum\r\n* 12\" x 10\" and 13\" x 11\" toms\r\n* 16\" x 16\" floor tom\r\n* 14\" x 6-1/2\" snare drum kick pedal\r\n* Snare stand\r\n* Straight cymbal stand hi-hat stand\r\n* FREE throne","list_price":"699.99","discount_percent":"30.00","date_added":"2018-07-30T12:46:40"}
{"product_id":10,"category_id":3,"product_code":"tama","product_name":"Tama 5-Piece Drum Set with Cymbals","description":"The Tama 5-piece Drum Set is the most affordable Tama drum kit ever to incorporate so many high-end features.\r\n\r\nWith over 40 years of experience, Tama knows what drummers really want. Which is why, no matter how long you've been playing the drums, no matter what budget you have to work with, Tama has the set you need, want, and can afford. Every aspect of the modern drum kit was exhaustively examined and reexamined and then improved before it was accepted as part of the Tama design. Which is why, if you start playing Tama now as a beginner, you'll still enjoy playing it when you've achieved pro-status. That's how good these groundbreaking new drums are.\r\n\r\nOnly Tama comes with a complete set of genuine Meinl HCS cymbals. These high-quality brass cymbals are made in Germany and are sonically matched so they sound great together. They are even lathed for a more refined tonal character. The set includes 14\" hi-hats, 16\" crash cymbal, and a 20\" ride cymbal.\r\n\r\nFeatures:\r\n\r\n* 100% poplar 6-ply/7.5mm shells\r\n* Precise bearing edges\r\n* 100% glued finishes\r\n* Original small lugs\r\n* Drum heads\r\n* Accu-tune bass drum hoops\r\n* Spur brackets\r\n* Tom holder\r\n* Tom brackets","list_price":"799.99","discount_percent":"15.00","date_added":"2018-07-30T13:14:15"}

View File

@@ -0,0 +1,42 @@
name: products
columns:
- name: product_id
type: int
default: null
autoIncrement: true
notNull: true
- name: category_id
type: int
default: null
notNull: true
references: categories
- name: product_code
type: varchar(10)
default: null
notNull: true
- name: product_name
type: varchar(255)
default: null
notNull: true
- name: description
type: text
default: null
notNull: true
- name: list_price
type: decimal(10,2)
default: null
notNull: true
- name: discount_percent
type: decimal(10,2)
default: '0.00'
notNull: true
- name: date_added
type: datetime
default: null
primaryKey:
- product_id
indexes:
- name: products_fk_categories
unique: false
columns:
- category_id

View File

@@ -22,7 +22,7 @@ services:
restart: always
ports:
- 16005:3306
- "16012:22"
- "16015:22"
mysql-ssh-keyfile:
build: containers/mysql-ssh-keyfile
@@ -49,3 +49,20 @@ services:
image: redis
ports:
- 16011:6379
mssql:
image: mcr.microsoft.com/mssql/server
restart: always
ports:
- 16014:1433
environment:
- ACCEPT_EULA=Y
- SA_PASSWORD=Pwd2020Db
- MSSQL_PID=Express
oracle:
image: gvenzl/oracle-xe:21-slim
environment:
ORACLE_PASSWORD: Pwd2020Db
ports:
- 16013:1521

View File

@@ -1,35 +0,0 @@
const path = require('path');
const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
// function createTimeStamp() {
// const now = new Date();
// const year = now.getFullYear();
// const month = String(now.getMonth() + 1).padStart(2, '0'); // měsíc je 0-indexovaný
// const day = String(now.getDate()).padStart(2, '0');
// const hours = String(now.getHours()).padStart(2, '0');
// const minutes = String(now.getMinutes()).padStart(2, '0');
// const seconds = String(now.getSeconds()).padStart(2, '0');
// // Poskládáme datum a čas do názvu souboru
// const ts = `${year}-${month}-${day}_${hours}-${minutes}-${seconds}`;
// return ts;
// }
function clearTestingData() {
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
}
if (fs.existsSync(path.join(baseDir, 'files-e2etests'))) {
fs.rmdirSync(path.join(baseDir, 'files-e2etests'), { recursive: true });
}
if (fs.existsSync(path.join(baseDir, 'archive-e2etests'))) {
fs.rmdirSync(path.join(baseDir, 'archive-e2etests'), { recursive: true });
}
}
module.exports = {
clearTestingData,
};

36
e2e-tests/env/multi-sql/.env vendored Normal file
View File

@@ -0,0 +1,36 @@
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite
LABEL_mysql=MySql-connection
SERVER_mysql=localhost
USER_mysql=root
PASSWORD_mysql=Pwd2020Db
PORT_mysql=16004
ENGINE_mysql=mysql@dbgate-plugin-mysql
DBCONFIG_mysql=[{"name":"MyChinook","connectionColor":"cyan"}]
LABEL_postgres=Postgres-connection
SERVER_postgres=localhost
USER_postgres=postgres
PASSWORD_postgres=Pwd2020Db
PORT_postgres=16000
ENGINE_postgres=postgres@dbgate-plugin-postgres
DBCONFIG_postgres=[{"name":"PgChinook","connectionColor":"red"}]
LABEL_oracle=Oracle-connection
SERVER_oracle=localhost
USER_oracle=system
PASSWORD_oracle=Pwd2020Db
PORT_oracle=16013
ENGINE_oracle=oracle@dbgate-plugin-oracle
SERVICE_NAME_oracle=xe
LABEL_mssql=Mssql-connection
SERVER_mssql=localhost
USER_mssql=sa
PASSWORD_mssql=Pwd2020Db
PORT_mssql=16014
ENGINE_mssql=mssql@dbgate-plugin-mssql
LABEL_sqlite=Sqlite-connection
FILE_sqlite=my_guitar_shop.db
ENGINE_sqlite=sqlite@dbgate-plugin-sqlite

110
e2e-tests/init/multi-sql.js Normal file
View File

@@ -0,0 +1,110 @@
const path = require('path');
const localconfig = require('../.localconfig');
const dbgateApi = require('dbgate-api');
dbgateApi.initializeApiEnvironment();
const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
dbgateApi.registerPlugins(dbgatePluginPostgres);
async function createDb(connection, dropDbSql, createDbSql, database = 'my_guitar_shop') {
if (dropDbSql) {
try {
await dbgateApi.executeQuery({
connection,
sql: dropDbSql,
});
} catch (err) {
console.error('Failed to drop database', err);
}
}
if (createDbSql) {
await dbgateApi.executeQuery({
connection,
sql: createDbSql,
});
}
await dbgateApi.importDbFromFolder({
connection: {
...connection,
database,
},
folder: path.resolve(path.join(__dirname, '../data/my-guitar-shop')),
});
}
async function run() {
if (localconfig.postgres) {
await createDb(
{
server: process.env.SERVER_postgres,
user: process.env.USER_postgres,
password: process.env.PASSWORD_postgres,
port: process.env.PORT_postgres,
engine: 'postgres@dbgate-plugin-postgres',
},
'drop database if exists my_guitar_shop',
'create database my_guitar_shop'
);
}
if (localconfig.mysql) {
await createDb(
{
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
},
'drop database if exists my_guitar_shop',
'create database my_guitar_shop'
);
}
if (localconfig.mssql) {
await createDb(
{
server: process.env.SERVER_mssql,
user: process.env.USER_mssql,
password: process.env.PASSWORD_mssql,
port: process.env.PORT_mssql,
engine: 'mssql@dbgate-plugin-mssql',
},
'drop database if exists my_guitar_shop',
'create database my_guitar_shop'
);
}
if (localconfig.oracle) {
await createDb(
{
server: process.env.SERVER_oracle,
user: process.env.USER_oracle,
password: process.env.PASSWORD_oracle,
port: process.env.PORT_oracle,
engine: 'oracle@dbgate-plugin-oracle',
},
'DROP USER c##my_guitar_shop CASCADE',
'CREATE USER c##my_guitar_shop IDENTIFIED BY my_guitar_shop DEFAULT TABLESPACE users TEMPORARY TABLESPACE temp QUOTA 10M ON users',
'C##my_guitar_shop'
);
}
if (localconfig.sqlite) {
await createDb(
{
databaseFile: process.env.FILE_sqlite,
singleDatabase: true,
engine: 'sqlite@dbgate-plugin-sqlite',
},
null,
null
);
}
}
dbgateApi.runScript(run);

View File

@@ -20,20 +20,23 @@
"cy:run:oauth": "cypress run --spec cypress/e2e/oauth.cy.js",
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
"start:add-connection": "cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:portal": "cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:oauth": "cd .. && env-cmd -f e2e-tests/env/oauth/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:browse-data": "cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:team": "cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:oauth": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/oauth/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
"test:oauth": "start-server-and-test start:oauth http://localhost:3000 cy:run:oauth",
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql",
"test:ci": "yarn test"
},
"dependencies": {}

View File

@@ -76,26 +76,23 @@ describe('Alter database', () => {
})
);
const objectsSupportingRename = flatSource(x => x.supportRenameSqlObject);
if (objectsSupportingRename.length > 0) {
test.each(objectsSupportingRename)(
'Rename object - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
test.each(flatSource(x => x.supportRenameSqlObject))(
'Rename object - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await runCommandOnDriver(conn, driver, object.create1);
await runCommandOnDriver(conn, driver, object.create1);
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
const dmp = driver.createDumper();
dmp.renameSqlObject(structure[type][0], 'renamed1');
const dmp = driver.createDumper();
dmp.renameSqlObject(structure[type][0], 'renamed1');
await driver.query(conn, dmp.s);
await driver.query(conn, dmp.s);
const structure2 = await driver.analyseFull(conn);
expect(structure2[type].length).toEqual(1);
expect(structure2[type][0].pureName).toEqual('renamed1');
})
);
}
const structure2 = await driver.analyseFull(conn);
expect(structure2[type].length).toEqual(1);
expect(structure2[type][0].pureName).toEqual('renamed1');
})
);
});

View File

@@ -1,7 +1,7 @@
const stableStringify = require('json-stable-stringify');
const _ = require('lodash');
const fp = require('lodash/fp');
const { testWrapper } = require('../tools');
const { testWrapper, removeNotNull, transformSqlForEngine } = require('../tools');
const engines = require('../engines');
const crypto = require('crypto');
const {
@@ -19,6 +19,7 @@ function pickImportantTableInfo(engine, table) {
pureName: table.pureName,
columns: table.columns
.filter(x => x.columnName != 'rowid')
.sort((a, b) => a.columnName.localeCompare(b.columnName))
.map(fp.pick(props))
.map(props => _.omitBy(props, x => x == null))
.map(props =>
@@ -33,36 +34,36 @@ function checkTableStructure(engine, t1, t2) {
}
async function testTableDiff(engine, conn, driver, mangle) {
await driver.query(conn, formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`));
const initQuery = formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`);
await driver.query(conn, transformSqlForEngine(engine, initQuery));
await driver.query(
conn,
formatQueryWithoutParams(
driver,
`create table ~t1 (
const query = formatQueryWithoutParams(
driver,
`create table ~t1 (
~col_pk int not null primary key,
~col_std int,
~col_def int default 12,
~col_def int ${engine.skipDefaultValue ? '' : 'default 12'},
${engine.skipReferences ? '' : '~col_fk int references ~t0(~id),'}
~col_idx int,
~col_uq int ${engine.skipUnique ? '' : 'unique'} ,
~col_ref int ${engine.skipUnique ? '' : 'unique'}
)`
)
);
await driver.query(conn, transformSqlForEngine(engine, query));
if (!engine.skipIndexes) {
await driver.query(conn, formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`));
const query = formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`);
await driver.query(conn, transformSqlForEngine(engine, query));
}
if (!engine.skipReferences) {
await driver.query(
conn,
formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
)
const query = formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
);
await driver.query(conn, transformSqlForEngine(engine, query));
}
const tget = x => x.tables.find(y => y.pureName == 't1');
@@ -89,14 +90,12 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
// const TESTED_COLUMNS = ['col_std'];
// const TESTED_COLUMNS = ['col_ref'];
function engines_columns_source() {
function create_engines_columns_source(engines) {
return _.flatten(
engines.map(engine =>
TESTED_COLUMNS.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting).map(column => [
engine.label,
column,
engine,
])
TESTED_COLUMNS.filter(col => col.endsWith('_pk') || !engine.skipNonPkRename)
.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting)
.map(column => [engine.label, column, engine])
)
);
}
@@ -117,26 +116,45 @@ describe('Alter table', () => {
})
);
test.each(engines_columns_source())(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
})
const columnsSource = create_engines_columns_source(engines);
const dropableColumnsSrouce = columnsSource.filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
);
const hasDropableColumns = dropableColumnsSrouce.length > 0;
test.each(engines_columns_source())(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
if (hasDropableColumns) {
test.each(dropableColumnsSrouce)(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column))
);
})
);
}
test.each(engines_columns_source())(
const hasEnginesWithNullable = engines.filter(x => !x.skipNullable).length > 0;
if (hasEnginesWithNullable) {
const source = create_engines_columns_source(engines.filter(x => !x.skipNullable));
test.each(source)(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
}
test.each(columnsSource)(
'Rename column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
@@ -157,32 +175,37 @@ describe('Alter table', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
const enginesWithDefault = engines.filter(x => !x.skipDefaultValue);
const hasEnginesWithDefault = enginesWithDefault.length > 0;
test.each(engines.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
if (hasEnginesWithDefault) {
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
}
// test.each(engines.map(engine => [engine.label, engine]))(
// 'Change autoincrement - %s',

View File

@@ -1,11 +1,12 @@
const engines = require('../engines');
const stream = require('stream');
const path = require('path');
const { testWrapper } = require('../tools');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const tableReader = require('dbgate-api/src/shell/tableReader');
const copyStream = require('dbgate-api/src/shell/copyStream');
const importDatabase = require('dbgate-api/src/shell/importDatabase');
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
const importDbFromFolder = require('dbgate-api/src/shell/importDbFromFolder');
const { runQueryOnDriver, runCommandOnDriver } = require('dbgate-tools');
function createImportStream() {
@@ -54,6 +55,30 @@ describe('DB Import/export', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
`Import to existing table - %s`,
testWrapper(async (conn, driver, engine) => {
await runQueryOnDriver(conn, driver, dmp =>
dmp.put(
`create table ~t1 (~id int primary key, ~country %s)`,
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
)
);
const reader = createImportStream();
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader, writer);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Import two tables - %s',
testWrapper(async (conn, driver, engine) => {
@@ -84,39 +109,48 @@ describe('DB Import/export', () => {
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
const enginesWithDumpFile = engines.filter(x => x.dumpFile);
const hasEnginesWithDumpFile = enginesWithDumpFile.length > 0;
test.each(engines.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
if (hasEnginesWithDumpFile) {
test.each(enginesWithDumpFile.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
const structure = await driver.analyseFull(conn);
const structure = await driver.analyseFull(conn);
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
}
test.each(engines.map(engine => [engine.label, engine]))(
'Export one table - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await runCommandOnDriver(conn, driver, 'create table ~t1 (~id int primary key, ~country varchar(100))');
await runCommandOnDriver(
conn,
driver,
`create table ~t1 (~id int primary key, ~country ${engine.useTextTypeForStrings ? 'text' : 'varchar(100)'})`
);
const data = [
[1, 'Czechia'],
[2, 'Austria'],
@@ -138,7 +172,27 @@ describe('DB Import/export', () => {
const writer = createExportStream();
await copyStream(reader, writer);
expect(writer.resultArray.filter(x => !x.__isStreamHeader).map(row => [row.id, row.country])).toEqual(data);
const result = writer.resultArray.filter(x => !x.__isStreamHeader).map(row => [row.id, row.country]);
if (engine.forceSortResults) {
result.sort((a, b) => a[0] - b[0]);
}
expect(result).toEqual(data);
})
);
test.each(engines.filter(engine => !engine.skipImportModel).map(engine => [engine.label, engine]))(
'Import guitar shop - schema + data - %s',
testWrapper(async (conn, driver, engine) => {
await importDbFromFolder({
systemConnection: conn,
driver,
folder: path.join(__dirname, '../../e2e-tests/data/my-guitar-shop'),
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~categories`));
expect(res1.rows[0].cnt.toString()).toEqual('4');
})
);
});

View File

@@ -149,7 +149,7 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
}
describe('Deploy database', () => {
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -167,7 +167,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple - %s - not connected',
testWrapperPrepareOnly(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -185,7 +185,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple twice - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -219,7 +219,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Add column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -250,7 +250,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont drop column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -287,7 +287,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipReferences)
.map(engine => [engine.label, engine])
)(
'Foreign keys - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -343,7 +348,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Deploy preloaded data - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -372,7 +382,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Deploy preloaded data - update - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -448,7 +463,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipChangeColumn).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipChangeColumn && !x.skipNullability)
.map(engine => [engine.label, engine])
)(
'Change column to NOT NULL column with default - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -566,7 +586,7 @@ describe('Deploy database', () => {
text: 'create view ~_deleted_v1 as select * from ~t1',
};
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont remove column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
@@ -576,7 +596,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont remove table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], []], {
@@ -586,7 +606,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark table removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
@@ -597,7 +617,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(engine => engine.supportRenameSqlObject)
.map(engine => [engine.label, engine])
)(
'Mark view removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1]], {
@@ -608,7 +633,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark column removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
@@ -619,7 +644,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Undelete table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -641,7 +666,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(engine => engine.supportRenameSqlObject)
.map(engine => [engine.label, engine])
)(
'Undelete view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1, V1]], {
@@ -652,7 +682,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Undelete column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL], [T1]], {
@@ -662,7 +692,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'View redeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -683,7 +713,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Change view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -703,7 +733,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Script drived deploy - basic predeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -723,7 +758,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Script drived deploy - install+uninstall - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -782,7 +822,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark table removed, one remains - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {

View File

@@ -73,7 +73,9 @@ describe('Query', () => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT ~id FROM ~t1 ORDER BY ~id'));
const res = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`SELECT ~id FROM ~t1 ${engine.skipOrderBy ? '' : 'ORDER BY ~id'}`)
);
expect(res.columns).toEqual([
expect.objectContaining({
columnName: 'id',
@@ -98,7 +100,11 @@ describe('Query', () => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const results = await executeStream(driver, conn, 'SELECT ~id FROM ~t1 ORDER BY ~id');
const results = await executeStream(
driver,
conn,
`SELECT ~id FROM ~t1 ${engine.skipOrderBy ? '' : 'ORDER BY ~id'}`
);
expect(results.length).toEqual(1);
const res = results[0];
@@ -107,7 +113,7 @@ describe('Query', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipOrderBy).map(engine => [engine.label, engine]))(
'More queries - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) {
@@ -137,7 +143,9 @@ describe('Query', () => {
const results = await executeStream(
driver,
conn,
'CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ORDER BY ~id; '
`CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ${
engine.skipOrderBy ? '' : 'ORDER BY ~id'
}; `
);
expect(results.length).toEqual(1);
@@ -188,7 +196,7 @@ describe('Query', () => {
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true, autoIncrement: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'val', dataType: engine.useTextTypeForStrings ? 'text' : 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],

View File

@@ -2,12 +2,37 @@ const { runCommandOnDriver } = require('dbgate-tools');
const engines = require('../engines');
const { testWrapper } = require('../tools');
const t1Sql = 'CREATE TABLE ~t1 (~id int not null primary key, ~val1 varchar(50))';
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t1Sql = engine =>
`CREATE TABLE ~t1 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~val1 ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
})`;
const ix1Sql = 'CREATE index ~ix1 ON ~t1(~val1, ~id)';
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t2Sql = engine =>
`CREATE TABLE ~t2 (~id int not null primary key, ~val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
const t3Sql = 'CREATE TABLE ~t3 (~id int not null primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))';
const t4Sql = 'CREATE TABLE ~t4 (~id int not null primary key, ~valdef int default 12 not null)';
`CREATE TABLE ~t2 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~val2 ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
} ${engine.skipUnique ? '' : 'unique'})`;
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t3Sql = engine =>
`CREATE TABLE ~t3 (~id int ${
engine.skipNullability ? '' : 'not null'
} primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))`;
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t4Sql = engine =>
`CREATE TABLE ~t4 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~valdef int default 12 ${
engine.skipNullability ? '' : 'not null'
})`;
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
@@ -22,12 +47,12 @@ const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
expect.objectContaining({
columnName: vcolname,
...(engine.skipNullability ? {} : { notNull: !!defaultValue }),
...(defaultValue
...(defaultValue && !engine.skipDefaultValue
? { defaultValue }
: {
dataType: engine.skipStringLength
? expect.stringMatching(/.*string|char.*/i)
: expect.stringMatching(/.*char.*\(50\)/i),
? expect.stringMatching(/.*string|char.*|text/i)
: expect.stringMatching(/.*char.*\(50\)|text/i),
}),
}),
...(nextcol
@@ -36,8 +61,8 @@ const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
columnName: 'nextcol',
...(engine.skipNullability ? {} : { notNull: false }),
dataType: engine.skipStringLength
? expect.stringMatching(/.*string.*|char.*/i)
: expect.stringMatching(/.*char.*\(50\).*/i),
? expect.stringMatching(/.*string.*|char.*|text/i)
: expect.stringMatching(/.*char.*\(50\).*|text/i),
}),
]
: []),
@@ -60,10 +85,9 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table structure - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
const structure = await driver.analyseFull(conn);
console.log(JSON.stringify(structure, null, 2));
expect(structure.tables.length).toEqual(1);
expect(structure.tables[0]).toEqual(t1Match(engine));
@@ -79,7 +103,7 @@ describe('Table analyse', () => {
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0]).toEqual(t2Match(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(2);
@@ -91,7 +115,7 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
@@ -109,14 +133,18 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
await runCommandOnDriver(conn, driver, dmp =>
dmp.put(`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol varchar(50)`)
dmp.put(
`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
}`
)
);
const structure2 = await driver.analyseIncremental(conn, structure1);
@@ -124,14 +152,25 @@ describe('Table analyse', () => {
expect(structure2.tables.length).toEqual(2);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch(engine));
const t2 = structure2.tables.find(x => x.pureName == 't2');
const t2ColumnsOrder = ['id', 'val2', 'nextcol'];
const t2Enchanted = engine.forceSortStructureColumns
? {
...t2,
columns: t2.columns.sort(
(a, b) => t2ColumnsOrder.indexOf(a.columnName) - t2ColumnsOrder.indexOf(b.columnName)
),
}
: t2;
expect(t2Enchanted).toEqual(t2NextColMatch(engine));
})
);
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Index - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(ix1Sql));
const structure = await driver.analyseFull(conn);
@@ -161,7 +200,7 @@ describe('Table analyse', () => {
'Foreign key - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql(engine)));
// await driver.query(conn, fkSql);
const structure = await driver.analyseFull(conn);
@@ -177,10 +216,10 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(engine => !engine.skipDefaultValue).map(engine => [engine.label, engine]))(
'Table structure - default value - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql(engine)));
const structure = await driver.analyseFull(conn);

View File

@@ -24,8 +24,10 @@ function checkTableStructure2(t1, t2) {
expect(t2).toEqual(createExpector(omitTableSpecificInfo(t1)));
}
async function testTableCreate(conn, driver, table) {
await runCommandOnDriver(conn, driver, dmp => dmp.put('create table ~t0 (~id int not null primary key)'));
async function testTableCreate(engine, conn, driver, table) {
await runCommandOnDriver(conn, driver, dmp =>
dmp.put(`create table ~t0 (~id int ${engine.skipNullability ? '' : 'not null'} primary key)`)
);
const dmp = driver.createDumper();
const table1 = {
@@ -47,12 +49,12 @@ describe('Table create', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple table - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
dataType: 'int',
notNull: true,
...(engine.skipNullability ? {} : { notNull: true }),
},
],
primaryKey: {
@@ -65,7 +67,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Table with index - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
@@ -95,7 +97,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Table with foreign key - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
@@ -125,7 +127,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
'Table with unique - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',

View File

@@ -1,35 +1,41 @@
version: '3'
services:
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
- 15000:5432
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 15004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
# postgres:
# image: postgres
# restart: always
# environment:
# POSTGRES_PASSWORD: Pwd2020Db
# ports:
# - 15000:5432
#
# mariadb:
# image: mariadb
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# - 15004:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql:
# image: mysql:8.0.18
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# ports:
# - 15001:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
#
cassandradb:
image: cassandra:5.0.2
ports:
- 15942:9042
# clickhouse:
# image: bitnami/clickhouse:24.8.4
# restart: always
# ports:
# ports:
# - 15005:8123
# environment:
# - CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
@@ -37,19 +43,18 @@ services:
# mssql:
# image: mcr.microsoft.com/mssql/server
# restart: always
# ports:
# ports:
# - 15002:1433
# environment:
# - ACCEPT_EULA=Y
# - SA_PASSWORD=Pwd2020Db
# - MSSQL_PID=Express
# cockroachdb:
# image: cockroachdb/cockroach
# ports:
# - 15003:26257
# command: start-single-node --insecure
# mongodb:
# image: mongo:4.0.12
# restart: always
@@ -59,11 +64,10 @@ services:
# ports:
# - 27017:27017
# cockroachdb-init:
# image: cockroachdb/cockroach
# # build: cockroach
# # entrypoint: /cockroach/init.sh
# # entrypoint: /cockroach/init.sh
# entrypoint: ./cockroach sql --insecure --host="cockroachdb" --execute="CREATE DATABASE IF NOT EXISTS test;"
# depends_on:

View File

@@ -1,3 +1,4 @@
// @ts-check
const views = {
type: 'views',
create1: 'CREATE VIEW ~obj1 AS SELECT ~id FROM ~t1',
@@ -13,6 +14,7 @@ const matviews = {
drop2: 'DROP MATERIALIZED VIEW obj2',
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mysqlEngine = {
label: 'MySQL',
connection: {
@@ -160,6 +162,7 @@ const mysqlEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mariaDbEngine = {
label: 'MariaDB',
connection: {
@@ -180,6 +183,7 @@ const mariaDbEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const postgreSqlEngine = {
label: 'PostgreSQL',
connection: {
@@ -352,6 +356,7 @@ $$ LANGUAGE plpgsql;`,
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const sqlServerEngine = {
label: 'SQL Server',
connection: {
@@ -465,6 +470,7 @@ const sqlServerEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const sqliteEngine = {
label: 'SQLite',
generateDbFile: true,
@@ -500,6 +506,7 @@ const sqliteEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const cockroachDbEngine = {
label: 'CockroachDB',
connection: {
@@ -511,6 +518,7 @@ const cockroachDbEngine = {
objects: [views, matviews],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const clickhouseEngine = {
label: 'ClickHouse',
connection: {
@@ -531,8 +539,10 @@ const clickhouseEngine = {
alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true,
skipChangeColumn: true,
skipImportModel: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const oracleEngine = {
label: 'Oracle',
connection: {
@@ -592,6 +602,41 @@ const oracleEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const cassandraEngine = {
label: 'Cassandra',
connection: {
server: 'localhost:15942',
engine: 'cassandra@dbgate-plugin-cassandra',
},
removeNotNull: true,
alterTableAddColumnSyntax: false,
skipOnCI: false,
skipReferences: true,
// dbSnapshotBySeconds: true,
// setNullDefaultInsteadOfDrop: true,
skipIncrementalAnalysis: true,
skipNonPkRename: true,
skipPkDrop: true,
skipDefaultValue: true,
skipNullability: true,
skipUnique: true,
skipIndexes: true,
skipOrderBy: true,
skipAutoIncrement: true,
skipDataModifications: true,
skipDataDuplicator: true,
skipDeploy: true,
skipImportModel: true,
forceSortResults: true,
forceSortStructureColumns: true,
useTextTypeForStrings: true,
objects: [],
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -602,20 +647,23 @@ const enginesOnCi = [
// cockroachDbEngine,
clickhouseEngine,
oracleEngine,
cassandraEngine,
];
const enginesOnLocal = [
// all engines, which would be run on local test
mysqlEngine,
// cassandraEngine,
// mysqlEngine,
// mariaDbEngine,
// postgreSqlEngine,
// sqlServerEngine,
sqliteEngine,
// sqliteEngine,
// cockroachDbEngine,
// clickhouseEngine,
clickhouseEngine,
// oracleEngine,
];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
module.exports = process.env.CITEST ? enginesOnCi : enginesOnLocal;
module.exports.mysqlEngine = mysqlEngine;
@@ -626,3 +674,4 @@ module.exports.sqliteEngine = sqliteEngine;
module.exports.cockroachDbEngine = cockroachDbEngine;
module.exports.clickhouseEngine = clickhouseEngine;
module.exports.oracleEngine = oracleEngine;
module.exports.cassandraEngine = cassandraEngine;

View File

@@ -1,3 +1,4 @@
// @ts-check
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
@@ -81,9 +82,27 @@ const testWrapperPrepareOnly =
await body(conn, driver, ...other);
};
/** @param {string} sql
* @returns {string} */
const removeNotNull = sql => sql.replace(/not null/gi, '');
/** @param {import('dbgate-types').TestEngineInfo} engine
* @param {string} sql
* @returns {string} */
const transformSqlForEngine = (engine, sql) => {
let result = sql;
if (engine.removeNotNull) {
result = removeNotNull(result);
}
return result;
};
module.exports = {
randomDbName,
connect,
testWrapper,
testWrapperPrepareOnly,
transformSqlForEngine,
};

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "6.1.7-premium-beta.1",
"version": "6.2.2-packer-beta.4",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -58,6 +58,7 @@
"install:drivers:packer": "node common/defineVolatileDependencies.js packer/build",
"prepare:docker": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
"build:e2e": "yarn build:lib && yarn prepare:packer",
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
"ts:api": "yarn workspace dbgate-api ts",
@@ -65,7 +66,12 @@
"ts": "yarn ts:api && yarn ts:web",
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn build:plugins:frontend",
"dbgate-serve": "node packages/dbgate/bin/dbgate-serve.js",
"workflows": "node common/processWorkflows.js"
"workflows": "node common/processWorkflows.js",
"cy:open": "cd e2e-tests && yarn cy:open",
"translations:extract": "node common/translations-cli/index.js extract",
"translations:add-missing": "node common/translations-cli/index.js add-missing",
"translations:remove-unused": "node common/translations-cli/index.js remove-unused",
"translations:check": "node common/translations-cli/index.js check"
},
"dependencies": {
"concurrently": "^5.1.0",

View File

@@ -136,7 +136,7 @@ module.exports = {
deleteSettings_meta: true,
async deleteSettings() {
await fs.unlink(path.join(datadir(), 'settings.json'));
await fs.unlink(path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'));
return true;
},
@@ -161,7 +161,10 @@ module.exports = {
async loadSettings() {
try {
const settingsText = await fs.readFile(path.join(datadir(), 'settings.json'), { encoding: 'utf-8' });
const settingsText = await fs.readFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
{ encoding: 'utf-8' }
);
return {
...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
@@ -247,7 +250,10 @@ module.exports = {
...currentValue,
..._.omit(values, ['other.licenseKey']),
};
await fs.writeFile(path.join(datadir(), 'settings.json'), JSON.stringify(updated, undefined, 2));
await fs.writeFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
JSON.stringify(updated, undefined, 2)
);
// this.settingsValue = updated;
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {

View File

@@ -77,6 +77,7 @@ function getPortalCollections() {
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
parent: process.env[`PARENT_${id}`] || undefined,
useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`],
localDataCenter: process.env[`LOCAL_DATA_CENTER_${id}`],
// SSH tunnel
useSshTunnel: process.env[`USE_SSH_${id}`],

View File

@@ -71,6 +71,11 @@ module.exports = {
handle_error(conid, database, props) {
const { error } = props;
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
if (props?.msgid) {
const [resolve, reject] = this.requests[props?.msgid];
reject(error);
delete this.requests[props?.msgid];
}
},
handle_response(conid, database, { msgid, ...response }) {
const [resolve, reject] = this.requests[msgid];

View File

@@ -94,14 +94,26 @@ module.exports = {
handle_ping() {},
handle_freeData(runid, { freeData }) {
const [resolve, reject] = this.requests[runid];
const { resolve } = this.requests[runid];
resolve(freeData);
delete this.requests[runid];
},
handle_copyStreamError(runid, { copyStreamError }) {
const { reject, exitOnStreamError } = this.requests[runid] || {};
if (exitOnStreamError) {
reject(copyStreamError);
delete this.requests[runid];
}
},
handle_progress(runid, progressData) {
socket.emit(`runner-progress-${runid}`, progressData);
},
rejectRequest(runid, error) {
if (this.requests[runid]) {
const [resolve, reject] = this.requests[runid];
const { reject } = this.requests[runid];
reject(error);
delete this.requests[runid];
}
@@ -113,6 +125,8 @@ module.exports = {
fs.writeFileSync(`${scriptFile}`, scriptText);
fs.mkdirSync(directory);
const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText);
logger.info({ scriptFile }, 'Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
@@ -150,11 +164,13 @@ module.exports = {
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process');
socket.emit(`runner-done-${runid}`, code);
});
subprocess.on('error', error => {
// console.log('... ERROR subprocess', error);
this.rejectRequest(runid, { message: error && (error.message || error.toString()) });
console.error('... ERROR subprocess', error);
this.dispatchMessage({
@@ -231,7 +247,7 @@ module.exports = {
const promise = new Promise((resolve, reject) => {
const runid = crypto.randomUUID();
this.requests[runid] = [resolve, reject];
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
});
return promise;

View File

@@ -56,12 +56,19 @@ module.exports = {
handle_done(sesid, props) {
socket.emit(`session-done-${sesid}`);
if (!props.skipFinishedMessage) {
this.dispatchMessage(sesid, 'Query execution finished');
if (props.controlCommand) {
this.dispatchMessage(sesid, `${_.startCase(props.controlCommand)} finished`);
} else {
this.dispatchMessage(sesid, 'Query execution finished');
}
}
const session = this.opened.find(x => x.sesid == sesid);
if (session.loadingReader_jslid) {
socket.emit(`session-jslid-done-${session.loadingReader_jslid}`);
}
if (props.autoCommit) {
this.executeControlCommand({ sesid, command: 'commitTransaction' });
}
if (session.killOnDone) {
this.kill({ sesid });
}
@@ -131,7 +138,7 @@ module.exports = {
},
executeQuery_meta: true,
async executeQuery({ sesid, sql }) {
async executeQuery({ sesid, sql, autoCommit }) {
const session = this.opened.find(x => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
@@ -139,7 +146,21 @@ module.exports = {
logger.info({ sesid, sql }, 'Processing query');
this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({ msgtype: 'executeQuery', sql });
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit });
return { state: 'ok' };
},
executeControlCommand_meta: true,
async executeControlCommand({ sesid, command }) {
const session = this.opened.find(x => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
}
logger.info({ sesid, command }, 'Processing control command');
this.dispatchMessage(sesid, `${_.startCase(command)} started`);
session.subprocess.send({ msgtype: 'executeControlCommand', command });
return { state: 'ok' };
},

View File

@@ -78,6 +78,8 @@ function start() {
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
} else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
} else if (platformInfo.isAzureUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/azureuser/build/public'));
} else if (processArgs.runE2eTests) {
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public')));
} else if (platformInfo.isNpmDist) {
@@ -140,6 +142,10 @@ function start() {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isAzureUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (Azure VM build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
port: parseInt(

View File

@@ -213,13 +213,12 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
}
}
async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
// console.log(sql);
const res = await driver.query(dbhan, sql);
const res = await driver.query(dbhan, sql, { range });
process.send({ msgtype: 'response', msgid, ...res });
} catch (err) {
process.send({
@@ -234,7 +233,7 @@ async function handleSqlSelect({ msgid, select }) {
const driver = requireEngineDriver(storedConnection);
const dmp = driver.createDumper();
dumpSqlSelect(dmp, select);
return handleQueryData({ msgid, sql: dmp.s }, true);
return handleQueryData({ msgid, sql: dmp.s, range: select.range }, true);
}
async function handleDriverDataCore(msgid, callMethod, { logName }) {
@@ -340,6 +339,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
}, 500);
}
} catch (err) {
console.error(err);
process.send({
msgtype: 'response',
msgid,
@@ -427,7 +427,11 @@ function start() {
await handleMessage(message);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error in DB connection');
process.send({ msgtype: 'error', error: extractErrorMessage(err, 'Error processing message') });
process.send({
msgtype: 'error',
error: extractErrorMessage(err, 'Error processing message'),
msgid: message?.msgid,
});
}
});
}

View File

@@ -245,7 +245,47 @@ async function handleStopProfiler({ jslid }) {
currentProfiler = null;
}
async function handleExecuteQuery({ sql }) {
async function handleExecuteControlCommand({ command }) {
lastActivity = new Date().getTime();
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (command == 'commitTransaction' && !allowExecuteCustomScript(driver)) {
process.send({
msgtype: 'info',
info: {
message: 'Connection without read-only sessions is read only',
severity: 'error',
},
});
process.send({ msgtype: 'done', skipFinishedMessage: true });
return;
//process.send({ msgtype: 'error', error: e.message });
}
executingScripts++;
try {
const dmp = driver.createDumper();
switch (command) {
case 'commitTransaction':
await dmp.commitTransaction();
break;
case 'rollbackTransaction':
await dmp.rollbackTransaction();
break;
case 'beginTransaction':
await dmp.beginTransaction();
break;
}
await driver.query(dbhan, dmp.s, { discardResult: true });
process.send({ msgtype: 'done', controlCommand: command });
} finally {
executingScripts--;
}
}
async function handleExecuteQuery({ sql, autoCommit }) {
lastActivity = new Date().getTime();
await waitConnected();
@@ -279,7 +319,7 @@ async function handleExecuteQuery({ sql }) {
// handler.stream = stream;
// resultIndex = handler.resultIndex;
}
process.send({ msgtype: 'done' });
process.send({ msgtype: 'done', autoCommit });
} finally {
executingScripts--;
}
@@ -323,6 +363,7 @@ function handlePing() {
const messageHandlers = {
connect: handleConnect,
executeQuery: handleExecuteQuery,
executeControlCommand: handleExecuteControlCommand,
executeReader: handleExecuteReader,
startProfiler: handleStartProfiler,
stopProfiler: handleStopProfiler,

View File

@@ -1,6 +1,25 @@
const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
const Stream = require('stream');
const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
const streamPipeline = require('../utility/streamPipeline');
const { getLogger, extractErrorLogData, RowProgressReporter } = require('dbgate-tools');
const logger = getLogger('copyStream');
const stream = require('stream');
class ReportingTransform extends stream.Transform {
constructor(reporter, options = {}) {
super({ ...options, objectMode: true });
this.reporter = reporter;
}
_transform(chunk, encoding, callback) {
this.reporter.add(1);
this.push(chunk);
callback();
}
_flush(callback) {
this.reporter.finish();
callback();
}
}
/**
* Copies reader to writer. Used for import, export tables and transfer data between tables
@@ -9,10 +28,23 @@ const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
* @param {object} options - options
* @returns {Promise}
*/
function copyStream(input, output, options) {
const { columns } = options || {};
async function copyStream(input, output, options) {
const { columns, progressName } = options || {};
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'running',
});
}
const transforms = [];
if (progressName) {
const reporter = new RowProgressReporter(progressName, 'readRowCount');
transforms.push(new ReportingTransform(reporter));
}
if (columns) {
transforms.push(new ColumnMapTransformStream(columns));
}
@@ -20,36 +52,37 @@ function copyStream(input, output, options) {
transforms.push(new EnsureStreamHeaderStream());
}
// return new Promise((resolve, reject) => {
// Stream.pipeline(input, ...transforms, output, err => {
// if (err) {
// reject(err);
// } else {
// resolve();
// }
// });
// });
try {
await streamPipeline(input, transforms, output);
return new Promise((resolve, reject) => {
const finisher = output['finisher'] || output;
finisher.on('finish', resolve);
finisher.on('error', reject);
let lastStream = input;
for (const tran of transforms) {
lastStream.pipe(tran);
lastStream = tran;
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'done',
});
}
lastStream.pipe(output);
} catch (err) {
process.send({
msgtype: 'copyStreamError',
copyStreamError: {
message: err.message,
...err,
},
});
// if (output.requireFixedStructure) {
// const ensureHeader = new EnsureStreamHeaderStream();
// input.pipe(ensureHeader);
// ensureHeader.pipe(output);
// } else {
// input.pipe(output);
// }
});
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'error',
errorMessage: err.message,
});
}
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
// throw err;
}
}
module.exports = copyStream;

View File

@@ -24,8 +24,6 @@ async function dataDuplicator({
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}

View File

@@ -19,8 +19,6 @@ async function dropAllDbObjects({ connection, systemConnection, driver, analysed
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}

View File

@@ -31,8 +31,6 @@ async function dumpDatabase({
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(dbhan, {
outputFile,
databaseName,

View File

@@ -36,7 +36,7 @@ async function executeQuery({
}
try {
logger.info(`Connected.`);
logger.debug(`Running SQL query, length: ${sql.length}`);
await driver.script(dbhan, sql, { logScriptItems });
} finally {

View File

@@ -5,6 +5,7 @@ const { splitQueryStream } = require('dbgate-query-splitter/lib/splitQueryStream
const download = require('./download');
const stream = require('stream');
const { getLogger } = require('dbgate-tools');
const streamPipeline = require('../utility/streamPipeline');
const logger = getLogger('importDb');
@@ -43,25 +44,12 @@ class ImportStream extends stream.Transform {
}
}
function awaitStreamEnd(stream) {
return new Promise((resolve, reject) => {
stream.once('end', () => {
resolve(true);
});
stream.once('error', err => {
reject(err);
});
});
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
logger.info(`Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
@@ -72,9 +60,8 @@ async function importDatabase({ connection = undefined, systemConnection = undef
returnRichInfo: true,
});
const importStream = new ImportStream(dbhan, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
await streamPipeline(splittedStream, importStream);
} finally {
if (!systemConnection) {
await driver.close(dbhan);

View File

@@ -0,0 +1,110 @@
const path = require('path');
const fs = require('fs-extra');
const executeQuery = require('./executeQuery');
const { connectUtility } = require('../utility/connectUtility');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver } = require('dbgate-tools');
const importDbModel = require('../utility/importDbModel');
const jsonLinesReader = require('./jsonLinesReader');
const tableWriter = require('./tableWriter');
const copyStream = require('./copyStream');
/**
* Deploys database model stored in modelFolder (table as yamls) to database
* @param {object} options
* @param {connectionType} options.connection - connection object
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {string} options.folder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {function[]} options.modelTransforms - array of functions for transforming model
*/
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms }) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
try {
const model = await importDbModel(folder);
let modelAdapted = {
...model,
tables: model.tables.map(table => driver.adaptTableInfo(table)),
};
for (const transform of modelTransforms || []) {
modelAdapted = transform(modelAdapted);
}
const modelNoFk = {
...modelAdapted,
tables: modelAdapted.tables.map(table => ({
...table,
foreignKeys: [],
})),
};
// const plan = createAlterDatabasePlan(
// DatabaseAnalyser.createEmptyStructure(),
// driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
// {},
// DatabaseAnalyser.createEmptyStructure(),
// driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
// driver
// );
// const dmp1 = driver.createDumper({ useHardSeparator: true });
// if (driver.dialect.enableAllForeignKeys) {
// dmp1.enableAllForeignKeys(false);
// }
// plan.run(dmp1);
// if (driver.dialect.enableAllForeignKeys) {
// dmp1.enableAllForeignKeys(true);
// }
const { sql } = getAlterDatabaseScript(
DatabaseAnalyser.createEmptyStructure(),
driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
{},
DatabaseAnalyser.createEmptyStructure(),
driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
driver
);
// console.log('CREATING STRUCTURE:', sql);
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
if (driver.dialect.enableAllForeignKeys) {
await runCommandOnDriver(dbhan, driver, dmp => dmp.enableAllForeignKeys(false));
}
for (const table of modelAdapted.tables) {
const fileName = path.join(folder, `${table.pureName}.jsonl`);
if (await fs.exists(fileName)) {
const src = await jsonLinesReader({ fileName });
const dst = await tableWriter({
systemConnection: dbhan,
pureName: table.pureName,
driver,
targetTableStructure: table,
});
await copyStream(src, dst);
}
}
if (driver.dialect.enableAllForeignKeys) {
await runCommandOnDriver(dbhan, driver, dmp => dmp.enableAllForeignKeys(true));
} else if (driver.dialect.createForeignKey) {
const dmp = driver.createDumper();
for (const table of modelAdapted.tables) {
for (const fk of table.foreignKeys) {
dmp.createForeignKey(fk);
}
}
// create foreign keys
await executeQuery({ connection, systemConnection: dbhan, driver, sql: dmp.s, logScriptItems: true });
}
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = importDbFromFolder;

View File

@@ -35,6 +35,7 @@ const sqlTextReplacementTransform = require('./sqlTextReplacementTransform');
const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform');
const generateDeploySql = require('./generateDeploySql');
const dropAllDbObjects = require('./dropAllDbObjects');
const importDbFromFolder = require('./importDbFromFolder');
const dbgateApi = {
queryReader,
@@ -73,6 +74,7 @@ const dbgateApi = {
autoIndexForeignKeysTransform,
generateDeploySql,
dropAllDbObjects,
importDbFromFolder,
};
requirePlugin.initializeDbgateApi(dbgateApi);

View File

@@ -53,8 +53,7 @@ async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undef
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows });
liner.pipe(parser);
return parser;
return [liner, parser];
}
module.exports = jsonLinesReader;

View File

@@ -10,7 +10,6 @@ const download = require('./download');
const logger = getLogger('jsonReader');
class ParseStream extends stream.Transform {
constructor({ limitRows, jsonStyle, keyField }) {
super({ objectMode: true });
@@ -72,8 +71,12 @@ async function jsonReader({
// @ts-ignore
encoding
);
const parseJsonStream = parser();
fileStream.pipe(parseJsonStream);
const resultPipe = [fileStream, parseJsonStream];
// fileStream.pipe(parseJsonStream);
const parseStream = new ParseStream({ limitRows, jsonStyle, keyField });
@@ -81,15 +84,20 @@ async function jsonReader({
if (rootField) {
const filterStream = pick({ filter: rootField });
parseJsonStream.pipe(filterStream);
filterStream.pipe(tramsformer);
} else {
parseJsonStream.pipe(tramsformer);
resultPipe.push(filterStream);
// parseJsonStream.pipe(filterStream);
// filterStream.pipe(tramsformer);
}
// else {
// parseJsonStream.pipe(tramsformer);
// }
tramsformer.pipe(parseStream);
resultPipe.push(tramsformer);
resultPipe.push(parseStream);
return parseStream;
// tramsformer.pipe(parseStream);
return resultPipe;
}
module.exports = jsonReader;

View File

@@ -99,9 +99,10 @@ async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, e
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = jsonWriter;

View File

@@ -6,15 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.info(`Analysing database`);
logger.debug(`Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dbInfo = await driver.analyseFull(dbhan);
logger.info(`Analyse finished`);
logger.debug(`Analyse finished`);
await exportDbModel(dbInfo, outputDir);
} finally {

View File

@@ -141,8 +141,9 @@ async function modifyJsonLinesReader({
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows, changeSet, mergedRows, mergeKey, mergeMode });
liner.pipe(parser);
return parser;
return [liner, parser];
// liner.pipe(parser);
// return parser;
}
module.exports = modifyJsonLinesReader;

View File

@@ -30,7 +30,6 @@ async function queryReader({
const driver = requireEngineDriver(connection);
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
logger.info(`Connected.`);
const reader =
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
return reader;

View File

@@ -44,9 +44,10 @@ async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' })
logger.info(`Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = sqlDataWriter;

View File

@@ -18,7 +18,6 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
driver = requireEngineDriver(connection);
}
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
logger.info(`Connected.`);
const fullName = { pureName, schemaName };

View File

@@ -15,6 +15,7 @@ const logger = getLogger('tableWriter');
* @param {boolean} options.truncate - truncate table before insert
* @param {boolean} options.createIfNotExists - create table if not exists
* @param {boolean} options.commitAfterInsert - commit transaction after insert
* @param {any} options.targetTableStructure - target table structure (don't analyse if given)
* @returns {Promise<writerType>} - writer object
*/
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
@@ -25,7 +26,6 @@ async function tableWriter({ connection, schemaName, pureName, driver, systemCon
}
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
return await driver.writeTable(dbhan, { schemaName, pureName }, options);
}

View File

@@ -82,6 +82,9 @@ function packagedPluginsDir() {
if (platformInfo.isAwsUbuntuLayout) {
return '/home/ubuntu/build/plugins';
}
if (platformInfo.isAzureUbuntuLayout) {
return '/home/azureuser/build/plugins';
}
if (platformInfo.isNpmDist) {
// node_modules
return global['PLUGINS_DIR'];

View File

@@ -73,6 +73,7 @@ async function getPublicHardwareFingerprint() {
region: fingerprint.region,
isDocker: platformInfo.isDocker,
isAwsUbuntuLayout: platformInfo.isAwsUbuntuLayout,
isAzureUbuntuLayout: platformInfo.isAzureUbuntuLayout,
isElectron: platformInfo.isElectron,
},
};

View File

@@ -15,6 +15,7 @@ const isNpmDist = !!global['IS_NPM_DIST'];
const isDbModel = !!global['IS_DB_MODEL'];
const isForkedApi = processArgs.isForkedApi;
const isAwsUbuntuLayout = fs.existsSync('/home/ubuntu/build/public');
const isAzureUbuntuLayout = fs.existsSync('/home/azureuser/build/public');
// function moduleAvailable(name) {
// try {
@@ -57,6 +58,7 @@ const platformInfo = {
allowConnectionFromEnvVariables: !!isDbModel,
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
isAwsUbuntuLayout,
isAzureUbuntuLayout,
};
module.exports = platformInfo;

View File

@@ -0,0 +1,18 @@
const stream = require('stream');
const _ = require('lodash');
function streamPipeline(...processedStreams) {
const streams = _.flattenDeep(processedStreams);
return new Promise((resolve, reject) => {
// @ts-ignore
stream.pipeline(...streams, err => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
module.exports = streamPipeline;

View File

@@ -86,7 +86,7 @@ module.exports = function useController(app, electron, route, controller) {
detail: err.detail,
});
} else {
res.status(500).json({ apiErrorMessage: err.message });
res.status(500).json({ apiErrorMessage: (_.isString(err) ? err : err.message) ?? 'Unknown error' });
}
}
});

View File

@@ -9,7 +9,7 @@ import {
AllowIdentityInsert,
Expression,
} from 'dbgate-sqltree';
import type { NamedObjectInfo, DatabaseInfo, TableInfo } from 'dbgate-types';
import type { NamedObjectInfo, DatabaseInfo, TableInfo, SqlDialect } from 'dbgate-types';
import { JsonDataObjectUpdateCommand } from 'dbgate-tools';
export interface ChangeSetItem {
@@ -230,41 +230,79 @@ export function batchUpdateChangeSet(
return changeSet;
}
function extractFields(item: ChangeSetItem, allowNulls = true, allowedDocumentColumns: string[] = []): UpdateField[] {
function extractFields(
item: ChangeSetItem,
allowNulls = true,
allowedDocumentColumns: string[] = [],
table?: TableInfo,
dialect?: SqlDialect
): UpdateField[] {
const allFields = {
...item.fields,
};
function isUuidColumn(columnName: string): boolean {
return table?.columns.find(x => x.columnName == columnName)?.dataType.toLowerCase() == 'uuid';
}
function createUpdateField(targetColumn: string): UpdateField {
const shouldGenerateDefaultValue =
isUuidColumn(targetColumn) && allFields[targetColumn] == null && dialect?.generateDefaultValueForUuid;
if (shouldGenerateDefaultValue) {
return {
targetColumn,
sql: dialect?.generateDefaultValueForUuid,
exprType: 'raw',
};
}
return {
targetColumn,
exprType: 'value',
value: allFields[targetColumn],
dataType: table?.columns?.find(x => x.columnName == targetColumn)?.dataType,
};
}
for (const docField in item.document || {}) {
if (allowedDocumentColumns.includes(docField)) {
allFields[docField] = item.document[docField];
}
}
return _.keys(allFields)
.filter(targetColumn => allowNulls || allFields[targetColumn] != null)
.map(targetColumn => ({
targetColumn,
exprType: 'value',
value: allFields[targetColumn],
}));
const columnNames = Object.keys(allFields);
if (dialect?.generateDefaultValueForUuid && table) {
columnNames.push(...table.columns.map(i => i.columnName));
}
return _.uniq(columnNames)
.filter(
targetColumn =>
allowNulls ||
allFields[targetColumn] != null ||
(isUuidColumn(targetColumn) && dialect?.generateDefaultValueForUuid)
)
.map(targetColumn => createUpdateField(targetColumn));
}
function changeSetInsertToSql(
item: ChangeSetItem,
dbinfo: DatabaseInfo = null
dbinfo: DatabaseInfo = null,
dialect: SqlDialect = null
): [AllowIdentityInsert, Insert, AllowIdentityInsert] {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
const fields = extractFields(
item,
false,
table?.columns?.map(x => x.columnName)
table?.columns?.map(x => x.columnName),
table,
dialect
);
if (fields.length == 0) return null;
let autoInc = false;
if (table) {
const autoIncCol = table.columns.find(x => x.autoIncrement);
// console.log('autoIncCol', autoIncCol);
if (autoIncCol && fields.find(x => x.targetColumn == autoIncCol.columnName)) {
autoInc = true;
}
@@ -299,19 +337,28 @@ function changeSetInsertToSql(
];
}
export function extractChangeSetCondition(item: ChangeSetItem, alias?: string): Condition {
export function extractChangeSetCondition(
item: ChangeSetItem,
alias?: string,
table?: TableInfo,
dialect?: SqlDialect
): Condition {
function getColumnCondition(columnName: string): Condition {
const dataType = table?.columns?.find(x => x.columnName == columnName)?.dataType;
const value = item.condition[columnName];
const expr: Expression = {
exprType: 'column',
columnName,
source: {
name: {
pureName: item.pureName,
schemaName: item.schemaName,
},
alias,
},
source: dialect?.omitTableBeforeColumn
? undefined
: {
name: {
pureName: item.pureName,
schemaName: item.schemaName,
},
alias,
},
};
if (value == null) {
return {
@@ -325,6 +372,7 @@ export function extractChangeSetCondition(item: ChangeSetItem, alias?: string):
left: expr,
right: {
exprType: 'value',
dataType,
value,
},
};
@@ -366,7 +414,7 @@ function compileSimpleChangeSetCondition(fields: { [column: string]: string }):
};
}
function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null): Update {
function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null, dialect: SqlDialect = null): Update {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
const autoIncCol = table?.columns?.find(x => x.autoIncrement);
@@ -382,13 +430,16 @@ function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null):
fields: extractFields(
item,
true,
table?.columns?.map(x => x.columnName).filter(x => x != autoIncCol?.columnName)
table?.columns?.map(x => x.columnName).filter(x => x != autoIncCol?.columnName),
table
),
where: extractChangeSetCondition(item),
where: extractChangeSetCondition(item, undefined, table, dialect),
};
}
function changeSetDeleteToSql(item: ChangeSetItem): Delete {
function changeSetDeleteToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null, dialect: SqlDialect = null): Delete {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
return {
from: {
name: {
@@ -397,16 +448,16 @@ function changeSetDeleteToSql(item: ChangeSetItem): Delete {
},
},
commandType: 'delete',
where: extractChangeSetCondition(item),
where: extractChangeSetCondition(item, undefined, table, dialect),
};
}
export function changeSetToSql(changeSet: ChangeSet, dbinfo: DatabaseInfo): Command[] {
export function changeSetToSql(changeSet: ChangeSet, dbinfo: DatabaseInfo, dialect: SqlDialect): Command[] {
return _.compact(
_.flatten([
...(changeSet.inserts.map(item => changeSetInsertToSql(item, dbinfo)) as any),
...changeSet.updates.map(item => changeSetUpdateToSql(item, dbinfo)),
...changeSet.deletes.map(changeSetDeleteToSql),
...(changeSet.inserts.map(item => changeSetInsertToSql(item, dbinfo, dialect)) as any),
...changeSet.updates.map(item => changeSetUpdateToSql(item, dbinfo, dialect)),
...changeSet.deletes.map(item => changeSetDeleteToSql(item, dbinfo, dialect)),
])
);
}

View File

@@ -253,7 +253,12 @@ export abstract class GridDisplay {
orCondition.conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return this.createColumnExpression(column, { alias: 'basetbl' }, undefined, 'filter');
return this.createColumnExpression(
column,
!this.dialect.omitTableAliases ? { alias: 'basetbl' } : undefined,
undefined,
'filter'
);
}
})
);
@@ -364,7 +369,7 @@ export abstract class GridDisplay {
}
getColumns(columnFilter) {
return this.columns.filter(col => filterName(columnFilter, col.columnName));
return this.columns.filter(col => filterName(columnFilter, col.columnName?.toString()));
}
getGridColumns() {
@@ -584,7 +589,7 @@ export abstract class GridDisplay {
}
return {
exprType: 'column',
alias: alias || col.columnName,
...(!this.dialect.omitTableAliases && { alias: alias || col.columnName }),
source,
...col,
};
@@ -597,9 +602,16 @@ export abstract class GridDisplay {
commandType: 'select',
from: {
name: _.pick(name, ['schemaName', 'pureName']),
alias: 'basetbl',
...(!this.dialect.omitTableAliases && { alias: 'basetbl' }),
},
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' }, undefined, 'view')),
columns: columns.map(col =>
this.createColumnExpression(
col,
!this.dialect.omitTableAliases ? { alias: 'basetbl' } : undefined,
undefined,
'view'
)
),
orderBy: this.driver?.requiresDefaultSortCriteria
? [
{
@@ -611,7 +623,10 @@ export abstract class GridDisplay {
: null,
};
const displayedColumnInfo = _.keyBy(
this.columns.map(col => ({ ...col, sourceAlias: 'basetbl' })),
this.columns.map(col => ({
...col,
...(!this.dialect.omitTableAliases && { sourceAlias: 'basetbl' }),
})),
'uniqueName'
);
this.processReferences(select, displayedColumnInfo, options);
@@ -639,7 +654,7 @@ export abstract class GridDisplay {
? x
: {
...x,
source: { alias: 'basetbl' },
...(!this.dialect.omitTableAliases && { source: { alias: 'basetbl' } }),
}
)
: [
@@ -695,6 +710,12 @@ export abstract class GridDisplay {
// return sql;
}
getPageQueryText(offset: number, count: number) {
const select = this.getPageQuery(offset, count);
const sql = treeToSql(this.driver, select, dumpSqlSelect);
return sql;
}
getExportQuery(postprocessSelect = null) {
const select = this.createSelect({ isExport: true });
if (!select) return null;

View File

@@ -149,7 +149,8 @@ export class TableGridDisplay extends GridDisplay {
},
right: {
exprType: 'column',
columnName: table.primaryKey.columns[0].columnName,
columnName:
column.foreignKey?.columns?.[0]?.refColumnName ?? table.primaryKey?.columns?.[0]?.columnName,
source: { name: table, alias: childAlias },
},
},
@@ -274,7 +275,11 @@ export class TableGridDisplay extends GridDisplay {
const refTableInfo = this.dbinfo.tables.find(
x => x.schemaName == res.foreignKey.refSchemaName && x.pureName == res.foreignKey.refTableName
);
if (refTableInfo && isTableColumnUnique(refTableInfo, res.foreignKey.columns[0].refColumnName)) {
if (
refTableInfo &&
res.foreignKey.columns.length == 1 &&
isTableColumnUnique(refTableInfo, res.foreignKey.columns[0].refColumnName)
) {
res.isForeignKeyUnique = true;
}
}

View File

@@ -52,6 +52,8 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
if (cmd.range) {
if (dmp.dialect.offsetFetchRangeSyntax) {
dmp.put('^offset %s ^rows ^fetch ^next %s ^rows ^only', cmd.range.offset, cmd.range.limit);
} else if (dmp.dialect.offsetNotSupported) {
dmp.put('^limit %s', cmd.range.limit + cmd.range.offset);
} else {
dmp.put('^limit %s ^offset %s ', cmd.range.limit, cmd.range.offset);
}

View File

@@ -21,7 +21,14 @@ export function dumpSqlExpression(dmp: SqlDumper, expr: Expression) {
break;
case 'value':
dmp.put('%v', expr.value);
if (expr.dataType) {
dmp.put('%V', {
value: expr.value,
dataType: expr.dataType,
});
} else {
dmp.put('%v', expr.value);
}
break;
case 'raw':

View File

@@ -182,6 +182,7 @@ export interface ColumnRefExpression {
export interface ValueExpression {
exprType: 'value';
value: any;
dataType?: string;
}
export interface PlaceholderExpression {

View File

@@ -41,12 +41,13 @@ export class ScriptWriter {
this.packageNames.push(packageName);
}
copyStream(sourceVar, targetVar, colmapVar = null) {
if (colmapVar) {
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar}, {columns: ${colmapVar}});`);
} else {
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar});`);
}
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
let opts = '{';
if (colmapVar) opts += `columns: ${colmapVar}, `;
if (progressName) opts += `progressName: "${progressName}", `;
opts += '}';
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar}, ${opts});`);
}
dumpDatabase(options) {
@@ -117,12 +118,13 @@ export class ScriptWriterJson {
});
}
copyStream(sourceVar, targetVar, colmapVar = null) {
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
this.commands.push({
type: 'copyStream',
sourceVar,
targetVar,
colmapVar,
progressName,
});
}
@@ -183,7 +185,7 @@ export function jsonScriptToJavascript(json) {
script.assignValue(cmd.variableName, cmd.jsonValue);
break;
case 'copyStream':
script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar);
script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar, cmd.progressName);
break;
case 'endLine':
script.endLine();

Some files were not shown because too many files have changed in this diff Show More