Compare commits

...

246 Commits

Author SHA1 Message Date
Jan Prochazka
f2d15e2f84 preinstall plugins 2020-11-27 21:26:52 +01:00
Jan Prochazka
c4ab06a14b plugin fixes 2020-11-27 18:41:23 +01:00
Jan Prochazka
6729317fcb remove pacote - blocks webpack build 2020-11-26 17:24:30 +01:00
Jan Prochazka
3533683a32 fied require problem in webpacked app 2020-11-26 15:42:50 +01:00
Jan Prochazka
d7ceb297e9 removed bindings to engines 2020-11-26 14:25:51 +01:00
Jan Prochazka
06e98cff9f fixes 2020-11-26 14:23:31 +01:00
Jan Prochazka
c96cb08cfd backend - using engine driver from plugin 2020-11-24 20:42:02 +01:00
Jan Prochazka
424aff5d93 frontend - removed references to dbgate-engines 2020-11-24 20:15:07 +01:00
Jan Prochazka
644a35d8c3 remove refs 2020-11-24 19:43:28 +01:00
Jan Prochazka
fb3af22302 reduced package size 2020-11-24 19:17:27 +01:00
Jan Prochazka
556a35f4ba reduce dbgate-tools package size 2020-11-24 19:06:05 +01:00
Jan Prochazka
5862a2cdc4 plugin list 2020-11-24 18:42:03 +01:00
Jan Prochazka
eef3195ee1 support webpack minimalize for frontend plugins 2020-11-23 21:01:53 +01:00
Jan Prochazka
ea2996c9b3 fix 2020-11-23 20:51:28 +01:00
Jan Prochazka
88cf6d35ed refactor - default dbaget engine mvoed to dbgate-tools 2020-11-23 20:49:25 +01:00
Jan Prochazka
d3cfc44fd9 disable axios cache 2020-11-22 18:00:01 +01:00
Jan Prochazka
994195667d fix 2020-11-22 17:29:33 +01:00
Jan Prochazka
a22320e141 removed MS excel support, moved to plugin 2020-11-22 17:25:41 +01:00
Jan Prochazka
e23e749cc5 CSV - completely remove, logic moved to plugin 2020-11-22 09:17:51 +01:00
Jan Prochazka
3cdba4339f uploads - moved logic to FE because of plugins 2020-11-22 09:14:19 +01:00
Jan Prochazka
7d1c0c5c18 runner openreader - support for plugins 2020-11-22 09:03:16 +01:00
Jan Prochazka
286cac066c backend exporters/importers from plugins 2020-11-21 21:16:22 +01:00
Jan Prochazka
6ed3eaa896 useExtensions fixed (extensions in context) 2020-11-21 20:20:47 +01:00
Jan Prochazka
5ec39054a3 extensions refactor 2020-11-21 20:12:33 +01:00
Jan Prochazka
3009724a82 removed fileformatbase 2020-11-21 18:03:35 +01:00
Jan Prochazka
8ab8034060 usePlugins 2020-11-21 17:40:16 +01:00
Jan Prochazka
e14165c403 install/uninstall plugin 2020-11-21 17:33:59 +01:00
Jan Prochazka
1f4a93f1d5 download plugin, show readme 2020-11-21 10:32:25 +01:00
Jan Prochazka
3771134b1c search plugins, plugin tab 2020-11-21 10:01:19 +01:00
Jan Prochazka
e2ee1f7561 v3.7.33 2020-11-19 15:15:06 +01:00
Jan Prochazka
b92e28695e jsl data filter (archive, query result) 2020-11-19 15:09:31 +01:00
Jan Prochazka
4a7d45e4d0 fix 2020-11-19 12:48:21 +01:00
Jan Prochazka
9a2520a10a datagrid context menu 2020-11-19 12:42:13 +01:00
Jan Prochazka
682f53881d export from free table editor 2020-11-19 12:34:03 +01:00
Jan Prochazka
03451c6897 single file options in excel export 2020-11-19 12:07:30 +01:00
Jan Prochazka
7ccb1d9c90 next csv params 2020-11-19 11:36:34 +01:00
Jan Prochazka
0cd3e393e8 csv delimiter configuration 2020-11-19 11:24:05 +01:00
Jan Prochazka
da805db44b using xlsx lib instead of exceljs, export excel files 2020-11-19 10:30:06 +01:00
Jan Prochazka
6324fd1de4 file format refactor 2020-11-19 08:36:56 +01:00
Jan Prochazka
64362cdf13 rename 2020-11-19 08:02:16 +01:00
Jan Prochazka
ac049f43a3 file formats refactor 2020-11-19 07:59:03 +01:00
Jan Prochazka
ef3c96f574 readme 2020-11-17 18:48:26 +01:00
Jan Prochazka
d0acbb4054 fix 2020-11-17 09:11:25 +01:00
Jan Prochazka
03c8c8c21f npm links 2020-11-17 09:10:06 +01:00
Jan Prochazka
f8bc990f81 missing dependency 2020-11-17 09:01:28 +01:00
Jan Prochazka
ea2709086c missing dependency 2020-11-17 08:57:08 +01:00
Jan Prochazka
f98f4414e4 missing dependency 2020-11-17 08:40:08 +01:00
Jan Prochazka
0486e9cc8c reduced API package size 2020-11-17 08:34:15 +01:00
Jan Prochazka
a3a4923397 npm packages 2020-11-17 08:27:14 +01:00
Jan Prochazka
b51a343323 npm packages - keywords 2020-11-17 07:48:45 +01:00
Jan Prochazka
61cb445237 package version 2020-11-17 07:38:58 +01:00
Jan Prochazka
3a83418fe4 documentation 2020-11-17 07:37:32 +01:00
Jan Prochazka
b849e5f81c v3.7.32 2020-11-16 21:59:27 +01:00
Jan Prochazka
f7ebcd9537 renamed dbgate packages, because of npmjs policy 2020-11-16 21:59:08 +01:00
Jan Prochazka
09b26319f0 readme 2020-11-16 21:51:04 +01:00
Jan Prochazka
2a032dfc33 readme 2020-11-16 21:35:18 +01:00
Jan Prochazka
5384677c6c readme 2020-11-16 20:20:08 +01:00
Jan Prochazka
7572cd273f readme, screenshot 2020-11-16 20:18:49 +01:00
Jan Prochazka
aee0a0fe2e import-export wizard design 2020-11-16 19:51:46 +01:00
Jan Prochazka
e1732d83fb v3.7.31 2020-11-15 20:55:52 +01:00
Jan Prochazka
86325701b3 fix 2020-11-15 20:48:41 +01:00
Jan Prochazka
72776f3297 refresh archive folder after import 2020-11-15 20:46:42 +01:00
Jan Prochazka
801bf05a31 import export- cancelable, better design 2020-11-15 18:55:42 +01:00
Jan Prochazka
eaf45d8768 improt into archive improvement 2020-11-15 17:18:33 +01:00
Jan Prochazka
075146403a preview in import dialog 2020-11-15 09:43:44 +01:00
Jan Prochazka
844ebf129a fix 2020-11-14 18:03:54 +01:00
Jan Prochazka
b1ce2f7b90 v3.7.30 2020-11-13 19:18:39 +01:00
Jan Prochazka
17c4d21347 fix 2020-11-13 18:58:32 +01:00
Jan Prochazka
7f7d39cfc2 dark mode fixes 2020-11-13 18:43:06 +01:00
Jan Prochazka
08efc787c7 v3.7.29 2020-11-12 15:58:41 +01:00
Jan Prochazka
8b610cdf32 styling 2020-11-12 15:58:01 +01:00
Jan Prochazka
2eca08944f theme 2020-11-12 15:38:14 +01:00
Jan Prochazka
5eace3e332 theme 2020-11-12 15:28:37 +01:00
Jan Prochazka
f30e7da503 theme - statusbar, icons 2020-11-12 14:51:27 +01:00
Jan Prochazka
a8d88d05db theme - modals, react select, tables 2020-11-12 14:20:02 +01:00
Jan Prochazka
a49f429f13 editor theme 2020-11-12 11:55:42 +01:00
Jan Prochazka
aa0501a729 scrollbar styling 2020-11-12 11:35:39 +01:00
Jan Prochazka
7658a2838a theme 2020-11-12 11:04:44 +01:00
Jan Prochazka
c7b693cfb6 theme 2020-11-12 09:46:01 +01:00
Jan Prochazka
4ef7f275e6 themable colors 2020-11-11 18:30:44 +01:00
Jan Prochazka
98d7b3c6b9 theme basic 2020-11-10 21:21:22 +01:00
Jan Prochazka
c8f7dc3d2c icon names refactor 2020-11-10 18:21:29 +01:00
Jan Prochazka
1c2dedfef3 using fonticon instead of span 2020-11-10 17:34:00 +01:00
Jan Prochazka
1169e23997 fix 2020-11-10 17:15:14 +01:00
Jan Prochazka
1de4294a4e v3.7.28 2020-11-09 21:07:54 +01:00
Jan Prochazka
f890edacea icon cleanup 2020-11-09 21:07:32 +01:00
Jan Prochazka
00d693e9e4 mdi font icons 2020-11-09 21:03:52 +01:00
Jan Prochazka
ca6d552f5b font icons instead of SVG icons 2020-11-09 20:49:06 +01:00
Jan Prochazka
a7fcf1d3a3 used mdi icons instead of font awesome 2020-11-09 19:33:46 +01:00
Jan Prochazka
4bf797c27d mdi icons 2020-11-09 18:42:16 +01:00
Jan Prochazka
7819cc9541 clean temp files 2020-11-09 18:03:18 +01:00
Jan Prochazka
caf773bd64 v3.7.27 2020-11-09 08:25:15 +01:00
Jan Prochazka
1379ba5026 build fix 2020-11-09 08:24:49 +01:00
Jan Prochazka
593e919e32 v3.7.26 2020-11-08 20:54:59 +01:00
Jan Prochazka
21c26067ef usability 2020-11-08 20:52:11 +01:00
Jan Prochazka
fd12eef0fc modal style, import-export configurator style 2020-11-08 20:50:12 +01:00
Jan Prochazka
6fb314c414 import using drag & drop 2020-11-07 21:52:35 +01:00
Jan Prochazka
c65806fd89 directory cleanup on startup 2020-11-07 17:54:43 +01:00
Jan Prochazka
307aaa2801 upload proof of concept 2020-11-07 11:43:31 +01:00
Jan Prochazka
19dadcd4ae export/import fix 2020-11-07 10:30:12 +01:00
Jan Prochazka
d234226750 cell data autodetect format, collapsed views in detail 2020-11-07 10:27:08 +01:00
Jan Prochazka
c57ec68916 v3.7.25 2020-11-05 17:42:56 +01:00
Jan Prochazka
dfc8c75d76 fix 2020-11-05 14:38:01 +01:00
Jan Prochazka
399d194771 json cell data view 2020-11-05 14:33:51 +01:00
Jan Prochazka
7b64e33e92 cell data view 2020-11-05 14:11:31 +01:00
Jan Prochazka
42ffd49f6e code cleanup 2020-11-05 12:42:05 +01:00
Jan Prochazka
3982a28549 code cleanup 2020-11-05 12:38:24 +01:00
Jan Prochazka
f5e243a77f resizable widgets in grid 2020-11-05 12:33:31 +01:00
Jan Prochazka
7888cf6714 resizable widgets 2020-11-05 12:17:34 +01:00
Jan Prochazka
fd9fa0c95a v3.7.24 2020-11-01 15:54:00 +01:00
Jan Prochazka
0d2120e96b fix 2020-11-01 15:52:31 +01:00
Jan Prochazka
229f0ea9c1 fix 2020-11-01 12:59:26 +01:00
Jan Prochazka
c9308255a7 macro icon 2020-11-01 12:54:27 +01:00
Jan Prochazka
8ff44e41b1 extract date fields macro 2020-11-01 12:46:34 +01:00
Jan Prochazka
ab2fb3bf97 data macros (able to change columns) + duplicate column macro 2020-11-01 12:09:40 +01:00
Jan Prochazka
d5b8433c17 rows macros 2020-11-01 11:26:46 +01:00
Jan Prochazka
cb0aee6476 handle macro errors 2020-11-01 10:47:13 +01:00
Jan Prochazka
4efa87c3c8 macro checkbox args + search & replace imporved 2020-11-01 10:22:14 +01:00
Jan Prochazka
20180fe4c4 macro default parameter values 2020-11-01 09:36:45 +01:00
Jan Prochazka
80e17eff39 column copy, fix column rename 2020-10-31 11:37:44 +01:00
Jan Prochazka
9bed46fe01 fix 2020-10-31 11:20:34 +01:00
Jan Prochazka
44059f1215 next macros 2020-10-31 11:04:10 +01:00
Jan Prochazka
4593ab7c46 macro - previre modified cells 2020-10-31 10:29:10 +01:00
Jan Prochazka
68cf397473 execute macro 2020-10-31 10:01:55 +01:00
Jan Prochazka
cc385c12ec macro preview 2020-10-31 09:04:59 +01:00
Jan Prochazka
d243e8cee5 editable flag moved to grider 2020-10-30 19:33:21 +01:00
Jan Prochazka
5f56aa2cf6 macro detail view 2020-10-30 19:27:19 +01:00
Jan Prochazka
ce38f7da4c open selection in free table editor 2020-10-29 10:59:50 +01:00
Jan Prochazka
3f14fec678 better loading free table data 2020-10-29 10:07:09 +01:00
Jan Prochazka
b39af32426 v3.7.23 2020-10-28 18:51:40 +01:00
Jan Prochazka
f81cefa8cb save button in free table grid 2020-10-28 18:47:29 +01:00
Jan Prochazka
8a2b6f3f37 free table editor - save and load 2020-10-28 18:42:02 +01:00
Jan Prochazka
2ba0c2cc46 free table editing operations 2020-10-27 21:31:22 +01:00
Jan Prochazka
6e4a53a2ab free table column editor 2020-10-26 18:47:02 +01:00
Jan Prochazka
c80510c37b free table infrastructure 2020-10-25 09:31:00 +01:00
Jan Prochazka
857f3fb4f7 Merge branch 'grid-refactor' 2020-10-25 07:14:28 +01:00
Jan Prochazka
22a263a598 code cleanup 2020-10-24 21:36:17 +02:00
Jan Prochazka
f9f2a501ab paste using grider implementation 2020-10-24 21:27:56 +02:00
Jan Prochazka
45d172d0b1 grider refactor WIP 2020-10-24 21:05:24 +02:00
Jan Prochazka
00453ae379 basic updates again working 2020-10-24 20:31:03 +02:00
Jan Prochazka
abc007753a grider refactor 2020-10-24 18:35:26 +02:00
Jan Prochazka
b314e363cd loading grid - split into JslDataGridCode and SqlDataGridCore 2020-10-24 09:32:06 +02:00
Jan Prochazka
b439c7bb70 data grid refactor - working read only 2020-10-22 15:22:58 +02:00
Jan Prochazka
7704e9b305 Merge branch 'master' into grid-refactor 2020-10-22 12:32:05 +02:00
Jan Prochazka
9adf7a6ae2 datastore 2020-10-22 11:27:23 +02:00
Jan Prochazka
7681f9e1ec jsl data source - renamed private methods 2020-10-22 10:04:52 +02:00
Jan Prochazka
541f064ddb jsl data store fix - uses lock, tested OK on all problematic queries 2020-10-22 10:02:11 +02:00
Jan Prochazka
61b4bf91b0 jsl data refactor 2020-10-22 09:39:18 +02:00
Jan Prochazka
da1617729b jsl data refactor 2020-10-22 08:23:53 +02:00
Jan Prochazka
c4914429ce v3.7.22 2020-10-19 17:21:59 +02:00
Jan Prochazka
1a54d6bab0 close all fix 2020-10-19 17:11:27 +02:00
Jan Prochazka
a7ed6bf62b loading data grid 2020-10-18 18:13:58 +02:00
Jan Prochazka
6792b652fb import from archive 2020-10-18 11:53:35 +02:00
Jan Prochazka
e833853d3f better tabs panel grouping 2020-10-18 10:50:48 +02:00
Jan Prochazka
69ea8010d2 archive 2020-10-18 10:36:24 +02:00
Jan Prochazka
b0f0710a75 archive, export into archive 2020-10-17 17:59:36 +02:00
Jan Prochazka
39a4c39b6d v3.7.21 2020-10-10 22:33:38 +02:00
Jan Prochazka
74d3407048 local storage garbage collector + delete older tabs 2020-10-10 18:25:14 +02:00
Jan Prochazka
a07d99c731 recently closed tabs 2020-10-10 17:43:16 +02:00
Jan Prochazka
598c48069a memorize grid config 2020-10-10 16:31:11 +02:00
Jan Prochazka
7aec8ccf99 table child config refacvtor 2020-10-10 16:15:43 +02:00
Jan Prochazka
5638706252 v3.7.20 2020-10-08 15:44:55 +02:00
Jan Prochazka
474d3962e2 show generated query 2020-10-08 15:42:53 +02:00
Jan Prochazka
3819bf9bd7 export query 2020-10-08 15:34:06 +02:00
Jan Prochazka
360a4ef1bc #40 export query result 2020-10-04 09:10:14 +02:00
Jan Prochazka
34fff77f66 #39 navigation between grid and filters 2020-10-03 11:14:04 +02:00
Jan Prochazka
c13f8b4786 #38 2020-10-02 13:51:56 +02:00
Jan Prochazka
73b5b86ace open related datble column menu #41 2020-10-02 13:24:55 +02:00
Jan Prochazka
c2316c7006 v3.7.19 2020-09-28 14:38:08 +02:00
Jan Prochazka
6548400b96 bulk inserter - fixes for mysql and postgres 2020-09-28 14:33:47 +02:00
Jan Prochazka
29a7b68b59 driver commons refactor, default bulkinserter for mysql and postgres 2020-09-28 13:54:31 +02:00
Jan Prochazka
5359f850dd bulk insert refactor 2020-09-28 12:58:45 +02:00
Jan Prochazka
36dffe0a0f postgres - stream query reader 2020-09-28 12:09:01 +02:00
Jan Prochazka
a88e38dcf7 postgre incremental analysis, fixed mysql incremental analysis 2020-09-28 11:33:25 +02:00
Jan Prochazka
fbd963bfb1 #37 2020-09-27 09:05:00 +02:00
Jan Prochazka
154a4fc7d9 #37 2020-09-27 08:28:59 +02:00
Jan Prochazka
5fa36c40f2 import export modal - ability to run job directly 2020-09-26 18:15:29 +02:00
Jan Prochazka
eb3aec0978 v3.7.18 2020-09-24 14:56:54 +02:00
Jan Prochazka
f9b3691a58 fixed potencial problem 2020-09-24 14:11:23 +02:00
Jan Prochazka
55eda6c1b0 #34 2020-09-24 13:52:32 +02:00
Jan Prochazka
6b04593343 #33 2020-09-24 12:49:18 +02:00
Jan Prochazka
e9cbd72100 v3.7.17 2020-09-24 11:16:20 +02:00
Jan Prochazka
dacb810768 added Loading dbgate... text 2020-09-24 11:14:46 +02:00
Jan Prochazka
61e881d4d9 #33 2020-09-24 10:44:20 +02:00
Jan Prochazka
c00ccd61bb style fix 2020-09-24 09:29:04 +02:00
Jan Prochazka
36dfa7c740 v3.7.16 2020-06-29 20:22:18 +02:00
Jan Prochazka
71861779e8 postgre analyser 2020-06-29 20:21:37 +02:00
Jan Prochazka
3c1be39976 postgre analyser 2020-06-29 19:49:54 +02:00
Jan Prochazka
eaaa7beaa1 fixes 2020-06-29 08:07:50 +02:00
Jan Prochazka
abf7ad478d mysql view format 2020-06-28 21:33:13 +02:00
Jan Prochazka
0c3a2fb047 fix 2020-06-28 21:28:14 +02:00
Jan Prochazka
a85ad0a1f0 mysql - analyse views 2020-06-28 21:19:51 +02:00
Jan Prochazka
4e1ee72d4d kill query 2020-06-28 15:33:59 +02:00
Jan Prochazka
8e9b6d5ea2 mysql - analyse modifications 2020-06-28 15:22:34 +02:00
Jan Prochazka
536ee6678f mysql - analyse views, procedures, functions 2020-06-28 13:58:56 +02:00
Jan Prochazka
dc4fbe21de fixes + UX 2020-06-27 12:50:45 +02:00
Jan Prochazka
f78b1adefa fix 2020-06-27 10:33:51 +02:00
Jan Prochazka
d754896f88 fix 2020-06-27 09:02:26 +02:00
Jan Prochazka
c38aac4015 group by date - postgres, mysql 2020-06-26 15:48:02 +02:00
Jan Prochazka
bf24796899 type analysers for mysql, postgre 2020-06-26 15:28:44 +02:00
Jan Prochazka
5e0563c42c v3.7.15 2020-06-21 22:07:56 +02:00
Jan Prochazka
ca2d182e83 fix 2020-06-21 22:07:38 +02:00
Jan Prochazka
387867b1ae build fix 2020-06-21 22:00:59 +02:00
Jan Prochazka
ae2ee7c6e2 v3.7.14 2020-06-21 21:55:57 +02:00
Jan Prochazka
da4370a420 build fix 2020-06-21 21:55:37 +02:00
Jan Prochazka
8131df6a2e v3.7.13 2020-06-21 21:52:02 +02:00
Jan Prochazka
b01fec4adc build fix 2020-06-21 21:51:45 +02:00
Jan Prochazka
bd56587517 v3.7.12 2020-06-21 21:45:38 +02:00
Jan Prochazka
72d38e4b8c grouping - work with datetimes 2020-06-21 21:44:54 +02:00
Jan Prochazka
9cd2e68f0b group by fix 2020-06-21 20:04:00 +02:00
Jan Prochazka
e1eb8ffd56 group by - count on identity 2020-06-21 10:41:26 +02:00
Jan Prochazka
0e1e3b9ed7 grid - grouping 2020-06-21 10:36:43 +02:00
Jan Prochazka
425e58627f v3.7.11 2020-06-18 21:51:04 +02:00
Jan Prochazka
bfc6f2a8a8 better type habndling, shell-tableReader for preserving table structure 2020-06-18 21:50:37 +02:00
Jan Prochazka
cecb88f024 #32 2020-06-18 21:18:30 +02:00
Jan Prochazka
ac9bd62ecf v3.7.10 2020-06-18 20:51:52 +02:00
Jan Prochazka
0d755fa8fc utility functions, trat view as table 2020-06-18 20:39:35 +02:00
Jan Prochazka
10d8a40d1c fix 2020-06-18 17:15:53 +02:00
Jan Prochazka
217be698af fixed bug with mssql connector 2020-06-18 17:11:08 +02:00
Jan Prochazka
061921fd6c export database popup menu 2020-06-18 16:53:03 +02:00
Jan Prochazka
07287e5f7f create database modal 2020-06-18 16:15:39 +02:00
Jan Prochazka
ec8b034541 excel import fix 2020-06-18 15:55:30 +02:00
Jan Prochazka
694e76b654 load wizard from shell window 2020-06-18 15:35:57 +02:00
Jan Prochazka
8425fc46a7 workign excel import 2020-06-18 15:08:58 +02:00
Jan Prochazka
967c5860c9 imp exp - transfer from one DB to another 2020-06-18 12:17:55 +02:00
Jan Prochazka
3f40996d2d imp exp configurator 2020-06-18 11:42:02 +02:00
Jan Prochazka
a9ce93cd67 import-export - work with schema (mssql) 2020-06-18 09:38:08 +02:00
Jan Prochazka
759754c437 v3.7.9 2020-06-11 21:29:52 +02:00
Jan Prochazka
b873dd75d3 json export in configurator, prepare for table mapping 2020-06-11 21:26:35 +02:00
Jan Prochazka
b520501d1f json lines reader, writer 2020-06-11 20:52:57 +02:00
Jan Prochazka
41ee6e9b91 excel sheet reader 2020-06-11 15:58:19 +02:00
Jan Prochazka
5c1920d60d Revert "fixed incorrect paste on data grid"
This reverts commit dda614165f.
2020-06-11 15:43:51 +02:00
Jan Prochazka
bab14883a2 fix pasting unwanted text 2020-06-11 15:43:39 +02:00
Jan Prochazka
dda614165f fixed incorrect paste on data grid 2020-06-11 15:28:12 +02:00
Jan Prochazka
38b6350ef8 mssql bulk table writer 2020-06-11 13:58:34 +02:00
Jan Prochazka
a86f7e96ca rename 2020-06-11 10:23:37 +02:00
Jan Prochazka
dc7c44b797 csvReader follows dbgate stream api 2020-06-11 10:19:38 +02:00
Jan Prochazka
f68bdafd9f scripting engine 2020-06-11 10:09:04 +02:00
Jan Prochazka
ca079d5dce jsl data - first line is structure 2020-06-10 21:53:43 +02:00
Jan Prochazka
1695fb2fd8 new streams 2020-06-10 21:35:25 +02:00
Jan Prochazka
26120969de byline parsing script output 2020-06-10 20:44:31 +02:00
Jan Prochazka
eb7c65dc95 basic auth for dbgate web 2020-06-10 19:58:05 +02:00
Jan Prochazka
235a9ff92d v3.7.8 2020-06-10 18:49:23 +02:00
Jan Prochazka
63b95c6793 build docker image 2020-06-10 18:48:54 +02:00
Jan Prochazka
b91bf13281 v3.7.7 2020-06-09 20:54:39 +02:00
Jan Prochazka
594837573e #28 2020-06-09 20:54:14 +02:00
322 changed files with 9876 additions and 5358 deletions

View File

@@ -4,8 +4,13 @@ name: Docker image
on:
push:
branches:
- production
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
# on:
# push:
# branches:
# - production
jobs:
build:

View File

@@ -1,40 +1,32 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![Donate](https://img.shields.io/badge/donate-paypal-blue.svg)](https://paypal.me/JanProchazkaCz/30eur)
[![NPM version](https://img.shields.io/npm/v/dbgate-api.svg)](https://www.npmjs.com/package/dbgate-api)
# DbGate - database administration tool
DbGate is fast and efficient database administration tool. It is focused to work with data (filtering, editing, master/detail views etc.)
**Try it online** - https://dbgate.org
**Try it online** - https://demo.dbgate.org - online demo application
## Currently implemented features
## Features
* Support for Microsoft SQL Server, Postgre SQL, MySQL
* Table data browsing - filtering, sorting, adding related columns using foreign keys
* Table data browsing - filtering, sorting, related columns using foreign keys
* Master/detail views
* Browsing objects - tables, views, procedures, functions
* Table data editing, with SQL change script preview
* SQL editor, execute SQL script, SQL code formatter
* SQL editor, execute SQL script, SQL code formatter, SQL code completion, SQL join wizard
* Runs as application for Windows, Linux and Mac. Or in Docker container on server and in web Browser on client.
* Import, export from/to CSV, Excel, JSON
* Free table editor - quick table data editing (cleanup data after import/before export, prototype tables etc.)
* Archives - backup your data in JSON files on local filesystem (or on DbGate server, when using web application)
* Light and dark theme
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
## Current support for database engines
| Action | Microsoft SQL Server | Postgre SQL | MySQL |
|---|---|---|---|
| Tables | yes | yes | yes |
| Columns | yes | yes | yes |
| Primary keys | yes | yes | yes |
| Foreign keys | yes | yes | yes |
| Run query | yes | yes | yes |
| Data types | yes | no | no |
| Views | yes | no | no |
| Stored procedures | yes | no | no |
| Functions | yes | no | no |
![Screenshot](https://raw.githubusercontent.com/dbshell/dbgate/master/screenshot.png)
## Design goals
* Application simplicity - DbGate takes the best and only the best from old [DbGate](http://www.jenasoft.com/dbgate), [DatAdmin](http://www.jenasoft.com/datadmin) and [DbMouse](http://www.jenasoft.com/dbmouse) . First will be implemented the most used features from this software.
* Minimal dependencies - so that the software can be developed in future without problems with obsolete libraries
* Application simplicity - DbGate takes the best and only the best from old [DbGate](http://www.jenasoft.com/dbgate), [DatAdmin](http://www.jenasoft.com/datadmin) and [DbMouse](http://www.jenasoft.com/dbmouse) .
* Minimal dependencies
* Frontend - React, styled-components, socket.io
* Backend - NodeJs, ExpressJs, socket.io, database connection drivers
* JavaScript + TypeScript
@@ -43,17 +35,29 @@ DbGate is fast and efficient database administration tool. It is focused to work
* Platform independed - will run as web application in single docker container on server, or as application using Electron platform on Linux, Windows and Mac
## How Can I Contribute?
You're welcome to contribute to this project! Especially with these topics:
You're welcome to contribute to this project! Below are some ideas, how to contribute:
* Bug fixing
* Test Mac edition
* Styles, graphics
* Better MySQL, Postgre SQL support
* Improve linux package build, add to APT repository
* Auto-upgrade of electron application
* Support for new import/export formats
Any help is appreciated!
Feel free to report issues and open merge requests.
## Roadmap
| Feature | Complexity | Schedule |
|---|---|---|
| Query designer | medium | december 2020 |
| Table designer (structure editor) | big | january 2021 |
| Filter SQL result sets | small | november 2020 |
| Filtering, sorting in free table editor | small | november 2020 |
| Using tedious driver instead of mssql | small | january 2021 |
| Support for SQLite | big | 2021 |
## How to run development environment
```sh
@@ -90,11 +94,14 @@ yarn start:app:local
```
## Packages
* api - backend, Javascript, ExpressJS
* datalib - TypeScript library for utility classes
* electron - application (JavaScript)
* engines - drivers for database engine (mssql, mysql, postgres), analysing database structure, creating specific queries (JavaScript)
* filterparser - TypeScript library for parsing data filter expressions using parsimmon
* sqltree - JSON representation of SQL query, functions converting to SQL (TypeScript)
* types - common TypeScript definitions
* web - frontend in React (JavaScript)
Some dbgate packages can be used also without DbGate. You can find them on [NPM repository](https://www.npmjs.com/search?q=keywords:dbgate)
* [api](https://github.com/dbshell/dbgate/tree/master/packages/api) - backend, Javascript, ExpressJS [![NPM version](https://img.shields.io/npm/v/dbgate-api.svg)](https://www.npmjs.com/package/dbgate-api)
* [datalib](https://github.com/dbshell/dbgate/tree/master/packages/datalib) - TypeScript library for utility classes
* [app](https://github.com/dbshell/dbgate/tree/master/app) - application (JavaScript)
* [engines](https://github.com/dbshell/dbgate/tree/master/packages/engines) - drivers for database engine (mssql, mysql, postgres), analysing database structure, creating specific queries (JavaScript) [![NPM version](https://img.shields.io/npm/v/dbgate-engines.svg)](https://www.npmjs.com/package/dbgate-engines)
* [filterparser](https://github.com/dbshell/dbgate/tree/master/packages/filterparser) - TypeScript library for parsing data filter expressions using parsimmon
* [sqltree](https://github.com/dbshell/dbgate/tree/master/packages/sqltree) - JSON representation of SQL query, functions converting to SQL (TypeScript) [![NPM version](https://img.shields.io/npm/v/dbgate-sqltree.svg)](https://www.npmjs.com/package/dbgate-sqltree)
* [types](https://github.com/dbshell/dbgate/tree/master/packages/types) - common TypeScript definitions [![NPM version](https://img.shields.io/npm/v/dbgate-types.svg)](https://www.npmjs.com/package/dbgate-types)
* [web](https://github.com/dbshell/dbgate/tree/master/packages/web) - frontend in React (JavaScript)
* [tools](https://github.com/dbshell/dbgate/tree/master/packages/tools) - various tools [![NPM version](https://img.shields.io/npm/v/dbgate-tools.svg)](https://www.npmjs.com/package/dbgate-tools)

View File

@@ -1,6 +1,6 @@
{
"name": "dbgate",
"version": "3.7.6",
"version": "3.7.33",
"private": true,
"author": "Jan Prochazka <jenasoft.database@gmail.com>",
"dependencies": {

View File

@@ -1,23 +1,25 @@
{
"private": true,
"name": "@dbgate/all",
"name": "dbgate-all",
"workspaces": [
"packages/*"
],
"scripts": {
"start:api": "yarn workspace @dbgate/api start",
"start:api:portal": "yarn workspace @dbgate/api start:portal",
"start:web": "yarn workspace @dbgate/web start",
"start:sqltree": "yarn workspace @dbgate/sqltree start",
"start:datalib": "yarn workspace @dbgate/datalib start",
"start:filterparser": "yarn workspace @dbgate/filterparser start",
"build:sqltree": "yarn workspace @dbgate/sqltree build",
"build:datalib": "yarn workspace @dbgate/datalib build",
"build:filterparser": "yarn workspace @dbgate/filterparser build",
"build:lib": "yarn build:sqltree && yarn build:filterparser && yarn build:datalib",
"start:api": "yarn workspace dbgate-api start",
"start:api:portal": "yarn workspace dbgate-api start:portal",
"start:web": "yarn workspace dbgate-web start",
"start:sqltree": "yarn workspace dbgate-sqltree start",
"start:tools": "yarn workspace dbgate-tools start",
"start:datalib": "yarn workspace dbgate-datalib start",
"start:filterparser": "yarn workspace dbgate-filterparser start",
"build:sqltree": "yarn workspace dbgate-sqltree build",
"build:datalib": "yarn workspace dbgate-datalib build",
"build:filterparser": "yarn workspace dbgate-filterparser build",
"build:tools": "yarn workspace dbgate-tools build",
"build:lib": "yarn build:tools && yarn build:sqltree && yarn build:filterparser && yarn build:datalib",
"build:app": "cd app && yarn install && yarn build",
"build:api": "yarn workspace @dbgate/api build",
"build:web:docker": "yarn workspace @dbgate/web build:docker",
"build:api": "yarn workspace dbgate-api build",
"build:web:docker": "yarn workspace dbgate-web build:docker",
"build:app:local": "cd app && yarn build:local",
"start:app:local": "cd app && yarn start:local",
@@ -26,9 +28,9 @@
"prepare": "yarn build:lib",
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\"",
"ts:api": "yarn workspace @dbgate/api ts",
"ts:web": "yarn workspace @dbgate/web ts",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\"",
"ts:api": "yarn workspace dbgate-api ts",
"ts:web": "yarn workspace dbgate-web ts",
"ts": "yarn ts:api && yarn ts:web",
"postinstall": "patch-package"
},

View File

@@ -15,7 +15,7 @@ PORT_postgres=5433
ENGINE_postgres=postgres
TOOLBAR=home
ICON_home=fas fa-home
ICON_home=mdi mdi-home
TITLE_home=Home
PAGE_home=home.html
STARTUP_PAGES=home

View File

@@ -1,21 +0,0 @@
module.exports = {
env: {
node: true,
commonjs: true,
es6: true,
jquery: false,
jest: true,
jasmine: true,
},
extends: 'eslint:recommended',
globals: {
Atomics: 'readonly',
SharedArrayBuffer: 'readonly',
},
parserOptions: {
ecmaVersion: 2018,
},
rules: {
'no-unused-vars': 'warn',
},
};

2
packages/api/.npmignore Normal file
View File

@@ -0,0 +1,2 @@
dist
.vscode

146
packages/api/README.md Normal file
View File

@@ -0,0 +1,146 @@
# dbgate-api
Allows run DbGate data-manipulation scripts.
## Installation
yarn add dbgate-api
## Usage
This example exports table Customer info CSV file.
```javascript
const dbgateApi = require('dbgate-api');
async function run() {
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql', user: 'sa', password: 'xxxx', database: 'Chinook' },
schemaName: 'dbo',
pureName: 'Customer',
});
const writer = await dbgateApi.csvWriter({ fileName: 'Customer.csv' });
await dbgateApi.copyStream(reader, writer);
console.log('Finished job script');
}
dbgateApi.runScript(run);
```
Silly example, runs without any dependencies. Copy [fakeObjectReader](https://github.com/dbshell/dbgate/blob/master/packages/api/src/shell/fakeObjectReader.js) to [consoleObjectWriter](https://github.com/dbshell/dbgate/blob/master/packages/api/src/shell/consoleObjectWriter.js) .
```javascript
const dbgateApi = require('dbgate-api');
async function run() {
const reader = await dbgateApi.fakeObjectReader();
const writer = await dbgateApi.consoleObjectWriter();
await dbgateApi.copyStream(reader, writer);
console.log('Finished job script');
}
dbgateApi.runScript(run);
```
## dbgateApi functions
### dbgateApi.copyStream
Copies data from reader into writer. Reader and writer should be created from functions listed below.
```js
await dbgateApi.copyStream(reader, writer);
```
### dbgateApi.tableReader
Reads table or view.
```js
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
schemaName: 'dbo',
pureName: 'Customer',
});
```
### dbgateApi.queryReader
Executes query and reads its result.
```js
const reader = await dbgateApi.tableReader({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
sql: 'SELECT * FROM Album',
});
```
### dbgateApi.tableWriter
Imports data into table. Options are optional, default values are false.
- dropIfExists - if table already exists, it is dropped before import
- truncate - delete table content before import
- createIfNotExists - create table, if not exists
```js
const reader = await dbgateApi.tableWriter({
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
schemaName: 'dbo',
pureName: 'Customer',
options: {
dropIfExists: false,
truncate: false,
createIfNotExists: false,
}
});
```
### dbgateApi.csvReader
Reads CSV file
```js
const reader = await dbgateApi.csvReader({
fileName: '/home/root/test.csv',
encoding: 'utf-8',
header: true,
delimiter: ',',
quoted: false,
limitRows: null
});
```
### dbgateApi.csvWriter
Writes CSV file
```js
const reader = await dbgateApi.csvWriter({
fileName: '/home/root/test.csv',
encoding: 'utf-8',
header: true,
delimiter: ',',
quoted: false
});
```
### dbgateApi.jsonLinesReader
Reads JSON lines data file. On first line could be structure. Every line contains one row as JSON serialized object.
```js
const reader = await dbgateApi.jsonLinesReader({
fileName: '/home/root/test.jsonl',
encoding: 'utf-8',
header: true,
limitRows: null
});
```
### dbgateApi.jsonLinesWriter
Writes JSON lines data file. On first line could be structure. Every line contains one row as JSON serialized object.
```js
const reader = await dbgateApi.jsonLinesWriter({
fileName: '/home/root/test.jsonl',
encoding: 'utf-8',
header: true
});
```
### dbgateApi.excelSheetReader
Reads tabular data from one sheet in MS Excel file.
```js
const reader = await dbgateApi.excelSheetReader({
fileName: '/home/root/test.xlsx',
sheetName: 'Album',
limitRows: null
});
```

View File

@@ -1,28 +1,44 @@
{
"name": "@dbgate/api",
"name": "dbgate-api",
"main": "src/index.js",
"version": "0.1.0",
"private": true,
"version": "1.0.5",
"homepage": "https://dbgate.org/",
"repository": {
"type": "git",
"url": "https://github.com/dbshell/dbgate.git"
},
"funding": "https://www.paypal.com/paypalme/JanProchazkaCz/30eur",
"author": "Jan Prochazka",
"license": "GPL",
"keywords": [
"sql",
"json",
"import",
"export",
"dbgate"
],
"dependencies": {
"@dbgate/engines": "^0.1.0",
"@dbgate/sqltree": "^0.1.0",
"async-lock": "^1.2.4",
"axios": "^0.19.0",
"body-parser": "^1.19.0",
"bufferutil": "^4.0.1",
"byline": "^5.0.0",
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"csv": "^5.3.2",
"dbgate-sqltree": "^1.0.0",
"dbgate-tools": "^1.0.0",
"eslint": "^6.8.0",
"express": "^4.17.1",
"express-basic-auth": "^1.2.0",
"express-fileupload": "^1.2.0",
"find-free-port": "^2.0.0",
"fs-extra": "^8.1.0",
"http": "^0.0.0",
"line-reader": "^0.4.0",
"mssql": "^6.0.1",
"mysql": "^2.17.1",
"lodash": "^4.17.15",
"ncp": "^2.0.0",
"nedb-promises": "^4.0.1",
"pg": "^7.17.0",
"pg-query-stream": "^3.1.1"
"tar": "^6.0.5"
},
"scripts": {
"start": "nodemon src/index.js",
@@ -31,8 +47,8 @@
"build": "webpack"
},
"devDependencies": {
"@dbgate/types": "^0.1.0",
"@types/lodash": "^4.14.149",
"dbgate-types": "^1.0.0",
"env-cmd": "^10.1.0",
"nodemon": "^2.0.2",
"typescript": "^3.7.4",

View File

@@ -0,0 +1,98 @@
const fs = require('fs-extra');
const stream = require('stream');
const readline = require('readline');
const path = require('path');
const { formatWithOptions } = require('util');
const { archivedir } = require('../utility/directories');
const socket = require('../utility/socket');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
const { saveFreeTableData } = require('../utility/freeTableStorage');
module.exports = {
folders_meta: 'get',
async folders() {
const folders = await fs.readdir(archivedir());
return [
{
name: 'default',
type: 'jsonl',
},
...folders
.filter((x) => x != 'default')
.map((name) => ({
name,
type: 'jsonl',
})),
];
},
createFolder_meta: 'post',
async createFolder({ folder }) {
await fs.mkdir(path.join(archivedir(), folder));
socket.emitChanged('archive-folders-changed');
return true;
},
files_meta: 'get',
async files({ folder }) {
const dir = path.join(archivedir(), folder);
if (!(await fs.exists(dir))) return [];
const files = await fs.readdir(dir);
return files
.filter((name) => name.endsWith('.jsonl'))
.map((name) => ({
name: name.slice(0, -'.jsonl'.length),
type: 'jsonl',
}));
},
refreshFiles_meta: 'post',
async refreshFiles({ folder }) {
socket.emitChanged(`archive-files-changed-${folder}`);
},
refreshFolders_meta: 'post',
async refreshFolders() {
socket.emitChanged(`archive-folders-changed`);
},
deleteFile_meta: 'post',
async deleteFile({ folder, file }) {
await fs.unlink(path.join(archivedir(), folder, `${file}.jsonl`));
socket.emitChanged(`archive-files-changed-${folder}`);
},
deleteFolder_meta: 'post',
async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter');
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
socket.emitChanged(`archive-folders-changed`);
},
saveFreeTable_meta: 'post',
async saveFreeTable({ folder, file, data }) {
saveFreeTableData(path.join(archivedir(), folder, `${file}.jsonl`), data);
return true;
},
loadFreeTable_meta: 'post',
async loadFreeTable({ folder, file }) {
return new Promise((resolve, reject) => {
const fileStream = fs.createReadStream(path.join(archivedir(), folder, `${file}.jsonl`));
const liner = readline.createInterface({
input: fileStream,
});
let structure = null;
const rows = [];
liner.on('line', (line) => {
const data = JSON.parse(line);
if (structure) rows.push(data);
else structure = data;
});
liner.on('close', () => {
resolve({ structure, rows });
fileStream.close();
});
});
},
};

View File

@@ -2,10 +2,10 @@ const uuidv1 = require('uuid/v1');
const connections = require('./connections');
const socket = require('../utility/socket');
const { fork } = require('child_process');
const DatabaseAnalyser = require('@dbgate/engines/default/DatabaseAnalyser');
const { DatabaseAnalyser } = require('dbgate-tools');
module.exports = {
/** @type {import('@dbgate/types').OpenedDatabaseConnection[]} */
/** @type {import('dbgate-types').OpenedDatabaseConnection[]} */
opened: [],
closed: [],
requests: {},
@@ -67,7 +67,7 @@ module.exports = {
return newOpened;
},
/** @param {import('@dbgate/types').OpenedDatabaseConnection} conn */
/** @param {import('dbgate-types').OpenedDatabaseConnection} conn */
sendRequest(conn, message) {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {

View File

@@ -1,95 +1,148 @@
const path = require('path');
const fs = require('fs');
const lineReader = require('line-reader');
const { jsldir } = require('../utility/directories');
const _ = require('lodash');
const DatastoreProxy = require('../utility/DatastoreProxy');
const { saveFreeTableData } = require('../utility/freeTableStorage');
const getJslFileName = require('../utility/getJslFileName');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
const socket = require('../utility/socket');
module.exports = {
openedReaders: {},
closeReader(jslid) {
// console.log('CLOSING READER');
if (!this.openedReaders[jslid]) return Promise.resolve();
return new Promise((resolve, reject) => {
this.openedReaders[jslid].reader.close((err) => {
if (err) reject(err);
delete this.openedReaders[jslid];
resolve();
});
});
},
readLine(jslid) {
if (!this.openedReaders[jslid]) return Promise.reject();
return new Promise((resolve, reject) => {
const { reader } = this.openedReaders[jslid];
if (!reader.hasNextLine()) {
function readFirstLine(file) {
return new Promise((resolve, reject) => {
lineReader.open(file, (err, reader) => {
if (err) reject(err);
if (reader.hasNextLine()) {
reader.nextLine((err, line) => {
if (err) reject(err);
resolve(line);
});
} else {
resolve(null);
return;
}
reader.nextLine((err, line) => {
this.openedReaders[jslid].readedCount += 1;
if (err) reject(err);
resolve(line);
});
});
},
});
}
openReader(jslid) {
// console.log('OPENING READER');
console.log('OPENING READER, LINES=', fs.readFileSync(path.join(jsldir(), `${jslid}.jsonl`), 'utf-8').split('\n').length);
const file = path.join(jsldir(), `${jslid}.jsonl`);
return new Promise((resolve, reject) =>
lineReader.open(file, (err, reader) => {
if (err) reject(err);
resolve();
this.openedReaders[jslid] = {
reader,
readedCount: 0,
};
})
);
},
module.exports = {
datastores: {},
async ensureReader(jslid, offset) {
if (this.openedReaders[jslid] && this.openedReaders[jslid].readedCount > offset) {
await this.closeReader(jslid);
}
if (!this.openedReaders[jslid]) {
await this.openReader(jslid);
}
while (this.openedReaders[jslid].readedCount < offset) {
await this.readLine(jslid);
// closeReader(jslid) {
// // console.log('CLOSING READER');
// if (!this.openedReaders[jslid]) return Promise.resolve();
// return new Promise((resolve, reject) => {
// this.openedReaders[jslid].reader.close((err) => {
// if (err) reject(err);
// delete this.openedReaders[jslid];
// resolve();
// });
// });
// },
// readLine(readerInfo) {
// return new Promise((resolve, reject) => {
// const { reader } = readerInfo;
// if (!reader.hasNextLine()) {
// resolve(null);
// return;
// }
// reader.nextLine((err, line) => {
// if (readerInfo.readedSchemaRow) readerInfo.readedDataRowCount += 1;
// else readerInfo.readedSchemaRow = true;
// if (err) reject(err);
// resolve(line);
// });
// });
// },
// openReader(jslid) {
// // console.log('OPENING READER');
// // console.log(
// // 'OPENING READER, LINES=',
// // fs.readFileSync(path.join(jsldir(), `${jslid}.jsonl`), 'utf-8').split('\n').length
// // );
// const file = getJslFileName(jslid);
// return new Promise((resolve, reject) =>
// lineReader.open(file, (err, reader) => {
// if (err) reject(err);
// const readerInfo = {
// reader,
// readedDataRowCount: 0,
// readedSchemaRow: false,
// isReading: true,
// };
// this.openedReaders[jslid] = readerInfo;
// resolve(readerInfo);
// })
// );
// },
// async ensureReader(jslid, offset) {
// if (this.openedReaders[jslid] && this.openedReaders[jslid].readedDataRowCount > offset) {
// await this.closeReader(jslid);
// }
// let readerInfo = this.openedReaders[jslid];
// if (!this.openedReaders[jslid]) {
// readerInfo = await this.openReader(jslid);
// }
// readerInfo.isReading = true;
// if (!readerInfo.readedSchemaRow) {
// await this.readLine(readerInfo); // skip structure
// }
// while (readerInfo.readedDataRowCount < offset) {
// await this.readLine(readerInfo);
// }
// return readerInfo;
// },
async ensureDatastore(jslid) {
let datastore = this.datastores[jslid];
if (!datastore) {
datastore = new JsonLinesDatastore(getJslFileName(jslid));
// datastore = new DatastoreProxy(getJslFileName(jslid));
this.datastores[jslid] = datastore;
}
return datastore;
},
getInfo_meta: 'get',
getInfo({ jslid }) {
const file = path.join(jsldir(), `${jslid}.jsonl.info`);
return JSON.parse(fs.readFileSync(file, 'utf-8'));
async getInfo({ jslid }) {
const file = getJslFileName(jslid);
const firstLine = await readFirstLine(file);
if (firstLine) return JSON.parse(firstLine);
return null;
},
getRows_meta: 'get',
async getRows({ jslid, offset, limit }) {
await this.ensureReader(jslid, offset);
const res = [];
for (let i = 0; i < limit; i += 1) {
const line = await this.readLine(jslid);
if (line == null) break;
res.push(JSON.parse(line));
}
return res;
getRows_meta: 'post',
async getRows({ jslid, offset, limit, filters }) {
const datastore = await this.ensureDatastore(jslid);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters);
},
getStats_meta: 'get',
getStats({ jslid }) {
const file = path.join(jsldir(), `${jslid}.jsonl.stats`);
return JSON.parse(fs.readFileSync(file, 'utf-8'));
const file = `${getJslFileName(jslid)}.stats`;
if (fs.existsSync(file)) return JSON.parse(fs.readFileSync(file, 'utf-8'));
return {};
},
async notifyChangedStats(stats) {
console.log('SENDING STATS', JSON.stringify(stats));
await this.closeReader(stats.jslid);
const datastore = this.datastores[stats.jslid];
if (datastore) await datastore.notifyChanged();
socket.emit(`jsldata-stats-${stats.jslid}`, stats);
// const readerInfo = this.openedReaders[stats.jslid];
// if (readerInfo && readerInfo.isReading) {
// readerInfo.closeAfterReadAndSendStats = stats;
// } else {
// await this.closeReader(stats.jslid);
// socket.emit(`jsldata-stats-${stats.jslid}`, stats);
// }
},
saveFreeTable_meta: 'post',
async saveFreeTable({ jslid, data }) {
saveFreeTableData(getJslFileName(jslid), data);
return true;
},
};

View File

@@ -0,0 +1,150 @@
const fs = require('fs-extra');
const axios = require('axios');
const path = require('path');
const { pluginsdir, datadir } = require('../utility/directories');
const socket = require('../utility/socket');
const requirePlugin = require('../shell/requirePlugin');
const downloadPackage = require('../utility/downloadPackage');
// async function loadPackageInfo(dir) {
// const readmeFile = path.join(dir, 'README.md');
// const packageFile = path.join(dir, 'package.json');
// if (!(await fs.exists(packageFile))) {
// return null;
// }
// let readme = null;
// let manifest = null;
// if (await fs.exists(readmeFile)) readme = await fs.readFile(readmeFile, { encoding: 'utf-8' });
// if (await fs.exists(packageFile)) manifest = JSON.parse(await fs.readFile(packageFile, { encoding: 'utf-8' }));
// return {
// readme,
// manifest,
// };
// }
const preinstallPlugins = [
'dbgate-plugin-mssql',
'dbgate-plugin-mysql',
'dbgate-plugin-postgres',
'dbgate-plugin-csv',
'dbgate-plugin-excel',
];
module.exports = {
script_meta: 'get',
async script({ packageName }) {
const file = path.join(pluginsdir(), packageName, 'dist', 'frontend.js');
const data = await fs.readFile(file, {
encoding: 'utf-8',
});
return data;
},
search_meta: 'get',
async search({ filter }) {
// DOCS: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#get-v1search
const resp = await axios.default.get(
`http://registry.npmjs.com/-/v1/search?text=${encodeURIComponent(filter)}+keywords:dbgateplugin&size=25&from=0`
);
const { objects } = resp.data || {};
return (objects || []).map((x) => x.package);
},
info_meta: 'get',
async info({ packageName }) {
try {
const infoResp = await axios.default.get(`https://registry.npmjs.org/${packageName}`);
const { latest } = infoResp.data['dist-tags'];
const manifest = infoResp.data.versions[latest];
const { readme } = infoResp.data;
return {
readme,
manifest,
};
} catch (err) {
return {
state: 'error',
error: err.message,
};
}
// const dir = path.join(pluginstmpdir(), packageName);
// if (!(await fs.exists(dir))) {
// await downloadPackage(packageName, dir);
// }
// return await loadPackageInfo(dir);
// return await {
// ...loadPackageInfo(dir),
// installed: loadPackageInfo(path.join(pluginsdir(), packageName)),
// };
},
installed_meta: 'get',
async installed() {
const files = await fs.readdir(pluginsdir());
const res = [];
for (const packageName of files) {
const manifest = await fs
.readFile(path.join(pluginsdir(), packageName, 'package.json'))
.then((x) => JSON.parse(x));
const readmeFile = path.join(pluginsdir(), packageName, 'README.md');
if (await fs.exists(readmeFile)) {
manifest.readme = await fs.readFile(readmeFile, { encoding: 'utf-8' });
}
res.push(manifest);
}
return res;
// const res = await Promise.all(
// files.map((packageName) =>
// fs.readFile(path.join(pluginsdir(), packageName, 'package.json')).then((x) => JSON.parse(x))
// )
// );
},
install_meta: 'post',
async install({ packageName }) {
const dir = path.join(pluginsdir(), packageName);
if (!(await fs.exists(dir))) {
await downloadPackage(packageName, dir);
}
socket.emitChanged(`installed-plugins-changed`);
},
uninstall_meta: 'post',
async uninstall({ packageName }) {
const dir = path.join(pluginsdir(), packageName);
await fs.rmdir(dir, { recursive: true });
socket.emitChanged(`installed-plugins-changed`);
this.removedPlugins.push(packageName);
await fs.writeFile(path.join(datadir(), 'removed-plugins'), this.removedPlugins.join('\n'));
},
command_meta: 'post',
async command({ packageName, command, args }) {
const content = requirePlugin(packageName);
return content.commands[command](args);
},
async _init() {
const installed = await this.installed();
try {
this.removedPlugins = (await fs.readFile(path.join(datadir(), 'removed-plugins'), { encoding: 'utf-8' })).split(
'\n'
);
} catch (err) {
this.removedPlugins = [];
}
for (const packageName of preinstallPlugins) {
if (this.removedPlugins.includes(packageName)) continue;
try {
console.log('Preinstalling plugin', packageName);
await this.install({ packageName });
} catch (err) {
console.error('Error preinstalling plugin', packageName, err);
}
}
},
};

View File

@@ -2,25 +2,56 @@ const _ = require('lodash');
const path = require('path');
const fs = require('fs-extra');
const uuidv1 = require('uuid/v1');
const byline = require('byline');
const socket = require('../utility/socket');
const { fork } = require('child_process');
const { rundir, uploadsdir } = require('../utility/directories');
const { rundir, uploadsdir, pluginsdir } = require('../utility/directories');
const { extractShellApiPlugins, extractShellApiFunctionName } = require('dbgate-tools');
function extractPlugins(script) {
const requireRegex = /\s*\/\/\s*@require\s+([^\s]+)\s*\n/g;
const matches = [...script.matchAll(requireRegex)];
return matches.map((x) => x[1]);
}
const requirePluginsTemplate = (plugins) =>
plugins
.map(
(packageName) => `const ${_.camelCase(packageName)} = require(process.env.PLUGIN_${_.camelCase(packageName)});\n`
)
.join('') + `dbgateApi.registerPlugins(${plugins.map((x) => _.camelCase(x)).join(',')});\n`;
const scriptTemplate = (script) => `
const dbgateApi = require(process.env.DBGATE_API || "@dbgate/api");
const dbgateApi = require(process.env.DBGATE_API);
${requirePluginsTemplate(extractPlugins(script))}
require=null;
async function run() {
${script}
await dbgateApi.finalizer.run();
console.log('Finished job script');
}
dbgateApi.runScript(run);
`;
const loaderScriptTemplate = (functionName, props, runid) => `
const dbgateApi = require(process.env.DBGATE_API);
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
require=null;
async function run() {
const reader=await ${extractShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
await dbgateApi.copyStream(reader, writer);
}
dbgateApi.runScript(run);
`;
module.exports = {
/** @type {import('@dbgate/types').OpenedRunner[]} */
/** @type {import('dbgate-types').OpenedRunner[]} */
opened: [],
requests: {},
dispatchMessage(runid, message) {
console.log('DISPATCHING', message);
if (message) console.log('...', message.message);
if (_.isString(message)) {
socket.emit(`runner-info-${runid}`, {
message,
@@ -39,35 +70,48 @@ module.exports = {
handle_ping() {},
start_meta: 'post',
async start({ script }) {
const runid = uuidv1();
handle_freeData(runid, { freeData }) {
const [resolve, reject] = this.requests[runid];
resolve(freeData);
delete this.requests[runid];
},
rejectRequest(runid, error) {
if (this.requests[runid]) {
const [resolve, reject] = this.requests[runid];
reject(error);
delete this.requests[runid];
}
},
startCore(runid, scriptText) {
const directory = path.join(rundir(), runid);
const scriptFile = path.join(uploadsdir(), runid + '.js');
fs.writeFileSync(`${scriptFile}`, scriptTemplate(script));
fs.writeFileSync(`${scriptFile}`, scriptText);
fs.mkdirSync(directory);
const pluginNames = fs.readdirSync(pluginsdir());
console.log(`RUNNING SCRIPT ${scriptFile}`);
const subprocess = fork(scriptFile, ['--checkParent'], {
cwd: directory,
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
env: {
DBGATE_API: process.argv[1],
..._.fromPairs(pluginNames.map((name) => [`PLUGIN_${_.camelCase(name)}`, path.join(pluginsdir(), name)])),
},
});
const pipeDispatcher = (severity) => (data) =>
data
.toString()
.split('\n')
.forEach((message) => {
if (message.trim()) this.dispatchMessage(runid, { severity, message: message.trim() });
});
this.dispatchMessage(runid, { severity, message: data.toString().trim() });
subprocess.stdout.on('data', pipeDispatcher('info'));
subprocess.stderr.on('data', pipeDispatcher('error'));
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
subprocess.on('exit', (code) => {
this.rejectRequest(runid, { message: 'No data retured, maybe input data source is too big' });
console.log('... EXIT process', code);
socket.emit(`runner-done-${runid}`, code);
});
subprocess.on('error', (error) => {
this.rejectRequest(runid, { message: error && (error.message || error.toString()) });
console.error('... ERROR subprocess', error);
this.dispatchMessage({
severity: 'error',
message: error.toString(),
@@ -85,6 +129,12 @@ module.exports = {
return newOpened;
},
start_meta: 'post',
async start({ script }) {
const runid = uuidv1();
return this.startCore(runid, scriptTemplate(script));
},
cancel_meta: 'post',
async cancel({ runid }) {
const runner = this.opened.find((x) => x.runid == runid);
@@ -110,4 +160,14 @@ module.exports = {
}
return res;
},
loadReader_meta: 'post',
async loadReader({ functionName, props }) {
const promise = new Promise((resolve, reject) => {
const runid = uuidv1();
this.requests[runid] = [resolve, reject];
this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
});
return promise;
},
};

View File

@@ -99,4 +99,11 @@ module.exports = {
await this.ensureOpened(conid);
return { status: 'ok' };
},
createDatabase_meta: 'post',
async createDatabase({ conid, name }) {
const opened = await this.ensureOpened(conid);
opened.subprocess.send({ msgtype: 'createDatabase', name });
return { status: 'ok' };
},
};

View File

@@ -6,7 +6,7 @@ const { fork } = require('child_process');
const jsldata = require('./jsldata');
module.exports = {
/** @type {import('@dbgate/types').OpenedSession[]} */
/** @type {import('dbgate-types').OpenedSession[]} */
opened: [],
// handle_error(sesid, props) {
@@ -50,8 +50,8 @@ module.exports = {
},
handle_recordset(sesid, props) {
const { jslid } = props;
socket.emit(`session-recordset-${sesid}`, { jslid });
const { jslid, resultIndex } = props;
socket.emit(`session-recordset-${sesid}`, { jslid, resultIndex });
},
handle_stats(sesid, stats) {
@@ -105,6 +105,16 @@ module.exports = {
return { state: 'ok' };
},
kill_meta: 'post',
async kill({ sesid }) {
const session = this.opened.find((x) => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
}
session.subprocess.kill();
return { state: 'ok' };
},
// runCommand_meta: 'post',
// async runCommand({ conid, database, sql }) {
// console.log(`Running SQL command , conid=${conid}, database=${database}, sql=${sql}`);

View File

@@ -0,0 +1,28 @@
const path = require('path');
const { uploadsdir } = require('../utility/directories');
const uuidv1 = require('uuid/v1');
module.exports = {
upload_meta: {
method: 'post',
raw: true,
},
upload(req, res) {
const { data } = req.files || {};
if (!data) {
res.json(null);
return;
}
const uploadName = uuidv1();
const filePath = path.join(uploadsdir(), uploadName);
console.log(`Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => {
res.json({
originalName: data.name,
uploadName,
filePath,
});
});
},
};

View File

@@ -12,7 +12,7 @@ if (argument && argument.endsWith('Process')) {
const module = proc[argument];
module.start();
} else if (!module['parent']) {
} else if (!module['parent'] && !process.argv.includes('--checkParent')) {
const main = require('./main');
main.start(argument);

View File

@@ -1,5 +1,7 @@
const express = require('express');
const basicAuth = require('express-basic-auth');
const bodyParser = require('body-parser');
const fileUpload = require('express-fileupload');
const http = require('http');
const cors = require('cors');
const io = require('socket.io');
@@ -18,6 +20,9 @@ const sessions = require('./controllers/sessions');
const runners = require('./controllers/runners');
const jsldata = require('./controllers/jsldata');
const config = require('./controllers/config');
const archive = require('./controllers/archive');
const uploads = require('./controllers/uploads');
const plugins = require('./controllers/plugins');
const { rundir } = require('./utility/directories');
@@ -29,8 +34,27 @@ function start(argument = null) {
const server = http.createServer(app);
socket.set(io(server));
if (process.env.LOGIN && process.env.PASSWORD) {
app.use(
basicAuth({
users: {
[process.env.LOGIN]: process.env.PASSWORD,
},
challenge: true,
realm: 'DbGate Web App',
})
);
}
app.use(cors());
app.use(bodyParser.json());
app.use(bodyParser.json({ limit: '50mb' }));
app.use(
'/uploads',
fileUpload({
limits: { fileSize: 4 * 1024 * 1024 },
})
);
useController(app, '/connections', connections);
useController(app, '/server-connections', serverConnections);
@@ -40,6 +64,9 @@ function start(argument = null) {
useController(app, '/runners', runners);
useController(app, '/jsldata', jsldata);
useController(app, '/config', config);
useController(app, '/archive', archive);
useController(app, '/uploads', uploads);
useController(app, '/plugins', plugins);
if (process.env.PAGES_DIRECTORY) {
app.use('/pages', express.static(process.env.PAGES_DIRECTORY));

View File

@@ -1,13 +1,12 @@
const engines = require('@dbgate/engines');
const driverConnect = require('../utility/driverConnect');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
function start() {
childProcessChecker();
process.on('message', async (connection) => {
try {
const driver = engines(connection);
const conn = await driverConnect(driver, connection);
const driver = requireEngineDriver(connection);
const conn = await driver.connect(connection);
const res = await driver.getVersion(conn);
process.send({ msgtype: 'connected', ...res });
} catch (e) {

View File

@@ -1,7 +1,6 @@
const engines = require('@dbgate/engines');
const stableStringify = require('json-stable-stringify');
const driverConnect = require('../utility/driverConnect');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
let systemConnection;
let storedConnection;
@@ -26,14 +25,14 @@ async function checkedAsyncCall(promise) {
}
async function handleFullRefresh() {
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection));
process.send({ msgtype: 'structure', structure: analysedStructure });
setStatusName('ok');
}
async function handleIncrementalRefresh() {
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
const newStructure = await checkedAsyncCall(driver.analyseIncremental(systemConnection, analysedStructure));
if (newStructure != null) {
analysedStructure = newStructure;
@@ -58,8 +57,8 @@ async function handleConnect({ connection, structure }) {
lastPing = new Date().getTime();
if (!structure) setStatusName('pending');
const driver = engines(storedConnection);
systemConnection = await checkedAsyncCall(driverConnect(driver, storedConnection));
const driver = requireEngineDriver(storedConnection);
systemConnection = await checkedAsyncCall(driver.connect(storedConnection));
if (structure) {
analysedStructure = structure;
handleIncrementalRefresh();
@@ -82,7 +81,7 @@ function waitConnected() {
async function handleQueryData({ msgid, sql }) {
await waitConnected();
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
try {
const res = await driver.query(systemConnection, sql);
process.send({ msgtype: 'response', msgid, ...res });

View File

@@ -2,10 +2,12 @@ const connectProcess = require('./connectProcess');
const databaseConnectionProcess = require('./databaseConnectionProcess');
const serverConnectionProcess = require('./serverConnectionProcess');
const sessionProcess = require('./sessionProcess');
const jslDatastoreProcess = require('./jslDatastoreProcess');
module.exports = {
connectProcess,
databaseConnectionProcess,
serverConnectionProcess,
sessionProcess,
jslDatastoreProcess,
};

View File

@@ -0,0 +1,58 @@
const childProcessChecker = require('../utility/childProcessChecker');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
let lastPing = null;
let datastore = new JsonLinesDatastore();
function handlePing() {
lastPing = new Date().getTime();
}
function handleOpen({ file }) {
handlePing();
datastore = new JsonLinesDatastore(file);
}
async function handleRead({ msgid, offset, limit }) {
handlePing();
const rows = await datastore.getRows(offset, limit);
process.send({ msgtype: 'response', msgid, rows });
}
async function handleNotify({ msgid }) {
await datastore.notifyChanged();
process.send({ msgtype: 'notify', msgid });
}
const messageHandlers = {
open: handleOpen,
read: handleRead,
ping: handlePing,
notify: handleNotify,
};
async function handleMessage({ msgtype, ...other }) {
const handler = messageHandlers[msgtype];
await handler(other);
}
function start() {
childProcessChecker();
setInterval(() => {
const time = new Date().getTime();
if (time - lastPing > 60 * 1000) {
process.exit(0);
}
}, 60 * 1000);
process.on('message', async (message) => {
try {
await handleMessage(message);
} catch (e) {
process.send({ msgtype: 'error', error: e.message });
}
});
}
module.exports = { start };

View File

@@ -1,7 +1,6 @@
const engines = require('@dbgate/engines');
const stableStringify = require('json-stable-stringify');
const driverConnect = require('../utility/driverConnect');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
let systemConnection;
let storedConnection;
@@ -10,7 +9,7 @@ let lastStatus = null;
let lastPing = null;
async function handleRefresh() {
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
try {
const databases = await driver.listDatabases(systemConnection);
setStatusName('ok');
@@ -46,9 +45,9 @@ async function handleConnect(connection) {
setStatusName('pending');
lastPing = new Date().getTime();
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
try {
systemConnection = await driverConnect(driver, storedConnection);
systemConnection = await driver.connect(storedConnection);
handleRefresh();
setInterval(handleRefresh, 30 * 1000);
} catch (err) {
@@ -65,9 +64,18 @@ function handlePing() {
lastPing = new Date().getTime();
}
async function handleCreateDatabase({ name }) {
const driver = requireEngineDriver(storedConnection);
systemConnection = await driver.connect(storedConnection);
console.log(`RUNNING SCRIPT: CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
await driver.query(systemConnection, `CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
await handleRefresh();
}
const messageHandlers = {
connect: handleConnect,
ping: handlePing,
createDatabase: handleCreateDatabase,
};
async function handleMessage({ msgtype, ...other }) {

View File

@@ -1,12 +1,12 @@
const engines = require('@dbgate/engines');
const uuidv1 = require('uuid/v1');
const path = require('path');
const fs = require('fs');
const _ = require('lodash');
const childProcessChecker = require('../utility/childProcessChecker');
const goSplit = require('../utility/goSplit');
const driverConnect = require('../utility/driverConnect');
const { jsldir } = require('../utility/directories');
const requireEngineDriver = require('../utility/requireEngineDriver');
let systemConnection;
let storedConnection;
@@ -14,15 +14,16 @@ let afterConnectCallbacks = [];
let currentHandlers = [];
class TableWriter {
constructor(columns) {
constructor(columns, resultIndex) {
this.jslid = uuidv1();
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
this.currentStream = fs.createWriteStream(this.currentFile);
this.currentRowCount = 0;
this.currentChangeIndex = 0;
fs.writeFileSync(`${this.currentFile}.info`, JSON.stringify(columns));
this.currentChangeIndex = 1;
fs.writeFileSync(this.currentFile, JSON.stringify({ columns }) + '\n');
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false);
process.send({ msgtype: 'recordset', jslid: this.jslid });
this.resultIndex = resultIndex;
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
}
row(row) {
@@ -63,7 +64,7 @@ class TableWriter {
}
class StreamHandler {
constructor() {
constructor(resultIndex) {
this.recordset = this.recordset.bind(this);
this.row = this.row.bind(this);
// this.error = this.error.bind(this);
@@ -72,6 +73,7 @@ class StreamHandler {
// use this for cancelling
this.stream = null;
this.plannedStats = false;
this.resultIndex = resultIndex;
currentHandlers = [...currentHandlers, this];
}
@@ -84,7 +86,7 @@ class StreamHandler {
recordset(columns) {
this.closeCurrentWriter();
this.currentWriter = new TableWriter(columns);
this.currentWriter = new TableWriter(columns, this.resultIndex);
// this.writeCurrentStats();
@@ -96,7 +98,8 @@ class StreamHandler {
}
row(row) {
// console.log('ACCEPT ROW', row);
this.currentWriter.row(row);
if (this.currentWriter) this.currentWriter.row(row);
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
// this.onRow(this.jslid);
}
// error(error) {
@@ -115,8 +118,8 @@ class StreamHandler {
async function handleConnect(connection) {
storedConnection = connection;
const driver = engines(storedConnection);
systemConnection = await driverConnect(driver, storedConnection);
const driver = requireEngineDriver(storedConnection);
systemConnection = await driver.connect(storedConnection);
for (const [resolve] of afterConnectCallbacks) {
resolve();
}
@@ -138,11 +141,15 @@ function waitConnected() {
async function handleExecuteQuery({ sql }) {
await waitConnected();
const driver = engines(storedConnection);
const driver = requireEngineDriver(storedConnection);
const handler = new StreamHandler();
const stream = await driver.stream(systemConnection, sql, handler);
handler.stream = stream;
let resultIndex = 0;
for (const sqlItem of goSplit(sql)) {
const handler = new StreamHandler(resultIndex);
const stream = await driver.stream(systemConnection, sqlItem, handler);
handler.stream = stream;
resultIndex += 1;
}
}
const messageHandlers = {

View File

@@ -0,0 +1,11 @@
const path = require('path');
const { archivedir } = require('../utility/directories');
const jsonLinesReader = require('./jsonLinesReader');
function archiveReader({ folderName, fileName, ...other }) {
const jsonlFile = path.join(archivedir(), folderName, `${fileName}.jsonl`);
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
return res;
}
module.exports = archiveReader;

View File

@@ -0,0 +1,19 @@
const path = require('path');
const fs = require('fs');
const { archivedir } = require('../utility/directories');
// const socket = require('../utility/socket');
const jsonLinesWriter = require('./jsonLinesWriter');
function archiveWriter({ folderName, fileName }) {
const dir = path.join(archivedir(), folderName);
if (!fs.existsSync(dir)) {
console.log(`Creating directory ${dir}`);
fs.mkdirSync(dir);
}
const jsonlFile = path.join(dir, `${fileName}.jsonl`);
const res = jsonLinesWriter({ fileName: jsonlFile });
// socket.emitChanged(`archive-files-changed-${folderName}`);
return res;
}
module.exports = archiveWriter;

View File

@@ -0,0 +1,33 @@
const stream = require('stream');
class CollectorWriterStream extends stream.Writable {
constructor(options) {
super(options);
this.rows = [];
this.structure = null;
this.runid = options.runid;
}
_write(chunk, enc, next) {
if (!this.structure) this.structure = chunk;
else this.rows.push(chunk);
next();
}
_final(callback) {
process.send({
msgtype: 'freeData',
runid: this.runid,
freeData: { rows: this.rows, structure: this.structure },
});
callback();
}
}
async function collectorWriter({ runid }) {
return new CollectorWriterStream({
objectMode: true,
runid,
});
}
module.exports = collectorWriter;

View File

@@ -0,0 +1,16 @@
const stream = require('stream');
class ObjectWriterStream extends stream.Writable {
_write(chunk, enc, next) {
console.log(JSON.stringify(chunk));
next();
}
}
async function consoleObjectWriter() {
return new ObjectWriterStream({
objectMode: true,
});
}
module.exports = consoleObjectWriter;

View File

@@ -1,13 +0,0 @@
const csv = require('csv');
const fs = require('fs');
async function csvWriter({ fileName, encoding = 'utf-8', ...options }) {
console.log(`Writing file ${fileName}`);
const csvStream = csv.stringify(options);
const fileStream = fs.createWriteStream(fileName, encoding);
csvStream.pipe(fileStream);
csvStream['finisher'] = fileStream;
return csvStream;
}
module.exports = csvWriter;

View File

@@ -5,10 +5,13 @@ async function fakeObjectReader({ delay = 0 } = {}) {
objectMode: true,
});
function doWrite() {
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }] });
pass.write({ id: 1, country: 'Czechia' });
pass.write({ id: 2, country: 'Austria' });
pass.write({ id: 3, country: 'Germany' });
pass.write({ id: 4, country: 'Romania' });
pass.write({ country: 'Germany', id: 3 });
pass.write({ country: 'Romania', id: 4 });
pass.write({ country: 'Great Britain', id: 5 });
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
pass.end();
}

View File

@@ -0,0 +1,12 @@
const finalizers = [];
module.exports = {
async run() {
for (const func of finalizers) {
await func();
}
},
register(func) {
finalizers.push(func);
},
};

View File

@@ -1,13 +1,38 @@
const queryReader = require('./queryReader');
const csvWriter = require('./csvWriter');
const runScript = require('./runScript');
const tableWriter = require('./tableWriter');
const tableReader = require('./tableReader');
const copyStream = require('./copyStream');
const fakeObjectReader = require('./fakeObjectReader');
const consoleObjectWriter = require('./consoleObjectWriter');
const jsonLinesWriter = require('./jsonLinesWriter');
const jsonLinesReader = require('./jsonLinesReader');
const jslDataReader = require('./jslDataReader');
const archiveWriter = require('./archiveWriter');
const archiveReader = require('./archiveReader');
const collectorWriter = require('./collectorWriter');
const finalizer = require('./finalizer');
const registerPlugins = require('./registerPlugins');
const requirePlugin = require('./requirePlugin');
module.exports = {
const dbgateApi = {
queryReader,
csvWriter,
runScript,
tableWriter,
tableReader,
copyStream,
jsonLinesWriter,
jsonLinesReader,
fakeObjectReader,
consoleObjectWriter,
jslDataReader,
archiveWriter,
archiveReader,
collectorWriter,
finalizer,
registerPlugins,
};
requirePlugin.initialize(dbgateApi);
module.exports = dbgateApi;

View File

@@ -0,0 +1,9 @@
const getJslFileName = require('../utility/getJslFileName');
const jsonLinesReader = require('./jsonLinesReader');
function jslDataReader({ jslid, ...other }) {
const fileName = getJslFileName(jslid);
return jsonLinesReader({ fileName, ...other });
}
module.exports = jslDataReader;

View File

@@ -0,0 +1,37 @@
const fs = require('fs');
const stream = require('stream');
const byline = require('byline');
class ParseStream extends stream.Transform {
constructor({ header, limitRows }) {
super({ objectMode: true });
this.header = header;
this.wasHeader = false;
this.limitRows = limitRows;
this.rowsWritten = 0;
}
_transform(chunk, encoding, done) {
const obj = JSON.parse(chunk);
if (!this.wasHeader) {
if (!this.header) this.push({ columns: Object.keys(obj).map((columnName) => ({ columnName })) });
this.wasHeader = true;
}
if (!this.limitRows || this.rowsWritten < this.limitRows) {
this.push(obj);
this.rowsWritten += 1;
}
done();
}
}
async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true, limitRows = undefined }) {
console.log(`Reading file ${fileName}`);
const fileStream = fs.createReadStream(fileName, encoding);
const liner = byline(fileStream);
const parser = new ParseStream({ header, limitRows });
liner.pipe(parser);
return parser;
}
module.exports = jsonLinesReader;

View File

@@ -0,0 +1,30 @@
const fs = require('fs');
const stream = require('stream');
class StringifyStream extends stream.Transform {
constructor({ header }) {
super({ objectMode: true });
this.header = header;
this.wasHeader = false;
}
_transform(chunk, encoding, done) {
if (!this.wasHeader) {
if (this.header) this.push(JSON.stringify(chunk) + '\n');
this.wasHeader = true;
} else {
this.push(JSON.stringify(chunk) + '\n');
}
done();
}
}
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
console.log(`Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
}
module.exports = jsonLinesWriter;

View File

@@ -1,14 +1,12 @@
const driverConnect = require('../utility/driverConnect');
const engines = require('@dbgate/engines');
const requireEngineDriver = require("../utility/requireEngineDriver");
async function queryReader({ connection, sql }) {
console.log(`Reading query ${sql}`);
const driver = engines(connection);
const pool = await driverConnect(driver, connection);
const driver = requireEngineDriver(connection);
const pool = await driver.connect(connection);
console.log(`Connected.`);
return await driver.readableStream(pool, sql);
return await driver.readQuery(pool, sql);
}
module.exports = queryReader;

View File

@@ -0,0 +1,9 @@
const requirePlugin = require('./requirePlugin');
function registerPlugins(...plugins) {
for (const plugin of plugins) {
requirePlugin(plugin.packageName, plugin);
}
}
module.exports = registerPlugins;

View File

@@ -0,0 +1,37 @@
const path = require('path');
const { pluginsdir } = require('../utility/directories');
const loadedPlugins = {};
const dbgateEnv = {
dbgateApi: null,
};
function requirePlugin(packageName, requiredPlugin = null) {
if (!packageName) throw new Error('Missing packageName in plugin');
if (loadedPlugins[packageName]) return loadedPlugins[packageName];
if (requiredPlugin == null) {
let module;
const modulePath = path.join(pluginsdir(), packageName, 'dist', 'backend.js');
console.log(`Loading module ${packageName} from ${modulePath}`);
try {
// @ts-ignore
module = __non_webpack_require__(modulePath);
} catch (err) {
console.error('Failed load webpacked module', err);
module = require(modulePath);
}
requiredPlugin = module.__esModule ? module.default : module;
}
loadedPlugins[packageName] = requiredPlugin;
if (requiredPlugin.initialize) requiredPlugin.initialize(dbgateEnv);
return requiredPlugin;
}
requirePlugin.initialize = (value) => {
dbgateEnv.dbgateApi = value;
};
module.exports = requirePlugin;

View File

@@ -0,0 +1,28 @@
const { quoteFullName } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
async function tableReader({ connection, pureName, schemaName }) {
const driver = requireEngineDriver(connection);
const pool = await driver.connect(connection);
console.log(`Connected.`);
const fullName = { pureName, schemaName };
const table = await driver.analyseSingleObject(pool, fullName, 'tables');
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) {
console.log(`Reading table ${table.pureName}`);
// @ts-ignore
return await driver.readQuery(pool, query, table);
}
const view = await driver.analyseSingleObject(pool, fullName, 'views');
if (view) {
console.log(`Reading view ${view.pureName}`);
// @ts-ignore
return await driver.readQuery(pool, query, view);
}
return await driver.readQuery(pool, query);
}
module.exports = tableReader;

View File

@@ -0,0 +1,12 @@
const requireEngineDriver = require("../utility/requireEngineDriver");
async function tableWriter({ connection, schemaName, pureName, ...options }) {
console.log(`Write table ${schemaName}.${pureName}`);
const driver = requireEngineDriver(connection);
const pool = await driver.connect(connection);
console.log(`Connected.`);
return await driver.writeTable(pool, { schemaName, pureName }, options);
}
module.exports = tableWriter;

View File

@@ -0,0 +1,75 @@
const { fork } = require('child_process');
const uuidv1 = require('uuid/v1');
class DatastoreProxy {
constructor(file) {
this.subprocess = null;
this.disconnected = false;
this.file = file;
this.requests = {};
this.handle_response = this.handle_response.bind(this);
this.handle_ping = this.handle_ping.bind(this);
this.notifyChangedCallback = null;
}
handle_response({ msgid, rows }) {
const [resolve, reject] = this.requests[msgid];
resolve(rows);
delete this.requests[msgid];
}
handle_ping() {}
handle_notify({ msgid }) {
const [resolve, reject] = this.requests[msgid];
resolve();
delete this.requests[msgid];
}
async ensureSubprocess() {
if (!this.subprocess) {
this.subprocess = fork(process.argv[1], ['jslDatastoreProcess']);
// @ts-ignore
this.subprocess.on('message', ({ msgtype, ...message }) => {
// if (this.disconnected) return;
this[`handle_${msgtype}`](message);
});
this.subprocess.on('exit', () => {
// if (this.disconnected) return;
this.subprocess = null;
});
this.subprocess.send({ msgtype: 'open', file: this.file });
}
return this.subprocess;
}
async getRows(offset, limit) {
await this.ensureSubprocess();
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
});
return promise;
}
async notifyChangedCore() {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.subprocess.send({ msgtype: 'notify', msgid });
});
return promise;
}
async notifyChanged(callback) {
this.notifyChangedCallback = callback;
await this.notifyChangedCore();
const call = this.notifyChangedCallback;
this.notifyChangedCallback = null;
if (call) call();
}
}
module.exports = DatastoreProxy;

View File

@@ -0,0 +1,156 @@
const lineReader = require('line-reader');
const AsyncLock = require('async-lock');
const lock = new AsyncLock();
const stableStringify = require('json-stable-stringify');
const { evaluateCondition } = require('dbgate-sqltree');
async function fetchNextLine(reader) {
return new Promise((resolve, reject) => {
if (!reader.hasNextLine()) {
resolve(null);
return;
}
reader.nextLine((err, line) => {
if (err) {
reject(err);
} else {
resolve(line);
}
});
});
}
class JsonLinesDatastore {
constructor(file) {
this.file = file;
this.reader = null;
this.readedDataRowCount = 0;
this.readedSchemaRow = false;
this.notifyChangedCallback = null;
this.currentFilter = null;
}
_closeReader() {
if (!this.reader) return;
const reader = this.reader;
this.reader = null;
this.readedDataRowCount = 0;
this.readedSchemaRow = false;
this.currentFilter = null;
reader.close(() => {});
}
async notifyChanged(callback) {
this.notifyChangedCallback = callback;
await lock.acquire('reader', async () => {
this._closeReader();
});
const call = this.notifyChangedCallback;
this.notifyChangedCallback = null;
if (call) call();
}
async _openReader() {
return new Promise((resolve, reject) =>
lineReader.open(this.file, (err, reader) => {
if (err) reject(err);
resolve(reader);
})
);
}
async _readLine(parse) {
for (;;) {
const line = await fetchNextLine(this.reader);
if (!line) {
// EOF
return null;
}
if (!this.readedSchemaRow) {
this.readedSchemaRow = true;
return true;
}
if (this.currentFilter) {
const parsedLine = JSON.parse(line);
if (evaluateCondition(this.currentFilter, parsedLine)) {
this.readedDataRowCount += 1;
return parse ? parsedLine : true;
}
} else {
this.readedDataRowCount += 1;
return parse ? JSON.parse(line) : true;
}
}
// return new Promise((resolve, reject) => {
// const reader = this.reader;
// if (!reader.hasNextLine()) {
// resolve(null);
// return;
// }
// reader.nextLine((err, line) => {
// if (err) {
// reject(err);
// return;
// }
// if (!this.readedSchemaRow) {
// this.readedSchemaRow = true;
// resolve(true);
// return;
// }
// if (this.currentFilter) {
// const parsedLine = JSON.parse(line);
// if (evaluateCondition(this.currentFilter, parsedLine)) {
// console.log('TRUE');
// resolve(parse ? parsedLine : true);
// this.readedDataRowCount += 1;
// return;
// } else {
// console.log('FALSE');
// // skip row
// return;
// }
// }
// this.readedDataRowCount += 1;
// resolve(parse ? JSON.parse(line) : true);
// });
// });
}
async _ensureReader(offset, filter) {
if (this.readedDataRowCount > offset || stableStringify(filter) != stableStringify(this.currentFilter)) {
this._closeReader();
}
if (!this.reader) {
const reader = await this._openReader();
this.reader = reader;
this.currentFilter = filter;
}
if (!this.readedSchemaRow) {
await this._readLine(false); // skip structure
}
while (this.readedDataRowCount < offset) {
await this._readLine(false);
}
}
async getRows(offset, limit, filter) {
const res = [];
await lock.acquire('reader', async () => {
await this._ensureReader(offset, filter);
for (let i = 0; i < limit; i += 1) {
const line = await this._readLine(true);
if (line == null) break;
res.push(line);
}
});
// console.log('RETURN', res.length);
return res;
}
}
module.exports = JsonLinesDatastore;

View File

@@ -0,0 +1,24 @@
const fs = require('fs-extra');
const path = require('path');
const ageSeconds = 3600;
async function cleanDirectory(directory) {
const files = await fs.readdir(directory);
const now = new Date().getTime();
for (const file of files) {
const full = path.join(directory, file);
const stat = await fs.stat(full);
const mtime = stat.mtime.getTime();
const expirationTime = mtime + ageSeconds * 1000;
if (now > expirationTime) {
if (stat.isDirectory()) {
await fs.rmdir(full, { recursive: true });
} else {
await fs.unlink(full);
}
}
}
}
module.exports = cleanDirectory;

View File

@@ -1,43 +1,49 @@
const os = require('os');
const path = require('path');
const fs = require('fs');
const cleanDirectory = require('./cleanDirectory');
let createdDatadir = false;
const createDirectories = {};
const ensureDirectory = (dir, clean) => {
if (!createDirectories[dir]) {
if (clean && fs.existsSync(dir)) {
console.log(`Cleaning directory ${dir}`);
cleanDirectory(dir);
}
if (!fs.existsSync(dir)) {
console.log(`Creating directory ${dir}`);
fs.mkdirSync(dir);
}
createDirectories[dir] = true;
}
};
function datadir() {
const dir = path.join(os.homedir(), 'dbgate-data');
if (!createdDatadir) {
if (!fs.existsSync(dir)) {
console.log(`Creating data directory ${dir}`);
fs.mkdirSync(dir);
}
createdDatadir = true;
}
ensureDirectory(dir);
return dir;
}
const dirFunc = (dirname) => () => {
const dirFunc = (dirname, clean = false) => () => {
const dir = path.join(datadir(), dirname);
if (!createDirectories[dirname]) {
if (!fs.existsSync(dir)) {
console.log(`Creating jsl directory ${dir}`);
fs.mkdirSync(dir);
}
createDirectories[dirname] = true;
}
ensureDirectory(dir, clean);
return dir;
};
const jsldir = dirFunc('jsl');
const rundir = dirFunc('run');
const uploadsdir = dirFunc('uploads');
const jsldir = dirFunc('jsl', true);
const rundir = dirFunc('run', true);
const uploadsdir = dirFunc('uploads', true);
const pluginsdir = dirFunc('plugins');
const archivedir = dirFunc('archive');
module.exports = {
datadir,
jsldir,
rundir,
uploadsdir,
archivedir,
ensureDirectory,
pluginsdir,
};

View File

@@ -0,0 +1,64 @@
// const pacote = require('pacote');
const axios = require('axios');
// const tarballExtract = require('tarball-extract');
const uuidv1 = require('uuid/v1');
const path = require('path');
const fs = require('fs');
const zlib = require('zlib');
const tar = require('tar');
const ncp = require('ncp').ncp;
const { uploadsdir } = require('./directories');
function extractTarball(tmpFile, destination) {
return new Promise((resolve, reject) => {
fs.createReadStream(tmpFile)
.pipe(zlib.createGunzip())
.pipe(tar.extract({ cwd: destination }))
.on('error', (err) => reject(err))
.on('end', () => resolve());
});
}
function saveStreamToFile(pipedStream, fileName) {
return new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(fileName);
fileStream.on('close', () => resolve());
pipedStream.pipe(fileStream);
});
}
function copyDirectory(source, target) {
return new Promise((resolve, reject) => {
ncp(source, target, (err) => {
if (err) reject(err);
resolve();
});
});
}
async function downloadPackage(packageName, directory) {
// await pacote.extract(packageName, directory);
const infoResp = await axios.default.get(`https://registry.npmjs.org/${packageName}`);
const { latest } = infoResp.data['dist-tags'] || {};
if (!latest) return false;
const tarball = infoResp.data.versions[latest].dist.tarball;
const tmpFile = path.join(uploadsdir(), uuidv1() + '.tgz');
console.log(`Downloading tarball ${tarball} into ${tmpFile}`);
const tarballResp = await axios.default({
method: 'get',
url: tarball,
responseType: 'stream',
});
await saveStreamToFile(tarballResp.data, tmpFile);
const tmpDir = path.join(uploadsdir(), uuidv1());
fs.mkdirSync(tmpDir);
await extractTarball(tmpFile, tmpDir);
await copyDirectory(path.join(tmpDir, 'package'), directory);
return true;
}
module.exports = downloadPackage;

View File

@@ -1,21 +0,0 @@
const mssql = require('mssql');
const mysql = require('mysql');
const pg = require('pg');
const pgQueryStream = require('pg-query-stream');
const fs = require('fs');
const stream = require('stream');
const nativeModules = {
mssql,
mysql,
pg,
pgQueryStream,
fs,
stream,
};
function driverConnect(driver, connection) {
return driver.connect(nativeModules, connection);
}
module.exports = driverConnect;

View File

@@ -0,0 +1,15 @@
const fs = require('fs-extra');
async function saveFreeTableData(file, data) {
const { structure, rows } = data;
const fileStream = fs.createWriteStream(file);
await fileStream.write(JSON.stringify(structure) + '\n');
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
}
await fileStream.close();
}
module.exports = {
saveFreeTableData,
};

View File

@@ -0,0 +1,12 @@
const path = require('path');
const { jsldir, archivedir } = require('./directories');
function getJslFileName(jslid) {
const archiveMatch = jslid.match(/^archive:\/\/([^/]+)\/(.*)$/);
if (archiveMatch) {
return path.join(archivedir(), archiveMatch[1], `${archiveMatch[2]}.jsonl`);
}
return path.join(jsldir(), `${jslid}.jsonl`);
}
module.exports = getJslFileName;

View File

@@ -0,0 +1,18 @@
function goSplit(sql) {
if (!sql) return [];
const lines = sql.split('\n');
const res = [];
let buffer = '';
for (const line of lines) {
if (/^\s*go\s*$/i.test(line)) {
if (buffer.trim()) res.push(buffer);
buffer = '';
} else {
buffer += line + '\n';
}
}
if (buffer.trim()) res.push(buffer);
return res;
}
module.exports = goSplit;

View File

@@ -0,0 +1,24 @@
const _ = require('lodash');
const requirePlugin = require('../shell/requirePlugin');
/** @returns {import('dbgate-types').EngineDriver} */
function requireEngineDriver(connection) {
let engine = null;
if (_.isString(connection)) {
engine = connection;
} else if (_.isPlainObject(connection)) {
engine = connection.engine;
}
if (!engine) {
throw new Error('Could not get driver from connection');
}
if (engine.includes('@')) {
const [shortName, packageName] = engine.split('@');
const plugin = requirePlugin(packageName);
return plugin.driver;
}
throw new Error(`Could not found engine driver ${engine}`);
}
module.exports = requireEngineDriver;

View File

@@ -8,11 +8,11 @@ module.exports = {
return socket;
},
emit(message, data) {
console.log('EMIT:', message, data);
// console.log('EMIT:', message, data);
socket.emit(message, data);
},
emitChanged(key) {
console.log('EMIT_CHANGED:', key);
// console.log('EMIT_CHANGED:', key);
socket.emit('clean-cache', key);
socket.emit(key);
},

View File

@@ -11,12 +11,13 @@ var config = {
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'bundle.js',
libraryTarget: 'commonjs2',
},
optimization: {
minimize: false
},
// optimization: {
// minimize: false,
// },
// module: {
// rules: [
// {
@@ -28,7 +29,7 @@ var config = {
plugins: [
new webpack.IgnorePlugin({
checkResource(resource) {
const lazyImports = ['pg-native', 'uws'];
const lazyImports = ['uws'];
if (!lazyImports.includes(resource)) {
return false;
}

View File

@@ -1,6 +1,6 @@
{
"version": "0.1.0",
"name": "@dbgate/datalib",
"version": "1.0.0",
"name": "dbgate-datalib",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
"scripts": {
@@ -12,11 +12,11 @@
"lib"
],
"dependencies": {
"@dbgate/sqltree": "^0.1.0",
"@dbgate/filterparser": "^0.1.0"
"dbgate-sqltree": "^1.0.0",
"dbgate-filterparser": "^1.0.0"
},
"devDependencies": {
"@dbgate/types": "^0.1.0",
"dbgate-types": "^1.0.0",
"@types/node": "^13.7.0",
"typescript": "^3.7.5"
}

View File

@@ -1,10 +1,10 @@
import _ from 'lodash';
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from '@dbgate/sqltree';
import { NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from 'dbgate-sqltree';
import { NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
export interface ChangeSetItem {
pureName: string;
schemaName: string;
schemaName?: string;
insertedRowIndex?: number;
condition?: { [column: string]: string };
fields?: { [column: string]: string };

View File

@@ -0,0 +1,44 @@
import _ from 'lodash';
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { GridConfig, GridCache } from './GridConfig';
import { FreeTableModel } from './FreeTableModel';
export class FreeTableGridDisplay extends GridDisplay {
constructor(
public model: FreeTableModel,
config: GridConfig,
setConfig: ChangeConfigFunc,
cache: GridCache,
setCache: ChangeCacheFunc
) {
super(config, setConfig, cache, setCache);
this.columns = this.getDisplayColumns(model);
this.filterable = false;
this.sortable = false;
}
getDisplayColumns(model: FreeTableModel) {
return (
model?.structure?.columns
?.map((col) => this.getDisplayColumn(col))
?.map((col) => ({
...col,
isChecked: this.isColumnChecked(col),
})) || []
);
}
getDisplayColumn( col: ColumnInfo) {
const uniquePath = [col.columnName];
const uniqueName = uniquePath.join('.');
return {
...col,
pureName: 'data',
schemaName: '',
headerText: col.columnName,
uniqueName,
uniquePath,
};
}
}

View File

@@ -0,0 +1,27 @@
import { TableInfo } from 'dbgate-types';
export interface FreeTableModel {
structure: TableInfo;
rows: any[];
}
export function createFreeTableModel() {
return {
structure: {
columns: [
{
columnName: 'col1',
},
],
foreignKeys: [],
},
rows: [
{
col1: 'val1',
},
{
col1: 'val2',
},
],
};
}

View File

@@ -1,5 +1,5 @@
import { DisplayColumn } from './GridDisplay';
import { TableInfo } from '@dbgate/types';
import { TableInfo } from 'dbgate-types';
export interface GridConfigColumns {
hiddenColumns: string[];
@@ -16,6 +16,8 @@ export interface GridReferenceDefinition {
}[];
}
export type GroupFunc = 'GROUP' | 'MAX' | 'MIN' | 'SUM' | 'AVG' | 'COUNT' | 'COUNT DISTINCT' | 'NULL';
export interface GridConfig extends GridConfigColumns {
filters: { [uniqueName: string]: string };
focusedColumn?: string;
@@ -24,6 +26,9 @@ export interface GridConfig extends GridConfigColumns {
uniqueName: string;
order: 'ASC' | 'DESC';
}[];
grouping: { [uniqueName: string]: GroupFunc };
childConfig?: GridConfig;
reference?: GridReferenceDefinition;
}
export interface GridCache {
@@ -39,6 +44,7 @@ export function createGridConfig(): GridConfig {
columnWidths: {},
sort: [],
focusedColumn: null,
grouping: {},
};
}

View File

@@ -1,10 +1,11 @@
import _ from 'lodash';
import { GridConfig, GridCache, GridConfigColumns, createGridCache } from './GridConfig';
import { ForeignKeyInfo, TableInfo, ColumnInfo, DbType, EngineDriver, NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
import { parseFilter, getFilterType } from '@dbgate/filterparser';
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc } from './GridConfig';
import { ForeignKeyInfo, TableInfo, ColumnInfo, EngineDriver, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
import { parseFilter, getFilterType } from 'dbgate-filterparser';
import { filterName } from './filterName';
import { ChangeSetFieldDefinition, ChangeSetRowDefinition } from './ChangeSet';
import { Expression, Select, treeToSql, dumpSqlSelect } from '@dbgate/sqltree';
import { Expression, Select, treeToSql, dumpSqlSelect, Condition } from 'dbgate-sqltree';
import { isTypeLogical } from 'dbgate-tools';
export interface DisplayColumn {
schemaName: string;
@@ -19,7 +20,7 @@ export interface DisplayColumn {
foreignKey?: ForeignKeyInfo;
isChecked?: boolean;
hintColumnName?: string;
commonType?: DbType;
dataType?: string;
}
export interface DisplayedColumnEx extends DisplayColumn {
@@ -57,6 +58,7 @@ export abstract class GridDisplay {
filterable = false;
editable = false;
isLoadedCorrectly = true;
supportsReload = false;
setColumnVisibility(uniquePath: string[], isVisible: boolean) {
const uniqueName = uniquePath.join('.');
@@ -75,6 +77,10 @@ export abstract class GridDisplay {
}));
}
get hasReferences() {
return false;
}
get focusedColumn() {
return this.config.focusedColumn;
}
@@ -143,7 +149,7 @@ export abstract class GridDisplay {
const column = displayedColumnInfo[uniqueName];
if (!column) continue;
try {
const condition = parseFilter(filter, getFilterType(column.commonType?.typeCode));
const condition = parseFilter(filter, getFilterType(column.dataType));
if (condition) {
conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
@@ -174,16 +180,87 @@ export abstract class GridDisplay {
if (this.config.sort?.length > 0) {
select.orderBy = this.config.sort
.map((col) => ({ ...col, dispInfo: displayedColumnInfo[col.uniqueName] }))
.filter((col) => col.dispInfo)
.map((col) => ({ ...col, expr: select.columns.find((x) => x.alias == col.uniqueName) }))
.filter((col) => col.dispInfo && col.expr)
.map((col) => ({
exprType: 'column',
columnName: col.dispInfo.columnName,
...col.expr,
direction: col.order,
source: { alias: col.dispInfo.sourceAlias },
}));
}
}
get isGrouped() {
return !_.isEmpty(this.config.grouping);
}
get groupColumns() {
return this.isGrouped
? _.keys(_.pickBy(this.config.grouping, (v) => v == 'GROUP' || v.startsWith('GROUP:')))
: null;
}
applyGroupOnSelect(select: Select, displayedColumnInfo: DisplayedColumnInfo) {
const groupColumns = this.groupColumns;
if (groupColumns && groupColumns.length > 0) {
// @ts-ignore
select.groupBy = groupColumns.map((col) => {
const colExpr: Expression = {
exprType: 'column',
columnName: displayedColumnInfo[col].columnName,
source: { alias: displayedColumnInfo[col].sourceAlias },
};
const grouping = this.config.grouping[col];
if (grouping.startsWith('GROUP:')) {
return {
exprType: 'transform',
transform: grouping,
expr: colExpr,
};
} else {
return colExpr;
}
});
}
if (!_.isEmpty(this.config.grouping)) {
for (let i = 0; i < select.columns.length; i++) {
const uniqueName = select.columns[i].alias;
// if (groupColumns && groupColumns.includes(uniqueName)) continue;
const grouping = this.getGrouping(uniqueName);
if (grouping == 'GROUP') {
continue;
} else if (grouping == 'NULL') {
select.columns[i].alias = null;
} else if (grouping && grouping.startsWith('GROUP:')) {
select.columns[i] = {
exprType: 'transform',
transform: grouping as any,
expr: select.columns[i],
alias: select.columns[i].alias,
};
} else {
let func = 'MAX';
let argsPrefix = '';
if (grouping) {
if (grouping == 'COUNT DISTINCT') {
func = 'COUNT';
argsPrefix = 'DISTINCT ';
} else {
func = grouping;
}
}
select.columns[i] = {
alias: select.columns[i].alias,
exprType: 'call',
func,
argsPrefix,
args: [select.columns[i]],
};
}
}
select.columns = select.columns.filter((x) => x.alias);
}
}
getColumns(columnFilter) {
return this.columns.filter((col) => filterName(columnFilter, col.columnName));
}
@@ -215,6 +292,17 @@ export abstract class GridDisplay {
this.reload();
}
setFilters(dct) {
this.setConfig((cfg) => ({
...cfg,
filters: {
...cfg.filters,
...dct,
},
}));
this.reload();
}
setSort(uniqueName, order) {
this.setConfig((cfg) => ({
...cfg,
@@ -223,6 +311,38 @@ export abstract class GridDisplay {
this.reload();
}
setGrouping(uniqueName, groupFunc: GroupFunc) {
this.setConfig((cfg) => ({
...cfg,
grouping: groupFunc
? {
...cfg.grouping,
[uniqueName]: groupFunc,
}
: _.omitBy(cfg.grouping, (v, k) => k == uniqueName),
}));
this.reload();
}
getGrouping(uniqueName): GroupFunc {
if (this.isGrouped) {
if (this.config.grouping[uniqueName]) return this.config.grouping[uniqueName];
const column = this.baseTable.columns.find((x) => x.columnName == uniqueName);
if (isTypeLogical(column?.dataType)) return 'COUNT DISTINCT';
if (column?.autoIncrement) return 'COUNT';
return 'MAX';
}
return null;
}
clearGrouping() {
this.setConfig((cfg) => ({
...cfg,
grouping: {},
}));
this.reload();
}
getSortOrder(uniqueName) {
return this.config.sort.find((x) => x.uniqueName == uniqueName)?.order;
}
@@ -266,13 +386,13 @@ export abstract class GridDisplay {
};
}
createSelect(): Select {
createSelect(options = {}): Select {
return null;
}
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo) {}
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {}
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[]) {
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[], options) {
if (!columns) return null;
const orderColumnName = columns[0].columnName;
const select: Select = {
@@ -296,13 +416,15 @@ export abstract class GridDisplay {
this.columns.map((col) => ({ ...col, sourceAlias: 'basetbl' })),
'uniqueName'
);
this.processReferences(select, displayedColumnInfo);
this.processReferences(select, displayedColumnInfo, options);
this.applyFilterOnSelect(select, displayedColumnInfo);
this.applyGroupOnSelect(select, displayedColumnInfo);
this.applySortOnSelect(select, displayedColumnInfo);
return select;
}
getPageQuery(offset: number, count: number) {
if (!this.driver) return null;
const select = this.createSelect();
if (!select) return null;
if (this.driver.dialect.rangeSelect) select.range = { offset: offset, limit: count };
@@ -311,6 +433,13 @@ export abstract class GridDisplay {
return sql;
}
getExportQuery() {
const select = this.createSelect({ isExport: true });
if (!select) return null;
const sql = treeToSql(this.driver, select, dumpSqlSelect);
return sql;
}
resizeColumn(uniqueName: string, computedSize: number, diff: number) {
this.setConfig((cfg) => {
const columnWidths = {
@@ -329,16 +458,63 @@ export abstract class GridDisplay {
}
getCountQuery() {
const select = this.createSelect();
select.columns = [
{
exprType: 'raw',
sql: 'COUNT(*)',
alias: 'count',
},
];
let select = this.createSelect();
select.orderBy = null;
if (this.isGrouped) {
select = {
commandType: 'select',
from: {
subQuery: select,
alias: 'subq',
},
columns: [
{
exprType: 'raw',
sql: 'COUNT(*)',
alias: 'count',
},
],
};
} else {
select.columns = [
{
exprType: 'raw',
sql: 'COUNT(*)',
alias: 'count',
},
];
}
const sql = treeToSql(this.driver, select, dumpSqlSelect);
return sql;
}
compileFilters(): Condition {
const filters = this.config && this.config.filters;
if (!filters) return null;
const conditions = [];
for (const name in filters) {
const column = this.columns.find((x) => (x.columnName = name));
if (!column) continue;
const filterType = getFilterType(column.dataType);
try {
const condition = parseFilter(filters[name], filterType);
const replaced = _.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder')
return {
exprType: 'column',
columnName: column.columnName,
};
});
conditions.push(replaced);
} catch (err) {
// filter parse error - ignore filter
}
}
if (conditions.length == 0) return null;
return {
conditionType: 'and',
conditions,
};
}
}

View File

@@ -1,5 +1,5 @@
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { QueryResultColumn } from '@dbgate/types';
import { QueryResultColumn } from 'dbgate-types';
import { GridConfig, GridCache } from './GridConfig';
export class JslGridDisplay extends GridDisplay {
@@ -13,6 +13,8 @@ export class JslGridDisplay extends GridDisplay {
) {
super(config, setConfig, cache, setCache, null);
this.filterable = true;
this.columns = columns
.map((col) => ({
columnName: col.columnName,

View File

@@ -0,0 +1,22 @@
import _ from 'lodash';
export interface MacroArgument {
type: 'text' | 'select';
label: string;
name: string;
}
export interface MacroDefinition {
title: string;
name: string;
group: string;
description?: string;
type: 'transformValue';
code: string;
args?: MacroArgument[];
}
export interface MacroSelectedCell {
column: string;
row: number;
}

View File

@@ -1,8 +1,8 @@
import _ from 'lodash';
import { GridDisplay, ChangeCacheFunc, DisplayColumn, DisplayedColumnInfo, ChangeConfigFunc } from './GridDisplay';
import { TableInfo, EngineDriver, ViewInfo, ColumnInfo, NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
import { TableInfo, EngineDriver, ViewInfo, ColumnInfo, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
import { GridConfig, GridCache, createGridCache } from './GridConfig';
import { Expression, Select, treeToSql, dumpSqlSelect } from '@dbgate/sqltree';
import { Expression, Select, treeToSql, dumpSqlSelect } from 'dbgate-sqltree';
import { filterName } from './filterName';
export class TableGridDisplay extends GridDisplay {
@@ -32,6 +32,7 @@ export class TableGridDisplay extends GridDisplay {
this.filterable = true;
this.sortable = true;
this.editable = true;
this.supportsReload = true;
this.baseTable = this.table;
if (this.table && this.table.columns) {
this.changeSetKeyFields = this.table.primaryKey
@@ -40,7 +41,7 @@ export class TableGridDisplay extends GridDisplay {
}
}
findTable({ schemaName, pureName }) {
findTable({ schemaName = undefined, pureName }) {
return (
this.dbinfo &&
this.dbinfo.tables &&
@@ -70,8 +71,8 @@ export class TableGridDisplay extends GridDisplay {
this.addReferenceToSelect(select, parentAlias, column);
this.addJoinsFromExpandedColumns(select, subcolumns, childAlias, columnSources)
this.addAddedColumnsToSelect(select, subcolumns, childAlias, columnSources)
this.addJoinsFromExpandedColumns(select, subcolumns, childAlias, columnSources);
this.addAddedColumnsToSelect(select, subcolumns, childAlias, columnSources);
}
}
}
@@ -81,7 +82,7 @@ export class TableGridDisplay extends GridDisplay {
const childAlias = `${column.uniqueName}_ref`;
if ((select.from.relations || []).find((x) => x.alias == childAlias)) return;
const table = this.getFkTarget(column);
if (table) {
if (table && table.primaryKey) {
select.from.relations = [
...(select.from.relations || []),
{
@@ -111,10 +112,14 @@ export class TableGridDisplay extends GridDisplay {
addHintsToSelect(select: Select): boolean {
let res = false;
const groupColumns = this.groupColumns;
for (const column of this.getGridColumns()) {
if (column.foreignKey) {
if (groupColumns && !groupColumns.includes(column.uniqueName)) {
continue;
}
const table = this.getFkTarget(column);
if (table && table.columns && table.columns.length > 0) {
if (table && table.columns && table.columns.length > 0 && table.primaryKey) {
const hintColumn = table.columns.find((x) => x?.dataType?.toLowerCase()?.includes('char'));
if (hintColumn) {
const parentUniqueName = column.uniquePath.slice(0, -1).join('.');
@@ -158,14 +163,16 @@ export class TableGridDisplay extends GridDisplay {
return this.findTable({ schemaName, pureName });
}
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo) {
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {
this.addJoinsFromExpandedColumns(select, this.columns, 'basetbl', displayedColumnInfo);
this.addHintsToSelect(select);
if (!options.isExport) {
this.addHintsToSelect(select);
}
}
createSelect() {
createSelect(options = {}) {
if (!this.table) return null;
const select = this.createSelectBase(this.table, this.table.columns);
const select = this.createSelectBase(this.table, this.table.columns, options);
return select;
}
@@ -212,4 +219,11 @@ export class TableGridDisplay extends GridDisplay {
}
}
}
get hasReferences() {
if (!this.table) return false;
if (this.table.foreignKeys && this.table.foreignKeys.length > 0) return true;
if (this.table.dependencies && this.table.dependencies.length > 0) return true;
return false;
}
}

View File

@@ -1,6 +1,6 @@
import _ from 'lodash';
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { EngineDriver, ViewInfo, ColumnInfo } from '@dbgate/types';
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
import { GridConfig, GridCache } from './GridConfig';
export class ViewGridDisplay extends GridDisplay {
@@ -16,7 +16,8 @@ export class ViewGridDisplay extends GridDisplay {
this.columns = this.getDisplayColumns(view);
this.filterable = true;
this.sortable = true;
this.editable = true;
this.editable = false;
this.supportsReload = true;
}
getDisplayColumns(view: ViewInfo) {
@@ -43,8 +44,8 @@ export class ViewGridDisplay extends GridDisplay {
};
}
createSelect() {
const select = this.createSelectBase(this.view, this.view.columns);
createSelect(options = {}) {
const select = this.createSelectBase(this.view, this.view.columns, options);
return select;
}
}

View File

@@ -5,4 +5,7 @@ export * from "./ViewGridDisplay";
export * from "./JslGridDisplay";
export * from "./ChangeSet";
export * from "./filterName";
export * from "./nameTools";
export * from "./FreeTableGridDisplay";
export * from "./FreeTableModel";
export * from "./MacroDefinition";
export * from "./runMacro";

View File

@@ -1,25 +0,0 @@
export function fullNameFromString(name) {
const m = name.match(/\[([^\]]+)\]\.\[([^\]]+)\]/);
if (m) {
return {
schemaName: m[1],
pureName: m[2],
};
}
return {
schemaName: null,
pureName: name,
};
}
export function fullNameToString({ schemaName, pureName }) {
if (schemaName) {
return `[${schemaName}].[${pureName}]`;
}
return pureName;
}
export function quoteFullName(dialect, { schemaName, pureName }) {
if (schemaName) return `${dialect.quoteIdentifier(schemaName)}.${dialect.quoteIdentifier(pureName)}`;
return `${dialect.quoteIdentifier(pureName)}`;
}

View File

@@ -0,0 +1,185 @@
import { FreeTableModel } from './FreeTableModel';
import _ from 'lodash';
import uuidv1 from 'uuid/v1';
import uuidv4 from 'uuid/v4';
import moment from 'moment';
import { MacroDefinition, MacroSelectedCell } from './MacroDefinition';
const getMacroFunction = {
transformValue: (code) => `
(value, args, modules, rowIndex, row, columnName) => {
${code}
}
`,
transformRows: (code) => `
(rows, args, modules, selectedCells, cols, columns) => {
${code}
}
`,
transformData: (code) => `
(rows, args, modules, selectedCells, cols, columns) => {
${code}
}
`,
};
const modules = {
lodash: _,
uuidv1,
uuidv4,
moment,
};
function runTramsformValue(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
const selectedRows = _.groupBy(selectedCells, 'row');
const rows = data.rows.map((row, rowIndex) => {
const selectedRow = selectedRows[rowIndex];
if (selectedRow) {
const modifiedFields = [];
let res = null;
for (const cell of selectedRow) {
const { column } = cell;
const oldValue = row[column];
let newValue = oldValue;
try {
newValue = func(oldValue, macroArgs, modules, rowIndex, row, column);
} catch (err) {
errors.push(`Error processing column ${column} on row ${rowIndex}: ${err.message}`);
}
if (newValue != oldValue) {
if (res == null) {
res = { ...row };
}
res[column] = newValue;
if (preview) modifiedFields.push(column);
}
}
if (res) {
if (modifiedFields.length > 0) {
return {
...res,
__modifiedFields: new Set(modifiedFields),
};
}
return res;
}
return row;
} else {
return row;
}
});
return {
structure: data.structure,
rows,
};
}
function removePreviewRowFlags(rows) {
rows = rows.filter((row) => row.__rowStatus != 'deleted');
rows = rows.map((row) => {
if (row.__rowStatus || row.__modifiedFields || row.__insertedFields || row.__deletedFields)
return _.omit(row, ['__rowStatus', '__modifiedFields', '__insertedFields', '__deletedFields']);
return row;
});
return rows;
}
function runTramsformRows(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
let rows = data.rows;
try {
rows = func(
data.rows,
macroArgs,
modules,
selectedCells,
data.structure.columns.map((x) => x.columnName),
data.structure.columns
);
if (!preview) {
rows = removePreviewRowFlags(rows);
}
} catch (err) {
errors.push(`Error processing rows: ${err.message}`);
}
return {
structure: data.structure,
rows,
};
}
function runTramsformData(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
try {
let { rows, columns, cols } = func(
data.rows,
macroArgs,
modules,
selectedCells,
data.structure.columns.map((x) => x.columnName),
data.structure.columns
);
if (cols && !columns) {
columns = cols.map((columnName) => ({ columnName }));
}
columns = _.uniqBy(columns, 'columnName');
if (!preview) {
rows = removePreviewRowFlags(rows);
}
return {
structure: { columns },
rows,
};
} catch (err) {
errors.push(`Error processing data: ${err.message}`);
}
return data;
}
export function runMacro(
macro: MacroDefinition,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
): FreeTableModel {
let func;
try {
func = eval(getMacroFunction[macro.type](macro.code));
} catch (err) {
errors.push(`Error compiling macro ${macro.name}: ${err.message}`);
return data;
}
if (macro.type == 'transformValue') {
return runTramsformValue(func, macroArgs, data, preview, selectedCells, errors);
}
if (macro.type == 'transformRows') {
return runTramsformRows(func, macroArgs, data, preview, selectedCells, errors);
}
if (macro.type == 'transformData') {
// @ts-ignore
return runTramsformData(func, macroArgs, data, preview, selectedCells, errors);
}
return data;
}

View File

@@ -1,123 +0,0 @@
const _ = require('lodash');
const fp = require('lodash/fp');
class DatabaseAnalyser {
/**
*
* @param {import('@dbgate/types').EngineDriver} driver
*/
constructor(pool, driver) {
this.pool = pool;
this.driver = driver;
// this.result = DatabaseAnalyser.createEmptyStructure();
/** @type {import('@dbgate/types').DatabaseInfo} */
this.structure = null;
/** import('@dbgate/types').DatabaseModification[]) */
this.modifications = null;
}
async _runAnalysis() {
return DatabaseAnalyser.createEmptyStructure();
}
/** @returns {Promise<import('@dbgate/types').DatabaseModification[]>} */
async getModifications() {
if (this.structure == null) throw new Error('DatabaseAnalyse.getModifications - structure must be filled');
return null;
}
async fullAnalysis() {
return this._runAnalysis();
}
async incrementalAnalysis(structure) {
this.structure = structure;
this.modifications = await this.getModifications();
if (this.modifications == null) {
// modifications not implemented, perform full analysis
this.structure = null;
return this._runAnalysis();
}
if (this.modifications.length == 0) return null;
console.log('DB modifications detected:', this.modifications);
return this._runAnalysis();
}
mergeAnalyseResult(newlyAnalysed) {
if (this.structure == null) {
return {
...DatabaseAnalyser.createEmptyStructure(),
...newlyAnalysed,
};
}
const res = {};
for (const field of ['tables', 'views', 'functions', 'procedures', 'triggers']) {
const removedIds = this.modifications
.filter((x) => x.action == 'remove' && x.objectTypeField == field)
.map((x) => x.objectId);
const newArray = newlyAnalysed[field] || [];
const addedChangedIds = newArray.map((x) => x.objectId);
const removeAllIds = [...removedIds, ...addedChangedIds];
res[field] = _.sortBy(
[...this.structure[field].filter((x) => !removeAllIds.includes(x.objectId)), ...newArray],
(x) => x.pureName
);
}
return res;
// const {tables,views, functions, procedures, triggers} = this.structure;
// return {
// tables:
// }
}
// findObjectById(id) {
// return this.structure.tables.find((x) => x.objectId == id);
// }
}
/** @returns {import('@dbgate/types').DatabaseInfo} */
DatabaseAnalyser.createEmptyStructure = () => ({
tables: [],
views: [],
functions: [],
procedures: [],
triggers: [],
});
DatabaseAnalyser.byTableFilter = (table) => (x) => x.pureName == table.pureName && x.schemaName == x.schemaName;
DatabaseAnalyser.extractPrimaryKeys = (table, pkColumns) => {
const filtered = pkColumns.filter(DatabaseAnalyser.byTableFilter(table));
if (filtered.length == 0) return undefined;
return {
..._.pick(filtered[0], ['constraintName', 'schemaName', 'pureName']),
constraintType: 'primaryKey',
columns: filtered.map(fp.pick('columnName')),
};
};
DatabaseAnalyser.extractForeignKeys = (table, fkColumns) => {
const grouped = _.groupBy(fkColumns.filter(DatabaseAnalyser.byTableFilter(table)), 'constraintName');
return _.keys(grouped).map((constraintName) => ({
constraintName,
constraintType: 'foreignKey',
..._.pick(grouped[constraintName][0], [
'constraintName',
'schemaName',
'pureName',
'refSchemaName',
'refTableName',
'updateAction',
'deleteAction',
]),
columns: grouped[constraintName].map(fp.pick(['columnName', 'refColumnName'])),
}));
};
module.exports = DatabaseAnalyser;

View File

@@ -1,7 +0,0 @@
import types from "@dbgate/types";
declare function getDriver(
connection: string | { engine: string }
): types.EngineDriver;
export = getDriver;

View File

@@ -1,24 +0,0 @@
const _ = require("lodash");
const mssql = require("./mssql");
const mysql = require("./mysql");
const postgres = require("./postgres");
const drivers = {
mssql,
mysql,
postgres
};
function getDriver(connection) {
if (_.isString(connection)) {
return drivers[connection];
}
if (_.isPlainObject(connection)) {
const { engine } = connection;
if (engine) {
return drivers[engine];
}
}
throw new Error(`Cannot extract engine from ${connection}`);
}
module.exports = getDriver;

View File

@@ -1,311 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const DatabaseAnalyser = require('../default/DatabaseAnalyser');
function objectTypeToField(type) {
switch (type.trim()) {
case 'U':
return 'tables';
case 'V':
return 'views';
case 'P':
return 'procedures';
case 'IF':
case 'FN':
case 'TF':
return 'functions';
case 'TR':
return 'triggers';
default:
return null;
}
}
/** @returns {import('@dbgate/types').DbType} */
function detectType(col) {
switch (col.dataType) {
case 'binary':
return {
typeCode: 'string',
isBinary: true,
};
case 'image':
return {
typeCode: 'string',
isBinary: true,
isBlob: true,
};
case 'timestamp':
return {
typeCode: 'string',
};
case 'varbinary':
return {
typeCode: 'string',
length: col.maxLength,
isBinary: true,
isVarLength: true,
};
case 'bit':
return {
typeCode: 'logical',
};
case 'tinyint':
return {
typeCode: 'int',
bytes: 1,
};
case 'mediumint':
return {
typeCode: 'int',
bytes: 3,
};
case 'datetime':
return {
typeCode: 'datetime',
subType: 'datetime',
};
case 'time':
return {
typeCode: 'datetime',
subType: 'time',
};
case 'year':
return {
typeCode: 'datetime',
subType: 'year',
};
case 'date':
return {
typeCode: 'datetime',
subType: 'date',
};
case 'decimal':
case 'numeric':
return {
typeCode: 'numeric',
precision: col.precision,
scale: col.scale,
};
case 'float':
return { typeCode: 'float' };
case 'uniqueidentifier':
return { typeCode: 'string' };
case 'smallint':
return {
typeCode: 'int',
bytes: 2,
};
case 'int':
return {
typeCode: 'int',
bytes: 4,
};
case 'bigint':
return {
typeCode: 'int',
bytes: 8,
};
case 'real':
return { typeCode: 'float' };
case 'char':
return {
typeCode: 'string',
length: col.maxLength,
};
case 'nchar':
return { typeCode: 'string', length: col.maxLength, isUnicode: true };
case 'varchar':
return {
typeCode: 'string',
length: col.maxLength,
isVarLength: true,
};
case 'nvarchar':
return {
typeCode: 'string',
length: col.maxLength,
isVarLength: true,
isUnicode: true,
};
case 'text':
return {
typeCode: 'blob',
isText: true,
};
case 'ntext':
return {
typeCode: 'blob',
isText: true,
isUnicode: true,
};
case 'xml':
return {
typeCode: 'blob',
isXml: true,
};
}
return {
typeCode: 'generic',
sql: col.dataType,
};
}
class MsSqlAnalyser extends DatabaseAnalyser {
constructor(pool, driver) {
super(pool, driver);
}
createQuery(resFileName, filterIdObjects) {
let res = sql[resFileName];
if (!this.modifications || !filterIdObjects || this.modifications.length == 0) {
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
} else {
const filterIds = this.modifications
.filter((x) => filterIdObjects.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
.map((x) => x.objectId);
if (filterIds.length == 0) {
res = res.replace('=[OBJECT_ID_CONDITION]', ' = 0');
} else {
res = res.replace('=[OBJECT_ID_CONDITION]', ` in (${filterIds.join(',')})`);
}
}
return res;
}
async _runAnalysis() {
const tablesRows = await this.driver.query(this.pool, this.createQuery('tables', ['tables']));
const columnsRows = await this.driver.query(this.pool, this.createQuery('columns', ['tables']));
const pkColumnsRows = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
const fkColumnsRows = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
const sqlCodeRows = await this.driver.query(
this.pool,
this.createQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers'])
);
const getCreateSql = (row) =>
sqlCodeRows.rows
.filter((x) => x.pureName == row.pureName && x.schemaName == row.schemaName)
.map((x) => x.codeText)
.join('');
const viewsRows = await this.driver.query(this.pool, this.createQuery('views', ['views']));
const programmableRows = await this.driver.query(
this.pool,
this.createQuery('programmables', ['procedures', 'functions'])
);
const viewColumnRows = await this.driver.query(this.pool, this.createQuery('viewColumns', ['views']));
const tables = tablesRows.rows.map((row) => ({
...row,
columns: columnsRows.rows
.filter((col) => col.objectId == row.objectId)
.map(({ isNullable, isIdentity, ...col }) => ({
...col,
notNull: !isNullable,
autoIncrement: !!isIdentity,
commonType: detectType(col),
})),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(row, pkColumnsRows.rows),
foreignKeys: DatabaseAnalyser.extractForeignKeys(row, fkColumnsRows.rows),
}));
const views = viewsRows.rows.map((row) => ({
...row,
createSql: getCreateSql(row),
columns: viewColumnRows.rows
.filter((col) => col.objectId == row.objectId)
.map(({ isNullable, isIdentity, ...col }) => ({
...col,
notNull: !isNullable,
autoIncrement: !!isIdentity,
commonType: detectType(col),
})),
}));
const procedures = programmableRows.rows
.filter((x) => x.sqlObjectType.trim() == 'P')
.map((row) => ({
...row,
createSql: getCreateSql(row),
}));
const functions = programmableRows.rows
.filter((x) => ['FN', 'IF', 'TF'].includes(x.sqlObjectType.trim()))
.map((row) => ({
...row,
createSql: getCreateSql(row),
}));
return this.mergeAnalyseResult({
tables,
views,
procedures,
functions,
});
}
getDeletedObjectsForField(idArray, objectTypeField) {
return this.structure[objectTypeField]
.filter((x) => !idArray.includes(x.objectId))
.map((x) => ({
oldName: _.pick(x, ['schemaName', 'pureName']),
objectId: x.objectId,
action: 'remove',
objectTypeField,
}));
}
getDeletedObjects(idArray) {
return [
...this.getDeletedObjectsForField(idArray, 'tables'),
...this.getDeletedObjectsForField(idArray, 'views'),
...this.getDeletedObjectsForField(idArray, 'procedures'),
...this.getDeletedObjectsForField(idArray, 'functions'),
...this.getDeletedObjectsForField(idArray, 'triggers'),
];
}
async getModifications() {
const modificationsQueryData = await this.driver.query(this.pool, this.createQuery('modifications'));
// console.log('MOD - SRC', modifications);
// console.log(
// 'MODs',
// this.structure.tables.map((x) => x.modifyDate)
// );
const modifications = modificationsQueryData.rows.map((x) => {
const { type, objectId, modifyDate, schemaName, pureName } = x;
const field = objectTypeToField(type);
if (!this.structure[field]) return null;
// @ts-ignore
const obj = this.structure[field].find((x) => x.objectId == objectId);
// object not modified
if (obj && Math.abs(new Date(modifyDate).getTime() - new Date(obj.modifyDate).getTime()) < 1000) return null;
/** @type {import('@dbgate/types').DatabaseModification} */
const action = obj
? {
newName: { schemaName, pureName },
oldName: _.pick(obj, ['schemaName', 'pureName']),
action: 'change',
objectTypeField: field,
objectId,
}
: {
newName: { schemaName, pureName },
action: 'add',
objectTypeField: field,
objectId,
};
return action;
});
return [..._.compact(modifications), ...this.getDeletedObjects(modificationsQueryData.rows.map((x) => x.objectId))];
}
}
module.exports = MsSqlAnalyser;

View File

@@ -1,20 +0,0 @@
const SqlDumper = require('../default/SqlDumper');
class MsSqlDumper extends SqlDumper {
autoIncrement() {
this.put(' ^identity');
}
putStringValue(value) {
if (/[^\u0000-\u00ff]/.test(value)) {
this.putRaw('N');
}
super.putStringValue(value);
}
allowIdentityInsert(table, allow) {
this.putCmd("^set ^identity_insert %f %k;&n", table, allow ? "on" : "off");
}
}
module.exports = MsSqlDumper;

View File

@@ -1,201 +0,0 @@
const _ = require('lodash');
const MsSqlAnalyser = require('./MsSqlAnalyser');
const MsSqlDumper = require('./MsSqlDumper');
/** @type {import('@dbgate/types').SqlDialect} */
const dialect = {
limitSelect: true,
rangeSelect: true,
offsetFetchRangeSyntax: true,
stringEscapeChar: "'",
quoteIdentifier(s) {
return `[${s}]`;
},
};
function extractColumns(columns) {
const mapper = {};
const res = _.sortBy(_.values(columns), 'index').map((col) => ({
...col,
columnName: col.name,
notNull: !col.nullable,
autoIncrement: !!col.identity,
}));
const generateName = () => {
let index = 1;
while (res.find((x) => x.columnName == `col${index}`)) index += 1;
return `col${index}`;
};
// const groups = _.groupBy(res, 'columnName');
// for (const colname of _.keys(groups)) {
// if (groups[colname].length == 1) continue;
// mapper[colname] = [];
// for (const col of groups[colname]) {
// col.columnName = generateName();
// mapper[colname].push(colname);
// }
// }
for (const col of res) {
if (!col.columnName) {
const newName = generateName();
mapper[col.columnName] = newName;
col.columnName = newName;
}
}
return [res, mapper];
}
/** @type {import('@dbgate/types').EngineDriver} */
const driver = {
async connect(nativeModules, { server, port, user, password, database }) {
const pool = await nativeModules.mssql.connect({
server,
port,
user,
password,
database,
requestTimeout: 1000 * 3600,
options: {
enableArithAbort: true,
},
});
pool._nativeModules = nativeModules;
return pool;
},
// @ts-ignore
async query(pool, sql) {
const resp = await pool.request().query(sql);
// console.log(Object.keys(resp.recordset));
// console.log(resp);
const res = {};
if (resp.recordset) {
const [columns] = extractColumns(resp.recordset.columns);
res.columns = columns;
res.rows = resp.recordset;
}
if (resp.rowsAffected) {
res.rowsAffected = _.sum(resp.rowsAffected);
}
return res;
},
async stream(pool, sql, options) {
const request = await pool.request();
let currentMapper = null;
const handleInfo = (info) => {
const { message, lineNumber, procName } = info;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'info',
});
};
const handleDone = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleRow = (row) => {
// if (currentMapper) {
// for (const colname of _.keys(currentMapper)) {
// let index = 0;
// for (const newcolname of currentMapper[colname]) {
// row[newcolname] = row[colname][index];
// index += 1;
// }
// delete row[colname];
// }
// }
if (currentMapper) {
row = { ...row };
for (const colname of _.keys(currentMapper)) {
const newcolname = currentMapper[colname];
row[newcolname] = row[colname];
if (_.isArray(row[newcolname])) row[newcolname] = row[newcolname].join(',');
delete row[colname];
}
}
options.row(row);
};
const handleRecordset = (columns) => {
const [extractedColumns, mapper] = extractColumns(columns);
currentMapper = mapper;
options.recordset(extractedColumns);
};
const handleError = (error) => {
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
request.stream = true;
request.on('recordset', handleRecordset);
request.on('row', handleRow);
request.on('error', handleError);
request.on('done', handleDone);
request.on('info', handleInfo);
request.query(sql);
return request;
},
async readableStream(pool, sql) {
const request = await pool.request();
const { stream } = pool._nativeModules;
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
request.stream = true;
request.on('row', (row) => pass.write(row));
request.on('error', (err) => {
console.error(err);
pass.end();
});
request.on('done', () => pass.end());
request.query(sql);
return pass;
},
async getVersion(pool) {
const { version } = (await this.query(pool, 'SELECT @@VERSION AS version')).rows[0];
return { version };
},
async listDatabases(pool) {
const { rows } = await this.query(pool, 'SELECT name FROM sys.databases order by name');
return rows;
},
async analyseFull(pool) {
const analyser = new MsSqlAnalyser(pool, this);
return analyser.fullAnalysis();
},
async analyseIncremental(pool, structure) {
const analyser = new MsSqlAnalyser(pool, this);
return analyser.incrementalAnalysis(structure);
},
createDumper() {
return new MsSqlDumper(this);
},
dialect,
engine: 'mssql',
};
module.exports = driver;

View File

@@ -1,15 +0,0 @@
module.exports = `
select c.name as columnName, t.name as dataType, c.object_id as objectId, c.is_identity as isIdentity,
c.max_length as maxLength, c.precision, c.scale, c.is_nullable as isNullable,
d.definition as defaultValue, d.name as defaultConstraint,
m.definition as computedExpression, m.is_persisted as isPersisted, c.column_id as columnId,
-- TODO only if version >= 2008
c.is_sparse as isSparse
from sys.columns c
inner join sys.types t on c.system_type_id = t.system_type_id and c.user_type_id = t.user_type_id
inner join sys.objects o on c.object_id = o.object_id
left join sys.default_constraints d on c.default_object_id = d.object_id
left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id
where o.type = 'U' and o.object_id =[OBJECT_ID_CONDITION]
order by c.column_id
`;

View File

@@ -1,40 +0,0 @@
module.exports = `
SELECT
schemaName = FK.TABLE_SCHEMA,
pureName = FK.TABLE_NAME,
columnName = CU.COLUMN_NAME,
refSchemaName = ISNULL(IXS.name, PK.TABLE_SCHEMA),
refTableName = ISNULL(IXT.name, PK.TABLE_NAME),
refColumnName = IXCC.name,
constraintName = C.CONSTRAINT_NAME,
updateAction = rc.UPDATE_RULE,
deleteAction = rc.DELETE_RULE,
objectId = o.object_id
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS C
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK ON C.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS PK ON C.UNIQUE_CONSTRAINT_NAME = PK.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE CU ON C.CONSTRAINT_NAME = CU.CONSTRAINT_NAME
--LEFT JOIN (
--SELECT i1.TABLE_NAME, i2.COLUMN_NAME
--FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS i1
--INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE i2 ON i1.CONSTRAINT_NAME = i2.CONSTRAINT_NAME
--WHERE i1.CONSTRAINT_TYPE = 'PRIMARY KEY'
--) PT ON PT.TABLE_NAME = PK.TABLE_NAME
INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON FK.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
LEFT JOIN sys.indexes IX ON IX.name = C.UNIQUE_CONSTRAINT_NAME
LEFT JOIN sys.objects IXT ON IXT.object_id = IX.object_id
LEFT JOIN sys.index_columns IXC ON IX.index_id = IXC.index_id and IX.object_id = IXC.object_id
LEFT JOIN sys.columns IXCC ON IXCC.column_id = IXC.column_id AND IXCC.object_id = IXC.object_id
LEFT JOIN sys.schemas IXS ON IXT.schema_id = IXS.schema_id
inner join sys.objects o on FK.TABLE_NAME = o.name
inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name
where o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,21 +0,0 @@
const columns = require('./columns');
const foreignKeys = require('./foreignKeys');
const primaryKeys = require('./primaryKeys');
const tables = require('./tables');
const modifications = require('./modifications');
const loadSqlCode = require('./loadSqlCode');
const views = require('./views');
const programmables = require('./programmables');
const viewColumns = require('./viewColumns');
module.exports = {
columns,
tables,
foreignKeys,
primaryKeys,
modifications,
loadSqlCode,
views,
programmables,
viewColumns,
};

View File

@@ -1,8 +0,0 @@
module.exports = `
select s.name as pureName, u.name as schemaName, c.text AS codeText
from sys.objects s
inner join sys.syscomments c on s.object_id = c.id
inner join sys.schemas u on u.schema_id = s.schema_id
where (s.object_id =[OBJECT_ID_CONDITION])
order by u.name, s.name, c.colid
`;

View File

@@ -1,6 +0,0 @@
module.exports = `
select o.object_id as objectId, o.modify_date as modifyDate, o.type, o.name as pureName, s.name as schemaName
from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled
`;

View File

@@ -1,14 +0,0 @@
module.exports = `
select o.object_id, pureName = t.Table_Name, schemaName = t.Table_Schema, columnName = c.Column_Name, constraintName=t.constraint_name from
INFORMATION_SCHEMA.TABLE_CONSTRAINTS t,
sys.objects o,
sys.schemas s,
INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE c
where
c.Constraint_Name = t.Constraint_Name
and t.table_name = o.name
and o.schema_id = s.schema_id and t.Table_Schema = s.name
and c.Table_Name = t.Table_Name
and Constraint_Type = 'PRIMARY KEY'
and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,6 +0,0 @@
module.exports = `
select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType
from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,8 +0,0 @@
module.exports = `
select
o.name as pureName, s.name as schemaName, o.object_id as objectId,
o.create_date as createDate, o.modify_date as modifyDate
from sys.tables o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,18 +0,0 @@
module.exports = `
select
o.object_id AS objectId,
col.TABLE_SCHEMA as schemaName,
col.TABLE_NAME as pureName,
col.COLUMN_NAME as columnName,
col.IS_NULLABLE as isNullable,
col.DATA_TYPE as dataType,
col.CHARACTER_MAXIMUM_LENGTH,
col.NUMERIC_PRECISION as precision,
col.NUMERIC_SCALE as scale,
col.COLUMN_DEFAULT
FROM sys.objects o
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name
WHERE o.type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
order by col.ORDINAL_POSITION
`;

View File

@@ -1,10 +0,0 @@
module.exports = `
SELECT
o.name as pureName,
u.name as schemaName,
o.object_id as objectId,
o.create_date as createDate,
o.modify_date as modifyDate
FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
WHERE type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,41 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const DatabaseAnalayser = require('../default/DatabaseAnalyser');
class MySqlAnalyser extends DatabaseAnalayser {
constructor(pool, driver) {
super(pool, driver);
}
createQuery(resFileName, tables = false, views = false, procedures = false, functions = false, triggers = false) {
let res = sql[resFileName];
res = res.replace('=[OBJECT_NAME_CONDITION]', ' is not null');
res = res.replace('#DATABASE#', this.pool._database_name);
return res;
}
async _runAnalysis() {
const tables = await this.driver.query(this.pool, this.createQuery('tables'));
const columns = await this.driver.query(this.pool, this.createQuery('columns'));
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys'));
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys'));
return this.mergeAnalyseResult({
tables: tables.rows.map((table) => ({
...table,
columns: columns.rows
.filter((col) => col.pureName == table.pureName)
.map(({ isNullable, extra, ...col }) => ({
...col,
notNull: !isNullable,
autoIncrement: extra && extra.toLowerCase().includes('auto_increment'),
})),
primaryKey: DatabaseAnalayser.extractPrimaryKeys(table, pkColumns.rows),
foreignKeys: DatabaseAnalayser.extractForeignKeys(table, fkColumns.rows),
})),
});
}
}
module.exports = MySqlAnalyser;

View File

@@ -1,5 +0,0 @@
const SqlDumper = require('../default/SqlDumper');
class MySqlDumper extends SqlDumper {}
module.exports = MySqlDumper;

View File

@@ -1,115 +0,0 @@
const MySqlAnalyser = require('./MySqlAnalyser');
const MySqlDumper = require('./MySqlDumper');
/** @type {import('@dbgate/types').SqlDialect} */
const dialect = {
rangeSelect: true,
stringEscapeChar: '\\',
quoteIdentifier(s) {
return '`' + s + '`';
},
};
function extractColumns(fields) {
if (fields)
return fields.map((col) => ({
columnName: col.name,
}));
return null;
}
/** @type {import('@dbgate/types').EngineDriver} */
const driver = {
async connect(nativeModules, { server, port, user, password, database }) {
const connection = nativeModules.mysql.createConnection({
host: server,
port,
user,
password,
database,
});
connection._database_name = database;
connection._nativeModules = nativeModules;
return connection;
},
async query(connection, sql) {
return new Promise((resolve, reject) => {
connection.query(sql, function (error, results, fields) {
if (error) reject(error);
resolve({ rows: results, columns: extractColumns(fields) });
});
});
},
async stream(connection, sql, options) {
const query = connection.query(sql);
// const handleInfo = (info) => {
// const { message, lineNumber, procName } = info;
// options.info({
// message,
// line: lineNumber,
// procedure: procName,
// time: new Date(),
// severity: 'info',
// });
// };
const handleEnd = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleRow = (row) => {
options.row(row);
};
const handleFields = (columns) => {
console.log('FIELDS', columns[0].name);
options.recordset(extractColumns(columns));
};
const handleError = (error) => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd);
return query;
},
async readableStream(connection, sql) {
const query = connection.query(sql);
return query.stream({ highWaterMark: 100 });
},
async getVersion(connection) {
const { rows } = await this.query(connection, "show variables like 'version'");
const version = rows[0].Value;
return { version };
},
async analyseFull(pool) {
const analyser = new MySqlAnalyser(pool, this);
return analyser.fullAnalysis();
},
async analyseIncremental(pool, structure) {
const analyser = new MySqlAnalyser(pool, this);
return analyser.incrementalAnalysis(structure);
},
async listDatabases(connection) {
const { rows } = await this.query(connection, 'show databases');
return rows.map((x) => ({ name: x.Database }));
},
createDumper() {
return new MySqlDumper(this);
},
dialect,
engine: 'mysql',
};
module.exports = driver;

View File

@@ -1,15 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
COLUMN_NAME as columnName,
IS_NULLABLE as isNullable,
DATA_TYPE as dataType,
CHARACTER_MAXIMUM_LENGTH,
NUMERIC_PRECISION,
NUMERIC_SCALE,
COLUMN_DEFAULT,
EXTRA as extra
from INFORMATION_SCHEMA.COLUMNS
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION]
order by ORDINAL_POSITION
`;

View File

@@ -1,17 +0,0 @@
module.exports = `
select
REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
REFERENTIAL_CONSTRAINTS.TABLE_NAME as pureName,
REFERENTIAL_CONSTRAINTS.UPDATE_RULE as updateAction,
REFERENTIAL_CONSTRAINTS.DELETE_RULE as deleteAction,
REFERENTIAL_CONSTRAINTS.REFERENCED_TABLE_NAME as refTableName,
KEY_COLUMN_USAGE.COLUMN_NAME as columnName,
KEY_COLUMN_USAGE.REFERENCED_COLUMN_NAME as refColumnName
from INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
on REFERENTIAL_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
where REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and REFERENTIAL_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION]
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
`;

View File

@@ -1,13 +0,0 @@
const columns = require('./columns');
const tables = require('./tables');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
const tableModifications = require('./tableModifications');
module.exports = {
columns,
tables,
primaryKeys,
foreignKeys,
tableModifications,
};

View File

@@ -1,12 +0,0 @@
module.exports = `select
TABLE_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
TABLE_CONSTRAINTS.TABLE_NAME as pureName,
KEY_COLUMN_USAGE.COLUMN_NAME as columnName
from INFORMATION_SCHEMA.TABLE_CONSTRAINTS
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
on TABLE_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
and TABLE_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
and TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
where TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and TABLE_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION] AND TABLE_CONSTRAINTS.CONSTRAINT_TYPE = 'PRIMARY KEY'
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
`;

View File

@@ -1,7 +0,0 @@
module.exports = `
select
TABLE_NAME,
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as ALTER_TIME
from information_schema.tables
where TABLE_SCHEMA = '#DATABASE#'
`;

View File

@@ -1,7 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as alterTime
from information_schema.tables
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION];
`;

View File

@@ -1,16 +0,0 @@
{
"name": "@dbgate/engines",
"version": "0.1.0",
"private": true,
"main": "index.js",
"typings": "./index.d.ts",
"devDependencies": {
"@dbgate/types": "^0.1.0",
"@types/lodash": "^4.14.149",
"nodemon": "^2.0.2",
"typescript": "^3.7.5"
},
"dependencies": {
"lodash": "^4.17.15"
}
}

View File

@@ -1,40 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const DatabaseAnalayser = require('../default/DatabaseAnalyser');
class PostgreAnalyser extends DatabaseAnalayser {
constructor(pool, driver) {
super(pool, driver);
}
createQuery(resFileName, tables = false, views = false, procedures = false, functions = false, triggers = false) {
let res = sql[resFileName];
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
return res;
}
async _runAnalysis() {
const tables = await this.driver.query(this.pool, this.createQuery('tableModifications'));
const columns = await this.driver.query(this.pool, this.createQuery('columns'));
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys'));
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys'));
// console.log('PG fkColumns', fkColumns.rows);
return this.mergeAnalyseResult({
tables: tables.rows.map((table) => ({
...table,
columns: columns.rows
.filter((col) => col.pureName == table.pureName && col.schemaName == table.schemaName)
.map(({ isNullable, ...col }) => ({
...col,
notNull: !isNullable,
})),
primaryKey: DatabaseAnalayser.extractPrimaryKeys(table, pkColumns.rows),
foreignKeys: DatabaseAnalayser.extractForeignKeys(table, fkColumns.rows),
})),
});
}
}
module.exports = PostgreAnalyser;

View File

@@ -1,5 +0,0 @@
const SqlDumper = require('../default/SqlDumper');
class PostgreDumper extends SqlDumper {}
module.exports = PostgreDumper;

View File

@@ -1,117 +0,0 @@
const _ = require('lodash');
const PostgreAnalyser = require('./PostgreAnalyser');
const PostgreDumper = require('./PostgreDumper');
/** @type {import('@dbgate/types').SqlDialect} */
const dialect = {
rangeSelect: true,
stringEscapeChar: '\\',
quoteIdentifier(s) {
return '"' + s + '"';
},
};
/** @type {import('@dbgate/types').EngineDriver} */
const driver = {
async connect(nativeModules, { server, port, user, password, database }) {
const client = new nativeModules.pg.Client({
host: server,
port,
user,
password,
database: database || 'postgres',
});
await client.connect();
client._nativeModules = nativeModules;
return client;
},
async query(client, sql) {
const res = await client.query(sql);
return { rows: res.rows, columns: res.fields };
},
async stream(client, sql, options) {
const query = new client._nativeModules.pgQueryStream(sql);
const stream = client.query(query);
// const handleInfo = (info) => {
// const { message, lineNumber, procName } = info;
// options.info({
// message,
// line: lineNumber,
// procedure: procName,
// time: new Date(),
// severity: 'info',
// });
// };
let wasHeader = false;
const handleEnd = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleReadable = () => {
let row = stream.read();
if (!wasHeader && row) {
options.recordset(_.keys(row).map((columnName) => ({ columnName })));
wasHeader = true;
}
while (row) {
options.row(row);
row = stream.read();
}
};
// const handleFields = (columns) => {
// // console.log('FIELDS', columns[0].name);
// options.recordset(columns);
// // options.recordset(extractColumns(columns));
// };
const handleError = (error) => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
stream.on('error', handleError);
stream.on('readable', handleReadable);
// stream.on('result', handleRow)
// stream.on('data', handleRow)
stream.on('end', handleEnd);
return stream;
},
async getVersion(client) {
const { rows } = await this.query(client, 'SELECT version()');
const { version } = rows[0];
return { version };
},
async analyseFull(pool) {
const analyser = new PostgreAnalyser(pool, this);
return analyser.fullAnalysis();
},
async analyseIncremental(pool, structure) {
const analyser = new PostgreAnalyser(pool, this);
return analyser.incrementalAnalysis(structure);
},
createDumper() {
return new PostgreDumper(this);
},
async listDatabases(client) {
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
return rows;
},
dialect,
engine: 'postgres',
};
module.exports = driver;

View File

@@ -1,19 +0,0 @@
module.exports = `
select
table_schema as "schemaName",
table_name as "pureName",
column_name as "columnName",
is_nullable as "isNullable",
data_type as dataType,
character_maximum_length,
numeric_precision,
numeric_scale,
column_default
from information_schema.columns
where
table_schema <> 'information_schema'
and table_schema <> 'pg_catalog'
and table_schema !~ '^pg_toast'
and 'table:' || table_schema || '.' || table_name =[OBJECT_ID_CONDITION]
order by ordinal_position
`;

View File

@@ -1,24 +0,0 @@
module.exports = `
select
fk.constraint_name as "constraintName",
fk.constraint_schema as "constraintSchema",
base.table_name as "pureName",
base.table_schema as "schemaName",
fk.update_rule as "updateAction",
fk.delete_rule as "deleteAction",
ref.table_name as "refTableName",
ref.table_schema as "refSchemaName",
basecol.column_name as "columnName",
refcol.column_name as "refColumnName"
from information_schema.referential_constraints fk
inner join information_schema.table_constraints base on fk.constraint_name = base.constraint_name and fk.constraint_schema = base.constraint_schema
inner join information_schema.table_constraints ref on fk.unique_constraint_name = ref.constraint_name and fk.unique_constraint_schema = ref.constraint_schema
inner join information_schema.key_column_usage basecol on base.table_name = basecol.table_name and base.constraint_name = basecol.constraint_name
inner join information_schema.key_column_usage refcol on ref.table_name = refcol.table_name and ref.constraint_name = refcol.constraint_name and basecol.ordinal_position = refcol.ordinal_position
where
base.table_schema <> 'information_schema'
and base.table_schema <> 'pg_catalog'
and base.table_schema !~ '^pg_toast'
and 'table:' || base.table_schema || '.' || base.table_name =[OBJECT_ID_CONDITION]
order by basecol.ordinal_position
`;

View File

@@ -1,11 +0,0 @@
const columns = require('./columns');
const tableModifications = require('./tableModifications');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
module.exports = {
columns,
tableModifications,
primaryKeys,
foreignKeys,
};

View File

@@ -1,17 +0,0 @@
module.exports = `
select
table_constraints.constraint_schema as "constraintSchema",
table_constraints.constraint_name as "constraintName",
table_constraints.table_schema as "schemaName",
table_constraints.table_name as "pureName",
key_column_usage.column_name as "columnName"
from information_schema.table_constraints
inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name
where
table_constraints.table_schema <> 'information_schema'
and table_constraints.table_schema <> 'pg_catalog'
and table_constraints.table_schema !~ '^pg_toast'
and table_constraints.constraint_type = 'PRIMARY KEY'
and 'table:' || table_constraints.table_schema || '.' || table_constraints.table_name =[OBJECT_ID_CONDITION]
order by key_column_usage.ordinal_position
`;

View File

@@ -1,52 +0,0 @@
module.exports = `
with pkey as
(
select cc.conrelid, format(E'create constraint %I primary key(%s);\\n', cc.conname,
string_agg(a.attname, ', '
order by array_position(cc.conkey, a.attnum))) pkey
from pg_catalog.pg_constraint cc
join pg_catalog.pg_class c on c.oid = cc.conrelid
join pg_catalog.pg_attribute a on a.attrelid = cc.conrelid
and a.attnum = any(cc.conkey)
where cc.contype = 'p'
group by cc.conrelid, cc.conname
)
SELECT oid as "objectId", nspname as "schemaName", relname as "pureName",
md5('CREATE TABLE ' || nspname || '.' || relname || E'\\n(\\n' ||
array_to_string(
array_agg(
' ' || column_name || ' ' || type || ' '|| not_null
)
, E',\\n'
) || E'\\n);\\n' || (select pkey from pkey where pkey.conrelid = oid)) as "hash"
from
(
SELECT
c.relname, a.attname AS column_name, c.oid,
n.nspname,
pg_catalog.format_type(a.atttypid, a.atttypmod) as type,
case
when a.attnotnull
then 'NOT NULL'
else 'NULL'
END as not_null
FROM pg_class c,
pg_namespace n,
pg_attribute a,
pg_type t
WHERE c.relkind = 'r'
AND a.attnum > 0
AND a.attrelid = c.oid
AND a.atttypid = t.oid
AND n.oid = c.relnamespace
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
ORDER BY a.attnum
) as tabledefinition
where 'table:' || nspname || '.' || relname =[OBJECT_ID_CONDITION]
group by relname, nspname, oid
`;

Some files were not shown because too many files have changed in this diff Show More