Compare commits

...

245 Commits

Author SHA1 Message Date
Jan Prochazka 9abb1ed19c v5.2.5 2023-03-17 13:33:32 +01:00
Jan Prochazka 58f4370bb6 v5.2.5-beta.17 2023-03-13 20:42:39 +01:00
Jan Prochazka 86c02a76d0 reverted xlsx 2023-03-13 20:42:27 +01:00
Jan Prochazka a2bc636396 convertor from forage 2023-03-13 20:33:51 +01:00
Jan Prochazka 00bf1e64a1 removed misleading WidgetColumnBarItrem.show attr 2023-03-13 20:18:13 +01:00
Jan Prochazka a45782098a fixes database widget collapsing problems 2023-03-13 20:03:52 +01:00
Jan Prochazka df4230ea1d pinned objects fixes 2023-03-13 19:50:06 +01:00
Jan Prochazka 886e0a059e v5.2.5-beta.16 2023-03-12 09:09:39 +01:00
Jan Prochazka f83c4ef799 Merge branch 'develop' 2023-03-12 09:08:40 +01:00
Jan Prochazka 66d1b4ca49 disable collapse last widget items 2023-03-12 09:06:11 +01:00
Jan Prochazka b2f55522a8 fixed Resizing window resets window contents #479 2023-03-12 08:45:28 +01:00
Jan Prochazka edc3a7409a tool strip wrappable, table data commands moved from statusbar to toolstrip 2023-03-12 08:36:22 +01:00
Jan Prochazka 09e584326f better mac icon 2023-03-11 11:55:23 +01:00
Jan Prochazka feed0cd8db postgres analyse index desc #514 2023-03-11 10:33:13 +01:00
Jan Prochazka 9d4105335f v5.2.5-beta.14 2023-03-06 19:11:59 +01:00
Jan Prochazka c15261227b upgraded ubuntu builder version 18.04=>22.04 2023-03-06 19:11:47 +01:00
Jan Prochazka de567bdd31 v5.2.5-beta.11 2023-03-06 18:49:58 +01:00
Jan Prochazka 6736e8d0cf allow collapse multitab group 2023-03-06 18:49:30 +01:00
Jan Prochazka 36ccba7988 fixed split margin 2023-03-06 18:49:26 +01:00
Jan Prochazka 75c5d30ad3 v5.2.5-beta.9 2023-03-05 20:28:22 +01:00
Jan Prochazka cd10095dc0 Merge branch 'master' into develop 2023-03-05 17:20:22 +01:00
Jan Prochazka a64e42f1c2 changelog 2023-03-05 17:19:03 +01:00
Jan Prochazka 3c3f8514da v5.2.4 2023-03-05 17:17:58 +01:00
Jan Prochazka 961d11b610 split group condition 2023-03-05 17:13:05 +01:00
Jan Prochazka c646a83608 close functions in tab group 2023-03-05 17:11:02 +01:00
Jan Prochazka d1bdebb4ed abilit to split whole group 2023-03-05 16:59:41 +01:00
Jan Prochazka aa4406942f tabs saves to forage instead of storage 2023-03-05 15:43:45 +01:00
Jan Prochazka ff044ebec8 tab drag&drop fix 2023-03-05 15:28:29 +01:00
Jan Prochazka f5d41c89e6 show selected tab in multitab 2023-03-05 12:20:07 +01:00
Jan Prochazka d283429f40 open new tab - corrent multiTabIndex 2023-03-05 12:14:43 +01:00
Jan Prochazka 15d005be13 drag & drop between mutlitabs 2023-03-05 11:47:13 +01:00
Jan Prochazka f404e9956e refactor - visibleSecondary not stored, computed in component 2023-03-05 11:34:42 +01:00
Jan Prochazka 2dadd1f437 vertical split tabs #394 2023-03-05 10:43:04 +01:00
Jan Prochazka 1061d2aba2 tabs container style refactor 2023-03-04 10:50:10 +01:00
Jan Prochazka ff36870763 tabs files refactor 2023-03-04 10:39:57 +01:00
Jan Prochazka 991176d433 v5.2.4-beta.2 2023-03-03 18:26:27 +01:00
Jan Prochazka 406e3c022c mac icon #494 2023-03-03 18:25:58 +01:00
Jan Prochazka 2688c31123 v5.2.4-alpha.1 2023-03-03 18:05:08 +01:00
Jan Prochazka 578282c419 fixed reference #508 2023-03-03 18:04:51 +01:00
Jan Prochazka 9505643a26 v5.2.3 2023-02-27 18:11:49 +01:00
Jan Prochazka 2169d1a288 changelog 2023-02-26 17:07:44 +01:00
Jan Prochazka 62ebe49ac0 v5.2.3-beta.9 2023-02-26 16:50:57 +01:00
Jan Prochazka a2043b237f multi column condition in perspectives 2023-02-26 16:48:32 +01:00
Jan Prochazka 7c03d31b84 mutli column condition for JSL data 2023-02-26 15:44:29 +01:00
Jan Prochazka b26be02203 multi column filter #491 2023-02-26 15:26:39 +01:00
Jan Prochazka a251e92598 filters refactor fix 2023-02-26 15:00:54 +01:00
Jan Prochazka 1a28922a62 refactor - simplified filters component 2023-02-26 14:42:54 +01:00
Jan Prochazka 65c3ff8ec9 fix 2023-02-26 11:42:19 +01:00
Jan Prochazka 56fe578884 removed free table refs 2023-02-26 11:39:30 +01:00
Jan Prochazka 4dbb3a72d4 fixed problem with closing queries in progress 2023-02-26 11:08:20 +01:00
Jan Prochazka 5fd7982f06 marked oracle support as experimantal 2023-02-26 10:15:18 +01:00
Jan Prochazka 0ca5114b71 Merge branch 'develop' 2023-02-26 10:14:42 +01:00
Jan Prochazka d1ae7fe6e9 oracle support marked as experimental 2023-02-26 10:08:41 +01:00
Jan Prochazka 1417f53c56 disable SSL tab for oracle 2023-02-26 10:03:25 +01:00
Jan Prochazka 7a606cf8ef oracle port config #496 2023-02-26 10:01:53 +01:00
Jan Prochazka 622773fccd optimalization of loading oracle structure 2023-02-26 09:40:12 +01:00
Jan Prochazka 64ceea3779 fiuxed dependency 2023-02-26 09:05:36 +01:00
Jan Prochazka a588d72b26 create default archive by default 2023-02-26 08:58:23 +01:00
Jan Prochazka 7ec23ecca4 fixed modify archive for windows 2023-02-26 08:41:30 +01:00
Jan Prochazka 0c62349802 fixed error reporting problems 2023-02-25 20:25:27 +01:00
Jan Prochazka c817bf5911 added import/export tab (not used) 2023-02-25 18:24:00 +01:00
Jan Prochazka 2d74b831c5 fixed sqlite data duplicator 2023-02-25 13:33:33 +01:00
Jan Prochazka 490efb065a fixes sqlite autoincrement column creation 2023-02-25 13:31:24 +01:00
Jan Prochazka 6ccaa05bec Merge pull request #505 from mhf-ir/master
fix: connection ssl require file path instread of file content
2023-02-25 12:26:55 +01:00
Jan Prochazka eb04f56662 fixed TS + code tidy 2023-02-25 11:57:30 +01:00
Jan Prochazka 4e97f54bd4 archive file - save as 2023-02-25 11:43:14 +01:00
Jan Prochazka 9fe689625e simplified tab register 2023-02-25 11:36:16 +01:00
Jan Prochazka fa24d47c03 fixed tab component 2023-02-25 11:34:55 +01:00
Jan Prochazka 1c73920dd5 save jsl data 2023-02-25 11:34:19 +01:00
Jan Prochazka a77492440e removed free table (data sheet) concept 2023-02-25 09:51:08 +01:00
Jan Prochazka 7c4a47c4c6 running row macros 2023-02-24 19:04:22 +01:00
Jan Prochazka a519c78301 quick export - current archive 2023-02-24 17:22:11 +01:00
Jan Prochazka d024b6f25c run macro on jsl data 2023-02-24 16:48:37 +01:00
Muhammad Hussein Fattahizadeh 0c6e113e3e fix: connection ssl require file path instread of file content 2023-02-22 18:53:41 +03:30
Jan Prochazka 6ff4acc50d removed marking archive as data sheet 2023-02-21 07:37:37 +01:00
Jan Prochazka fabf333664 v5.2.3-beta.8 2023-02-19 19:24:00 +01:00
Jan Prochazka 29eef5619d dynamic structure switch 2023-02-19 19:23:51 +01:00
Jan Prochazka eb098bb33a upgraded xlsx package 2023-02-17 14:13:25 +01:00
Jan Prochazka 36c792f44e excel import fix 2023-02-17 13:57:30 +01:00
Jan Prochazka c7aaf06506 v5.2.3-beta.7 2023-02-17 12:15:19 +01:00
Jan Prochazka 7b6a1543de duplicator UX 2023-02-17 12:14:58 +01:00
Jan Prochazka 67e287cfdf added links from duplicator 2023-02-17 10:41:01 +01:00
Jan Prochazka 7802cde14d duplicator fixes 2023-02-17 10:26:44 +01:00
Jan Prochazka 6b783027e5 data duplicator fix 2023-02-17 10:00:21 +01:00
Jan Prochazka 1ab58a491a data duplicator test 2023-02-17 09:27:16 +01:00
Jan Prochazka b6c5f26eb4 data duplicator test 2023-02-17 09:15:13 +01:00
Jan Prochazka 6a0feb235a fixed compilation error 2023-02-17 08:46:31 +01:00
Jan Prochazka 1365f2b47c duplicator options 2023-02-16 18:27:05 +01:00
Jan Prochazka 8109dd862e change theme fix 2023-02-16 18:00:04 +01:00
Jan Prochazka fb1c2c61fb duplicator improvements 2023-02-16 17:25:54 +01:00
Jan Prochazka b514f8ae35 using readline instead of line-reader-fixes freeze 2023-02-16 15:11:33 +01:00
Jan Prochazka 3114a05c3b save structure changes to jsonl file 2023-02-16 13:33:28 +01:00
Jan Prochazka edf0637a35 change structure generates data commands 2023-02-16 13:14:56 +01:00
Jan Prochazka cd1267b464 schema editing in dataset 2023-02-16 11:47:17 +01:00
Jan Prochazka 675ef6e593 v5.2.3-beta.6 2023-02-13 20:35:37 +01:00
Jan Prochazka 60bd3c157e fixed multi-db perspectives 2023-02-13 20:35:10 +01:00
Jan Prochazka aceffd5681 v5.2.3-beta.5 2023-02-12 20:25:07 +01:00
Jan Prochazka 83f01c52f2 data duplicator style 2023-02-12 19:52:53 +01:00
Jan Prochazka 5e207a6c16 build fix 2023-02-12 12:44:40 +01:00
Jan Prochazka 10d5667c83 pg fix 2023-02-12 12:31:38 +01:00
Jan Prochazka d1e1b2ce9c Merge branch 'develop' 2023-02-12 12:15:59 +01:00
Jan Prochazka bb2f1399ba data duplicator runs in transaction 2023-02-12 12:14:07 +01:00
Jan Prochazka 5b6f90abc5 data duplicator - logs 2023-02-12 12:09:20 +01:00
Jan Prochazka 1d24562ead duplicator 2023-02-12 11:43:13 +01:00
Jan Prochazka fb8174b3e9 delete cascade fix 2023-02-12 11:43:02 +01:00
Jan Prochazka 4e194539d9 fix 2023-02-11 10:24:52 +01:00
Jan Prochazka b5e37053b8 data duplicator works in simple case 2023-02-11 10:17:10 +01:00
Jan Prochazka f3dd187df7 useEditorData fix 2023-02-11 09:53:08 +01:00
Jan Prochazka b5f504f3b1 data duplicator tab - configurator 2023-02-10 16:50:27 +01:00
Jan Prochazka 8df2a8a6df more mirroe archive commands 2023-02-10 15:14:02 +01:00
Jan Prochazka dd46604069 correct saving jsonl data 2023-02-10 11:37:18 +01:00
Jan Prochazka cc9402dd84 save archive algorithm 2023-02-10 11:25:18 +01:00
Jan Prochazka be0f68fb7f editing changeset on archive file 2023-02-10 10:22:38 +01:00
Jan Prochazka a3db8e2903 html & xml autio select highlighter #485 2023-02-08 07:34:28 +01:00
Jan Prochazka 87c29faadd html & xsml syntax highlight #485 2023-02-06 20:39:50 +01:00
Jan Prochazka 9bf610707e v5.2.3-beta.4 2023-02-06 20:30:24 +01:00
Jan Prochazka 28a568901a fixed rimraf usage 2023-02-06 20:29:58 +01:00
Jan Prochazka 1ba43af48d v5.2.3-beta.3 2023-02-05 20:23:29 +01:00
Jan Prochazka 356b623eaf downgraded rimraf, so that it passes old build 2023-02-05 20:23:16 +01:00
Jan Prochazka 85c3d6fe6f v5.2.3-beta.2 2023-02-05 20:07:56 +01:00
Jan Prochazka d9eb0f0976 intelisense fix #484 2023-02-05 20:03:15 +01:00
Jan Prochazka d61a7c54ce table data edit - shows editing mark 2023-02-05 19:58:45 +01:00
Jan Prochazka cd000098f1 save table structure uses transaction 2023-02-05 19:24:22 +01:00
Jan Prochazka e9a01a1ffd used transaction for save table data 2023-02-05 19:17:46 +01:00
Jan Prochazka 722789ca01 fix 2023-02-05 18:51:34 +01:00
Jan Prochazka 83ba530112 explicit order criteria only on MSSQL #436 2023-02-04 15:58:45 +01:00
Jan Prochazka 57fa9335d4 sort JSONL data & query results 2023-02-04 15:27:55 +01:00
Jan Prochazka 3babe95944 v5.2.3-beta.1 2023-02-04 09:46:29 +01:00
Jan Prochazka aab1229220 fixed typo #481 2023-02-04 09:44:49 +01:00
Jan Prochazka 7b64587f6a fixed crash #452 2023-02-03 11:11:44 +01:00
Jan Prochazka 6a5157140e mysql default value #455 2023-02-03 11:06:59 +01:00
Jan Prochazka 47e0173f84 arm64 windows installer added to build #473 2023-02-03 10:01:11 +01:00
Jan Prochazka 8fe6cb1f71 fixed reading DB with mongo views #476 2023-02-03 09:59:53 +01:00
Jan Prochazka dc6eff7f9e fixed show DB 2023-02-03 09:33:38 +01:00
Jan Prochazka dad9e3ea48 changelog 2023-02-01 18:31:18 +01:00
Jan Prochazka 166c2254ec v5.2.2 2023-02-01 18:22:21 +01:00
Jan Prochazka 5ab4b9ee13 v5.2.2-alpha.13 2023-01-29 08:40:35 +01:00
Jan Prochazka 1c87b1b994 fixed dependency 2023-01-29 08:40:12 +01:00
Jan Prochazka 072c340d5f added missing dependency 2023-01-29 08:35:50 +01:00
Jan Prochazka 5bc7a8e763 v5.2.2-alpha.12 2023-01-29 08:30:09 +01:00
Jan Prochazka 655dec369f fix 2023-01-29 08:30:00 +01:00
Jan Prochazka 9356ef6667 dbmodel docs 2023-01-29 08:27:28 +01:00
Jan Prochazka b3308dc389 v5.2.2-alpha.11 2023-01-28 20:19:30 +01:00
Jan Prochazka 7cbcafb6f7 dbmodel added to build 2023-01-28 20:19:16 +01:00
Jan Prochazka adbb335062 v5.2.2-alpha.10 2023-01-28 20:15:05 +01:00
Jan Prochazka bc1c827225 dbmodel commandline tool 2023-01-28 20:14:44 +01:00
Jan Prochazka 258338cd2e dbmodel tool initial import 2023-01-28 18:48:52 +01:00
Jan Prochazka cf00af9e30 v5.2.2-beta.9 2023-01-28 16:33:08 +01:00
Jan Prochazka 0f515bb762 bigger timeout to yarn 2023-01-28 16:32:55 +01:00
Jan Prochazka 5ca3a66f17 remove call of snapcraft login 2023-01-28 16:30:43 +01:00
Jan Prochazka 4f857ab1f8 v5.2.2-beta.8 2023-01-28 13:45:54 +01:00
Jan Prochazka 5ed97079b1 fixed snapcraft login 2023-01-28 13:45:43 +01:00
Jan Prochazka 16408d85f8 support for binary values in filters #467 2023-01-28 12:57:17 +01:00
Jan Prochazka cc388362d6 close query sessions after timeout #468 2023-01-28 11:40:52 +01:00
Jan Prochazka 079cac6eda use pinomin package 2023-01-28 10:22:12 +01:00
Jan Prochazka a43522752c logger refactor 2023-01-28 09:17:57 +01:00
Jan Prochazka dbcc732688 appname sent to connection - tedious 2023-01-27 16:42:01 +01:00
Jan Prochazka 3f525cacc1 appname added to pg connection string 2023-01-27 16:31:20 +01:00
Jan Prochazka 2fee308185 pinomin time field 2023-01-27 16:31:06 +01:00
Jan Prochazka 331c303e8f v5.2.2-beta.7 2023-01-27 15:40:50 +01:00
Jan Prochazka 7c8d225868 added missing file 2023-01-27 15:40:20 +01:00
Jan Prochazka dd44798ff4 v5.2.2-beta.6 2023-01-27 15:38:36 +01:00
Jan Prochazka 2dd8749bc6 simplified logging 2023-01-27 15:37:16 +01:00
Jan Prochazka 174d7fde5c pinomin logger 2023-01-27 15:37:04 +01:00
Jan Prochazka af3d271361 v5.2.2-beta.5 2023-01-23 20:11:59 +01:00
Jan Prochazka 17e83c700e try remove console logging for electron 2023-01-23 20:11:47 +01:00
Jan Prochazka 513fe6184a v5.2.2-beta.4 2023-01-23 19:41:42 +01:00
Jan Prochazka b56f11156d try to fix electron errors after start 2023-01-23 19:41:31 +01:00
Jan Prochazka 80e8b210be handle errors when sending to subprocess #458 2023-01-23 19:28:05 +01:00
Jan Prochazka d60687485b v5.2.2-beta.3 2023-01-23 18:26:53 +01:00
Jan Prochazka 7a62ef0cc3 remove handle electron errors 2023-01-23 18:26:43 +01:00
Jan Prochazka 0e58e94153 v5.2.2-beta.2 2023-01-22 19:29:00 +01:00
Jan Prochazka 8926e3bc84 Merge branch 'develop' 2023-01-22 19:27:57 +01:00
Jan Prochazka ef62948b5a form view works for JSL data 2023-01-22 19:27:39 +01:00
Jan Prochazka f014a4e6b4 added loadingformview 2023-01-22 19:12:32 +01:00
Jan Prochazka e589a994fa form view cleanup 2023-01-22 18:31:00 +01:00
Jan Prochazka 6fdb9cc5c9 form works also for views 2023-01-22 18:26:49 +01:00
Jan Prochazka 11bb8faf91 form view - open reference 2023-01-22 18:22:18 +01:00
Jan Prochazka 98b26bb119 form view filters 2023-01-22 18:03:29 +01:00
Jan Prochazka 268c010a22 form view refactor - handle hiearchic columns 2023-01-22 17:27:13 +01:00
Jan Prochazka 6dd3945724 form view refactor - basically works 2023-01-22 16:26:48 +01:00
Jan Prochazka ba644a37b7 removed hostname from logs 2023-01-22 12:35:11 +01:00
Jan Prochazka e9322cc1ba fix 2023-01-22 12:27:10 +01:00
Jan Prochazka f266acb807 #455 column default value help text 2023-01-22 12:21:12 +01:00
Jan Prochazka 9f66c5e28a logger info 2023-01-22 12:12:56 +01:00
Jan Prochazka 61d93fb9d9 Merge branch 'develop' 2023-01-22 12:07:06 +01:00
Jan Prochazka c87e38fd17 log & report unhandled electron error 2023-01-22 11:56:09 +01:00
Jan Prochazka 7eb6357c8d #360 allow to set log level 2023-01-22 10:55:10 +01:00
Jan Prochazka 1cf02488b4 configuring logger for electron 2023-01-22 10:35:02 +01:00
Jan Prochazka 5249713a3c show logs from menu 2023-01-22 10:31:16 +01:00
Jan Prochazka 1bf8f38793 added process name to logger output 2023-01-22 10:12:46 +01:00
Jan Prochazka e1f92fef13 pipe logs from forks into pino logger 2023-01-22 10:00:01 +01:00
Jan Prochazka af01d95348 pino multistream - file logging 2023-01-22 09:50:35 +01:00
Jan Prochazka d4f0882054 fixed error logging 2023-01-21 18:00:59 +01:00
Jan Prochazka cc0f05168d defined logger caller 2023-01-21 17:49:16 +01:00
Jan Prochazka 4d93be61b5 PINO JSON logging 2023-01-21 17:32:28 +01:00
Jan Prochazka dd230b008f Merge branch 'master' of github.com:dbgate/dbgate 2023-01-21 13:57:12 +01:00
Jan Prochazka 16238f8f94 Merge branch 'develop' 2023-01-21 13:56:55 +01:00
Jan Prochazka 20570c1988 Merge pull request #460 from ProjectInfinity/fix-sql-formatter
Update sql-formatter, fixes #450
2023-01-21 13:15:43 +01:00
Jan Prochazka 44dadcd256 fixed sqlite analyser 2023-01-21 11:01:19 +01:00
Jan Prochazka cf07123f51 fixed msql analyser 2023-01-21 10:52:50 +01:00
Jan Prochazka b56134d308 #457 fixed ctrl+tab 2023-01-21 10:40:13 +01:00
Jan Prochazka f9f879272b analyser refactor + optimalization 2023-01-21 10:13:08 +01:00
Jan Prochazka 3dfae351a6 foreign key loading optimalization #451 2023-01-21 09:34:29 +01:00
Infinity 822482ab4e Update sql-formatter, fixes #450 2023-01-19 15:08:51 +01:00
Jan Prochazka 451f671426 v5.2.2-beta.1 2023-01-06 18:49:09 +01:00
Jan Prochazka b06d747399 #451 loading fks on postgres cleanup & fix 2023-01-06 18:40:47 +01:00
Jan Prochazka 37eeaf0cce v5.2.1 2023-01-06 18:03:55 +01:00
Jan Prochazka 5f0ee80306 changelog 2023-01-06 18:03:44 +01:00
Jan Prochazka d8f25c17f7 fix 2023-01-06 14:32:42 +01:00
Jan Prochazka f6173335da v5.2.1-beta.3 2023-01-06 09:07:51 +01:00
Jan Prochazka 9fdc15b8aa used persmissions fixed 2023-01-06 09:06:54 +01:00
Jan Prochazka 77300f2078 fix login page 2023-01-06 08:34:27 +01:00
Jan Prochazka 3ab887f8e9 v5.2.1-beta.2 2023-01-05 10:19:03 +01:00
Jan Prochazka 5684eab3e2 OAuth scope added #407 2023-01-05 10:18:53 +01:00
Jan Prochazka 9ce743a8d3 v5.2.1-beta.1 2023-01-05 09:23:49 +01:00
Jan Prochazka 680c0057b1 fixed client_id param in oauth #407 2023-01-05 09:23:31 +01:00
Jan Prochazka e9fffc063b changelog 2023-01-03 22:35:41 +01:00
Jan Prochazka a0bc6f314c v5.2.0 2023-01-03 22:35:17 +01:00
Jan Prochazka af1bb005e5 changelog 2023-01-02 19:53:22 +01:00
Jan Prochazka 34d891e935 changelog preparation 2023-01-02 19:52:45 +01:00
Jan Prochazka dcccfe11c8 v5.1.7-alpha.14 2023-01-02 18:48:58 +01:00
Jan Prochazka 8823cff3a1 oracle build fix 2023-01-02 18:48:28 +01:00
Jan Prochazka 18320352ff v5.1.7-alpha.13 2023-01-02 18:35:35 +01:00
Jan Prochazka d3292810f8 v5.1.7-beta.12 2023-01-01 19:55:59 +01:00
Jan Prochazka 7cd493e518 fixed(oracle) - removed incorrect query result row 2023-01-01 19:55:08 +01:00
Jan Prochazka 6c4b56a28b fixed loading materialized views in oracle 2023-01-01 19:50:19 +01:00
Jan Prochazka 0c795e33c3 commented out some console.log in oracle driver 2023-01-01 19:48:36 +01:00
Jan Prochazka fd2e1e0cae v5.1.7-beta.11 2023-01-01 12:25:13 +01:00
Jan Prochazka 13fd7a0aad memoize connection folder expand state #425 2023-01-01 12:24:42 +01:00
Jan Prochazka d5e240a701 rename, delete connection folder #425 2023-01-01 12:16:59 +01:00
Jan Prochazka 2151252032 fix 2023-01-01 10:29:54 +01:00
Jan Prochazka cd175973d9 fixed file filters #445 2022-12-31 14:33:58 +01:00
Jan Prochazka 10789a75a8 force text display 2022-12-31 14:17:47 +01:00
Jan Prochazka f775fbad29 force text display 2022-12-31 14:16:08 +01:00
Jan Prochazka dbdb50f796 fix 2022-12-31 13:50:51 +01:00
Jan Prochazka 61a2002627 deep refresh on datagrid 2022-12-31 13:39:07 +01:00
Jan Prochazka 4d8e0d44d1 ALTER VIEW, ALTER PROCEDURE scripts 2022-12-31 13:05:16 +01:00
Jan Prochazka e13808945c removed unused imports 2022-12-31 12:44:44 +01:00
Jan Prochazka 3aa7e6c022 map view refactor 2022-12-31 12:43:27 +01:00
Jan Prochazka cb0a9770d2 map cell view improved 2022-12-31 12:29:47 +01:00
Jan Prochazka 4a2b33276d clone mongto rows without _id #404 2022-12-31 11:18:18 +01:00
Jan Prochazka fb1cbc71f2 clear perspective cache reloads also patterns 2022-12-31 10:48:14 +01:00
Jan Prochazka b8fcbbbc93 drag & drop memory in designer 2022-12-31 10:37:25 +01:00
Jan Prochazka 6b5d2114bf designer - column filter 2022-12-31 10:05:09 +01:00
271 changed files with 6128 additions and 3454 deletions
+8 -11
View File
@@ -12,7 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [macOS-10.15, windows-2022, ubuntu-18.04]
os: [macOS-10.15, windows-2022, ubuntu-22.04]
# os: [macOS-10.15]
steps:
@@ -30,6 +30,9 @@ jobs:
- name: yarn adjustPackageJson
run: |
yarn adjustPackageJson
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
- name: yarn install
run: |
yarn install
@@ -43,7 +46,7 @@ jobs:
run: |
yarn fillPackagedPlugins
- name: Install Snapcraft
if: matrix.os == 'ubuntu-18.04'
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
run: |
@@ -62,18 +65,12 @@ jobs:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
- name: Save snap login
if: matrix.os == 'ubuntu-18.04'
run: 'echo "$SNAPCRAFT_LOGIN" > snapcraft.login'
shell: bash
env:
SNAPCRAFT_LOGIN: ${{secrets.SNAPCRAFT_LOGIN}}
- name: publishSnap
if: matrix.os == 'ubuntu-18.04'
if: matrix.os == 'ubuntu-22.04'
run: |
snapcraft login --with snapcraft.login
snapcraft upload --release=beta app/dist/*.snap
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Copy artifacts
run: |
+10 -13
View File
@@ -16,8 +16,8 @@ jobs:
strategy:
fail-fast: false
matrix:
# os: [ubuntu-18.04, windows-2016]
os: [macOS-10.15, windows-2022, ubuntu-18.04]
# os: [ubuntu-22.04, windows-2016]
os: [macOS-10.15, windows-2022, ubuntu-22.04]
steps:
- name: Context
@@ -34,6 +34,9 @@ jobs:
- name: yarn adjustPackageJson
run: |
yarn adjustPackageJson
- name: yarn set timeout
run: |
yarn config set network-timeout 100000
- name: yarn install
run: |
# yarn --version
@@ -49,7 +52,7 @@ jobs:
run: |
yarn fillPackagedPlugins
- name: Install Snapcraft
if: matrix.os == 'ubuntu-18.04'
if: matrix.os == 'ubuntu-22.04'
uses: samuelmeuli/action-snapcraft@v1
- name: Publish
run: |
@@ -72,18 +75,12 @@ jobs:
run: |
yarn generatePadFile
- name: Save snap login
if: matrix.os == 'ubuntu-18.04'
run: 'echo "$SNAPCRAFT_LOGIN" > snapcraft.login'
shell: bash
env:
SNAPCRAFT_LOGIN: ${{secrets.SNAPCRAFT_LOGIN}}
- name: publishSnap
if: matrix.os == 'ubuntu-18.04'
if: matrix.os == 'ubuntu-22.04'
run: |
snapcraft login --with snapcraft.login
snapcraft upload --release=stable app/dist/*.snap
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
- name: Copy artifacts
run: |
@@ -137,7 +134,7 @@ jobs:
mv app/dist/dbgate-pad.xml artifacts/ || true
- name: Copy latest-linux.yml
if: matrix.os == 'ubuntu-18.04'
if: matrix.os == 'ubuntu-22.04'
run: |
mv app/dist/latest-linux.yml artifacts/latest-linux.yml || true
+1 -1
View File
@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-18.04]
os: [ubuntu-22.04]
steps:
- name: Context
+11 -1
View File
@@ -20,7 +20,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-18.04]
os: [ubuntu-22.04]
steps:
- name: Context
@@ -94,6 +94,11 @@ jobs:
run: |
npm publish
- name: Publish dbmodel
working-directory: packages/dbmodel
run: |
npm publish
- name: Publish dbgate-plugin-csv
working-directory: plugins/dbgate-plugin-csv
run: |
@@ -138,3 +143,8 @@ jobs:
working-directory: plugins/dbgate-plugin-redis
run: |
npm publish
- name: Publish dbgate-plugin-oracle
working-directory: plugins/dbgate-plugin-oracle
run: |
npm publish
+87
View File
@@ -8,6 +8,93 @@ Builds:
- linux - application for linux
- win - application for Windows
### 5.2.3
- FIXED: npm version crash (#508)
### 5.2.3
- ADDED: Search entire table (multi column filter) #491
- ADDED: OracleDB - connection to toher than default ports #496
- CHANGED: OracleDB - status of support set to experimental
- FIXED: OracleDB database URL - fixes: Connect to default Oracle database #489
- ADDED: HTML, XML code highlighting for Edit cell value #485
- FIXED: Intellisense - incorrect alias after ORDER BY clause #484
- FIXED: Typo in SQL-Generator #481
- ADDED: Data duplicator #480
- FIXED: MongoDB - support for views #476
- FIXED: "SQL:CREATE TABLE" generated SQL default value syntax errors #455
- FIXED: Crash when right-clicking on tables #452
- FIXED: View sort #436
- ADDED: Arm64 version for Windows #473
- ADDED: Sortable query results and data archive
- CHANGED: Use transactions for saving table data
- CHANGED: Save table structure uses transactions
- ADDED: Table data editing - shows editing mark
- ADDED: Editing data archive files
- FIXED: Delete cascade options when using more than 2 tables
- ADDED: Save to current archive commands
- ADDED: Current archive mark is on status bar
- FIXED: Changed package used for parsing JSONL files when browsing - fixes backend freezing
- FIXED: SSL option for mongodb #504
- REMOVED: Data sheet editor
- FIXED: Creating SQLite autoincrement column
- FIXED: Better error reporting from exports/import/dulicator
- CHANGED: Optimalizede OracleDB analysing algorithm
- ADDED: Mutli column filter for perspectives
- FIXED: Fixed some scenarios using tables from different DBs
- FIXED: Sessions with long-running queries are not killed
### 5.2.2
- FIXED: Optimalized load DB structure for PostgreSQL #451
- ADDED: Auto-closing query connections after configurable (15 minutes default) no-activity interval #468
- ADDED: Set application-name connection parameter (for PostgreSQL and MS SQL) for easier identifying of DbGate connections
- ADDED: Filters supports binary IDs #467
- FIXED: Ctrl+Tab works (switching tabs) #457
- FIXED: Format code supports non-standard letters #450
- ADDED: New logging system, log to file, ability to reduce logging #360 (using https://www.npmjs.com/package/pinomin)
- FIXED: crash on Windows and Mac after system goes in suspend mode #458
- ADDED: dbmodel standalone NPM package (https://www.npmjs.com/package/dbmodel) - deploy database via commandline tool
### 5.2.1
- FIXED: client_id param in OAuth
- ADDED: OAuth scope parameter
- FIXED: login page - password was not sent, when submitting by pressing ENTER
- FIXED: Used permissions fix
- FIXED: Export modal - fixed crash when selecting different database
### 5.2.0
- ADDED: Oracle database support #380
- ADDED: OAuth authentification #407
- ADDED: Active directory (Windows) authentification #261
- ADDED: Ask database credentials when login to DB
- ADDED: Login form instead of simple authorization (simple auth is possible with special configuration)
- FIXED: MongoDB - connection uri regression
- ADDED: MongoDB server summary tab
- FIXED: Broken versioned tables in MariaDB #433
- CHANGED: Improved editor margin #422
- ADDED: Implemented camel case search in all search boxes
- ADDED: MonhoDB filter empty array, not empty array
- ADDED: Maximize button reflects window state
- ADDED: MongoDB - database profiler
- CHANGED: Short JSON values are shown directly in grid
- FIXED: Fixed filtering nested fields in NDJSON viewer
- CHANGED: Improved fuzzy search after Ctrl+P #246
- ADDED: MongoDB: Create collection backup
- ADDED: Single database mode
- ADDED: Perspective designer supports joins from MongoDB nested documents and arrays
- FIXED: Perspective designer joins on MongoDB ObjectId fields
- ADDED: Filtering columns in designer (query designer, diagram designer, perspective designer)
- FIXED: Clone MongoDB rows without _id attribute #404
- CHANGED: Improved cell view with GPS latitude, longitude fields
- ADDED: SQL: ALTER VIEW and SQL:ALTER PROCEDURE scripts
- ADDED: Ctrl+F5 refreshes data grid also with database structure #428
- ADDED: Perspective display modes: text, force text #439
- FIXED: Fixed file filters #445
- ADDED: Rename, remove connection folder, memoize opened state after app restart #425
- FIXED: Show SQLServer alter store procedure #435
### 5.1.6
- ADDED: Connection folders support #274
- ADDED: Keyboard shortcut to hide result window and show/hide the side toolbar #406
+6 -2
View File
@@ -22,7 +22,7 @@ DbGate is licensed under MIT license and is completely free.
* MySQL
* PostgreSQL
* SQL Server
* Oracle
* Oracle (experimental)
* MongoDB
* Redis
* SQLite
@@ -175,4 +175,8 @@ cd dbgate-plugin-my-new-plugin # this directory is created by wizard, edit, what
yarn plugin # this compiles plugin and copies it into existing DbGate installation
```
After restarting DbGate, you could use your new plugin from DbGate.
After restarting DbGate, you could use your new plugin from DbGate.
## Logging
DbGate uses [pinomin logger](https://github.com/dbgate/pinomin). So by default, it produces JSON log messages into console and log files. If you want to see formatted logs, please use [pino-pretty](https://github.com/pinojs/pino-pretty) log formatter.
Binary file not shown.

After

Width:  |  Height:  |  Size: 89 KiB

+10 -4
View File
@@ -21,7 +21,7 @@
"afterSign": "electron-builder-notarize",
"mac": {
"category": "database",
"icon": "icon512.png",
"icon": "icon512-mac.png",
"hardenedRuntime": true,
"entitlements": "entitlements.mac.plist",
"entitlementsInherit": "entitlements.mac.plist",
@@ -71,7 +71,13 @@
},
"win": {
"target": [
"nsis",
{
"target": "nsis",
"arch": [
"x64",
"arm64"
]
},
{
"target": "zip",
"arch": [
@@ -113,7 +119,7 @@
},
"optionalDependencies": {
"better-sqlite3": "7.6.2",
"oracledb": "^5.5.0",
"msnodesqlv8": "^2.6.0"
"msnodesqlv8": "^2.6.0",
"oracledb": "^5.5.0"
}
}
+21
View File
@@ -1,6 +1,8 @@
const electron = require('electron');
const os = require('os');
const fs = require('fs');
// const unhandled = require('electron-unhandled');
// const { openNewGitHubIssue, debugInfo } = require('electron-util');
const { Menu, ipcMain } = require('electron');
const { autoUpdater } = require('electron-updater');
const log = require('electron-log');
@@ -22,9 +24,25 @@ const configRootPath = path.join(app.getPath('userData'), 'config-root.json');
let initialConfig = {};
let apiLoaded = false;
let mainModule;
// let getLogger;
// let loadLogsContent;
const isMac = () => os.platform() == 'darwin';
// unhandled({
// showDialog: true,
// reportButton: error => {
// openNewGitHubIssue({
// user: 'dbgate',
// repo: 'dbgate',
// body: `PLEASE DELETE SENSITIVE INFO BEFORE POSTING ISSUE!!!\n\n\`\`\`\n${
// error.stack
// }\n\`\`\`\n\n---\n\n${debugInfo()}\n\n\`\`\`\n${loadLogsContent ? loadLogsContent(50) : ''}\n\`\`\``,
// });
// },
// logger: error => (getLogger ? getLogger('electron').fatal(error) : console.error(error)),
// });
try {
initialConfig = JSON.parse(fs.readFileSync(configRootPath, { encoding: 'utf-8' }));
} catch (err) {
@@ -333,9 +351,12 @@ function createWindow() {
// path.join(__dirname, process.env.DEVMODE ? '../../packages/api/src/index' : '../packages/api/dist/bundle.js')
// )
// );
api.configureLogger();
const main = api.getMainModule();
main.useAllControllers(null, electron);
mainModule = main;
// getLogger = api.getLogger;
// loadLogsContent = api.loadLogsContent;
apiLoaded = true;
}
mainModule.setElectronSender(mainWindow.webContents);
+3
View File
@@ -86,6 +86,9 @@ module.exports = ({ editMenu }) => [
{ command: 'sql.generator', hideDisabled: true },
{ command: 'file.import', hideDisabled: true },
{ command: 'new.modelCompare', hideDisabled: true },
{ divider: true },
{ command: 'folder.showLogs', hideDisabled: true },
{ command: 'folder.showData', hideDisabled: true },
],
},
{
@@ -0,0 +1,94 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
const { runCommandOnDriver } = require('dbgate-tools');
describe('Data duplicator', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: true },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
const gett1 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
{ id: 3, val: 'v3' },
]);
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
{ id: 3, val: 'v3', valfk: 3 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
});
+2 -2
View File
@@ -136,8 +136,8 @@ const filterLocal = [
'-MySQL',
'-MariaDB',
'-PostgreSQL',
'SQL Server',
'-SQLite',
'-SQL Server',
'SQLite',
'-CockroachDB',
];
+2
View File
@@ -13,6 +13,8 @@
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
+3
View File
@@ -104,3 +104,6 @@ magick icon.png -resize 16x16! ../app/icons/16x16.png
magick icon.png -resize 192x192! ../packages/web/public/logo192.png
magick icon.png -resize 512x512! ../packages/web/public/logo512.png
magick icon.png -define icon:auto-resize="256,128,96,64,48,32,16" ../packages/web/public/favicon.ico
convert icon.png -resize 800x800 -background transparent -gravity center -extent 1000x1000 iconmac.png
magick composite iconmac.png macbg.png -resize 600x600! ../app/icon512-mac.png
BIN
View File
Binary file not shown.

After

Width:  |  Height:  |  Size: 211 KiB

BIN
View File
Binary file not shown.

After

Width:  |  Height:  |  Size: 177 KiB

+11 -8
View File
@@ -1,6 +1,6 @@
{
"private": true,
"version": "5.1.7-beta.10",
"version": "5.2.5",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -8,16 +8,18 @@
"integration-tests"
],
"scripts": {
"start:api": "yarn workspace dbgate-api start",
"start:app": "cd app && yarn start",
"start:api": "yarn workspace dbgate-api start | pino-pretty",
"start:api:json": "yarn workspace dbgate-api start",
"start:app": "cd app && yarn start | pino-pretty",
"start:app:singledb": "CONNECTIONS=con1 SERVER_con1=localhost ENGINE_con1=mysql@dbgate-plugin-mysql USER_con1=root PASSWORD_con1=Pwd2020Db SINGLE_CONNECTION=con1 SINGLE_DATABASE=Chinook yarn start:app",
"start:api:debug": "cross-env DEBUG=* yarn workspace dbgate-api start",
"start:app:debug": "cd app && cross-env DEBUG=* yarn start",
"start:api:debug:ssh": "cross-env DEBUG=ssh yarn workspace dbgate-api start",
"start:app:debug:ssh": "cd app && cross-env DEBUG=ssh yarn start",
"start:api:portal": "yarn workspace dbgate-api start:portal",
"start:api:singledb": "yarn workspace dbgate-api start:singledb",
"start:api:auth": "yarn workspace dbgate-api start:auth",
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin",
"start:api:portal": "yarn workspace dbgate-api start:portal | pino-pretty",
"start:api:singledb": "yarn workspace dbgate-api start:singledb | pino-pretty",
"start:api:auth": "yarn workspace dbgate-api start:auth | pino-pretty",
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin | pino-pretty",
"start:web": "yarn workspace dbgate-web dev",
"start:sqltree": "yarn workspace dbgate-sqltree start",
"start:tools": "yarn workspace dbgate-tools start",
@@ -57,7 +59,8 @@
},
"dependencies": {
"concurrently": "^5.1.0",
"patch-package": "^6.2.1"
"patch-package": "^6.2.1",
"pino-pretty": "^9.1.1"
},
"devDependencies": {
"copyfiles": "^2.2.0",
+7 -2
View File
@@ -29,6 +29,7 @@
"dbgate-query-splitter": "^4.9.3",
"dbgate-sqltree": "^5.0.0-alpha.1",
"dbgate-tools": "^5.0.0-alpha.1",
"dbgate-datalib": "^5.0.0-alpha.1",
"debug": "^4.3.4",
"diff": "^5.0.0",
"diff2html": "^3.4.13",
@@ -36,6 +37,7 @@
"express": "^4.17.1",
"express-basic-auth": "^1.2.0",
"express-fileupload": "^1.2.0",
"external-sorting": "^1.3.1",
"fs-extra": "^9.1.0",
"fs-reverse": "^0.0.3",
"get-port": "^5.1.1",
@@ -46,10 +48,13 @@
"jsonwebtoken": "^8.5.1",
"line-reader": "^0.4.0",
"lodash": "^4.17.21",
"moment": "^2.24.0",
"ncp": "^2.0.0",
"node-cron": "^2.0.3",
"on-finished": "^2.4.1",
"pinomin": "^1.0.1",
"portfinder": "^1.0.28",
"rimraf": "^3.0.0",
"simple-encryptor": "^4.0.0",
"ssh2": "^1.11.0",
"tar": "^6.0.5",
@@ -79,7 +84,7 @@
},
"optionalDependencies": {
"better-sqlite3": "7.6.2",
"oracledb": "^5.5.0",
"msnodesqlv8": "^2.6.0"
"msnodesqlv8": "^2.6.0",
"oracledb": "^5.5.0"
}
}
+74 -31
View File
@@ -3,9 +3,15 @@ const readline = require('readline');
const path = require('path');
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
const socket = require('../utility/socket');
const { saveFreeTableData } = require('../utility/freeTableStorage');
const loadFilesRecursive = require('../utility/loadFilesRecursive');
const getJslFileName = require('../utility/getJslFileName');
const { getLogger } = require('dbgate-tools');
const uuidv1 = require('uuid/v1');
const dbgateApi = require('../shell');
const jsldata = require('./jsldata');
const platformInfo = require('../utility/platformInfo');
const logger = getLogger('archive');
module.exports = {
folders_meta: true,
@@ -68,7 +74,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
console.log('Error reading archive files', err.message);
logger.error({ err }, 'Error reading archive files');
return [];
}
},
@@ -76,17 +82,20 @@ module.exports = {
refreshFiles_meta: true,
async refreshFiles({ folder }) {
socket.emitChanged('archive-files-changed', { folder });
return true;
},
refreshFolders_meta: true,
async refreshFolders() {
socket.emitChanged(`archive-folders-changed`);
return true;
},
deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) {
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
renameFile_meta: true,
@@ -96,6 +105,46 @@ module.exports = {
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
modifyFile_meta: true,
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
await jsldata.closeDataStore(`archive://${folder}/${file}`);
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
if (!fs.existsSync(changedFilePath)) {
if (!mergedRows) {
return false;
}
const fileStream = fs.createWriteStream(changedFilePath);
for (const row of mergedRows) {
await fileStream.write(JSON.stringify(row) + '\n');
}
await fileStream.close();
socket.emitChanged(`archive-files-changed`, { folder });
return true;
}
const tmpchangedFilePath = path.join(resolveArchiveFolder(folder), `${file}-${uuidv1()}.jsonl`);
const reader = await dbgateApi.modifyJsonLinesReader({
fileName: changedFilePath,
changeSet,
mergedRows,
mergeKey,
mergeMode,
});
const writer = await dbgateApi.jsonLinesWriter({ fileName: tmpchangedFilePath });
await dbgateApi.copyStream(reader, writer);
if (platformInfo.isWindows) {
await fs.copyFile(tmpchangedFilePath, changedFilePath);
await fs.unlink(tmpchangedFilePath);
} else {
await fs.unlink(changedFilePath);
await fs.rename(tmpchangedFilePath, changedFilePath);
}
return true;
},
renameFolder_meta: true,
@@ -103,6 +152,7 @@ module.exports = {
const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
socket.emitChanged(`archive-folders-changed`);
return true;
},
deleteFolder_meta: true,
@@ -114,36 +164,9 @@ module.exports = {
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
}
socket.emitChanged(`archive-folders-changed`);
},
saveFreeTable_meta: true,
async saveFreeTable({ folder, file, data }) {
await saveFreeTableData(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), data);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
loadFreeTable_meta: true,
async loadFreeTable({ folder, file }) {
return new Promise((resolve, reject) => {
const fileStream = fs.createReadStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
const liner = readline.createInterface({
input: fileStream,
});
let structure = null;
const rows = [];
liner.on('line', line => {
const data = JSON.parse(line);
if (structure) rows.push(data);
else structure = data;
});
liner.on('close', () => {
resolve({ structure, rows });
fileStream.close();
});
});
},
saveText_meta: true,
async saveText({ folder, file, text }) {
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
@@ -152,10 +175,30 @@ module.exports = {
},
saveJslData_meta: true,
async saveJslData({ folder, file, jslid }) {
async saveJslData({ folder, file, jslid, changeSet }) {
const source = getJslFileName(jslid);
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
await fs.copyFile(source, target);
if (changeSet) {
const reader = await dbgateApi.modifyJsonLinesReader({
fileName: source,
changeSet,
});
const writer = await dbgateApi.jsonLinesWriter({ fileName: target });
await dbgateApi.copyStream(reader, writer);
} else {
await fs.copyFile(source, target);
socket.emitChanged(`archive-files-changed`, { folder });
}
return true;
},
saveRows_meta: true,
async saveRows({ folder, file, rows }) {
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
}
await fileStream.close();
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
+14 -7
View File
@@ -3,8 +3,11 @@ const jwt = require('jsonwebtoken');
const getExpressPath = require('../utility/getExpressPath');
const uuidv1 = require('uuid/v1');
const { getLogins } = require('../utility/hasPermission');
const { getLogger } = require('dbgate-tools');
const AD = require('activedirectory2').promiseWrapper;
const logger = getLogger('auth');
const tokenSecret = uuidv1();
function shouldAuthorizeApi() {
@@ -51,7 +54,7 @@ function authMiddleware(req, res, next) {
return next();
}
console.log('Sending invalid token error', err.message);
logger.error({ err }, 'Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}
@@ -62,20 +65,24 @@ module.exports = {
async oauthToken(params) {
const { redirectUri, code } = params;
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
const resp = await axios.default.post(
`${process.env.OAUTH_TOKEN}`,
`grant_type=authorization_code&code=${encodeURIComponent(code)}&redirect_uri=${encodeURIComponent(
redirectUri
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}`
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
);
const { access_token, refresh_token } = resp.data;
const payload = jwt.decode(access_token);
console.log('User payload returned from OAUTH:', payload);
logger.info({ payload }, 'User payload returned from OAUTH');
const login = process.env.OAUTH_LOGIN_FIELD ? payload[process.env.OAUTH_LOGIN_FIELD] : 'oauth';
const login =
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
? payload[process.env.OAUTH_LOGIN_FIELD]
: 'oauth';
if (
process.env.OAUTH_ALLOWED_LOGINS &&
@@ -113,12 +120,12 @@ module.exports = {
!process.env.AD_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
) {
return { error: `Username ${login} not allowed to log in` };
}
}
return {
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
};
} catch (err) {
console.log('Failed active directory authentization', err.message);
logger.error({ err }, 'Failed active directory authentization');
return {
error: err.message,
};
@@ -129,7 +136,7 @@ module.exports = {
if (!logins) {
return { error: 'Logins not configured' };
}
const foundLogin = logins.find(x => x.login == login)
const foundLogin = logins.find(x => x.login == login);
if (foundLogin && foundLogin.password == password) {
return {
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
+8 -7
View File
@@ -2,7 +2,7 @@ const fs = require('fs-extra');
const os = require('os');
const path = require('path');
const axios = require('axios');
const { datadir } = require('../utility/directories');
const { datadir, getLogsFilePath } = require('../utility/directories');
const { hasPermission, getLogins } = require('../utility/hasPermission');
const socket = require('../utility/socket');
const _ = require('lodash');
@@ -28,12 +28,9 @@ module.exports = {
get_meta: true,
async get(_params, req) {
const logins = getLogins();
const login =
req && req.user
? req.user.login
: logins
? logins.find(x => x.login == (req && req.auth && req.auth.user))
: null;
const loginName =
req && req.user && req.user.login ? req.user.login : req && req.auth && req.auth.user ? req.auth.user : null;
const login = logins && loginName ? logins.find(x => x.login == loginName) : null;
const permissions = login ? login.permissions : process.env.PERMISSIONS;
return {
@@ -47,8 +44,12 @@ module.exports = {
permissions,
login,
oauth: process.env.OAUTH_AUTH,
oauthClient: process.env.OAUTH_CLIENT_ID,
oauthScope: process.env.OAUTH_SCOPE,
oauthLogout: process.env.OAUTH_LOGOUT,
isLoginForm: !!process.env.AD_URL || (!!logins && !process.env.BASIC_AUTH),
logsFilePath: getLogsFilePath(),
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
...currentVersion,
};
},
+31 -13
View File
@@ -12,9 +12,12 @@ const { pickSafeConnectionInfo } = require('../utility/crypting');
const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
const processArgs = require('../utility/processArgs');
const { safeJsonParse } = require('dbgate-tools');
const { safeJsonParse, getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
const pipeForkLogs = require('../utility/pipeForkLogs');
const logger = getLogger('connections');
let volatileConnections = {};
@@ -86,13 +89,13 @@ function getPortalCollections() {
sslKeyFile: process.env[`SSL_KEY_FILE_${id}`],
sslRejectUnauthorized: process.env[`SSL_REJECT_UNAUTHORIZED_${id}`],
}));
console.log('Using connections from ENV variables:');
console.log(JSON.stringify(connections.map(pickSafeConnectionInfo), undefined, 2));
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
const noengine = connections.filter(x => !x.engine);
if (noengine.length > 0) {
console.log(
'Warning: Invalid CONNECTIONS configutation, missing ENGINE for connection ID:',
noengine.map(x => x._id)
logger.warn(
{ connections: noengine.map(x => x._id) },
'Invalid CONNECTIONS configutation, missing ENGINE for connection ID'
);
}
return connections;
@@ -203,13 +206,20 @@ module.exports = {
test_meta: true,
test(connection) {
const subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'connectProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
]);
const subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
'--is-forked-api',
'--start-process',
'connectProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(subprocess);
subprocess.send(connection);
return new Promise(resolve => {
subprocess.on('message', resp => {
@@ -283,6 +293,14 @@ module.exports = {
return res;
},
batchChangeFolder_meta: true,
async batchChangeFolder({ folder, newFolder }, req) {
// const updated = await this.datastore.find(x => x.parent == folder);
const res = await this.datastore.updateAll(x => (x.parent == folder ? { ...x, parent: newFolder } : x));
socket.emitChanged('connection-list-changed');
return res;
},
updateDatabase_meta: true,
async updateDatabase({ conid, database, values }, req) {
if (portalConnections) return;
@@ -12,6 +12,7 @@ const {
matchPairedObjects,
extendDatabaseInfo,
modelCompareDbDiffOptions,
getLogger,
} = require('dbgate-tools');
const { html, parse } = require('diff2html');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -28,6 +29,9 @@ const diff2htmlPage = require('../utility/diff2htmlPage');
const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const logger = getLogger('databaseConnections');
module.exports = {
/** @type {import('dbgate-types').OpenedDatabaseConnection[]} */
@@ -60,7 +64,7 @@ module.exports = {
handle_error(conid, database, props) {
const { error } = props;
console.log(`Error in database connection ${conid}, database ${database}: ${error}`);
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
},
handle_response(conid, database, { msgid, ...response }) {
const [resolve, reject] = this.requests[msgid];
@@ -85,13 +89,20 @@ module.exports = {
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
}
const subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'databaseConnectionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
]);
const subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
'--is-forked-api',
'--start-process',
'databaseConnectionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(subprocess);
const lastClosed = this.closed[`${conid}/${database}`];
const newOpened = {
conid,
@@ -129,7 +140,12 @@ module.exports = {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
conn.subprocess.send({ msgid, ...message });
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request do process');
this.close(conn.conid, conn.database);
}
});
return promise;
},
@@ -137,7 +153,7 @@ module.exports = {
queryData_meta: true,
async queryData({ conid, database, sql }, req) {
testConnectionPermission(conid, req);
console.log(`Processing query, conid=${conid}, database=${database}, sql=${sql}`);
logger.info({ conid, database, sql }, 'Processing query');
const opened = await this.ensureOpened(conid, database);
// if (opened && opened.status && opened.status.name == 'error') {
// return opened.status;
@@ -155,11 +171,11 @@ module.exports = {
},
runScript_meta: true,
async runScript({ conid, database, sql }, req) {
async runScript({ conid, database, sql, useTransaction }, req) {
testConnectionPermission(conid, req);
console.log(`Processing script, conid=${conid}, database=${database}, sql=${sql}`);
logger.info({ conid, database, sql }, 'Processing script');
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql });
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql, useTransaction });
return res;
},
@@ -278,6 +294,7 @@ module.exports = {
if (existing) {
existing.subprocess.send({ msgtype: 'ping' });
} else {
// @ts-ignore
existing = await this.ensureOpened(conid, database);
}
@@ -308,7 +325,13 @@ module.exports = {
const existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) {
existing.disconnected = true;
if (kill) existing.subprocess.kill();
if (kill) {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
this.closed[`${conid}/${database}`] = {
status: {
+26 -9
View File
@@ -4,7 +4,6 @@ const lineReader = require('line-reader');
const _ = require('lodash');
const { __ } = require('lodash/fp');
const DatastoreProxy = require('../utility/DatastoreProxy');
const { saveFreeTableData } = require('../utility/freeTableStorage');
const getJslFileName = require('../utility/getJslFileName');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
const requirePluginFunction = require('../utility/requirePluginFunction');
@@ -113,6 +112,14 @@ module.exports = {
return datastore;
},
async closeDataStore(jslid) {
const datastore = this.datastores[jslid];
if (datastore) {
await datastore._closeReader();
delete this.datastores[jslid];
}
},
getInfo_meta: true,
async getInfo({ jslid }) {
const file = getJslFileName(jslid);
@@ -135,9 +142,15 @@ module.exports = {
},
getRows_meta: true,
async getRows({ jslid, offset, limit, filters, formatterFunction }) {
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
const datastore = await this.ensureDatastore(jslid, formatterFunction);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
},
exists_meta: true,
async exists({ jslid }) {
const fileName = getJslFileName(jslid);
return fs.existsSync(fileName);
},
getStats_meta: true,
@@ -181,18 +194,22 @@ module.exports = {
// }
},
saveFreeTable_meta: true,
async saveFreeTable({ jslid, data }) {
saveFreeTableData(getJslFileName(jslid), data);
return true;
},
saveText_meta: true,
async saveText({ jslid, text }) {
await fs.promises.writeFile(getJslFileName(jslid), text);
return true;
},
saveRows_meta: true,
async saveRows({ jslid, rows }) {
const fileStream = fs.createWriteStream(getJslFileName(jslid));
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
}
await fileStream.close();
return true;
},
extractTimelineChart_meta: true,
async extractTimelineChart({ jslid, timestampFunction, aggregateFunction, measures }) {
const timestamp = requirePluginFunction(timestampFunction);
+32 -18
View File
@@ -6,10 +6,17 @@ const byline = require('byline');
const socket = require('../utility/socket');
const { fork } = require('child_process');
const { rundir, uploadsdir, pluginsdir, getPluginBackendPath, packagedPluginList } = require('../utility/directories');
const { extractShellApiPlugins, extractShellApiFunctionName, jsonScriptToJavascript } = require('dbgate-tools');
const {
extractShellApiPlugins,
extractShellApiFunctionName,
jsonScriptToJavascript,
getLogger,
safeJsonParse,
} = require('dbgate-tools');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const platformInfo = require('../utility/platformInfo');
const logger = getLogger('runners');
function extractPlugins(script) {
const requireRegex = /\s*\/\/\s*@require\s+([^\s]+)\s*\n/g;
@@ -29,13 +36,14 @@ const requirePluginsTemplate = (plugins, isExport) =>
const scriptTemplate = (script, isExport) => `
const dbgateApi = require(${isExport ? `'dbgate-api'` : 'process.env.DBGATE_API'});
const logger = dbgateApi.getLogger('script');
dbgateApi.initializeApiEnvironment();
${requirePluginsTemplate(extractPlugins(script), isExport)}
require=null;
async function run() {
${script}
await dbgateApi.finalizer.run();
console.log('Finished job script');
logger.info('Finished job script');
}
dbgateApi.runScript(run);
`;
@@ -59,20 +67,23 @@ module.exports = {
requests: {},
dispatchMessage(runid, message) {
if (message) console.log('...', message.message);
if (_.isString(message)) {
socket.emit(`runner-info-${runid}`, {
message,
if (message) {
const json = safeJsonParse(message.message);
if (json) logger.log(json);
else logger.info(message.message);
const toEmit = {
time: new Date(),
severity: 'info',
});
}
if (_.isPlainObject(message)) {
socket.emit(`runner-info-${runid}`, {
time: new Date(),
severity: 'info',
...message,
});
message: json ? json.msg : message.message,
};
if (json && json.level >= 50) {
toEmit.severity = 'error';
}
socket.emit(`runner-info-${runid}`, toEmit);
}
},
@@ -98,13 +109,15 @@ module.exports = {
fs.writeFileSync(`${scriptFile}`, scriptText);
fs.mkdirSync(directory);
const pluginNames = _.union(fs.readdirSync(pluginsdir()), packagedPluginList);
console.log(`RUNNING SCRIPT ${scriptFile}`);
logger.info({ scriptFile }, 'Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
scriptFile,
[
'--checkParent', // ...process.argv.slice(3)
'--is-forked-api',
'--process-display-name',
'script',
...processArgs.getPassArgs(),
],
{
@@ -117,14 +130,15 @@ module.exports = {
},
}
);
const pipeDispatcher = severity => data =>
this.dispatchMessage(runid, { severity, message: data.toString().trim() });
const pipeDispatcher = severity => data => {
return this.dispatchMessage(runid, { severity, message: data.toString().trim() });
};
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
subprocess.on('exit', code => {
this.rejectRequest(runid, { message: 'No data retured, maybe input data source is too big' });
console.log('... EXIT process', code);
logger.info({ code, pid: subprocess.pid }, 'Exited process');
socket.emit(`runner-done-${runid}`, code);
});
subprocess.on('error', error => {
+4 -1
View File
@@ -4,6 +4,9 @@ const path = require('path');
const cron = require('node-cron');
const runners = require('./runners');
const { hasPermission } = require('../utility/hasPermission');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('scheduler');
const scheduleRegex = /\s*\/\/\s*@schedule\s+([^\n]+)\n/;
@@ -21,7 +24,7 @@ module.exports = {
if (!match) return;
const pattern = match[1];
if (!cron.validate(pattern)) return;
console.log(`Schedule script ${file} with pattern ${pattern}`);
logger.info(`Schedule script ${file} with pattern ${pattern}`);
const task = cron.schedule(pattern, () => runners.start({ script: text }));
this.tasks.push(task);
},
@@ -10,6 +10,10 @@ const config = require('./config');
const processArgs = require('../utility/processArgs');
const { testConnectionPermission } = require('../utility/hasPermission');
const { MissingCredentialsError } = require('../utility/exceptions');
const pipeForkLogs = require('../utility/pipeForkLogs');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('serverConnection');
module.exports = {
opened: [],
@@ -47,16 +51,26 @@ module.exports = {
const existing = this.opened.find(x => x.conid == conid);
if (existing) return existing;
const connection = await connections.getCore({ conid });
if (!connection) {
throw new Error(`Connection with conid="${conid}" not fund`);
}
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
}
const subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'serverConnectionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
]);
const subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
'--is-forked-api',
'--start-process',
'serverConnectionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(subprocess);
const newOpened = {
conid,
subprocess,
@@ -91,7 +105,13 @@ module.exports = {
const existing = this.opened.find(x => x.conid == conid);
if (existing) {
existing.disconnected = true;
if (kill) existing.subprocess.kill();
if (kill) {
try {
existing.subprocess.kill();
} catch (err) {
logger.error({ err }, 'Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
this.closed[conid] = {
...existing.status,
@@ -110,6 +130,7 @@ module.exports = {
listDatabases_meta: true,
async listDatabases({ conid }, req) {
if (!conid) return [];
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
return opened.databases;
@@ -140,7 +161,12 @@ module.exports = {
}
this.lastPinged[conid] = new Date().getTime();
const opened = await this.ensureOpened(conid);
opened.subprocess.send({ msgtype: 'ping' });
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error({ err }, 'Error calling ping');
this.close(conid);
}
})
);
return { status: 'ok' };
@@ -177,7 +203,12 @@ module.exports = {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
conn.subprocess.send({ msgid, ...message });
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error({ err }, 'Error sending request');
this.close(conn.conid);
}
});
return promise;
},
+27 -10
View File
@@ -8,6 +8,11 @@ const path = require('path');
const { handleProcessCommunication } = require('../utility/processComm');
const processArgs = require('../utility/processArgs');
const { appdir } = require('../utility/directories');
const { getLogger } = require('dbgate-tools');
const pipeForkLogs = require('../utility/pipeForkLogs');
const config = require('./config');
const logger = getLogger('sessions');
module.exports = {
/** @type {import('dbgate-types').OpenedSession[]} */
@@ -82,13 +87,20 @@ module.exports = {
async create({ conid, database }) {
const sesid = uuidv1();
const connection = await connections.getCore({ conid });
const subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'sessionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
]);
const subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
'--is-forked-api',
'--start-process',
'sessionProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(subprocess);
const newOpened = {
conid,
database,
@@ -109,7 +121,12 @@ module.exports = {
socket.emit(`session-closed-${sesid}`);
});
subprocess.send({ msgtype: 'connect', ...connection, database });
subprocess.send({
msgtype: 'connect',
...connection,
database,
globalSettings: await config.getSettings(),
});
return _.pick(newOpened, ['conid', 'database', 'sesid']);
},
@@ -120,7 +137,7 @@ module.exports = {
throw new Error('Invalid session');
}
console.log(`Processing query, sesid=${sesid}, sql=${sql}`);
logger.info({ sesid, sql }, 'Processing query');
this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({ msgtype: 'executeQuery', sql });
@@ -158,7 +175,7 @@ module.exports = {
throw new Error('Invalid session');
}
console.log(`Starting profiler, sesid=${sesid}`);
logger.info({ sesid }, 'Starting profiler');
session.loadingReader_jslid = jslid;
session.subprocess.send({ msgtype: 'startProfiler', jslid });
+3 -1
View File
@@ -1,6 +1,8 @@
const path = require('path');
const { uploadsdir } = require('../utility/directories');
const uuidv1 = require('uuid/v1');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('uploads');
module.exports = {
upload_meta: {
@@ -15,7 +17,7 @@ module.exports = {
}
const uploadName = uuidv1();
const filePath = path.join(uploadsdir(), uploadName);
console.log(`Uploading file ${data.name}, size=${data.size}`);
logger.info(`Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => {
res.json({
+96 -2
View File
@@ -1,5 +1,96 @@
const shell = require('./shell');
const { setLogger, getLogger, setLoggerName } = require('dbgate-tools');
const processArgs = require('./utility/processArgs');
const fs = require('fs');
const moment = require('moment');
const path = require('path');
const { logsdir, setLogsFilePath, getLogsFilePath } = require('./utility/directories');
const { createLogger } = require('pinomin');
if (processArgs.startProcess) {
setLoggerName(processArgs.startProcess.replace(/Process$/, ''));
}
if (processArgs.processDisplayName) {
setLoggerName(processArgs.processDisplayName);
}
// function loadLogsContent(maxLines) {
// const text = fs.readFileSync(getLogsFilePath(), { encoding: 'utf8' });
// if (maxLines) {
// const lines = text
// .split('\n')
// .map(x => x.trim())
// .filter(x => x);
// return lines.slice(-maxLines).join('\n');
// }
// return text;
// }
function configureLogger() {
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
setLogsFilePath(logsFilePath);
setLoggerName('main');
const logger = createLogger({
base: { pid: process.pid },
targets: [
{
type: 'console',
// @ts-ignore
level: process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
},
{
type: 'stream',
// @ts-ignore
level: process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
stream: fs.createWriteStream(logsFilePath, { flags: 'a' }),
},
],
});
// const streams = [];
// if (!platformInfo.isElectron) {
// streams.push({
// stream: process.stdout,
// level: process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
// });
// }
// streams.push({
// stream: fs.createWriteStream(logsFilePath),
// level: process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
// });
// let logger = pinoms({
// redact: { paths: ['hostname'], remove: true },
// streams,
// });
// // @ts-ignore
// let logger = pino({
// redact: { paths: ['hostname'], remove: true },
// transport: {
// targets: [
// {
// level: process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
// target: 'pino/file',
// },
// {
// level: process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'info',
// target: 'pino/file',
// options: { destination: logsFilePath },
// },
// ],
// },
// });
setLogger(logger);
}
if (processArgs.listenApi) {
configureLogger();
}
const shell = require('./shell');
const dbgateTools = require('dbgate-tools');
global['DBGATE_TOOLS'] = dbgateTools;
@@ -8,7 +99,7 @@ if (processArgs.startProcess) {
const proc = require('./proc');
const module = proc[processArgs.startProcess];
module.start();
}
}
if (processArgs.listenApi) {
const main = require('./main');
@@ -17,5 +108,8 @@ if (processArgs.listenApi) {
module.exports = {
...shell,
getLogger,
configureLogger,
// loadLogsContent,
getMainModule: () => require('./main'),
};
+12 -9
View File
@@ -33,6 +33,9 @@ const platformInfo = require('./utility/platformInfo');
const getExpressPath = require('./utility/getExpressPath');
const { getLogins } = require('./utility/hasPermission');
const _ = require('lodash');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('main');
function start() {
// console.log('process.argv', process.argv);
@@ -60,8 +63,8 @@ function start() {
} else if (platformInfo.isNpmDist) {
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
} else if (process.env.DEVWEB) {
console.log('__dirname', __dirname);
console.log(path.join(__dirname, '../../web/public/build'));
// console.log('__dirname', __dirname);
// console.log(path.join(__dirname, '../../web/public/build'));
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../web/public')));
} else {
app.get(getExpressPath('/'), (req, res) => {
@@ -109,7 +112,7 @@ function start() {
if (platformInfo.isDocker) {
const port = process.env.PORT || 3000;
console.log('DbGate API listening on port (docker build)', port);
logger.info(`DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
@@ -119,27 +122,27 @@ function start() {
),
}).then(port => {
server.listen(port, () => {
console.log(`DbGate API listening on port ${port} (NPM build)`);
logger.info(`DbGate API listening on port ${port} (NPM build)`);
});
});
} else if (process.env.DEVWEB) {
const port = process.env.PORT || 3000;
console.log('DbGate API & web listening on port (dev web build)', port);
logger.info(`DbGate API & web listening on port ${port} (dev web build)`);
server.listen(port);
} else {
const port = process.env.PORT || 3000;
console.log('DbGate API listening on port (dev API build)', port);
logger.info(`DbGate API listening on port ${port} (dev API build)`);
server.listen(port);
}
function shutdown() {
console.log('\nShutting down DbGate API server');
logger.info('\nShutting down DbGate API server');
server.close(() => {
console.log('Server shut down, terminating');
logger.info('Server shut down, terminating');
process.exit(0);
});
setTimeout(() => {
console.log('Server close timeout, terminating');
logger.info('Server close timeout, terminating');
process.exit(0);
}, 1000);
}
@@ -1,7 +1,7 @@
const stableStringify = require('json-stable-stringify');
const { splitQuery } = require('dbgate-query-splitter');
const childProcessChecker = require('../utility/childProcessChecker');
const { extractBoolSettingsValue, extractIntSettingsValue } = require('dbgate-tools');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -9,6 +9,8 @@ const { SqlGenerator } = require('dbgate-tools');
const generateDeploySql = require('../shell/generateDeploySql');
const { dumpSqlSelect } = require('dbgate-sqltree');
const logger = getLogger('dbconnProcess');
let systemConnection;
let storedConnection;
let afterConnectCallbacks = [];
@@ -156,12 +158,12 @@ function resolveAnalysedPromises() {
afterAnalyseCallbacks = [];
}
async function handleRunScript({ msgid, sql }, skipReadonlyCheck = false) {
async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck = false) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.script(systemConnection, sql);
await driver.script(systemConnection, sql, { useTransaction });
process.send({ msgtype: 'response', msgid });
} catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
@@ -269,7 +271,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(() => {
console.log('Exiting because of unhandled exception');
logger.error('Exiting because of unhandled exception');
process.exit(0);
}, 500);
}
@@ -336,7 +338,7 @@ function start() {
setInterval(() => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
console.log('Database connection not alive, exiting');
logger.info('Database connection not alive, exiting');
process.exit(0);
}
}, 10 * 1000);
@@ -345,9 +347,9 @@ function start() {
if (handleProcessCommunication(message)) return;
try {
await handleMessage(message);
} catch (e) {
console.error('Error in DB connection', e);
process.send({ msgtype: 'error', error: e.message });
} catch (err) {
logger.error({ err }, 'Error in DB connection');
process.send({ msgtype: 'error', error: err.message });
}
});
}
@@ -1,9 +1,10 @@
const stableStringify = require('json-stable-stringify');
const { extractBoolSettingsValue, extractIntSettingsValue } = require('dbgate-tools');
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const logger = getLogger('srvconnProcess');
let systemConnection;
let storedConnection;
@@ -101,7 +102,7 @@ async function handleDatabaseOp(op, { name }) {
} else {
const dmp = driver.createDumper();
dmp[op](name);
console.log(`RUNNING SCRIPT: ${dmp.s}`);
logger.info({ sql: dmp.s }, 'Running script');
await driver.query(systemConnection, dmp.s);
}
await handleRefresh();
@@ -146,7 +147,7 @@ function start() {
setInterval(() => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
console.log('Server connection not alive, exiting');
logger.info('Server connection not alive, exiting');
process.exit(0);
}
}, 10 * 1000);
+50 -14
View File
@@ -10,13 +10,18 @@ const requireEngineDriver = require('../utility/requireEngineDriver');
const { decryptConnection } = require('../utility/crypting');
const connectUtility = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require('dbgate-tools');
const logger = getLogger('sessionProcess');
let systemConnection;
let storedConnection;
let afterConnectCallbacks = [];
// let currentHandlers = [];
let lastPing = null;
let lastActivity = null;
let currentProfiler = null;
let executingScripts = 0;
class TableWriter {
constructor() {
@@ -212,6 +217,8 @@ function waitConnected() {
}
async function handleStartProfiler({ jslid }) {
lastActivity = new Date().getTime();
await waitConnected();
const driver = requireEngineDriver(storedConnection);
@@ -230,6 +237,8 @@ async function handleStartProfiler({ jslid }) {
}
async function handleStopProfiler({ jslid }) {
lastActivity = new Date().getTime();
const driver = requireEngineDriver(storedConnection);
currentProfiler.writer.close();
driver.stopProfiler(systemConnection, currentProfiler);
@@ -237,6 +246,8 @@ async function handleStopProfiler({ jslid }) {
}
async function handleExecuteQuery({ sql }) {
lastActivity = new Date().getTime();
await waitConnected();
const driver = requireEngineDriver(storedConnection);
@@ -253,23 +264,30 @@ async function handleExecuteQuery({ sql }) {
//process.send({ msgtype: 'error', error: e.message });
}
const resultIndexHolder = {
value: 0,
};
for (const sqlItem of splitQuery(sql, {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleStream(driver, resultIndexHolder, sqlItem);
// const handler = new StreamHandler(resultIndex);
// const stream = await driver.stream(systemConnection, sqlItem, handler);
// handler.stream = stream;
// resultIndex = handler.resultIndex;
executingScripts++;
try {
const resultIndexHolder = {
value: 0,
};
for (const sqlItem of splitQuery(sql, {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleStream(driver, resultIndexHolder, sqlItem);
// const handler = new StreamHandler(resultIndex);
// const stream = await driver.stream(systemConnection, sqlItem, handler);
// handler.stream = stream;
// resultIndex = handler.resultIndex;
}
process.send({ msgtype: 'done' });
} finally {
executingScripts--;
}
process.send({ msgtype: 'done' });
}
async function handleExecuteReader({ jslid, sql, fileName }) {
lastActivity = new Date().getTime();
await waitConnected();
const driver = requireEngineDriver(storedConnection);
@@ -325,7 +343,25 @@ function start() {
setInterval(() => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
console.log('Session not alive, exiting');
logger.info('Session not alive, exiting');
process.exit(0);
}
const useSessionTimeout =
storedConnection && storedConnection.globalSettings
? extractBoolSettingsValue(storedConnection.globalSettings, 'session.autoClose', true)
: false;
const sessionTimeout =
storedConnection && storedConnection.globalSettings
? extractIntSettingsValue(storedConnection.globalSettings, 'session.autoCloseTimeout', 15, 1, 120)
: 15;
if (
useSessionTimeout &&
time - lastActivity > sessionTimeout * 60 * 1000 &&
!currentProfiler &&
executingScripts == 0
) {
logger.info('Session not active, exiting');
process.exit(0);
}
}, 10 * 1000);
+5 -2
View File
@@ -3,6 +3,9 @@ const platformInfo = require('../utility/platformInfo');
const childProcessChecker = require('../utility/childProcessChecker');
const { handleProcessCommunication } = require('../utility/processComm');
const { SSHConnection } = require('../utility/SSHConnection');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('sshProcess');
async function getSshConnection(connection) {
const sshConfig = {
@@ -35,8 +38,8 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
console.log('Error creating SSH tunnel connection:', err.message);
logger.error({ err }, 'Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',
connection,
+4 -1
View File
@@ -3,11 +3,14 @@ const fs = require('fs');
const { archivedir, resolveArchiveFolder } = require('../utility/directories');
// const socket = require('../utility/socket');
const jsonLinesWriter = require('./jsonLinesWriter');
const { getLogger } = require('dbgate-tools');
const logger = getLogger();
function archiveWriter({ folderName, fileName }) {
const dir = resolveArchiveFolder(folderName);
if (!fs.existsSync(dir)) {
console.log(`Creating directory ${dir}`);
logger.info(`Creating directory ${dir}`);
fs.mkdirSync(dir);
}
const jsonlFile = path.join(dir, `${fileName}.jsonl`);
+50
View File
@@ -0,0 +1,50 @@
const stream = require('stream');
const path = require('path');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const logger = getLogger('dataDuplicator');
const { DataDuplicator } = require('dbgate-datalib');
const copyStream = require('./copyStream');
const jsonLinesReader = require('./jsonLinesReader');
const { resolveArchiveFolder } = require('../utility/directories');
async function dataDuplicator({
connection,
archive,
items,
options,
analysedStructure = null,
driver,
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(pool);
}
const dupl = new DataDuplicator(
pool,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream ||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
}
module.exports = dataDuplicator;
+5 -2
View File
@@ -1,5 +1,8 @@
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('dumpDb');
function doDump(dumper) {
return new Promise((resolve, reject) => {
@@ -21,11 +24,11 @@ async function dumpDatabase({
databaseName,
schemaName,
}) {
console.log(`Dumping database`);
logger.info(`Dumping database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
console.log(`Connected.`);
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(pool, {
outputFile,
+5 -2
View File
@@ -1,12 +1,15 @@
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('execQuery');
async function executeQuery({ connection = undefined, systemConnection = undefined, driver = undefined, sql }) {
console.log(`Execute query ${sql}`);
logger.info({ sql }, `Execute query`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'script'));
console.log(`Connected.`);
logger.info(`Connected.`);
await driver.script(pool, sql);
}
+17 -9
View File
@@ -1,18 +1,26 @@
const stream = require('stream');
async function fakeObjectReader({ delay = 0 } = {}) {
async function fakeObjectReader({ delay = 0, dynamicData = null } = {}) {
const pass = new stream.PassThrough({
objectMode: true,
});
function doWrite() {
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }], __isStreamHeader: true });
pass.write({ id: 1, country: 'Czechia' });
pass.write({ id: 2, country: 'Austria' });
pass.write({ country: 'Germany', id: 3 });
pass.write({ country: 'Romania', id: 4 });
pass.write({ country: 'Great Britain', id: 5 });
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
pass.end();
if (dynamicData) {
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
for (const item of dynamicData) {
pass.write(item);
}
pass.end();
} else {
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }], __isStreamHeader: true });
pass.write({ id: 1, country: 'Czechia' });
pass.write({ id: 2, country: 'Austria' });
pass.write({ country: 'Germany', id: 3 });
pass.write({ country: 'Romania', id: 4 });
pass.write({ country: 'Great Britain', id: 5 });
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
pass.end();
}
}
if (delay) {
@@ -0,0 +1,30 @@
const requireEngineDriver = require('../utility/requireEngineDriver');
const {
extendDatabaseInfo,
databaseInfoFromYamlModel,
getAlterDatabaseScript,
DatabaseAnalyser,
} = require('dbgate-tools');
const importDbModel = require('../utility/importDbModel');
const fs = require('fs');
async function generateModelSql({ engine, driver, modelFolder, loadedDbModel, outputFile }) {
if (!driver) driver = requireEngineDriver(engine);
const dbInfo = extendDatabaseInfo(
loadedDbModel ? databaseInfoFromYamlModel(loadedDbModel) : await importDbModel(modelFolder)
);
const { sql } = getAlterDatabaseScript(
DatabaseAnalyser.createEmptyStructure(),
dbInfo,
{},
DatabaseAnalyser.createEmptyStructure(),
dbInfo,
driver
);
fs.writeFileSync(outputFile, sql);
}
module.exports = generateModelSql;
+5 -2
View File
@@ -4,6 +4,9 @@ const connectUtility = require('../utility/connectUtility');
const { splitQueryStream } = require('dbgate-query-splitter/lib/splitQueryStream');
const download = require('./download');
const stream = require('stream');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('importDb');
class ImportStream extends stream.Transform {
constructor(pool, driver) {
@@ -38,11 +41,11 @@ function awaitStreamEnd(stream) {
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
console.log(`Importing database`);
logger.info(`Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
console.log(`Connected.`);
logger.info(`Connected.`);
const downloadedFile = await download(inputFile);
+8
View File
@@ -23,6 +23,10 @@ const deployDb = require('./deployDb');
const initializeApiEnvironment = require('./initializeApiEnvironment');
const dumpDatabase = require('./dumpDatabase');
const importDatabase = require('./importDatabase');
const loadDatabase = require('./loadDatabase');
const generateModelSql = require('./generateModelSql');
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
const dataDuplicator = require('./dataDuplicator');
const dbgateApi = {
queryReader,
@@ -49,6 +53,10 @@ const dbgateApi = {
initializeApiEnvironment,
dumpDatabase,
importDatabase,
loadDatabase,
generateModelSql,
modifyJsonLinesReader,
dataDuplicator,
};
requirePlugin.initializeDbgateApi(dbgateApi);
+4 -1
View File
@@ -1,6 +1,9 @@
const { getLogger } = require('dbgate-tools');
const fs = require('fs');
const stream = require('stream');
const logger = getLogger('jsonArrayWriter');
class StringifyStream extends stream.Transform {
constructor() {
super({ objectMode: true });
@@ -38,7 +41,7 @@ class StringifyStream extends stream.Transform {
}
async function jsonArrayWriter({ fileName, encoding = 'utf-8' }) {
console.log(`Writing file ${fileName}`);
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream();
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
+8 -2
View File
@@ -1,6 +1,8 @@
const fs = require('fs');
const stream = require('stream');
const byline = require('byline');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('jsonLinesReader');
class ParseStream extends stream.Transform {
constructor({ limitRows }) {
@@ -31,9 +33,13 @@ class ParseStream extends stream.Transform {
}
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
console.log(`Reading file ${fileName}`);
logger.info(`Reading file ${fileName}`);
const fileStream = fs.createReadStream(fileName, encoding);
const fileStream = fs.createReadStream(
fileName,
// @ts-ignore
encoding
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows });
liner.pipe(parser);
+6 -2
View File
@@ -1,5 +1,7 @@
const { getLogger } = require('dbgate-tools');
const fs = require('fs');
const stream = require('stream');
const logger = getLogger('jsonLinesWriter');
class StringifyStream extends stream.Transform {
constructor({ header }) {
@@ -10,7 +12,9 @@ class StringifyStream extends stream.Transform {
_transform(chunk, encoding, done) {
let skip = false;
if (!this.wasHeader) {
skip = (chunk.__isStreamHeader && !this.header) || (chunk.__isStreamHeader && chunk.__isDynamicStructure);
skip =
(chunk.__isStreamHeader && !this.header) ||
(chunk.__isStreamHeader && chunk.__isDynamicStructure && !chunk.__keepDynamicStreamHeader);
this.wasHeader = true;
}
if (!skip) {
@@ -21,7 +25,7 @@ class StringifyStream extends stream.Transform {
}
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
console.log(`Writing file ${fileName}`);
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
+21
View File
@@ -0,0 +1,21 @@
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger } = require('dbgate-tools');
const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.info(`Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const pool = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
logger.info(`Connected.`);
const dbInfo = await driver.analyseFull(pool);
logger.info(`Analyse finished`);
await exportDbModel(dbInfo, outputDir);
}
module.exports = loadDatabase;
@@ -0,0 +1,145 @@
const fs = require('fs');
const _ = require('lodash');
const stream = require('stream');
const byline = require('byline');
const { getLogger, processJsonDataUpdateCommands, removeTablePairingId } = require('dbgate-tools');
const logger = getLogger('modifyJsonLinesReader');
const stableStringify = require('json-stable-stringify');
class ParseStream extends stream.Transform {
constructor({ limitRows, changeSet, mergedRows, mergeKey, mergeMode }) {
super({ objectMode: true });
this.limitRows = limitRows;
this.changeSet = changeSet;
this.wasHeader = false;
this.currentRowIndex = 0;
if (mergeMode == 'merge') {
if (mergedRows && mergeKey) {
this.mergedRowsDict = {};
for (const row of mergedRows) {
const key = stableStringify(_.pick(row, mergeKey));
this.mergedRowsDict[key] = row;
}
}
}
this.mergedRowsArray = mergedRows;
this.mergeKey = mergeKey;
this.mergeMode = mergeMode;
}
_transform(chunk, encoding, done) {
let obj = JSON.parse(chunk);
if (obj.__isStreamHeader) {
if (this.changeSet && this.changeSet.structure) {
this.push({
...removeTablePairingId(this.changeSet.structure),
__isStreamHeader: true,
});
} else {
this.push(obj);
}
this.wasHeader = true;
done();
return;
}
if (this.changeSet) {
if (!this.wasHeader && this.changeSet.structure) {
this.push({
...removeTablePairingId(this.changeSet.structure),
__isStreamHeader: true,
});
this.wasHeader = true;
}
if (!this.limitRows || this.currentRowIndex < this.limitRows) {
if (this.changeSet.deletes.find(x => x.existingRowIndex == this.currentRowIndex)) {
obj = null;
}
const update = this.changeSet.updates.find(x => x.existingRowIndex == this.currentRowIndex);
if (update) {
if (update.document) {
obj = update.document;
} else {
obj = {
...obj,
...update.fields,
};
}
}
if (obj) {
if (this.changeSet.dataUpdateCommands) {
obj = processJsonDataUpdateCommands(obj, this.changeSet.dataUpdateCommands);
}
this.push(obj);
}
this.currentRowIndex += 1;
}
} else if (this.mergedRowsArray && this.mergeKey && this.mergeMode) {
if (this.mergeMode == 'merge') {
const key = stableStringify(_.pick(obj, this.mergeKey));
if (this.mergedRowsDict[key]) {
this.push({ ...obj, ...this.mergedRowsDict[key] });
delete this.mergedRowsDict[key];
} else {
this.push(obj);
}
} else if (this.mergeMode == 'append') {
this.push(obj);
}
} else {
this.push(obj);
}
done();
}
_flush(done) {
if (this.changeSet) {
for (const insert of this.changeSet.inserts) {
this.push({
...insert.document,
...insert.fields,
});
}
} else if (this.mergedRowsArray && this.mergeKey) {
if (this.mergeMode == 'merge') {
for (const row of this.mergedRowsArray) {
const key = stableStringify(_.pick(row, this.mergeKey));
if (this.mergedRowsDict[key]) {
this.push(row);
}
}
} else {
for (const row of this.mergedRowsArray) {
this.push(row);
}
}
}
done();
}
}
async function modifyJsonLinesReader({
fileName,
encoding = 'utf-8',
limitRows = undefined,
changeSet = null,
mergedRows = null,
mergeKey = null,
mergeMode = 'merge',
}) {
logger.info(`Reading file ${fileName} with change set`);
const fileStream = fs.createReadStream(
fileName,
// @ts-ignore
encoding
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows, changeSet, mergedRows, mergeKey, mergeMode });
liner.pipe(parser);
return parser;
}
module.exports = modifyJsonLinesReader;
+4 -2
View File
@@ -1,5 +1,7 @@
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('queryReader');
async function queryReader({
connection,
@@ -14,12 +16,12 @@ async function queryReader({
// if (!sql && !json) {
// throw new Error('One of sql or json must be set');
// }
console.log(`Reading query ${query || sql}`);
logger.info({ sql: query || sql }, `Reading query`);
// else console.log(`Reading query ${JSON.stringify(json)}`);
const driver = requireEngineDriver(connection);
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
console.log(`Connected.`);
logger.info(`Connected.`);
const reader =
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
return reader;
+3 -1
View File
@@ -3,6 +3,8 @@ const fs = require('fs');
const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../utility/directories');
const nativeModules = require('../nativeModules');
const platformInfo = require('../utility/platformInfo');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('requirePlugin');
const loadedPlugins = {};
@@ -17,7 +19,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
if (requiredPlugin == null) {
let module;
const modulePath = getPluginBackendPath(packageName);
console.log(`Loading module ${packageName} from ${modulePath}`);
logger.info(`Loading module ${packageName} from ${modulePath}`);
try {
// @ts-ignore
module = __non_webpack_require__(modulePath);
+3 -1
View File
@@ -1,5 +1,7 @@
const { getLogger } = require('dbgate-tools');
const childProcessChecker = require('../utility/childProcessChecker');
const processArgs = require('../utility/processArgs');
const logger = getLogger();
async function runScript(func) {
if (processArgs.checkParent) {
@@ -9,7 +11,7 @@ async function runScript(func) {
await func();
process.exit(0);
} catch (err) {
console.log(err);
logger.error({ err }, `Error running script: ${err.message}`);
process.exit(1);
}
}
+3 -2
View File
@@ -1,8 +1,9 @@
const fs = require('fs');
const stream = require('stream');
const path = require('path');
const { driverBase } = require('dbgate-tools');
const { driverBase, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const logger = getLogger('sqlDataWriter');
class SqlizeStream extends stream.Transform {
constructor({ fileName, dataName }) {
@@ -40,7 +41,7 @@ class SqlizeStream extends stream.Transform {
}
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
console.log(`Writing file ${fileName}`);
logger.info(`Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
+6 -5
View File
@@ -1,17 +1,18 @@
const { quoteFullName, fullNameToString } = require('dbgate-tools');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const logger = getLogger('tableReader');
async function tableReader({ connection, pureName, schemaName }) {
const driver = requireEngineDriver(connection);
const pool = await connectUtility(driver, connection, 'read');
console.log(`Connected.`);
logger.info(`Connected.`);
const fullName = { pureName, schemaName };
if (driver.databaseEngineTypes.includes('document')) {
// @ts-ignore
console.log(`Reading collection ${fullNameToString(fullName)}`);
logger.info(`Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore
return await driver.readQuery(pool, JSON.stringify(fullName));
}
@@ -20,14 +21,14 @@ async function tableReader({ connection, pureName, schemaName }) {
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) {
// @ts-ignore
console.log(`Reading table ${fullNameToString(table)}`);
logger.info(`Reading table ${fullNameToString(table)}`);
// @ts-ignore
return await driver.readQuery(pool, query, table);
}
const view = await driver.analyseSingleObject(pool, fullName, 'views');
if (view) {
// @ts-ignore
console.log(`Reading view ${fullNameToString(view)}`);
logger.info(`Reading view ${fullNameToString(view)}`);
// @ts-ignore
return await driver.readQuery(pool, query, view);
}
+4 -3
View File
@@ -1,16 +1,17 @@
const { fullNameToString } = require('dbgate-tools');
const { fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const connectUtility = require('../utility/connectUtility');
const logger = getLogger('tableWriter');
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
console.log(`Writing table ${fullNameToString({ schemaName, pureName })}`);
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
if (!driver) {
driver = requireEngineDriver(connection);
}
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
console.log(`Connected.`);
logger.info(`Connected.`);
return await driver.writeTable(pool, { schemaName, pureName }, options);
}
+29 -9
View File
@@ -2,6 +2,9 @@ const { fork } = require('child_process');
const uuidv1 = require('uuid/v1');
const { handleProcessCommunication } = require('./processComm');
const processArgs = require('../utility/processArgs');
const pipeForkLogs = require('./pipeForkLogs');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('DatastoreProxy');
class DatastoreProxy {
constructor(file) {
@@ -30,13 +33,20 @@ class DatastoreProxy {
async ensureSubprocess() {
if (!this.subprocess) {
this.subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'jslDatastoreProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
]);
this.subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
[
'--is-forked-api',
'--start-process',
'jslDatastoreProcess',
...processArgs.getPassArgs(),
// ...process.argv.slice(3),
],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(this.subprocess);
this.subprocess.on('message', message => {
// @ts-ignore
@@ -60,7 +70,12 @@ class DatastoreProxy {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
try {
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
} catch (err) {
logger.error({ err }, 'Error getting rows');
this.subprocess = null;
}
});
return promise;
}
@@ -69,7 +84,12 @@ class DatastoreProxy {
const msgid = uuidv1();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject];
this.subprocess.send({ msgtype: 'notify', msgid });
try {
this.subprocess.send({ msgtype: 'notify', msgid });
} catch (err) {
logger.error({ err }, 'Error notifying subprocess');
this.subprocess = null;
}
});
return promise;
}
@@ -90,6 +90,12 @@ class JsonLinesDatabase {
return obj;
}
async updateAll(mapFunction) {
await this._ensureLoaded();
this.data = this.data.map(mapFunction);
await this._save();
}
async patch(id, values) {
await this._ensureLoaded();
this.data = this.data.map(x => (x._id == id ? { ...x, ...values } : x));
+68 -33
View File
@@ -1,26 +1,16 @@
const lineReader = require('line-reader');
const fs = require('fs');
const os = require('os');
const rimraf = require('rimraf');
const path = require('path');
const AsyncLock = require('async-lock');
const lock = new AsyncLock();
const stableStringify = require('json-stable-stringify');
const { evaluateCondition } = require('dbgate-sqltree');
const requirePluginFunction = require('./requirePluginFunction');
function fetchNextLineFromReader(reader) {
return new Promise((resolve, reject) => {
if (!reader.hasNextLine()) {
resolve(null);
return;
}
reader.nextLine((err, line) => {
if (err) {
reject(err);
} else {
resolve(line);
}
});
});
}
const esort = require('external-sorting');
const uuidv1 = require('uuid/v1');
const { jsldir } = require('./directories');
const LineReader = require('./LineReader');
class JsonLinesDatastore {
constructor(file, formatterFunction) {
@@ -32,10 +22,43 @@ class JsonLinesDatastore {
// this.firstRowToBeReturned = null;
this.notifyChangedCallback = null;
this.currentFilter = null;
this.currentSort = null;
this.rowFormatter = requirePluginFunction(formatterFunction);
this.sortedFiles = {};
}
_closeReader() {
static async sortFile(infile, outfile, sort) {
const tempDir = path.join(os.tmpdir(), uuidv1());
fs.mkdirSync(tempDir);
await esort
.default({
input: fs.createReadStream(infile),
output: fs.createWriteStream(outfile),
deserializer: JSON.parse,
serializer: JSON.stringify,
tempDir,
maxHeap: 100,
comparer: (a, b) => {
for (const item of sort) {
const { uniqueName, order } = item;
if (a[uniqueName] < b[uniqueName]) {
return order == 'ASC' ? -1 : 1;
}
if (a[uniqueName] > b[uniqueName]) {
return order == 'ASC' ? 1 : -1;
}
}
return 0;
},
})
.asc();
await new Promise(resolve => rimraf(tempDir, resolve));
}
async _closeReader() {
// console.log('CLOSING READER', this.reader);
if (!this.reader) return;
const reader = this.reader;
this.reader = null;
@@ -43,7 +66,8 @@ class JsonLinesDatastore {
this.readedSchemaRow = false;
// this.firstRowToBeReturned = null;
this.currentFilter = null;
reader.close(() => {});
this.currentSort = null;
await reader.close();
}
async notifyChanged(callback) {
@@ -56,13 +80,12 @@ class JsonLinesDatastore {
if (call) call();
}
async _openReader() {
return new Promise((resolve, reject) =>
lineReader.open(this.file, (err, reader) => {
if (err) reject(err);
resolve(reader);
})
);
async _openReader(fileName) {
// console.log('OPENING READER', fileName);
// console.log(fs.readFileSync(fileName, 'utf-8'));
const fileStream = fs.createReadStream(fileName);
return new LineReader(fileStream);
}
parseLine(line) {
@@ -77,7 +100,7 @@ class JsonLinesDatastore {
// return res;
// }
for (;;) {
const line = await fetchNextLineFromReader(this.reader);
const line = await this.reader.readLine();
if (!line) {
// EOF
return null;
@@ -140,14 +163,19 @@ class JsonLinesDatastore {
// });
}
async _ensureReader(offset, filter) {
if (this.readedDataRowCount > offset || stableStringify(filter) != stableStringify(this.currentFilter)) {
async _ensureReader(offset, filter, sort) {
if (
this.readedDataRowCount > offset ||
stableStringify(filter) != stableStringify(this.currentFilter) ||
stableStringify(sort) != stableStringify(this.currentSort)
) {
this._closeReader();
}
if (!this.reader) {
const reader = await this._openReader();
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
this.reader = reader;
this.currentFilter = filter;
this.currentSort = sort;
}
// if (!this.readedSchemaRow) {
// const line = await this._readLine(true); // skip structure
@@ -179,13 +207,20 @@ class JsonLinesDatastore {
});
}
async getRows(offset, limit, filter) {
async getRows(offset, limit, filter, sort) {
const res = [];
if (sort && !this.sortedFiles[stableStringify(sort)]) {
const jslid = uuidv1();
const sortedFile = path.join(jsldir(), `${jslid}.jsonl`);
await JsonLinesDatastore.sortFile(this.file, sortedFile, sort);
this.sortedFiles[stableStringify(sort)] = sortedFile;
}
await lock.acquire('reader', async () => {
await this._ensureReader(offset, filter);
await this._ensureReader(offset, filter, sort);
// console.log(JSON.stringify(this.currentFilter, undefined, 2));
for (let i = 0; i < limit; i += 1) {
const line = await this._readLine(true);
// console.log('READED LINE', i);
if (line == null) break;
res.push(line);
}
+88
View File
@@ -0,0 +1,88 @@
const readline = require('readline');
class Queue {
constructor() {
this.elements = {};
this.head = 0;
this.tail = 0;
}
enqueue(element) {
this.elements[this.tail] = element;
this.tail++;
}
dequeue() {
const item = this.elements[this.head];
delete this.elements[this.head];
this.head++;
return item;
}
peek() {
return this.elements[this.head];
}
getLength() {
return this.tail - this.head;
}
isEmpty() {
return this.getLength() === 0;
}
}
class LineReader {
constructor(input) {
this.input = input;
this.queue = new Queue();
this.resolve = null;
this.isEnded = false;
this.rl = readline.createInterface({
input,
});
this.input.pause();
this.rl.on('line', line => {
this.input.pause();
if (this.resolve) {
const resolve = this.resolve;
this.resolve = null;
resolve(line);
return;
}
this.queue.enqueue(line);
});
this.rl.on('close', () => {
if (this.resolve) {
const resolve = this.resolve;
this.resolve = null;
this.isEnded = true;
resolve(null);
return;
}
this.queue.enqueue(null);
});
}
readLine() {
if (this.isEnded) {
return Promise.resolve(null);
}
if (!this.queue.isEmpty()) {
const res = this.queue.dequeue();
if (res == null) this.isEnded = true;
return Promise.resolve(res);
}
this.input.resume();
return new Promise(resolve => {
this.resolve = resolve;
});
}
close() {
this.isEnded = true;
return new Promise(resolve => this.input.close(resolve));
}
}
module.exports = LineReader;
@@ -1,14 +1,18 @@
const { getLogger } = require('dbgate-tools');
const logger = getLogger('childProcessChecked');
let counter = 0;
function childProcessChecker() {
setInterval(() => {
try {
process.send({ msgtype: 'ping', counter: counter++ });
} catch (ex) {
} catch (err) {
// This will come once parent dies.
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
// and call process.exit()
console.log('parent died', ex.toString());
logger.error({ err }, 'parent died');
process.exit(1);
}
}, 1000);
+2 -2
View File
@@ -2,7 +2,7 @@ const fs = require('fs-extra');
const path = require('path');
const ageSeconds = 3600;
async function cleanDirectory(directory) {
async function cleanDirectory(directory, age = undefined) {
const files = await fs.readdir(directory);
const now = new Date().getTime();
@@ -10,7 +10,7 @@ async function cleanDirectory(directory) {
const full = path.join(directory, file);
const stat = await fs.stat(full);
const mtime = stat.mtime.getTime();
const expirationTime = mtime + ageSeconds * 1000;
const expirationTime = mtime + (age || ageSeconds) * 1000;
if (now > expirationTime) {
if (stat.isDirectory()) {
await fs.rmdir(full, { recursive: true });
@@ -62,14 +62,17 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
if (connection.sslCaFile) {
connection.ssl.ca = await fs.readFile(connection.sslCaFile);
connection.ssl.sslCaFile = connection.sslCaFile;
}
if (connection.sslCertFile) {
connection.ssl.cert = await fs.readFile(connection.sslCertFile);
connection.ssl.sslCertFile = connection.sslCertFile;
}
if (connection.sslKeyFile) {
connection.ssl.key = await fs.readFile(connection.sslKeyFile);
connection.ssl.sslKeyFile = connection.sslKeyFile;
}
if (connection.sslCertFilePassword) {
+32 -11
View File
@@ -1,20 +1,24 @@
const os = require('os');
const path = require('path');
const fs = require('fs');
const _ = require('lodash');
const cleanDirectory = require('./cleanDirectory');
const platformInfo = require('./platformInfo');
const processArgs = require('./processArgs');
const consoleObjectWriter = require('../shell/consoleObjectWriter');
const { getLogger } = require('dbgate-tools');
let logsFilePath;
const createDirectories = {};
const ensureDirectory = (dir, clean) => {
if (!createDirectories[dir]) {
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
console.log(`Cleaning directory ${dir}`);
cleanDirectory(dir);
getLogger('directories').info(`Cleaning directory ${dir}`);
cleanDirectory(dir, _.isNumber(clean) ? clean : null);
}
if (!fs.existsSync(dir)) {
console.log(`Creating directory ${dir}`);
getLogger('directories').info(`Creating directory ${dir}`);
fs.mkdirSync(dir);
}
createDirectories[dir] = true;
@@ -38,20 +42,26 @@ function datadir() {
return dir;
}
const dirFunc = (dirname, clean = false) => () => {
const dir = path.join(datadir(), dirname);
ensureDirectory(dir, clean);
const dirFunc =
(dirname, clean, subdirs = []) =>
() => {
const dir = path.join(datadir(), dirname);
ensureDirectory(dir, clean);
for (const subdir of subdirs) {
ensureDirectory(path.join(dir, subdir), false);
}
return dir;
};
return dir;
};
const jsldir = dirFunc('jsl', true);
const rundir = dirFunc('run', true);
const uploadsdir = dirFunc('uploads', true);
const pluginsdir = dirFunc('plugins');
const archivedir = dirFunc('archive');
const archivedir = dirFunc('archive', false, ['default']);
const appdir = dirFunc('apps');
const filesdir = dirFunc('files');
const logsdir = dirFunc('logs', 3600 * 24 * 7);
function packagedPluginsDir() {
// console.log('CALL DIR FROM', new Error('xxx').stack);
@@ -127,11 +137,19 @@ function migrateDataDir() {
if (fs.existsSync(oldDir) && !fs.existsSync(newDir)) {
fs.renameSync(oldDir, newDir);
}
} catch (e) {
console.log('Error migrating data dir:', e.message);
} catch (err) {
getLogger('directories').error({ err }, 'Error migrating data dir');
}
}
function setLogsFilePath(value) {
logsFilePath = value;
}
function getLogsFilePath() {
return logsFilePath;
}
migrateDataDir();
module.exports = {
@@ -144,9 +162,12 @@ module.exports = {
ensureDirectory,
pluginsdir,
filesdir,
logsdir,
packagedPluginsDir,
packagedPluginList,
getPluginBackendPath,
resolveArchiveFolder,
clearArchiveLinksCache,
getLogsFilePath,
setLogsFilePath,
};
@@ -1,15 +0,0 @@
const fs = require('fs-extra');
async function saveFreeTableData(file, data) {
const { structure, rows } = data;
const fileStream = fs.createWriteStream(file);
await fileStream.write(JSON.stringify({ __isStreamHeader: true, ...structure }) + '\n');
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
}
await fileStream.close();
}
module.exports = {
saveFreeTableData,
};
+19
View File
@@ -0,0 +1,19 @@
const byline = require('byline');
const { safeJsonParse, getLogger } = require('dbgate-tools');
const logger = getLogger();
const logDispatcher = method => data => {
const json = safeJsonParse(data.toString());
if (json && json.level) {
logger.log(json);
} else {
logger[method](json || data.toString());
}
};
function pipeForkLogs(subprocess) {
byline(subprocess.stdout).on('data', logDispatcher('info'));
byline(subprocess.stderr).on('data', logDispatcher('error'));
}
module.exports = pipeForkLogs;
+2
View File
@@ -11,6 +11,7 @@ const startProcess = getNamedArg('--start-process');
const isForkedApi = process.argv.includes('--is-forked-api');
const pluginsDir = getNamedArg('--plugins-dir');
const workspaceDir = getNamedArg('--workspace-dir');
const processDisplayName = getNamedArg('--process-display-name');
const listenApi = process.argv.includes('--listen-api');
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
@@ -37,4 +38,5 @@ module.exports = {
workspaceDir,
listenApi,
listenApiChild,
processDisplayName,
};
+23 -14
View File
@@ -5,6 +5,9 @@ const AsyncLock = require('async-lock');
const lock = new AsyncLock();
const { fork } = require('child_process');
const processArgs = require('../utility/processArgs');
const { getLogger } = require('dbgate-tools');
const pipeForkLogs = require('./pipeForkLogs');
const logger = getLogger('sshTunnel');
const sshTunnelCache = {};
@@ -21,18 +24,24 @@ const CONNECTION_FIELDS = [
const TUNNEL_FIELDS = [...CONNECTION_FIELDS, 'server', 'port'];
function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
let subprocess = fork(global['API_PACKAGE'] || process.argv[1], [
'--is-forked-api',
'--start-process',
'sshForwardProcess',
...processArgs.getPassArgs(),
]);
let subprocess = fork(
global['API_PACKAGE'] || process.argv[1],
['--is-forked-api', '--start-process', 'sshForwardProcess', ...processArgs.getPassArgs()],
{
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
}
);
pipeForkLogs(subprocess);
subprocess.send({
msgtype: 'connect',
connection,
tunnelConfig,
});
try {
subprocess.send({
msgtype: 'connect',
connection,
tunnelConfig,
});
} catch (err) {
logger.error({ err }, 'Error connecting SSH');
}
return new Promise((resolve, reject) => {
subprocess.on('message', resp => {
// @ts-ignore
@@ -45,7 +54,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
}
});
subprocess.on('exit', code => {
console.log('SSH forward process exited');
logger.info('SSH forward process exited');
delete sshTunnelCache[tunnelCacheKey];
});
});
@@ -65,13 +74,13 @@ async function getSshTunnel(connection) {
toHost: connection.server,
};
try {
console.log(
logger.info(
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
);
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
console.log(
logger.info(
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
);
+7 -1
View File
@@ -1,11 +1,17 @@
const { getLogger } = require('dbgate-tools');
const uuidv1 = require('uuid/v1');
const { getSshTunnel } = require('./sshTunnel');
const logger = getLogger('sshTunnelProxy');
const dispatchedMessages = {};
async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
const response = await getSshTunnel(connection);
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
try {
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
} catch (err) {
logger.error({ err }, 'Error sending to SSH tunnel');
}
}
function handleGetSshTunnelResponse({ msgid, response }, subprocess) {
+9 -7
View File
@@ -2,7 +2,9 @@ const _ = require('lodash');
const express = require('express');
const getExpressPath = require('./getExpressPath');
const { MissingCredentialsError } = require('./exceptions');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('useController');
/**
* @param {string} route
*/
@@ -10,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
const router = express.Router();
if (controller._init) {
console.log(`Calling init controller for controller ${route}`);
logger.info(`Calling init controller for controller ${route}`);
try {
controller._init();
} catch (err) {
console.log(`Error initializing controller, exiting application`, err);
logger.error({ err }, `Error initializing controller, exiting application`);
process.exit(1);
}
}
@@ -75,16 +77,16 @@ module.exports = function useController(app, electron, route, controller) {
try {
const data = await controller[key]({ ...req.body, ...req.query }, req);
res.json(data);
} catch (e) {
console.log(e);
if (e instanceof MissingCredentialsError) {
} catch (err) {
logger.error({ err }, `Error when processing route ${route}/${key}`);
if (err instanceof MissingCredentialsError) {
res.json({
missingCredentials: true,
apiErrorMessage: 'Missing credentials',
detail: e.detail,
detail: err.detail,
});
} else {
res.status(500).json({ apiErrorMessage: e.message });
res.status(500).json({ apiErrorMessage: err.message });
}
}
});
+31 -8
View File
@@ -9,23 +9,31 @@ import {
AllowIdentityInsert,
Expression,
} from 'dbgate-sqltree';
import type { NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
import type { NamedObjectInfo, DatabaseInfo, TableInfo } from 'dbgate-types';
import { JsonDataObjectUpdateCommand } from 'dbgate-tools';
export interface ChangeSetItem {
pureName: string;
schemaName?: string;
insertedRowIndex?: number;
existingRowIndex?: number;
document?: any;
condition?: { [column: string]: string };
fields?: { [column: string]: string };
}
export interface ChangeSet {
export interface ChangeSetItemFields {
inserts: ChangeSetItem[];
updates: ChangeSetItem[];
deletes: ChangeSetItem[];
}
export interface ChangeSet extends ChangeSetItemFields {
structure?: TableInfo;
dataUpdateCommands?: JsonDataObjectUpdateCommand[];
setColumnMode?: 'fixed' | 'variable';
}
export function createChangeSet(): ChangeSet {
return {
inserts: [],
@@ -38,6 +46,7 @@ export interface ChangeSetRowDefinition {
pureName: string;
schemaName: string;
insertedRowIndex?: number;
existingRowIndex?: number;
condition?: { [column: string]: string };
}
@@ -49,7 +58,7 @@ export interface ChangeSetFieldDefinition extends ChangeSetRowDefinition {
export function findExistingChangeSetItem(
changeSet: ChangeSet,
definition: ChangeSetRowDefinition
): [keyof ChangeSet, ChangeSetItem] {
): [keyof ChangeSetItemFields, ChangeSetItem] {
if (!changeSet || !definition) return ['updates', null];
if (definition.insertedRowIndex != null) {
return [
@@ -66,7 +75,8 @@ export function findExistingChangeSetItem(
x =>
x.pureName == definition.pureName &&
x.schemaName == definition.schemaName &&
_.isEqual(x.condition, definition.condition)
((definition.existingRowIndex != null && x.existingRowIndex == definition.existingRowIndex) ||
(definition.existingRowIndex == null && _.isEqual(x.condition, definition.condition)))
);
if (inUpdates) return ['updates', inUpdates];
@@ -74,7 +84,8 @@ export function findExistingChangeSetItem(
x =>
x.pureName == definition.pureName &&
x.schemaName == definition.schemaName &&
_.isEqual(x.condition, definition.condition)
((definition.existingRowIndex != null && x.existingRowIndex == definition.existingRowIndex) ||
(definition.existingRowIndex == null && _.isEqual(x.condition, definition.condition)))
);
if (inDeletes) return ['deletes', inDeletes];
@@ -119,6 +130,7 @@ export function setChangeSetValue(
schemaName: definition.schemaName,
condition: definition.condition,
insertedRowIndex: definition.insertedRowIndex,
existingRowIndex: definition.existingRowIndex,
fields: {
[definition.uniqueName]: value,
},
@@ -162,6 +174,7 @@ export function setChangeSetRowData(
schemaName: definition.schemaName,
condition: definition.condition,
insertedRowIndex: definition.insertedRowIndex,
existingRowIndex: definition.existingRowIndex,
document,
},
],
@@ -395,6 +408,7 @@ export function deleteChangeSetRows(changeSet: ChangeSet, definition: ChangeSetR
pureName: definition.pureName,
schemaName: definition.schemaName,
condition: definition.condition,
existingRowIndex: definition.existingRowIndex,
},
],
};
@@ -402,9 +416,11 @@ export function deleteChangeSetRows(changeSet: ChangeSet, definition: ChangeSetR
}
export function getChangeSetInsertedRows(changeSet: ChangeSet, name?: NamedObjectInfo) {
if (!name) return [];
// if (!name) return [];
if (!changeSet) return [];
const rows = changeSet.inserts.filter(x => x.pureName == name.pureName && x.schemaName == name.schemaName);
const rows = changeSet.inserts.filter(
x => name == null || (x.pureName == name.pureName && x.schemaName == name.schemaName)
);
const maxIndex = _.maxBy(rows, x => x.insertedRowIndex)?.insertedRowIndex;
if (maxIndex == null) return [];
const res = Array(maxIndex + 1).fill({});
@@ -447,5 +463,12 @@ export function changeSetInsertDocuments(changeSet: ChangeSet, documents: any[],
export function changeSetContainsChanges(changeSet: ChangeSet) {
if (!changeSet) return false;
return changeSet.deletes.length > 0 || changeSet.updates.length > 0 || changeSet.inserts.length > 0;
return (
changeSet.deletes.length > 0 ||
changeSet.updates.length > 0 ||
changeSet.inserts.length > 0 ||
!!changeSet.structure ||
!!changeSet.setColumnMode ||
changeSet.dataUpdateCommands?.length > 0
);
}
+270
View File
@@ -0,0 +1,270 @@
import { createAsyncWriteStream, getLogger, runCommandOnDriver, runQueryOnDriver } from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
const logger = getLogger('dataDuplicator');
export interface DataDuplicatorItem {
openStream: () => Promise<ReadableStream>;
name: string;
operation: 'copy' | 'lookup' | 'insertMissing';
matchColumns: string[];
}
export interface DataDuplicatorOptions {
rollbackAfterFinish?: boolean;
skipRowsWithUnresolvedRefs?: boolean;
}
class DuplicatorReference {
constructor(
public base: DuplicatorItemHolder,
public ref: DuplicatorItemHolder,
public isMandatory: boolean,
public foreignKey: ForeignKeyInfo
) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class DuplicatorItemHolder {
references: DuplicatorReference[] = [];
backReferences: DuplicatorReference[] = [];
table: TableInfo;
isPlanned = false;
idMap = {};
autoColumn: string;
refByColumn: { [columnName: string]: DuplicatorReference } = {};
isReferenced: boolean;
get name() {
return this.item.name;
}
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
if (
this.table.primaryKey?.columns?.length != 1 ||
this.table.primaryKey?.columns?.[0].columnName != this.autoColumn
) {
this.autoColumn = null;
}
}
initializeReferences() {
for (const fk of this.table.foreignKeys) {
if (fk.columns?.length != 1) continue;
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
if (refHolder == null) continue;
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
this.references.push(newref);
this.refByColumn[newref.columnName] = newref;
refHolder.isReferenced = true;
}
}
createInsertObject(chunk) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName)]
);
for (const key in res) {
const ref = this.refByColumn[key];
if (ref) {
// remap id
res[key] = ref.ref.idMap[res[key]];
if (ref.isMandatory && res[key] == null) {
// mandatory refertence not matched
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
return null;
}
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
}
}
}
return res;
}
async runImport() {
const readStream = await this.item.openStream();
const driver = this.duplicator.driver;
const pool = this.duplicator.pool;
let inserted = 0;
let mapped = 0;
let missing = 0;
let skipped = 0;
let lastLogged = new Date();
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
processItem: async chunk => {
if (chunk.__isStreamHeader) {
return;
}
const doCopy = async () => {
// console.log('chunk', this.name, JSON.stringify(chunk));
const insertedObj = this.createInsertObject(chunk);
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
if (insertedObj == null) {
skipped += 1;
return;
}
let res = await runQueryOnDriver(pool, driver, dmp => {
dmp.put(
'^insert ^into %f (%,i) ^values (%,v)',
this.table,
Object.keys(insertedObj),
Object.values(insertedObj)
);
if (
this.autoColumn &&
this.isReferenced &&
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
) {
dmp.selectScopeIdentity(this.table);
}
});
inserted += 1;
if (this.autoColumn && this.isReferenced) {
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
}
// console.log('IDRES', JSON.stringify(res));
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
this.idMap[chunk[this.autoColumn]] = resId;
}
}
};
switch (this.item.operation) {
case 'copy': {
await doCopy();
break;
}
case 'insertMissing':
case 'lookup': {
const res = await runQueryOnDriver(pool, driver, dmp =>
dmp.put(
'^select %i ^from %f ^where %i = %v',
this.autoColumn,
this.table,
this.item.matchColumns[0],
chunk[this.item.matchColumns[0]]
)
);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
mapped += 1;
this.idMap[chunk[this.autoColumn]] = resId;
} else if (this.item.operation == 'insertMissing') {
await doCopy();
} else {
missing += 1;
}
break;
}
}
if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info(
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
);
lastLogged = new Date();
}
// this.idMap[oldId] = newId;
},
});
await this.duplicator.copyStream(readStream, writeStream);
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
// mapResultId: (oldId, newId) => {
// this.idMap[oldId] = newId;
// },
// });
return { inserted, mapped, missing, skipped };
}
}
export class DataDuplicator {
itemHolders: DuplicatorItemHolder[];
itemPlan: DuplicatorItemHolder[] = [];
constructor(
public pool: any,
public driver: EngineDriver,
public db: DatabaseInfo,
public items: DataDuplicatorItem[],
public stream,
public copyStream: (input, output) => Promise<void>,
public options: DataDuplicatorOptions = {}
) {
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
this.itemHolders.forEach(x => x.initializeReferences());
}
findItemToPlan(): DuplicatorItemHolder {
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned)) {
return item;
}
}
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
const backReferences = item.references.filter(x => !x.ref.isPlanned);
item.backReferences = backReferences;
return item;
}
}
throw new Error('Cycle in mandatory references');
}
createPlan() {
while (this.itemPlan.length < this.itemHolders.length) {
const item = this.findItemToPlan();
item.isPlanned = true;
this.itemPlan.push(item);
}
}
async run() {
this.createPlan();
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
try {
for (const item of this.itemPlan) {
const stats = await item.runImport();
logger.info(
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
);
}
} catch (err) {
logger.error({ err }, `Failed duplicator job, rollbacking. ${err.message}`);
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
return;
}
if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
} else {
logger.info('Committing duplicator transaction');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
}
}
}
-120
View File
@@ -1,120 +0,0 @@
import _ from 'lodash';
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc } from './GridConfig';
import type { TableInfo, EngineDriver, DatabaseInfo, SqlDialect } from 'dbgate-types';
import { getFilterValueExpression } from 'dbgate-filterparser';
import { ChangeCacheFunc, ChangeConfigFunc, DisplayColumn } from './GridDisplay';
export class FormViewDisplay {
isLoadedCorrectly = true;
columns: DisplayColumn[];
public baseTable: TableInfo;
dialect: SqlDialect;
constructor(
public config: GridConfig,
protected setConfig: ChangeConfigFunc,
public cache: GridCache,
protected setCache: ChangeCacheFunc,
public driver?: EngineDriver,
public dbinfo: DatabaseInfo = null,
public serverVersion = null
) {
this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(serverVersion)) || driver?.dialect;
}
addFilterColumn(column) {
if (!column) return;
this.setConfig(cfg => ({
...cfg,
formFilterColumns: [...(cfg.formFilterColumns || []), column.uniqueName],
}));
}
filterCellValue(column, rowData) {
if (!column || !rowData) return;
const value = rowData[column.uniqueName];
const expr = getFilterValueExpression(value, column.dataType);
if (expr) {
this.setConfig(cfg => ({
...cfg,
filters: {
...cfg.filters,
[column.uniqueName]: expr,
},
addedColumns: cfg.addedColumns.includes(column.uniqueName)
? cfg.addedColumns
: [...cfg.addedColumns, column.uniqueName],
}));
this.reload();
}
}
setFilter(uniqueName, value) {
this.setConfig(cfg => ({
...cfg,
filters: {
...cfg.filters,
[uniqueName]: value,
},
}));
this.reload();
}
removeFilter(uniqueName) {
const reloadRequired = !!this.config.filters[uniqueName];
this.setConfig(cfg => ({
...cfg,
formFilterColumns: (cfg.formFilterColumns || []).filter(x => x != uniqueName),
filters: _.omit(cfg.filters || [], uniqueName),
}));
if (reloadRequired) this.reload();
}
reload() {
this.setCache(cache => ({
// ...cache,
...createGridCache(),
refreshTime: new Date().getTime(),
}));
}
getKeyValue(columnName) {
const { formViewKey, formViewKeyRequested } = this.config;
if (formViewKeyRequested && formViewKeyRequested[columnName]) return formViewKeyRequested[columnName];
if (formViewKey && formViewKey[columnName]) return formViewKey[columnName];
return null;
}
requestKeyValue(columnName, value) {
if (this.getKeyValue(columnName) == value) return;
this.setConfig(cfg => ({
...cfg,
formViewKeyRequested: {
...cfg.formViewKey,
...cfg.formViewKeyRequested,
[columnName]: value,
},
}));
this.reload();
}
extractKey(row) {
if (!row || !this.baseTable || !this.baseTable.primaryKey) {
return null;
}
const formViewKey = _.pick(
row,
this.baseTable.primaryKey.columns.map(x => x.columnName)
);
return formViewKey;
}
cancelRequestKey(rowData) {
this.setConfig(cfg => ({
...cfg,
formViewKeyRequested: null,
formViewKey: rowData ? this.extractKey(rowData) : cfg.formViewKey,
}));
}
}
@@ -1,48 +0,0 @@
import _ from 'lodash';
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { GridConfig, GridCache } from './GridConfig';
import { FreeTableModel } from './FreeTableModel';
import { analyseCollectionDisplayColumns } from '.';
export class FreeTableGridDisplay extends GridDisplay {
constructor(
public model: FreeTableModel,
config: GridConfig,
setConfig: ChangeConfigFunc,
cache: GridCache,
setCache: ChangeCacheFunc
) {
super(config, setConfig, cache, setCache);
this.columns = model?.structure?.__isDynamicStructure
? analyseCollectionDisplayColumns(model?.rows, this)
: this.getDisplayColumns(model);
this.filterable = false;
this.sortable = false;
}
getDisplayColumns(model: FreeTableModel) {
return _.uniqBy(
model?.structure?.columns
?.map(col => this.getDisplayColumn(col))
?.map(col => ({
...col,
isChecked: this.isColumnChecked(col),
})) || [],
col => col.uniqueName
);
}
getDisplayColumn(col: ColumnInfo) {
const uniquePath = [col.columnName];
const uniqueName = uniquePath.join('.');
return {
...col,
pureName: 'data',
schemaName: '',
headerText: col.columnName,
uniqueName,
uniquePath,
};
}
}
-27
View File
@@ -1,27 +0,0 @@
import type { TableInfo } from 'dbgate-types';
export interface FreeTableModel {
structure: TableInfo;
rows: any[];
}
export function createFreeTableModel() {
return {
structure: {
columns: [
{
columnName: 'col1',
},
],
foreignKeys: [],
},
rows: [
{
col1: 'val1',
},
{
col1: 'val2',
},
],
};
}
+2 -2
View File
@@ -27,10 +27,10 @@ export interface GridConfig extends GridConfigColumns {
childConfig?: GridConfig;
reference?: GridReferenceDefinition;
isFormView?: boolean;
formViewKey?: { [uniqueName: string]: string };
formViewKeyRequested?: { [uniqueName: string]: string };
formViewRecordNumber?: number;
formFilterColumns: string[];
formColumnFilterText?: string;
multiColumnFilter?: string;
}
export interface GridCache {
+112 -32
View File
@@ -14,7 +14,7 @@ import type {
import { parseFilter, getFilterType } from 'dbgate-filterparser';
import { filterName } from 'dbgate-tools';
import { ChangeSetFieldDefinition, ChangeSetRowDefinition } from './ChangeSet';
import { Expression, Select, treeToSql, dumpSqlSelect, Condition } from 'dbgate-sqltree';
import { Expression, Select, treeToSql, dumpSqlSelect, Condition, CompoudCondition } from 'dbgate-sqltree';
import { isTypeLogical } from 'dbgate-tools';
export interface DisplayColumn {
@@ -70,6 +70,7 @@ export abstract class GridDisplay {
}
dialect: SqlDialect;
columns: DisplayColumn[];
formColumns: DisplayColumn[] = [];
baseTable?: TableInfo;
baseView?: ViewInfo;
baseCollection?: CollectionInfo;
@@ -83,6 +84,7 @@ export abstract class GridDisplay {
return this.baseTable || this.baseView;
}
changeSetKeyFields: string[] = null;
editableStructure: TableInfo = null;
sortable = false;
groupable = false;
filterable = false;
@@ -211,6 +213,32 @@ export abstract class GridDisplay {
}
}
if (this.baseTableOrView && this.config.multiColumnFilter) {
try {
const condition = parseFilter(this.config.multiColumnFilter, 'string');
if (condition) {
const orCondition: CompoudCondition = {
conditionType: 'or',
conditions: [],
};
for (const column of this.baseTableOrView.columns) {
orCondition.conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return this.createColumnExpression(column, { alias: 'basetbl' });
}
})
);
}
if (orCondition.conditions.length > 0) {
conditions.push(orCondition);
}
}
} catch (err) {
console.warn(err.message);
}
}
if (conditions.length > 0) {
select.where = {
conditionType: 'and',
@@ -329,6 +357,16 @@ export abstract class GridDisplay {
...cfg.filters,
[uniqueName]: value,
},
formViewRecordNumber: 0,
}));
this.reload();
}
setMutliColumnFilter(value) {
this.setConfig(cfg => ({
...cfg,
multiColumnFilter: value,
formViewRecordNumber: 0,
}));
this.reload();
}
@@ -351,6 +389,7 @@ export abstract class GridDisplay {
this.setConfig(cfg => ({
...cfg,
filters: _.omit(cfg.filters, [uniqueName]),
formFilterColumns: (cfg.formFilterColumns || []).filter(x => x != uniqueName),
}));
this.reload();
}
@@ -457,30 +496,39 @@ export abstract class GridDisplay {
return _.pick(row, this.changeSetKeyFields);
}
getChangeSetField(row, uniqueName, insertedRowIndex): ChangeSetFieldDefinition {
getChangeSetField(
row,
uniqueName,
insertedRowIndex,
existingRowIndex = null,
baseNameOmitable = false
): ChangeSetFieldDefinition {
const col = this.columns.find(x => x.uniqueName == uniqueName);
if (!col) return null;
const baseObj = this.baseTableOrSimilar;
if (!baseObj) return null;
if (baseObj.pureName != col.pureName || baseObj.schemaName != col.schemaName) {
return null;
if (!baseNameOmitable) {
if (!baseObj) return null;
if (baseObj.pureName != col.pureName || baseObj.schemaName != col.schemaName) {
return null;
}
}
return {
...this.getChangeSetRow(row, insertedRowIndex),
...this.getChangeSetRow(row, insertedRowIndex, existingRowIndex, baseNameOmitable),
uniqueName: uniqueName,
columnName: col.columnName,
};
}
getChangeSetRow(row, insertedRowIndex): ChangeSetRowDefinition {
getChangeSetRow(row, insertedRowIndex, existingRowIndex, baseNameOmitable = false): ChangeSetRowDefinition {
const baseObj = this.baseTableOrSimilar;
if (!baseObj) return null;
if (!baseNameOmitable && !baseObj) return null;
return {
pureName: baseObj.pureName,
schemaName: baseObj.schemaName,
pureName: baseObj?.pureName,
schemaName: baseObj?.schemaName,
insertedRowIndex,
condition: insertedRowIndex == null ? this.getChangeSetCondition(row) : null,
existingRowIndex,
condition: insertedRowIndex == null && existingRowIndex == null ? this.getChangeSetCondition(row) : null,
};
}
@@ -516,13 +564,15 @@ export abstract class GridDisplay {
alias: 'basetbl',
},
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' })),
orderBy: [
{
exprType: 'column',
columnName: orderColumnName,
direction: 'ASC',
},
],
orderBy: this.driver?.requiresDefaultSortCriteria
? [
{
exprType: 'column',
columnName: orderColumnName,
direction: 'ASC',
},
]
: null,
};
const displayedColumnInfo = _.keyBy(
this.columns.map(col => ({ ...col, sourceAlias: 'basetbl' })),
@@ -688,7 +738,7 @@ export abstract class GridDisplay {
// return sql;
}
compileFilters(): Condition {
compileJslFilters(): Condition {
const filters = this.config && this.config.filters;
if (!filters) return null;
const conditions = [];
@@ -711,6 +761,17 @@ export abstract class GridDisplay {
// filter parse error - ignore filter
}
}
if (this.config.multiColumnFilter) {
const placeholderCondition = parseFilter(this.config.multiColumnFilter, 'string');
if (placeholderCondition) {
conditions.push({
conditionType: 'anyColumnPass',
placeholderCondition,
});
}
}
if (conditions.length == 0) return null;
return {
conditionType: 'and',
@@ -718,22 +779,11 @@ export abstract class GridDisplay {
};
}
switchToFormView(rowData) {
if (!this.baseTable) return;
const { primaryKey } = this.baseTable;
if (!primaryKey) return;
const { columns } = primaryKey;
switchToFormView(rowIndex) {
this.setConfig(cfg => ({
...cfg,
isFormView: true,
formViewKey: rowData
? _.pick(
rowData,
columns.map(x => x.columnName)
)
: null,
formViewKeyRequested: null,
formViewRecordNumber: rowIndex,
}));
}
@@ -743,6 +793,36 @@ export abstract class GridDisplay {
isJsonView: true,
}));
}
formViewNavigate(command, allRowCount) {
switch (command) {
case 'begin':
this.setConfig(cfg => ({
...cfg,
formViewRecordNumber: 0,
}));
break;
case 'previous':
this.setConfig(cfg => ({
...cfg,
formViewRecordNumber: Math.max((cfg.formViewRecordNumber || 0) - 1, 0),
}));
break;
case 'next':
this.setConfig(cfg => ({
...cfg,
formViewRecordNumber: Math.max((cfg.formViewRecordNumber || 0) + 1, 0),
}));
break;
case 'end':
this.setConfig(cfg => ({
...cfg,
formViewRecordNumber: Math.max(allRowCount - 1, 0),
}));
break;
}
this.reload();
}
}
export function reloadDataCacheFunc(cache: GridCache): GridCache {
+7 -1
View File
@@ -13,14 +13,18 @@ export class JslGridDisplay extends GridDisplay {
setCache: ChangeCacheFunc,
rows: any,
isDynamicStructure: boolean,
supportsReload: boolean
supportsReload: boolean,
editable: boolean = false
) {
super(config, setConfig, cache, setCache, null);
this.filterable = true;
this.sortable = true;
this.supportsReload = supportsReload;
this.isDynamicStructure = isDynamicStructure;
this.filterTypeOverride = 'eval';
this.editable = editable;
this.editableStructure = editable ? structure : null;
if (structure?.columns) {
this.columns = _.uniqBy(
@@ -48,5 +52,7 @@ export class JslGridDisplay extends GridDisplay {
}
if (!this.columns) this.columns = [];
this.formColumns = this.columns;
}
}
+1 -1
View File
@@ -11,7 +11,7 @@ export interface MacroDefinition {
name: string;
group: string;
description?: string;
type: 'transformValue';
type: 'transformValue' | 'transformRow';
code: string;
args?: MacroArgument[];
}
@@ -80,6 +80,8 @@ export interface PerspectiveNodeConfig {
isAutoGenerated?: true | undefined;
isNodeChecked?: boolean;
multiColumnFilter?: string;
position?: {
x: number;
y: number;
+91 -26
View File
@@ -35,7 +35,7 @@ import { PerspectiveDataLoadProps, PerspectiveDataProvider } from './Perspective
import stableStringify from 'json-stable-stringify';
import { getFilterType, parseFilter } from 'dbgate-filterparser';
import { FilterType } from 'dbgate-filterparser/lib/types';
import { Condition, Expression, Select } from 'dbgate-sqltree';
import { CompoudCondition, Condition, Expression, Select } from 'dbgate-sqltree';
// import { getPerspectiveDefaultColumns } from './getPerspectiveDefaultColumns';
import uuidv1 from 'uuid/v1';
import { PerspectiveDataPatternColumn } from './PerspectiveDataPattern';
@@ -78,7 +78,7 @@ export abstract class PerspectiveTreeNode {
public setConfig: ChangePerspectiveConfigFunc,
public parentNode: PerspectiveTreeNode,
public dataProvider: PerspectiveDataProvider,
public databaseConfig: PerspectiveDatabaseConfig,
public defaultDatabaseConfig: PerspectiveDatabaseConfig,
public designerId: string
) {
this.nodeConfig = config.nodes.find(x => x.designerId == designerId);
@@ -126,6 +126,12 @@ export abstract class PerspectiveTreeNode {
get engineType(): PerspectiveDatabaseEngineType {
return null;
}
get databaseConfig(): PerspectiveDatabaseConfig {
const res = { ...this.defaultDatabaseConfig };
if (this.nodeConfig?.conid) res.conid = this.nodeConfig?.conid;
if (this.nodeConfig?.database) res.database = this.nodeConfig?.database;
return res;
}
abstract getNodeLoadProps(parentRows: any[]): PerspectiveDataLoadProps;
get isRoot() {
return this.parentNode == null;
@@ -335,10 +341,66 @@ export abstract class PerspectiveTreeNode {
);
}
getMutliColumnSqlCondition(source): Condition {
if (!this.nodeConfig?.multiColumnFilter) return null;
const base = this.getBaseTableFromThis() as TableInfo | ViewInfo;
if (!base) return null;
try {
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, 'string');
if (condition) {
const orCondition: CompoudCondition = {
conditionType: 'or',
conditions: [],
};
for (const column of base.columns || []) {
orCondition.conditions.push(
_cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return {
exprType: 'column',
alias: source,
columnName: column.columnName,
};
}
})
);
}
if (orCondition.conditions.length > 0) {
return orCondition;
}
}
} catch (err) {
console.warn(err.message);
}
return null;
}
getMutliColumnMongoCondition(): {} {
if (!this.nodeConfig?.multiColumnFilter) return null;
const pattern = this.dataProvider?.dataPatterns?.[this.designerId];
if (!pattern) return null;
const condition = parseFilter(this.nodeConfig?.multiColumnFilter, 'mongo');
if (!condition) return null;
const res = pattern.columns.map(col => {
return _cloneDeepWith(condition, expr => {
if (expr.__placeholder__) {
return {
[col.name]: expr.__placeholder__,
};
}
});
});
return {
$or: res,
};
}
getChildrenSqlCondition(source = null): Condition {
const conditions = _compact([
...this.childNodes.map(x => x.parseFilterCondition(source)),
...this.buildParentFilterConditions(),
this.getMutliColumnSqlCondition(source),
]);
if (conditions.length == 0) {
return null;
@@ -353,7 +415,10 @@ export abstract class PerspectiveTreeNode {
}
getChildrenMongoCondition(source = null): {} {
const conditions = _compact([...this.childNodes.map(x => x.parseFilterCondition(source))]);
const conditions = _compact([
...this.childNodes.map(x => x.parseFilterCondition(source)),
this.getMutliColumnMongoCondition(),
]);
if (conditions.length == 0) {
return null;
}
@@ -396,7 +461,7 @@ export abstract class PerspectiveTreeNode {
}
return res;
}
getBaseTableFromThis() {
getBaseTableFromThis(): TableInfo | ViewInfo | CollectionInfo {
return null;
}
@@ -534,11 +599,11 @@ export class PerspectiveTableColumnNode extends PerspectiveTreeNode {
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
parentNode: PerspectiveTreeNode,
designerId: string
) {
super(dbs, config, setConfig, parentNode, dataProvider, databaseConfig, designerId);
super(dbs, config, setConfig, parentNode, dataProvider, defaultDatabaseConfig, designerId);
this.isTable = !!this.db?.tables?.find(x => x.schemaName == table.schemaName && x.pureName == table.pureName);
this.isView = !!this.db?.views?.find(x => x.schemaName == table.schemaName && x.pureName == table.pureName);
@@ -690,7 +755,7 @@ export class PerspectiveTableColumnNode extends PerspectiveTreeNode {
this.config,
this.setConfig,
this.dataProvider,
this.databaseConfig,
this.defaultDatabaseConfig,
this
);
}
@@ -768,11 +833,11 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
parentNode: PerspectiveTreeNode,
designerId: string
) {
super(dbs, config, setConfig, parentNode, dataProvider, databaseConfig, designerId);
super(dbs, config, setConfig, parentNode, dataProvider, defaultDatabaseConfig, designerId);
this.parentNodeConfig = this.tableNodeOrParent?.nodeConfig;
// console.log('PATTERN COLUMN', column);
}
@@ -904,7 +969,7 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
this.config,
this.setConfig,
this.dataProvider,
this.databaseConfig,
this.defaultDatabaseConfig,
this,
null
)
@@ -951,7 +1016,7 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
}
// console.log('CP2');
const newConfig = { ...this.databaseConfig };
const newConfig: PerspectiveDatabaseConfig = { ...this.defaultDatabaseConfig };
if (node.conid) newConfig.conid = node.conid;
if (node.database) newConfig.database = node.database;
const db = this.dbs?.[newConfig.conid]?.[newConfig.database];
@@ -983,7 +1048,7 @@ export class PerspectivePatternColumnNode extends PerspectiveTreeNode {
this.config,
this.setConfig,
this.dataProvider,
newConfig,
this.defaultDatabaseConfig,
this,
node.designerId
)
@@ -1073,11 +1138,11 @@ export class PerspectiveTableNode extends PerspectiveTreeNode {
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
public dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
parentNode: PerspectiveTreeNode,
designerId: string
) {
super(dbs, config, setConfig, parentNode, dataProvider, databaseConfig, designerId);
super(dbs, config, setConfig, parentNode, dataProvider, defaultDatabaseConfig, designerId);
}
get engineType(): PerspectiveDatabaseEngineType {
@@ -1118,7 +1183,7 @@ export class PerspectiveTableNode extends PerspectiveTreeNode {
this.config,
this.setConfig,
this.dataProvider,
this.databaseConfig,
this.defaultDatabaseConfig,
this
);
}
@@ -1160,12 +1225,12 @@ export class PerspectiveTableReferenceNode extends PerspectiveTableNode {
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
public dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
public isMultiple: boolean,
parentNode: PerspectiveTreeNode,
designerId: string
) {
super(table, dbs, config, setConfig, dataProvider, databaseConfig, parentNode, designerId);
super(table, dbs, config, setConfig, dataProvider, defaultDatabaseConfig, parentNode, designerId);
}
matchChildRow(parentRow: any, childRow: any): boolean {
@@ -1264,11 +1329,11 @@ export class PerspectiveCustomJoinTreeNode extends PerspectiveTableNode {
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
public dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
parentNode: PerspectiveTreeNode,
designerId: string
) {
super(table, dbs, config, setConfig, dataProvider, databaseConfig, parentNode, designerId);
super(table, dbs, config, setConfig, dataProvider, defaultDatabaseConfig, parentNode, designerId);
}
matchChildRow(parentRow: any, childRow: any): boolean {
@@ -1474,7 +1539,7 @@ export function getTableChildPerspectiveNodes(
config: PerspectiveConfig,
setConfig: ChangePerspectiveConfigFunc,
dataProvider: PerspectiveDataProvider,
databaseConfig: PerspectiveDatabaseConfig,
defaultDatabaseConfig: PerspectiveDatabaseConfig,
parentNode: PerspectiveTreeNode
) {
if (!table) return [];
@@ -1496,7 +1561,7 @@ export function getTableChildPerspectiveNodes(
config,
setConfig,
dataProvider,
databaseConfig,
defaultDatabaseConfig,
parentNode,
designerId
)
@@ -1507,7 +1572,7 @@ export function getTableChildPerspectiveNodes(
config,
setConfig,
dataProvider,
databaseConfig,
defaultDatabaseConfig,
parentNode,
designerId
)
@@ -1526,7 +1591,7 @@ export function getTableChildPerspectiveNodes(
config,
setConfig,
dataProvider,
databaseConfig,
defaultDatabaseConfig,
parentNode,
designerId
)
@@ -1565,7 +1630,7 @@ export function getTableChildPerspectiveNodes(
config,
setConfig,
dataProvider,
databaseConfig,
defaultDatabaseConfig,
isMultiple,
parentNode,
designerId
@@ -1591,7 +1656,7 @@ export function getTableChildPerspectiveNodes(
if (ref.columns.find(x => x.source.includes('::') || x.target.includes('::'))) {
continue;
}
const newConfig = { ...databaseConfig };
const newConfig: PerspectiveDatabaseConfig = { ...defaultDatabaseConfig };
if (node.conid) newConfig.conid = node.conid;
if (node.database) newConfig.database = node.database;
const db = dbs?.[newConfig.conid]?.[newConfig.database];
@@ -1623,7 +1688,7 @@ export function getTableChildPerspectiveNodes(
config,
setConfig,
dataProvider,
newConfig,
defaultDatabaseConfig,
parentNode,
node.designerId
)
@@ -1,272 +0,0 @@
import { FormViewDisplay } from './FormViewDisplay';
import _ from 'lodash';
import { ChangeCacheFunc, DisplayColumn, ChangeConfigFunc } from './GridDisplay';
import type { EngineDriver, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
import { GridConfig, GridCache } from './GridConfig';
import { mergeConditions, Condition, OrderByExpression } from 'dbgate-sqltree';
import { TableGridDisplay } from './TableGridDisplay';
import stableStringify from 'json-stable-stringify';
import { ChangeSetFieldDefinition, ChangeSetRowDefinition } from './ChangeSet';
import { DictionaryDescriptionFunc } from '.';
export class TableFormViewDisplay extends FormViewDisplay {
// use utility functions from GridDisplay and publish result in FromViewDisplay interface
private gridDisplay: TableGridDisplay;
constructor(
public tableName: NamedObjectInfo,
driver: EngineDriver,
config: GridConfig,
setConfig: ChangeConfigFunc,
cache: GridCache,
setCache: ChangeCacheFunc,
dbinfo: DatabaseInfo,
displayOptions,
serverVersion,
getDictionaryDescription: DictionaryDescriptionFunc = null,
isReadOnly = false
) {
super(config, setConfig, cache, setCache, driver, dbinfo, serverVersion);
this.gridDisplay = new TableGridDisplay(
tableName,
driver,
config,
setConfig,
cache,
setCache,
dbinfo,
displayOptions,
serverVersion,
getDictionaryDescription,
isReadOnly
);
this.gridDisplay.addAllExpandedColumnsToSelected = true;
this.isLoadedCorrectly = this.gridDisplay.isLoadedCorrectly && !!this.driver;
this.columns = [];
this.addDisplayColumns(this.gridDisplay.columns);
this.baseTable = this.gridDisplay.baseTable;
this.gridDisplay.hintBaseColumns = this.columns;
}
addDisplayColumns(columns: DisplayColumn[]) {
for (const col of columns) {
this.columns.push(col);
if (this.gridDisplay.isExpandedColumn(col.uniqueName)) {
const table = this.gridDisplay.getFkTarget(col);
if (table) {
const subcolumns = this.gridDisplay.getDisplayColumns(table, col.uniquePath);
this.addDisplayColumns(subcolumns);
}
}
}
}
getPrimaryKeyEqualCondition(row = null): Condition {
if (!row) row = this.config.formViewKeyRequested || this.config.formViewKey;
if (!row) return null;
const { primaryKey } = this.gridDisplay.baseTable;
if (!primaryKey) return null;
return {
conditionType: 'and',
conditions: primaryKey.columns.map(({ columnName }) => ({
conditionType: 'binary',
operator: '=',
left: {
exprType: 'column',
columnName,
source: {
alias: 'basetbl',
},
},
right: {
exprType: 'value',
value: row[columnName],
},
})),
};
}
getPrimaryKeyOperatorCondition(operator): Condition {
if (!this.config.formViewKey) return null;
const conditions = [];
const { primaryKey } = this.gridDisplay.baseTable;
if (!primaryKey) return null;
for (let index = 0; index < primaryKey.columns.length; index++) {
conditions.push({
conditionType: 'and',
conditions: [
...primaryKey.columns.slice(0, index).map(({ columnName }) => ({
conditionType: 'binary',
operator: '=',
left: {
exprType: 'column',
columnName,
source: {
alias: 'basetbl',
},
},
right: {
exprType: 'value',
value: this.config.formViewKey[columnName],
},
})),
...primaryKey.columns.slice(index).map(({ columnName }) => ({
conditionType: 'binary',
operator: operator,
left: {
exprType: 'column',
columnName,
source: {
alias: 'basetbl',
},
},
right: {
exprType: 'value',
value: this.config.formViewKey[columnName],
},
})),
],
});
}
if (conditions.length == 1) {
return conditions[0];
}
return {
conditionType: 'or',
conditions,
};
}
getSelect() {
if (!this.driver) return null;
const select = this.gridDisplay.createSelect();
if (!select) return null;
select.topRecords = 1;
return select;
}
getCurrentRowQuery() {
const select = this.getSelect();
if (!select) return null;
select.where = mergeConditions(select.where, this.getPrimaryKeyEqualCondition());
return select;
}
getCountSelect() {
const select = this.getSelect();
if (!select) return null;
select.orderBy = null;
select.columns = [
{
exprType: 'raw',
sql: 'COUNT(*)',
alias: 'count',
},
];
select.topRecords = null;
return select;
}
getCountQuery() {
if (!this.driver) return null;
const select = this.getCountSelect();
if (!select) return null;
return select;
}
getBeforeCountQuery() {
if (!this.driver) return null;
const select = this.getCountSelect();
if (!select) return null;
select.where = mergeConditions(select.where, this.getPrimaryKeyOperatorCondition('<'));
return select;
}
navigate(row) {
const formViewKey = this.extractKey(row);
this.setConfig(cfg => ({
...cfg,
formViewKey,
}));
}
isLoadedCurrentRow(row) {
if (!row) return false;
const formViewKey = this.extractKey(row);
return stableStringify(formViewKey) == stableStringify(this.config.formViewKey);
}
navigateRowQuery(commmand: 'begin' | 'previous' | 'next' | 'end') {
if (!this.driver) return null;
const select = this.gridDisplay.createSelect();
if (!select) return null;
const { primaryKey } = this.gridDisplay.baseTable;
function getOrderBy(direction): OrderByExpression[] {
return primaryKey.columns.map(({ columnName }) => ({
exprType: 'column',
columnName,
direction,
}));
}
select.topRecords = 1;
switch (commmand) {
case 'begin':
select.orderBy = getOrderBy('ASC');
break;
case 'end':
select.orderBy = getOrderBy('DESC');
break;
case 'previous':
select.orderBy = getOrderBy('DESC');
select.where = mergeConditions(select.where, this.getPrimaryKeyOperatorCondition('<'));
break;
case 'next':
select.orderBy = getOrderBy('ASC');
select.where = mergeConditions(select.where, this.getPrimaryKeyOperatorCondition('>'));
break;
}
return select;
}
getChangeSetRow(row): ChangeSetRowDefinition {
if (!this.baseTable) return null;
return {
pureName: this.baseTable.pureName,
schemaName: this.baseTable.schemaName,
condition: this.extractKey(row),
};
}
getChangeSetField(row, uniqueName): ChangeSetFieldDefinition {
const col = this.columns.find(x => x.uniqueName == uniqueName);
if (!col) return null;
if (!this.baseTable) return null;
if (this.baseTable.pureName != col.pureName || this.baseTable.schemaName != col.schemaName) return null;
return {
...this.getChangeSetRow(row),
uniqueName: uniqueName,
columnName: col.columnName,
};
}
toggleExpandedColumn(uniqueName: string, value?: boolean) {
this.gridDisplay.toggleExpandedColumn(uniqueName, value);
this.gridDisplay.reload();
}
isExpandedColumn(uniqueName: string) {
return this.gridDisplay.isExpandedColumn(uniqueName);
}
get editable() {
return this.gridDisplay.editable;
}
}
+19
View File
@@ -51,6 +51,7 @@ export class TableGridDisplay extends GridDisplay {
}
this.columns = this.getDisplayColumns(this.table, []);
this.addFormDisplayColumns(this.getDisplayColumns(this.table, []));
this.filterable = true;
this.sortable = true;
this.groupable = true;
@@ -62,6 +63,24 @@ export class TableGridDisplay extends GridDisplay {
? this.table.primaryKey.columns.map(x => x.columnName)
: this.table.columns.map(x => x.columnName);
}
if (this.config.isFormView) {
this.addAllExpandedColumnsToSelected = true;
this.hintBaseColumns = this.formColumns;
}
}
addFormDisplayColumns(columns) {
for (const col of columns) {
this.formColumns.push(col);
if (this.isExpandedColumn(col.uniqueName)) {
const table = this.getFkTarget(col);
if (table) {
const subcolumns = this.getDisplayColumns(table, col.uniquePath);
this.addFormDisplayColumns(subcolumns);
}
}
}
}
findTable({ schemaName = undefined, pureName }) {
+1
View File
@@ -15,6 +15,7 @@ export class ViewGridDisplay extends GridDisplay {
) {
super(config, setConfig, cache, setCache, driver, serverVersion);
this.columns = this.getDisplayColumns(view);
this.formColumns = this.columns;
this.filterable = true;
this.sortable = true;
this.groupable = false;
+11 -2
View File
@@ -55,7 +55,7 @@ function processDependencies(
schemaName: fk.schemaName,
},
alias: 't0',
relations: subFkPath.map((fkItem, fkIndex) => ({
relations: [...subFkPath].reverse().map((fkItem, fkIndex) => ({
joinType: 'INNER JOIN',
alias: `t${fkIndex + 1}`,
name: {
@@ -123,7 +123,16 @@ export function getDeleteCascades(changeSet: ChangeSet, dbinfo: DatabaseInfo): C
const table = dbinfo.tables.find(x => x.pureName == baseCmd.pureName && x.schemaName == baseCmd.schemaName);
if (!table.primaryKey) continue;
processDependencies(changeSet, result, allForeignKeys, [], table, baseCmd, dbinfo, [table.pureName]);
const itemResult: ChangeSetDeleteCascade[] = [];
processDependencies(changeSet, itemResult, allForeignKeys, [], table, baseCmd, dbinfo, [table.pureName]);
for (const item of itemResult) {
const existing = result.find(x => x.title == item.title);
if (existing) {
existing.commands.push(...item.commands);
} else {
result.push(item);
}
}
// let resItem = result.find(x => x.title == baseCmd.pureName);
// if (!resItem) {
+1 -4
View File
@@ -6,12 +6,8 @@ export * from './TableGridDisplay';
export * from './ViewGridDisplay';
export * from './JslGridDisplay';
export * from './ChangeSet';
export * from './FreeTableGridDisplay';
export * from './FreeTableModel';
export * from './MacroDefinition';
export * from './runMacro';
export * from './FormViewDisplay';
export * from './TableFormViewDisplay';
export * from './CollectionGridDisplay';
export * from './deleteCascade';
export * from './PerspectiveDisplay';
@@ -22,3 +18,4 @@ export * from './processPerspectiveDefaultColunns';
export * from './PerspectiveDataPattern';
export * from './PerspectiveDataLoader';
export * from './perspectiveTools';
export * from './DataDuplicator';
+51 -180
View File
@@ -1,10 +1,9 @@
import { FreeTableModel } from './FreeTableModel';
import _ from 'lodash';
import uuidv1 from 'uuid/v1';
import uuidv4 from 'uuid/v4';
import moment from 'moment';
import { MacroDefinition, MacroSelectedCell } from './MacroDefinition';
import { ChangeSet, setChangeSetValue } from './ChangeSet';
import { ChangeSet, setChangeSetValue, setChangeSetRowData } from './ChangeSet';
import { GridDisplay } from './GridDisplay';
const getMacroFunction = {
@@ -13,13 +12,8 @@ const getMacroFunction = {
${code}
}
`,
transformRows: code => `
(rows, args, modules, selectedCells, cols, columns) => {
${code}
}
`,
transformData: code => `
(rows, args, modules, selectedCells, cols, columns) => {
transformRow: code => `
(row, args, modules, rowIndex, columns) => {
${code}
}
`,
@@ -32,160 +26,6 @@ const modules = {
moment,
};
function runTramsformValue(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
const selectedRows = _.groupBy(selectedCells, 'row');
const rows = data.rows.map((row, rowIndex) => {
const selectedRow = selectedRows[rowIndex];
if (selectedRow) {
const modifiedFields = [];
let res = null;
for (const cell of selectedRow) {
const { column } = cell;
const oldValue = row[column];
let newValue = oldValue;
try {
newValue = func(oldValue, macroArgs, modules, rowIndex, row, column);
} catch (err) {
errors.push(`Error processing column ${column} on row ${rowIndex}: ${err.message}`);
}
if (newValue != oldValue) {
if (res == null) {
res = { ...row };
}
res[column] = newValue;
if (preview) modifiedFields.push(column);
}
}
if (res) {
if (modifiedFields.length > 0) {
return {
...res,
__modifiedFields: new Set(modifiedFields),
};
}
return res;
}
return row;
} else {
return row;
}
});
return {
structure: data.structure,
rows,
};
}
function removePreviewRowFlags(rows) {
rows = rows.filter(row => row.__rowStatus != 'deleted');
rows = rows.map(row => {
if (row.__rowStatus || row.__modifiedFields || row.__insertedFields || row.__deletedFields)
return _.omit(row, ['__rowStatus', '__modifiedFields', '__insertedFields', '__deletedFields']);
return row;
});
return rows;
}
function runTramsformRows(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
let rows = data.rows;
try {
rows = func(
data.rows,
macroArgs,
modules,
selectedCells,
data.structure.columns.map(x => x.columnName),
data.structure.columns
);
if (!preview) {
rows = removePreviewRowFlags(rows);
}
} catch (err) {
errors.push(`Error processing rows: ${err.message}`);
}
return {
structure: data.structure,
rows,
};
}
function runTramsformData(
func,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
) {
try {
let { rows, columns, cols } = func(
data.rows,
macroArgs,
modules,
selectedCells,
data.structure.columns.map(x => x.columnName),
data.structure.columns
);
if (cols && !columns) {
columns = cols.map(columnName => ({ columnName }));
}
columns = _.uniqBy(columns, 'columnName');
if (!preview) {
rows = removePreviewRowFlags(rows);
}
return {
structure: { columns },
rows,
};
} catch (err) {
errors.push(`Error processing data: ${err.message}`);
}
return data;
}
export function runMacro(
macro: MacroDefinition,
macroArgs: {},
data: FreeTableModel,
preview: boolean,
selectedCells: MacroSelectedCell[],
errors: string[] = []
): FreeTableModel {
let func;
try {
func = eval(getMacroFunction[macro.type](macro.code));
} catch (err) {
errors.push(`Error compiling macro ${macro.name}: ${err.message}`);
return data;
}
if (macro.type == 'transformValue') {
return runTramsformValue(func, macroArgs, data, preview, selectedCells, errors);
}
if (macro.type == 'transformRows') {
return runTramsformRows(func, macroArgs, data, preview, selectedCells, errors);
}
if (macro.type == 'transformData') {
// @ts-ignore
return runTramsformData(func, macroArgs, data, preview, selectedCells, errors);
}
return data;
}
export function compileMacroFunction(macro: MacroDefinition, errors = []) {
if (!macro) return null;
let func;
@@ -198,7 +38,7 @@ export function compileMacroFunction(macro: MacroDefinition, errors = []) {
}
}
export function runMacroOnValue(compiledFunc, macroArgs, value, rowIndex, row, column, errors = []) {
export function runMacroOnValue(compiledFunc, macroArgs, value, rowIndex: number, row, column: string, errors = []) {
if (!compiledFunc) return value;
try {
const res = compiledFunc(value, macroArgs, modules, rowIndex, row, column);
@@ -209,31 +49,62 @@ export function runMacroOnValue(compiledFunc, macroArgs, value, rowIndex, row, c
}
}
export function runMacroOnRow(compiledFunc, macroArgs, rowIndex: number, row: any, columns: string[], errors = []) {
if (!compiledFunc) return row;
try {
const res = compiledFunc(row, macroArgs, modules, rowIndex, columns);
return res;
} catch (err) {
errors.push(`Error processing row ${rowIndex}: ${err.message}`);
return row;
}
}
export function runMacroOnChangeSet(
macro: MacroDefinition,
macroArgs: {},
selectedCells: MacroSelectedCell[],
changeSet: ChangeSet,
display: GridDisplay
display: GridDisplay,
useRowIndexInsteaOfCondition: boolean
): ChangeSet {
const errors = [];
const compiledMacroFunc = compileMacroFunction(macro, errors);
if (!compiledMacroFunc) return null;
let res = changeSet;
for (const cell of selectedCells) {
const definition = display.getChangeSetField(cell.rowData, cell.column, undefined);
const macroResult = runMacroOnValue(
compiledMacroFunc,
macroArgs,
cell.value,
cell.row,
cell.rowData,
cell.column,
errors
);
res = setChangeSetValue(res, definition, macroResult);
if (macro.type == 'transformValue') {
let res = changeSet;
for (const cell of selectedCells) {
const definition = display.getChangeSetField(
cell.rowData,
cell.column,
undefined,
useRowIndexInsteaOfCondition ? cell.row : undefined,
useRowIndexInsteaOfCondition
);
const macroResult = runMacroOnValue(
compiledMacroFunc,
macroArgs,
cell.value,
cell.row,
cell.rowData,
cell.column,
errors
);
res = setChangeSetValue(res, definition, macroResult);
}
return res;
}
if (macro.type == 'transformRow') {
let res = changeSet;
const rowIndexes = _.uniq(selectedCells.map(x => x.row));
for (const index of rowIndexes) {
const rowData = selectedCells.find(x => x.row == index)?.rowData;
const columns = _.uniq(selectedCells.map(x => x.column));
const definition = display.getChangeSetRow(rowData, null, index, true);
const newRow = runMacroOnRow(compiledMacroFunc, macroArgs, index, rowData, columns);
res = setChangeSetRowData(res, definition, newRow);
}
return res;
}
return res;
}
+1
View File
@@ -0,0 +1 @@
testdata
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2019 dbshell
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+102
View File
@@ -0,0 +1,102 @@
# dbmodel
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](https://dbgate.org) tooling and plugins for connecting many different databases.
If you want to use this tool from JavaScript interface, please use [dbgate-api](https://www.npmjs.com/package/dbgate-api) package.
Model is stored as a collection of files:
* tables - stored as YAML files
* columns
* indexes
* primary keys
* foreign keys
* views - stored as SQL file with extension **.view.sql**
* stored procedures - stored as SQL file with extension **.proc.sql**
* functions - stored as SQL file with extension **.func.sql**
## Installation - as global tool
npm install --global dbmodel
## Installation - as regular package
npm install --save dbmodel
## Available commands
* **load** - loads structure of database, saves it to local directory (called *project*). Also can download data of enlisted tables (use --load-data-condition options)
* **deploy** - deploys structure from local directory (*project*) to database. *Deploy does not perform any actions leading to data loss, these changes must be made manually.*
* creates not existing tables
* creates not existing columns of existing tables
* checks column NULL/NOT NULL flag, alters colums
* checks tables, which are in database, but not in project, list of these tables are reported
* checks columns, which are in database, but not in project, list of these columns are reported
* checks indexes and its definitions, indexes are created or recreated, if neccessary (*but not deleted*)
* checks and creates foreign keys
* checks, creates new or changes existing views, stored procedures and functions
* updates and creates static data (included in table yaml files)
* **build** - builds script from project folder. This operation is complete offline, no database connection is needed. Built script makes subset of deploy command. It can be executed on empty database, but also it can convert existing database to current structure (but only using operations below).
* creates not existing tables
* creates not existing columns of existing tables
* creates not existing indexes (checked only by name)
* creates not existing foreign keys
* creates new or changes existing views, stored procedures and functions
* updates and creates static data (included in table yaml files)
## Command line interface
```sh
# load from existing database
dbmodel load -s localhost -u USERNAME -p PASSWORD -d DATABASE -e mssql@dbgate-plugin-mssql OUTPUT_FOLDER
# deploy project to database
dbmodel deploy -s localhost -u USERNAME -p PASSWORD -d DATABASE -e mssql@dbgate-plugin-mssql PROJECT_FOLDER
# build SQL script from project
dbmodel build -e mssql@dbgate-plugin-mssql PROJECT_FOLDER OUTPUT_FILE.sql
```
Parameter -e (or --engine) specifies database dialect and connection driver to be used
Supported databases:
- MySQL - `-e mysql@dbgate-plugin-mysql`
- MS SQL Server - `-e mssql@dbgate-plugin-mssql`
- PostgreSQL - `-e postgres@dbgate-plugin-postgres`
- SQLite - `-e sqlite@dbgate-plugin-sqlite`
- Oracle - `-e oracle@dbgate-plugin-oracle`
- MariaDB - `-e mariadb@dbgate-plugin-mysql`
- CockroachDB - `-e cockroach@dbgate-plugin-postgres`
- Amazon Redshift - `-e redshift@dbgate-plugin-postgres`
## Table yaml file documentation
```yaml
name: Album # table name
columns:
- name: AlbumId # column name
type: int # data type. is used directly in target SQL engine
autoIncrement: true # column is autoincrement
notNull: true # column is not nullable (default: is nullable)
- name: Title
type: nvarchar
length: 160 # maximum character length
notNull: true
- name: ArtistId
type: int
references: Artist # name of table. Is used for creating foreign key
- name: isDeleted
type: bit
notNull: true
default: 0 # default value
primaryKey:
- AlbumId # list of primary key column names
indexes:
- name: UQ_AlbumTitleArtistId # index name
unique: true # whether index is unique. default=false
columns: # list of index columns
- Title
- ArtistId
filter: isDeleted=0 # if defined, filtered index (with WHERE condition) is created
continueOnError: true # if true and there was error in creating this index, continue (suitable for lately added unique indexes)
data: # static data (only for list tables)
- AlbumId: -1 # values for all columns, which should be filled
Title: Predefined static album
```
+118
View File
@@ -0,0 +1,118 @@
#!/usr/bin/env node
const path = require('path');
require('dotenv').config();
global.API_PACKAGE = path.dirname(path.dirname(require.resolve('dbgate-api')));
global.PLUGINS_DIR = process.env.DEVMODE
? path.join(path.dirname(path.dirname(global.API_PACKAGE)), 'plugins')
: path.dirname(global.API_PACKAGE);
global.IS_NPM_DIST = true;
const program = require('commander');
const dbgateApi = require('dbgate-api');
const { createLogger } = require('pinomin');
const logger = createLogger('dbmodel');
async function runAndExit(promise) {
try {
await promise;
logger.info('Success');
process.exit();
} catch (err) {
logger.error({ err }, 'Processing failed');
process.exit(1);
}
}
program
.option('-s, --server <server>', 'server host')
.option('-u, --user <user>', 'user name')
.option('-p, --password <password>', 'password')
.option('-d, --database <database>', 'database name')
.option('--auto-index-foreign-keys', 'automatically adds indexes to all foreign keys')
.option(
'--load-data-condition <condition>',
'regex, which table data will be loaded and stored in model (in load command)'
)
.requiredOption('-e, --engine <engine>', 'engine name, eg. mysql@dbgate-plugin-mysql');
program
.command('deploy <modelFolder>')
.description('Deploys model to database')
.action(modelFolder => {
const { engine, server, user, password, database } = program.opts();
// const hooks = [];
// if (program.autoIndexForeignKeys) hooks.push(dbmodel.hooks.autoIndexForeignKeys);
runAndExit(
dbgateApi.deployDb({
connection: {
engine,
server,
user,
password,
database,
},
modelFolder,
})
);
});
// runAndExit(
// dbmodel.deploy({
// connection: {
// engine,
// server,
// user,
// password,
// database,
// },
// hooks,
// projectDir,
// })
// );
program
.command('load <outputDir>')
.description('Loads model from database')
.action(outputDir => {
const { engine, server, user, password, database } = program.opts();
// const loadDataCondition = program.loadDataCondition
// ? table => table.name.match(new RegExp(program.loadDataCondition, 'i'))
// : null;
// const hooks = [];
runAndExit(
dbgateApi.loadDatabase({
connection: {
engine,
server,
user,
password,
database,
},
outputDir,
})
);
});
program
.command('build <modelFolder> <outputFile>')
.description('Builds single SQL script from project')
.action((modelFolder, outputFile) => {
const { engine } = program.opts();
// const hooks = [];
runAndExit(
dbgateApi.generateModelSql({
// client,
// hooks,
modelFolder,
outputFile,
engine,
})
);
});
program.parse(process.argv);
+46
View File
@@ -0,0 +1,46 @@
{
"name": "dbmodel",
"version": "5.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
},
"description": "Deploy, load or build script from model of SQL database",
"author": "Jan Prochazka",
"license": "MIT",
"bin": {
"dbmodel": "./bin/dbmodel.js"
},
"keywords": [
"sql",
"dbgate",
"web"
],
"scripts": {
"dbmodel": "node ./bin/dbmodel.js",
"dbmodel:load": "cross-env DEVMODE=1 node ./bin/dbmodel.js load testdata/db -e postgres@dbgate-plugin-postgres -s localhost -u postgres -p Pwd2020Db -d zradlo",
"dbmodel:deploy": "cross-env DEVMODE=1 node ./bin/dbmodel.js deploy testdata/db -e postgres@dbgate-plugin-postgres -s localhost -u postgres -p Pwd2020Db -d deployed",
"dbmodel:build": "cross-env DEVMODE=1 node ./bin/dbmodel.js build testdata/db testdata/db.sql -e postgres@dbgate-plugin-postgres "
},
"files": [
"LICENSE",
"README.md",
"bin"
],
"dependencies": {
"commander": "^10.0.0",
"dbgate-api": "^5.0.0-alpha.1",
"dbgate-plugin-csv": "^5.0.0-alpha.1",
"dbgate-plugin-excel": "^5.0.0-alpha.1",
"dbgate-plugin-mongo": "^5.0.0-alpha.1",
"dbgate-plugin-mssql": "^5.0.0-alpha.1",
"dbgate-plugin-mysql": "^5.0.0-alpha.1",
"dbgate-plugin-postgres": "^5.0.0-alpha.1",
"dbgate-plugin-xml": "^5.0.0-alpha.1",
"dbgate-plugin-oracle": "^5.0.0-alpha.1",
"dbgate-web": "^5.0.0-alpha.1",
"dotenv": "^16.0.0",
"pinomin": "^1.0.1"
}
}
+76
View File
@@ -0,0 +1,76 @@
BEGIN TRANSACTION;
CREATE TABLE "language" (
"language_id" CHARACTER(31) NOT NULL,
"name" VARCHAR(255) NULL,
PRIMARY KEY ("language_id")
);
CREATE TABLE "preparation_step" (
"preparation_step_id" SERIAL,
"recipe_id" INTEGER NULL,
"description" VARCHAR NULL,
"duration" INTERVAL NULL,
"step_order" SMALLINT NULL,
"language_id" CHARACTER(31) NULL,
PRIMARY KEY ("preparation_step_id")
);
CREATE TABLE "recipe" (
"recipe_id" SERIAL,
"name" VARCHAR(1000) NOT NULL,
"is_public" BOOLEAN NULL,
"user_id" INTEGER NULL,
"source_url" VARCHAR(1000) NULL,
"cook_duration" INTERVAL NULL,
"servings" SMALLINT NULL,
"language_id" CHARACTER(31) NULL,
"deleted_date" TIMESTAMP NULL,
PRIMARY KEY ("recipe_id")
);
CREATE TABLE "recipe_has_ingredient" (
"recipe_id" INTEGER NOT NULL,
"recipe_ingredient_id" INTEGER NOT NULL,
"unit_id" INTEGER NULL,
"unit" VARCHAR(100) NULL,
"name" VARCHAR(1000) NULL,
"amount" REAL NULL,
PRIMARY KEY ("recipe_id", "recipe_ingredient_id")
);
CREATE TABLE "recipe_photo" (
"recipe_photo_id" SERIAL,
"path" VARCHAR NULL,
"name" VARCHAR(255) NULL,
"recipe_id" INTEGER NULL,
"user_id" INTEGER NULL,
"created_on" TIMESTAMP NULL,
PRIMARY KEY ("recipe_photo_id")
);
CREATE TABLE "shop" (
"shop_id" SERIAL,
"name" VARCHAR(255) NULL,
"url" VARCHAR NULL,
"logo_path" VARCHAR NULL,
PRIMARY KEY ("shop_id")
);
CREATE TABLE "unit" (
"unit_id" SERIAL,
"name" VARCHAR(255) NOT NULL,
"user_id" INTEGER NULL,
PRIMARY KEY ("unit_id")
);
CREATE TABLE "user" (
"user_id" SERIAL,
"name" VARCHAR(255) NULL,
"email" VARCHAR(255) NULL,
"password_hash" VARCHAR NULL,
"last_logged_on" TIMESTAMP NULL,
"user_settings_id" INTEGER NULL,
PRIMARY KEY ("user_id")
);
ALTER TABLE "preparation_step" ADD FOREIGN KEY ("recipe_id") REFERENCES "recipe" ("recipe_id");
ALTER TABLE "preparation_step" ADD FOREIGN KEY ("language_id") REFERENCES "language" ("language_id");
ALTER TABLE "recipe" ADD FOREIGN KEY ("user_id") REFERENCES "user" ("user_id");
ALTER TABLE "recipe" ADD FOREIGN KEY ("language_id") REFERENCES "language" ("language_id");
ALTER TABLE "recipe_has_ingredient" ADD FOREIGN KEY ("recipe_id") REFERENCES "recipe" ("recipe_id");
ALTER TABLE "recipe_has_ingredient" ADD FOREIGN KEY ("unit_id") REFERENCES "unit" ("unit_id");
ALTER TABLE "recipe_photo" ADD FOREIGN KEY ("recipe_id") REFERENCES "recipe" ("recipe_id");
ALTER TABLE "recipe_photo" ADD FOREIGN KEY ("user_id") REFERENCES "user" ("user_id");
COMMIT;
+5 -1
View File
@@ -1,4 +1,4 @@
import { isTypeDateTime } from 'dbgate-tools';
import { arrayToHexString, isTypeDateTime } from 'dbgate-tools';
import moment from 'moment';
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
@@ -9,6 +9,10 @@ export function getFilterValueExpression(value, dataType?) {
if (value === true) return 'TRUE';
if (value === false) return 'FALSE';
if (value.$oid) return `ObjectId("${value.$oid}")`;
if (value.type == 'Buffer' && Array.isArray(value.data)) {
return '0x' + arrayToHexString(value.data);
}
return `="${value}"`;
}
+11 -1
View File
@@ -5,6 +5,7 @@ import { Condition } from 'dbgate-sqltree';
import { interpretEscapes, token, word, whitespace } from './common';
import { mongoParser } from './mongoParser';
import { datetimeParser } from './datetimeParser';
import { hexStringToArray } from 'dbgate-tools';
const binaryCondition = operator => value => ({
conditionType: 'binary',
@@ -104,6 +105,14 @@ const createParser = (filterType: FilterType) => {
.map(Number)
.desc('number'),
hexstring: () =>
token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1))
.map(x => ({
type: 'Buffer',
data: hexStringToArray(x),
}))
.desc('hex string'),
noQuotedString: () => P.regexp(/[^\s^,^'^"]+/).desc('string unquoted'),
sql: () =>
@@ -113,6 +122,7 @@ const createParser = (filterType: FilterType) => {
value: r => P.alt(...allowedValues.map(x => r[x])),
valueTestEq: r => r.value.map(binaryCondition('=')),
hexTestEq: r => r.hexstring.map(binaryCondition('=')),
valueTestStr: r => r.value.map(likeCondition('like', '%#VALUE#%')),
comma: () => word(','),
@@ -158,7 +168,7 @@ const createParser = (filterType: FilterType) => {
allowedElements.push('le', 'ge', 'lt', 'gt');
}
if (filterType == 'string') {
allowedElements.push('empty', 'notEmpty');
allowedElements.push('empty', 'notEmpty', 'hexTestEq');
}
if (filterType == 'eval' || filterType == 'string') {
allowedElements.push('startsWith', 'endsWith', 'contains', 'startsWithNot', 'endsWithNot', 'containsNot');
+15 -3
View File
@@ -1,5 +1,6 @@
import _ from 'lodash';
import { Condition, BinaryCondition } from './types';
import _cloneDeepWith from 'lodash/cloneDeepWith';
import _escapeRegExp from 'lodash/escapeRegExp';
import { Condition, Expression } from './types';
import { evaluateExpression } from './evaluateExpression';
function isEmpty(value) {
@@ -10,7 +11,7 @@ function isEmpty(value) {
function isLike(value, test) {
if (!value) return false;
if (!test) return false;
const regex = new RegExp(`^${_.escapeRegExp(test).replace(/%/g, '.*')}$`, 'i');
const regex = new RegExp(`^${_escapeRegExp(test).replace(/%/g, '.*')}$`, 'i');
const res = !!value.toString().match(regex);
return res;
}
@@ -55,5 +56,16 @@ export function evaluateCondition(condition: Condition, values) {
return !isLike(evaluateExpression(condition.left, values), evaluateExpression(condition.right, values));
case 'not':
return !evaluateCondition(condition.condition, values);
case 'anyColumnPass':
return Object.keys(values).some(columnName => {
const replaced = _cloneDeepWith(condition.placeholderCondition, (expr: Expression) => {
if (expr.exprType == 'placeholder')
return {
exprType: 'column',
columnName,
};
});
return evaluateCondition(replaced, values);
});
}
}
+7 -1
View File
@@ -111,6 +111,11 @@ export interface RawTemplateCondition {
expr: Expression;
}
export interface AnyColumnPassEvalOnlyCondition {
conditionType: 'anyColumnPass';
placeholderCondition: Condition;
}
export type Condition =
| BinaryCondition
| NotCondition
@@ -121,7 +126,8 @@ export type Condition =
| NotExistsCondition
| BetweenCondition
| InCondition
| RawTemplateCondition;
| RawTemplateCondition
| AnyColumnPassEvalOnlyCondition;
export interface Source {
name?: NamedObjectInfo;
+1
View File
@@ -36,6 +36,7 @@
"debug": "^4.3.4",
"json-stable-stringify": "^1.0.1",
"lodash": "^4.17.21",
"pinomin": "^1.0.1",
"uuid": "^3.4.0"
}
}
+21 -8
View File
@@ -3,6 +3,9 @@ import _sortBy from 'lodash/sortBy';
import _groupBy from 'lodash/groupBy';
import _pick from 'lodash/pick';
import _compact from 'lodash/compact';
import { getLogger } from './getLogger';
const logger = getLogger('dbAnalyser');
const STRUCTURE_FIELDS = ['tables', 'collections', 'views', 'matviews', 'functions', 'procedures', 'triggers'];
@@ -107,7 +110,7 @@ export class DatabaseAnalyser {
this.modifications = structureModifications;
if (structureWithRowCounts) this.structure = structureWithRowCounts;
console.log('DB modifications detected:', this.modifications);
logger.info({ modifications: this.modifications }, 'DB modifications detected:');
return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
}
@@ -170,9 +173,9 @@ export class DatabaseAnalyser {
// return this.structure.tables.find((x) => x.objectId == id);
// }
containsObjectIdCondition(typeFields) {
return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
}
// containsObjectIdCondition(typeFields) {
// return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
// }
createQuery(template, typeFields) {
return this.createQueryCore(template, typeFields);
@@ -197,7 +200,7 @@ export class DatabaseAnalyser {
.filter(x => typeFields.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
.map(x => x.objectId);
if (filterIds.length == 0) {
return template.replace(/=OBJECT_ID_CONDITION/g, " = '0'");
return null;
}
return template.replace(/=OBJECT_ID_CONDITION/g, ` in (${filterIds.map(x => `'${x}'`).join(',')})`);
}
@@ -232,7 +235,10 @@ export class DatabaseAnalyser {
if (this.pool.feedback) {
this.pool.feedback(obj);
}
}
if (obj && obj.analysingMessage) {
logger.debug(obj.analysingMessage);
}
}
async getModifications() {
const snapshot = await this._getFastSnapshot();
@@ -293,11 +299,18 @@ export class DatabaseAnalyser {
return [..._compact(res), ...this.getDeletedObjects(snapshot)];
}
async safeQuery(sql) {
async analyserQuery(template, typeFields) {
const sql = this.createQuery(template, typeFields);
if (!sql) {
return {
rows: [],
};
}
try {
return await this.driver.query(this.pool, sql);
} catch (err) {
console.log('Error running analyser query', err.message);
logger.error({ err }, 'Error running analyser query');
return {
rows: [],
};
+14
View File
@@ -57,6 +57,10 @@ export class ScriptWriter {
this._put(`await dbgateApi.importDatabase(${JSON.stringify(options)});`);
}
dataDuplicator(options) {
this._put(`await dbgateApi.dataDuplicator(${JSON.stringify(options)});`);
}
comment(s) {
this._put(`// ${s}`);
}
@@ -143,6 +147,13 @@ export class ScriptWriterJson {
});
}
dataDuplicator(options) {
this.commands.push({
type: 'dataDuplicator',
options,
});
}
getScript(schedule = null) {
return {
type: 'json',
@@ -186,6 +197,9 @@ export function jsonScriptToJavascript(json) {
case 'importDatabase':
script.importDatabase(cmd.options);
break;
case 'dataDuplicator':
script.dataDuplicator(cmd.options);
break;
}
}
+20 -10
View File
@@ -197,6 +197,8 @@ export class SqlDumper implements AlterProcessor {
specialColumnOptions(column) {}
selectScopeIdentity(table: TableInfo) {}
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
if (column.computedExpression) {
this.put('^as %s', column.computedExpression);
@@ -242,16 +244,7 @@ export class SqlDumper implements AlterProcessor {
this.put('%i ', col.columnName);
this.columnDefinition(col);
});
if (table.primaryKey) {
this.put(',&n');
if (table.primaryKey.constraintName) {
this.put('^constraint %i', table.primaryKey.constraintName);
}
this.put(
' ^primary ^key (%,i)',
table.primaryKey.columns.map(x => x.columnName)
);
}
this.createTablePrimaryKeyCore(table);
(table.foreignKeys || []).forEach(fk => {
this.put(',&n');
@@ -273,6 +266,19 @@ export class SqlDumper implements AlterProcessor {
});
}
createTablePrimaryKeyCore(table: TableInfo) {
if (table.primaryKey) {
this.put(',&n');
if (table.primaryKey.constraintName) {
this.put('^constraint %i', table.primaryKey.constraintName);
}
this.put(
' ^primary ^key (%,i)',
table.primaryKey.columns.map(x => x.columnName)
);
}
}
createForeignKeyFore(fk: ForeignKeyInfo) {
if (fk.constraintName != null) this.put('^constraint %i ', fk.constraintName);
this.put(
@@ -533,6 +539,10 @@ export class SqlDumper implements AlterProcessor {
this.putCmd('^commit');
}
rollbackTransaction() {
this.putCmd('^rollback');
}
alterProlog() {}
alterEpilog() {}
+5 -2
View File
@@ -8,10 +8,13 @@ import type {
ViewInfo,
} from 'dbgate-types';
import _flatten from 'lodash/flatten';
import _uniqBy from 'lodash/uniqBy'
import _uniqBy from 'lodash/uniqBy';
import { getLogger } from './getLogger';
import { SqlDumper } from './SqlDumper';
import { extendDatabaseInfo } from './structureTools';
const logger = getLogger('sqlGenerator');
interface SqlGeneratorOptions {
dropTables: boolean;
checkIfTableExists: boolean;
@@ -82,7 +85,7 @@ export class SqlGenerator {
}
private handleException = error => {
console.log('Unhandled error', error);
logger.error({ error }, 'Unhandled error');
this.isUnhandledException = true;
};

Some files were not shown because too many files have changed in this diff Show More