Compare commits

..

363 Commits

Author SHA1 Message Date
Stela Augustinova c51dad39e0 Improve saveRows method to handle backpressure and errors in file writing 2026-04-14 09:49:14 +02:00
Stela Augustinova 1d350a3a29 Add validation to assertSafeArchiveName to prevent resolving to archive root 2026-04-13 14:55:29 +02:00
Stela Augustinova 81e3cce070 Add validation for linkedFolder in createLink method 2026-04-13 13:54:48 +02:00
Stela Augustinova f9de2d77b5 Moved functionName validation 2026-04-13 13:44:45 +02:00
Stela Augustinova 3956eaf389 Improve error handling in unzipDirectory by adding readStream error listener and immediate abort on file extraction failure 2026-04-13 13:20:28 +02:00
Stela Augustinova d13e2c2d87 Make fs.writeFile call awaitable in createLink method for proper async handling 2026-04-13 12:53:54 +02:00
Stela Augustinova ebf2371da9 Enhance unzipDirectory error handling and manage active streams for better resource cleanup 2026-04-13 12:43:45 +02:00
Stela Augustinova fa4b12448d Refactor unzipDirectory to improve error handling and prevent multiple rejections 2026-04-13 10:48:01 +02:00
Stela Augustinova 5fe6dfa551 Refactor loaderScriptTemplate to streamline plugin extraction and improve readability 2026-04-13 10:41:12 +02:00
Stela Augustinova 6061c8b0a5 Update JS reserved words 2026-04-13 10:31:12 +02:00
Stela Augustinova 1ac0aa8a3e Add path traversal and null byte checks for archive names and ZIP entries 2026-04-13 09:19:03 +02:00
Stela Augustinova 5d04d7f01f Enhance JavaScript identifier validation and update variable storage method in ScriptWriterEval 2026-04-10 16:15:45 +02:00
Stela Augustinova 9c97e347c5 Add validation for JavaScript identifiers and shell API function names 2026-04-10 13:22:54 +02:00
Stela Augustinova 22967d123d Add 7.1.8 entry to CHANGELOG with fixed NPM packages build 2026-04-09 16:05:50 +02:00
Stela Augustinova 3fed650254 Add postgresql optimalization to 7.1.7 changelog 2026-04-09 16:03:24 +02:00
Stela Augustinova b57b2083d3 v7.1.8 2026-04-09 15:35:13 +02:00
Stela Augustinova 1f47e8c62e v7.1.8-alpha.7 2026-04-09 15:26:28 +02:00
CI workflows d7ce653d74 chore: auto-update github workflows 2026-04-09 13:25:14 +00:00
Stela Augustinova 07c803efee Update npm publishing steps and remove unnecessary access flag 2026-04-09 15:24:48 +02:00
Stela Augustinova 26b6d9133e v7.1.8-alpha.6 2026-04-09 15:15:37 +02:00
CI workflows 146084bdb3 chore: auto-update github workflows 2026-04-09 13:14:54 +00:00
Stela Augustinova fa82b4630b Specify npm version to 11.5.1 for consistency 2026-04-09 15:14:24 +02:00
Stela Augustinova d00841030f v7.1.8-alpha.5 2026-04-09 15:01:14 +02:00
CI workflows c517bb0be6 chore: auto-update github workflows 2026-04-09 13:00:00 +00:00
Stela Augustinova e585d8be8f Add public access to npm publish commands in build workflow 2026-04-09 14:59:25 +02:00
Stela Augustinova 8be76832a5 v7.1.8-alpha.4 2026-04-09 14:54:23 +02:00
Stela Augustinova 99df266a3e v7.1.8-aplha.4 2026-04-09 14:50:56 +02:00
CI workflows 5660874992 chore: auto-update github workflows 2026-04-09 12:50:31 +00:00
Stela Augustinova b0dade9da3 Configure NPM token in build workflow 2026-04-09 14:50:11 +02:00
Stela Augustinova a533858804 v7.1.8-alpha.3 2026-04-09 14:20:39 +02:00
CI workflows d3bcc984e7 chore: auto-update github workflows 2026-04-09 12:18:26 +00:00
Stela Augustinova 99e8307a80 Enable NPM token configuration in build workflow 2026-04-09 14:17:59 +02:00
Stela Augustinova 73926ea392 v7.1.8-alpha.2 2026-04-09 14:12:17 +02:00
CI workflows 5ff24526b7 chore: auto-update github workflows 2026-04-09 12:11:15 +00:00
Stela Augustinova 32ed1c57bd Update Node.js setup to use yarn caching and remove npm install step 2026-04-09 14:10:50 +02:00
Stela Augustinova f4c3a95348 v7.1.8-alpha.1 2026-04-09 14:02:38 +02:00
CI workflows b1a908343a chore: auto-update github workflows 2026-04-09 11:58:25 +00:00
Stela Augustinova 7f9d7eb36e Update Node.js setup action and enable npm caching 2026-04-09 13:57:51 +02:00
Stela Augustinova 30820e29fc Update CHANGELOG for version 7.1.7 2026-04-09 13:23:07 +02:00
Stela Augustinova a85ea2e0f7 v7.1.7 2026-04-09 12:56:57 +02:00
Stela Augustinova 993e713955 v7.1.7-premium-beta.5 2026-04-09 12:11:02 +02:00
Stela Augustinova 3151e30db1 SYNC: Update translations 2026-04-09 08:59:26 +00:00
Jan Prochazka eb5219dd68 Merge pull request #1422 from dbgate/feature/duplicate-translation-keys
Remove duplicate translation keys
2026-04-09 10:49:30 +02:00
Stela Augustinova bb44783369 Refactor translation keys to eliminate duplicates in QueryTab component 2026-04-09 10:33:33 +02:00
CI workflows 33b46c4db3 chore: auto-update github workflows 2026-04-09 08:24:34 +00:00
Jan Prochazka 3730aae62a Merge pull request #1419 from dbgate/feature/map-referer
Added referer
2026-04-09 10:24:25 +02:00
CI workflows 065062d58a Update pro ref 2026-04-09 08:24:16 +00:00
Jan Prochazka 7b2f58e68e SYNC: Merge pull request #92 from dbgate/feature/ai-toggle 2026-04-09 08:24:02 +00:00
Stela Augustinova e2fc23fcf8 Remove duplicate translation keys 2026-04-09 10:12:39 +02:00
SPRINX0\prochazka 6f56ef284d v7.1.7-premium-beta.4 2026-04-08 16:14:19 +02:00
SPRINX0\prochazka 08a644ba39 v7.1.7-premuim-beta.4 2026-04-08 16:07:40 +02:00
CI workflows 6ae19ac4a6 chore: auto-update github workflows 2026-04-08 14:06:22 +00:00
CI workflows 7761cbe81d Update pro ref 2026-04-08 14:05:57 +00:00
Jan Prochazka f981d88150 SYNC: Merge pull request #91 from dbgate/feature/query-history-per-user 2026-04-08 14:05:40 +00:00
CI workflows e2a23eaa0d chore: auto-update github workflows 2026-04-08 12:57:03 +00:00
CI workflows 9d510b3c08 Update pro ref 2026-04-08 12:56:40 +00:00
SPRINX0\prochazka a98f5ac45e reverted yarn.lock 2026-04-08 14:03:13 +02:00
SPRINX0\prochazka b989e964c0 v7.1.7-premium-beta.3 2026-04-08 13:34:11 +02:00
CI workflows 3ff6eefa06 chore: auto-update github workflows 2026-04-08 11:29:47 +00:00
CI workflows 67fde9be3c Update pro ref 2026-04-08 11:29:28 +00:00
SPRINX0\prochazka df7ac89723 SYNC: v7.1.7-premium-beta.2 2026-04-08 11:29:18 +00:00
SPRINX0\prochazka 358df9f53b SYNC: try to fix ms entra login 2026-04-08 11:29:15 +00:00
Stela Augustinova 02e3bfaa8a Added referer 2026-04-08 12:05:42 +02:00
Jan Prochazka dde74fa73b Merge pull request #1407 from dbgate/feature/postgres-optimalization
Feature/postgres optimalization
2026-04-08 11:46:42 +02:00
SPRINX0\prochazka 100e3fe75f deleted sast workflows 2026-04-08 10:59:29 +02:00
SPRINX0\prochazka af7930cea2 Enhance aggregation functions in SQL queries for improved PostgreSQL compatibility 2026-04-08 10:55:24 +02:00
SPRINX0\prochazka 6b4f6b909c Merge branch 'feature/postgres-optimalization' of https://github.com/dbgate/dbgate into feature/postgres-optimalization 2026-04-08 10:26:35 +02:00
Jan Prochazka 9a6e5cd7cc Update plugins/dbgate-plugin-postgres/src/backend/sql/views.js
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-08 10:21:22 +02:00
SPRINX0\prochazka 9f64b6ec7a Merge branch 'master' into feature/postgres-optimalization 2026-04-08 10:20:28 +02:00
Stela Augustinova 77f720e34c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-08 10:20:09 +02:00
Stela Augustinova 168dcb7824 Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-08 10:20:09 +02:00
Stela Augustinova 759186a212 Improve error handling for volatile connection responses in subprocess communication 2026-04-08 10:20:09 +02:00
Stela Augustinova 71ed7a76ea Handle errors in volatile connection resolution and remove unused registration function 2026-04-08 10:20:09 +02:00
Stela Augustinova bd939b22c7 Fix volatile connection resolution to prevent multiple resolves 2026-04-08 10:20:09 +02:00
Stela Augustinova c327f77294 Refactor volatile connections handling in connections and runners modules 2026-04-08 10:20:09 +02:00
Stela Augustinova d907d79beb Streamline volatile connections handling and remove unused registration module 2026-04-08 10:20:09 +02:00
Stela Augustinova 93b879927c Implement volatile connections handling in runners and shell modules 2026-04-08 10:20:09 +02:00
Stela Augustinova 0c545d4cf9 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-08 10:20:09 +02:00
Stela Augustinova 95c90c1517 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-08 10:20:09 +02:00
CI workflows cb731fa858 chore: auto-update github workflows 2026-04-08 10:20:09 +02:00
Stela Augustinova 9bb3b09ecf SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-08 10:20:09 +02:00
SPRINX0\prochazka 7c8f541d3e deleted sast workflow 2026-04-08 10:18:37 +02:00
Jan Prochazka ce41687382 Merge pull request #1417 from dbgate/feature/auth-error
Implement volatile connections handling in runners and shell modules
2026-04-08 10:14:02 +02:00
Stela Augustinova 4b083dea5c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-07 14:56:29 +02:00
Stela Augustinova c84473c1eb Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-07 14:26:58 +02:00
Stela Augustinova 7fc078f3e6 Improve error handling for volatile connection responses in subprocess communication 2026-04-07 14:15:18 +02:00
Stela Augustinova cbbd538248 Handle errors in volatile connection resolution and remove unused registration function 2026-04-07 14:01:13 +02:00
Stela Augustinova 825f6e562b Fix volatile connection resolution to prevent multiple resolves 2026-04-07 13:46:34 +02:00
Stela Augustinova a278afb260 Refactor volatile connections handling in connections and runners modules 2026-04-07 13:42:11 +02:00
Stela Augustinova 2fbeea717c Streamline volatile connections handling and remove unused registration module 2026-04-07 13:26:16 +02:00
Jan Prochazka c7259e4663 Merge pull request #1412 from dbgate/feature/copy-sql
Improve clipboard formatters to omit undefined values, enhancing data…
2026-04-07 13:11:49 +02:00
Stela Augustinova 69a2669342 Implement volatile connections handling in runners and shell modules 2026-04-07 13:06:04 +02:00
CI workflows 42d1ca8fd4 chore: auto-update github workflows 2026-04-07 10:27:40 +00:00
Stela Augustinova 1cf52d8b39 SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-07 10:27:24 +00:00
Jan Prochazka 6e482afab2 v7.1.7-premium-beta.1 2026-04-02 16:39:06 +02:00
SPRINX0\prochazka ddf3295e6d Merge branch 'master' into feature/postgres-optimalization 2026-04-02 16:33:25 +02:00
SPRINX0\prochazka 79e087abd3 Optimize PostgreSQL analysis queries and add support for Info Schema routines 2026-04-02 16:32:36 +02:00
CI workflows a7cf51bdf7 chore: auto-update github workflows 2026-04-02 13:55:33 +00:00
Jan Prochazka dfdb31e2f8 Merge pull request #1413 from dbgate/feature/integration-test-pro
Update test workflow to include directory changes for integration tests
2026-04-02 15:55:14 +02:00
Stela Augustinova 3508ddc3ca Update test workflow to include directory changes for integration tests 2026-04-02 11:02:36 +02:00
Stela Augustinova 137fc6b928 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-02 10:29:02 +02:00
Jan Prochazka e6f5295420 Merge pull request #1410 from dbgate/feature/large-fields
Enhance binary size handling in grid cell display
2026-04-01 16:01:23 +02:00
CI workflows 2bb08921c3 chore: auto-update github workflows 2026-04-01 13:55:00 +00:00
Stela Augustinova ee2d0e4c30 Remove unnecessary restart policy for DynamoDB service 2026-04-01 15:54:35 +02:00
Jan Prochazka c43a838572 Merge pull request #1411 from dbgate/feature/unreadable-dropdown
Correct class binding and update style variables in SelectField compo…
2026-04-01 15:53:23 +02:00
CI workflows 17ff6a8013 chore: auto-update github workflows 2026-04-01 13:53:13 +00:00
Stela Augustinova 62ad6a0d08 Remove unnecessary restart policy for MongoDB service 2026-04-01 15:52:48 +02:00
CI workflows 5c049fa867 chore: auto-update github workflows 2026-04-01 13:51:09 +00:00
CI workflows 619f17114a Update pro ref 2026-04-01 13:50:58 +00:00
Stela Augustinova 1c1431014c SYNC: Merge pull request #87 from dbgate/feature/collection-test 2026-04-01 13:50:46 +00:00
Stela Augustinova 9d1d7b7e34 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-01 15:49:35 +02:00
Stela Augustinova f68ca1e786 Correct class binding and update style variables in SelectField component 2026-04-01 13:24:34 +02:00
Stela Augustinova 8d16a30064 Fix message formatting for large binary fields in stringifyCellValue function 2026-04-01 10:55:47 +02:00
Stela Augustinova cf601c33c0 Enhance binary size handling in grid cell display 2026-04-01 10:25:40 +02:00
Jan Prochazka 588cd39d7c Merge pull request #1404 from dbgate/feature/fetch-all-button
Add fetch all button
2026-04-01 09:44:04 +02:00
Stela Augustinova 79ebfa9b7a Add fetchAll command to dataGrid menu 2026-03-31 13:37:06 +02:00
Stela Augustinova 0c6b2746d1 Fix file stream reference in jsldata and remove redundant buffer assignment in LoadingDataGridCore 2026-03-31 08:59:33 +02:00
Stela Augustinova 978972c55c Enhance file path validation in streamRows to include symlink resolution and case normalization, improving security and error handling 2026-03-31 08:31:43 +02:00
Stela Augustinova 37854fc577 Refactor fetchAll to trim lines before parsing, improving error handling for malformed data 2026-03-31 06:54:37 +02:00
Stela Augustinova 5537e193a6 Improve fetchAll error handling and cleanup process during streaming and paginated reads 2026-03-31 06:21:06 +02:00
Stela Augustinova 0d42b2b133 Refactor fetchAll cancel function to improve cleanup process and prevent errors 2026-03-30 15:48:35 +02:00
Stela Augustinova 44bd7972d4 Enhance fetchAll functionality with improved error handling and state management 2026-03-30 14:34:57 +02:00
Stela Augustinova 5143eb39f7 Implement fetchAll functionality with streaming support and error handling 2026-03-30 13:30:12 +02:00
Stela Augustinova cf51883b3e Add checkbox to skip confirmation when fetching all rows 2026-03-26 15:24:25 +01:00
Stela Augustinova 484ca0c78a Reset loaded time reference in reload function 2026-03-26 15:11:11 +01:00
Stela Augustinova 8f5cad0e2c Prevent loading next data when fetching all rows is in progress 2026-03-26 15:03:54 +01:00
Stela Augustinova 988512a571 Update warning message in FetchAllConfirmModal to simplify language 2026-03-26 14:50:09 +01:00
Stela Augustinova f8bd380051 Optimize fetchAllRows by using a local buffer to reduce array copies and improve performance 2026-03-26 14:19:11 +01:00
Stela Augustinova 281131dbba Enhance fetchAll functionality by adding loading state check 2026-03-26 14:07:12 +01:00
Stela Augustinova ea3a61077a v7.1.6 2026-03-26 12:47:09 +01:00
Stela Augustinova d1a898b40d SYNC: Add translations for cloudUnavailable message in multiple languages 2026-03-26 11:11:07 +00:00
Stela Augustinova a521a81ef0 v7.1.6-premium-beta.1 2026-03-26 11:25:13 +01:00
Stela Augustinova 2505c61975 Add fetch all button 2026-03-26 11:24:05 +01:00
Stela Augustinova ab5a54dbb6 SYNC: Merge pull request #89 from dbgate/feature/cloud-error 2026-03-26 10:12:05 +00:00
Stela Augustinova 44ad8fa60a Update CHANGELOG for version 7.1.5 2026-03-25 16:59:13 +01:00
Stela Augustinova 5b27a241d7 v7.1.5 2026-03-25 16:21:59 +01:00
Stela Augustinova 084019ca65 v7.1.5-premium-beta.3 2026-03-25 15:21:43 +01:00
Stela Augustinova ba147af8fe SYNC: v7.1.5-premium-beta.2 2026-03-25 14:08:24 +00:00
Stela Augustinova 1b3f4db07d SYNC: Merge pull request #88 from dbgate/feature/cloud-error 2026-03-25 13:39:00 +00:00
Jan Prochazka c36705d458 Merge pull request #1395 from dbgate/feature/display-uuid
Feature/display UUID
2026-03-25 10:04:58 +01:00
Stela Augustinova 0e126cb8cf Enhance BinData subType handling to support hexadecimal strings and improve validation 2026-03-25 08:32:03 +01:00
Stela Augustinova c48183a539 Enhance base64 to UUID conversion with error handling and regex improvements 2026-03-25 08:23:15 +01:00
Stela Augustinova 50f380dbbe Enhance uuidToBase64 function with validation and improve UUID parsing in parseCellValue 2026-03-24 17:15:32 +01:00
Stela Augustinova 66023a9a68 Validate base64 UUID conversion and enhance handling in stringifyCellValue 2026-03-24 17:06:52 +01:00
Stela Augustinova c3fbc3354c Validate BinData subType to ensure it is an integer between 0 and 255 2026-03-24 16:32:16 +01:00
Jan Prochazka a7d2ed11f3 SYNC: Merge pull request #86 from dbgate/feature/icon-vulnerability 2026-03-23 12:50:27 +00:00
SPRINX0\prochazka 899aec2658 v7.1.5-premium-beta.1 2026-03-20 14:24:11 +01:00
SPRINX0\prochazka 74e47587e2 Merge branch 'master' into feature/postgres-optimalization 2026-03-20 14:23:40 +01:00
Stela Augustinova 6a3dc92572 Add uuid to base64 conversion and enhance cell value parsing for UUIDs 2026-03-20 12:46:50 +01:00
Stela Augustinova e3a4667422 feat: add base64 to UUID conversion and integrate into cell value parsing 2026-03-19 14:50:08 +01:00
Stela Augustinova c4dd99bba9 Changelog 7.1.4 2026-03-19 13:07:44 +01:00
SPRINX0\prochazka cb70f3c318 postgres loading optimalization 2026-03-19 12:17:29 +01:00
Stela Augustinova 588b6f9882 v7.1.4 2026-03-19 12:13:37 +01:00
Stela Augustinova 375f69ca1e v7.1.4-alpha.2 2026-03-19 11:13:29 +01:00
Stela Augustinova a32e5cc139 v7.1.4-alpha.1 2026-03-19 10:56:16 +01:00
CI workflows 8e00137751 chore: auto-update github workflows 2026-03-19 09:33:56 +00:00
Stela Augustinova 003db50833 SYNC: Add missing publish step for rest 2026-03-19 09:33:36 +00:00
Stela Augustinova bc519c2c20 Changelog 7.1.3 2026-03-18 16:06:01 +01:00
Stela Augustinova 3b41fa8cfa v7.1.3 2026-03-18 15:31:26 +01:00
Stela Augustinova 39ed0f6d2d v7.1.3-premium-beta.7 2026-03-18 14:27:27 +01:00
CI workflows 710f796832 chore: auto-update github workflows 2026-03-18 13:15:43 +00:00
CI workflows 9ec5fb7263 Update pro ref 2026-03-18 13:15:24 +00:00
Stela Augustinova 407db457d5 SYNC: Added new translations and error codes 2026-03-18 13:15:12 +00:00
Jan Prochazka 0c5d2cfcd1 Merge pull request #1393 from dbgate/feature/script-filter
Add cloud content list integration for connection label resolution
2026-03-18 13:55:40 +01:00
CI workflows 87ace375bb chore: auto-update github workflows 2026-03-18 12:54:58 +00:00
CI workflows d010020f3b Update pro ref 2026-03-18 12:54:34 +00:00
Jan Prochazka c60227a98f SYNC: Merge pull request #85 from dbgate/feature/proxy-configuration 2026-03-18 12:54:21 +00:00
Stela Augustinova 2824681bff Refactor cloudIdToLabel assignment to use lodash's fromPairs for improved readability 2026-03-18 13:47:45 +01:00
Stela Augustinova 073a3e3946 Add cloud content list integration for connection label resolution 2026-03-18 11:23:31 +01:00
CI workflows 93e91127a0 chore: auto-update github workflows 2026-03-18 08:03:38 +00:00
CI workflows b60a6cff56 Update pro ref 2026-03-18 08:03:23 +00:00
Jan Prochazka 1f3b1963d9 SYNC: errors assign 2026-03-18 08:03:13 +00:00
SPRINX0\prochazka 4915f57abb v7.1.3-premium-beta.6 2026-03-17 15:35:35 +01:00
Jan Prochazka 97c6fc97d5 Merge pull request #1392 from dbgate/feature/duckdb-integration-test
Synchronize client and instance disconnection methods
2026-03-17 15:34:51 +01:00
Stela Augustinova b68421bbc3 Synchronize client and instance disconnection methods 2026-03-17 14:45:57 +01:00
SPRINX0\prochazka 2d10559754 v7.1.3-premium-beta.5 2026-03-17 13:38:35 +01:00
CI workflows b398a7b546 chore: auto-update github workflows 2026-03-17 11:58:40 +00:00
CI workflows 1711d2102d Update pro ref 2026-03-17 11:58:24 +00:00
Jan Prochazka 97cea230f3 SYNC: Merge pull request #83 from dbgate/feature/transaction-isolation 2026-03-17 11:58:10 +00:00
CI workflows b6a0fe9465 chore: auto-update github workflows 2026-03-17 11:46:56 +00:00
CI workflows 06c50659bb Update pro ref 2026-03-17 11:46:39 +00:00
Jan Prochazka 244b47f548 SYNC: Merge pull request #84 from dbgate/feature/proxy-configuration 2026-03-17 11:46:28 +00:00
Jan Prochazka b72a244d93 Merge pull request #1389 from dbgate/feature/duckdb-query-result
Fix getColumnsInfo loop to iterate from start to end
2026-03-17 09:55:59 +01:00
Jan Prochazka c1e069d4dc Merge pull request #1391 from dbgate/feature/script-filter
Refactor connection selection to use a dropdown instead of a button f…
2026-03-17 09:50:01 +01:00
Stela Augustinova f99994085a Refactor connection selection to use a dropdown instead of a button for improved usability 2026-03-17 09:22:18 +01:00
Stela Augustinova 32fd0dd78c Update @duckdb/node-api dependency to version 1.5.0-r.1 2026-03-16 15:52:01 +01:00
Jan Prochazka a557b6b2b4 Merge pull request #1388 from dbgate/feature/script-filter
Feature/script filter
2026-03-16 15:27:49 +01:00
Stela Augustinova e84583c776 Fix getColumnsInfo loop to iterate from start to end 2026-03-16 15:09:31 +01:00
Stela Augustinova a548b0d543 Refactor connection label assignment to use logical OR for fallback 2026-03-16 15:05:45 +01:00
Stela Augustinova de94f15383 Fix file reading to correctly handle bytes read from file 2026-03-16 14:41:38 +01:00
Stela Augustinova 7045d986ef Fix file handle management to ensure proper closure in file reading process 2026-03-16 14:31:43 +01:00
Stela Augustinova de7ae9cf09 Refactor connection filter options 2026-03-16 14:17:06 +01:00
Stela Augustinova ab3d6888dc Enhance file reading and connection filtering in SavedFilesList component 2026-03-16 14:08:19 +01:00
Stela Augustinova 98a70891f3 Refactor file reading 2026-03-16 08:12:35 +01:00
Stela Augustinova 52e7326a2c Enhance file listing to support front matter parsing and connection filtering 2026-03-16 08:02:03 +01:00
Jan Prochazka bfd2e3b07a Merge pull request #1382 from dbgate/feature/add-files-button
Enhance drag-and-drop functionality to support Electron file paths
2026-03-12 12:41:31 +01:00
Stela Augustinova 799f5e30d3 Enhance drag-and-drop functionality to support Electron file paths 2026-03-12 10:14:47 +01:00
SPRINX0\prochazka d3e544c3c0 v7.1.3-premium-beta.4 2026-03-11 08:55:53 +01:00
CI workflows 866fd55834 chore: auto-update github workflows 2026-03-10 10:17:13 +00:00
CI workflows 74ce1fba32 Update pro ref 2026-03-10 10:16:57 +00:00
Jan Prochazka a11b93b4cc SYNC: Merge pull request #80 from dbgate/feature/loading-fix 2026-03-10 10:16:46 +00:00
CI workflows 066f2baa03 chore: auto-update github workflows 2026-03-10 09:50:36 +00:00
Stela Augustinova e02396280f SYNC: Add port mappings for DynamoDB and fix formatting in e2e-pro.yaml 2026-03-10 09:50:18 +00:00
CI workflows a654c80746 chore: auto-update github workflows 2026-03-10 09:32:53 +00:00
CI workflows 3b50f4bd7c Update pro ref 2026-03-10 09:32:34 +00:00
CI workflows cc1f77f5bc chore: auto-update github workflows 2026-03-10 08:23:51 +00:00
CI workflows 381fce4a82 Update pro ref 2026-03-10 08:23:35 +00:00
Jan Prochazka bc3be97cee SYNC: Merge pull request #81 from dbgate/feature/dynamo-e2e 2026-03-10 08:22:32 +00:00
Jan Prochazka 1c389208a7 Merge pull request #1378 from dbgate/feature/add-files-button
Import getElectron in ElectronFilesInput component
2026-03-10 09:19:34 +01:00
SPRINX0\prochazka cbeed2d3d0 v7.1.3-alpha.3 2026-03-09 10:20:49 +01:00
SPRINX0\prochazka 3d974ad144 v7.1.3-alpha.2 2026-03-09 10:01:50 +01:00
SPRINX0\prochazka 749042a05d set version 2026-03-09 09:59:53 +01:00
SPRINX0\prochazka 52413b82ee v7.1.3-alpha.1 2026-03-09 09:22:26 +01:00
SPRINX0\prochazka 212a7ec083 used exact version 2026-03-09 09:21:57 +01:00
SPRINX0\prochazka cee94fe113 added missing package 2026-03-09 09:20:48 +01:00
Stela Augustinova e1ead2519a Import getElectron in ElectronFilesInput component 2026-03-09 07:35:34 +01:00
Jan Prochazka 80330a25ac Merge pull request #1372 from dbgate/feature/export-diagram
Add diagram export to png
2026-03-05 10:32:35 +01:00
Stela Augustinova 508470e970 Added import 2026-03-05 10:02:57 +01:00
Stela Augustinova bc64b4b5c7 Update ToolStripDropDownButton label to use translation for export 2026-03-04 15:36:40 +01:00
Jan Prochazka 48d8494ead SYNC: added CLAUDE.md 2026-03-04 07:42:30 +00:00
SPRINX0\prochazka 2a51d2ed96 SYNC: fix: enhance date handling in zipDataRow function 2026-03-03 16:13:49 +00:00
Stela Augustinova cfabcc7bf6 Fix import name for ToolStripDropDownButton in DiagramTab.svelte 2026-03-03 17:08:13 +01:00
Stela Augustinova 90fc8fd0fc Add diagram export to png 2026-03-03 16:54:46 +01:00
SPRINX0\prochazka ff54533e33 v7.1.2 2026-03-02 15:53:28 +01:00
SPRINX0\prochazka 2072f0b5ba SYNC: don't use random data in testing REST service 2026-03-02 14:10:12 +00:00
Jan Prochazka 6efc720a45 SYNC: Merge pull request #78 from dbgate/feature/aitest 2026-03-02 13:28:57 +00:00
SPRINX0\prochazka c7cb1efe9c v7.1.2-premium-beta.2 2026-03-02 12:59:39 +01:00
SPRINX0\prochazka e193531246 changelog 2026-03-02 12:58:03 +01:00
CI workflows 2aa53f414e chore: auto-update github workflows 2026-03-02 11:57:43 +00:00
CI workflows 843c15d754 Update pro ref 2026-03-02 11:57:27 +00:00
SPRINX0\prochazka fb19582088 v7.1.2-premium-beta.1 2026-03-02 10:34:53 +01:00
SPRINX0\prochazka 8040466cbe text 2026-03-02 10:34:16 +01:00
CI workflows 302b4d7acd chore: auto-update github workflows 2026-03-02 09:33:33 +00:00
CI workflows a8ccc24d46 Update pro ref 2026-03-02 09:33:16 +00:00
Jan Prochazka b2fb071a7b SYNC: Merge pull request #73 from dbgate/feature/openai-upgrade 2026-03-02 09:33:04 +00:00
SPRINX0\prochazka 204d7b97d5 chore: update CHANGELOG for version 7.1.1 enhancements and fixes 2026-02-27 16:08:33 +01:00
SPRINX0\prochazka f3da709aac v7.1.1 2026-02-27 15:34:12 +01:00
SPRINX0\prochazka 0ab8afb838 v7.1.1-packer-beta.3 2026-02-27 13:36:37 +01:00
SPRINX0\prochazka d50999547f v7.1.1-premium-beta.2 2026-02-27 13:36:14 +01:00
CI workflows 04741b0eba chore: auto-update github workflows 2026-02-27 12:35:44 +00:00
SPRINX0\prochazka ba86fe32e7 comment out azure build 2026-02-27 13:35:24 +01:00
CI workflows 9deb7d7fdc chore: auto-update github workflows 2026-02-27 12:34:08 +00:00
CI workflows 55eb64e5ca Update pro ref 2026-02-27 12:33:52 +00:00
Jan Prochazka a5f50f3f2b SYNC: Merge pull request #68 from dbgate/feature/dynamodb-plugin 2026-02-27 12:33:39 +00:00
Jan Prochazka 47214eb5b3 SYNC: Merge pull request #72 from dbgate/feature-firebird-fixes 2026-02-27 12:24:38 +00:00
CI workflows 599509d417 chore: auto-update github workflows 2026-02-27 08:20:08 +00:00
CI workflows 9d366fc359 Update pro ref 2026-02-27 08:19:53 +00:00
SPRINX0\prochazka 0e1ed0bde6 SYNC: upgraded dbgate-query-splitter 2026-02-27 08:19:41 +00:00
CI workflows 6ad7824bf2 chore: auto-update github workflows 2026-02-27 08:06:47 +00:00
CI workflows 1174f51c07 Update pro ref 2026-02-27 08:06:31 +00:00
Jan Prochazka 1950dda1ab SYNC: Merge pull request #70 from dbgate/feature/new-gql-query 2026-02-27 08:06:19 +00:00
Jan Prochazka 8231b6d5be SYNC: Merge pull request #71 from dbgate/feature/reset-virtual-scroll 2026-02-27 08:03:36 +00:00
Jan Prochazka 0feacbe6eb Merge pull request #1368 from dbgate/feature/driver-selection
Set default selected item to 'general' in SettingsTab and WidgetIconP…
2026-02-27 08:21:16 +01:00
Jan Prochazka 80b5f5adca SYNC: Merge pull request #65 from dbgate/feature/filter-bigint 2026-02-26 08:48:23 +00:00
CI workflows 13650f36e6 chore: auto-update github workflows 2026-02-26 08:47:23 +00:00
CI workflows 3f58d99069 Update pro ref 2026-02-26 08:47:03 +00:00
CI workflows 0c8a025cf6 chore: auto-update github workflows 2026-02-26 08:33:59 +00:00
CI workflows 5014df4859 Update pro ref 2026-02-26 08:33:42 +00:00
SPRINX0\prochazka 34a491e2ef v7.1.1-premium-beta.1 2026-02-25 14:07:03 +01:00
Jan Prochazka 884e4ca88e SYNC: Merge pull request #67 from dbgate/feature/connfix 2026-02-25 13:01:09 +00:00
CI workflows a670c5e86c chore: auto-update github workflows 2026-02-25 12:54:58 +00:00
CI workflows af1fba79be Update pro ref 2026-02-25 12:54:40 +00:00
Jan Prochazka ac44de0bf4 SYNC: Merge pull request #66 from dbgate/team-premium-permis-fix-2 2026-02-25 12:54:28 +00:00
Stela Augustinova f013a241ce Merge pull request #1367 from dbgate/feature/disable-cell-data-view
Add setting to disable automatic Cell Data View opening
2026-02-25 10:31:42 +01:00
Stela Augustinova 0e29a7206d Prevent unnecessary updates in handleUserChange when the selected item remains unchanged 2026-02-25 09:27:50 +01:00
Stela Augustinova 689b3f299c Prevent unnecessary updates in handleUserChange when the selected item remains unchanged 2026-02-25 09:20:08 +01:00
Stela Augustinova 02ccb990bd Remove default selected item from SettingsTab in stdCommands and WidgetIconPanel 2026-02-25 09:18:51 +01:00
Stela Augustinova 61fe4f0d57 Set default selected item to 'general' in SettingsTab and WidgetIconPanel 2026-02-25 09:09:17 +01:00
Stela Augustinova 0a920195d5 Add setting to disable automatic Cell Data View opening 2026-02-25 07:14:31 +01:00
SPRINX0\prochazka 18896bf56d v7.1.0 2026-02-24 15:18:24 +01:00
SPRINX0\prochazka 098c9041a0 changelog 2026-02-24 15:15:05 +01:00
CI workflows 61a41d8eb2 chore: auto-update github workflows 2026-02-24 13:40:14 +00:00
CI workflows e76073d5c8 Update pro ref 2026-02-24 13:39:55 +00:00
Jan Prochazka 8c34added7 SYNC: Merge pull request #63 from dbgate/feature/test-api-e2e 2026-02-24 13:39:42 +00:00
SPRINX0\prochazka 66fc6b93ae v7.0.7-premium-beta.13 2026-02-24 13:17:35 +01:00
SPRINX0\prochazka 881d5a8008 v7.0.7-beta.12 2026-02-24 12:51:25 +01:00
Jan Prochazka 5d263de954 SYNC: Merge pull request #62 from dbgate/feature/refactor-rolldown 2026-02-24 11:50:42 +00:00
SPRINX0\prochazka c8d0494000 v7.0.7-beta.11 2026-02-24 12:29:12 +01:00
SPRINX0\prochazka a9b48b5aa5 v7.0.7-premium-beta.10 2026-02-24 12:25:37 +01:00
SPRINX0\prochazka f08a951eef SYNC: Refactor DriverSettings and stores to manage hidden database engines 2026-02-24 11:23:14 +00:00
SPRINX0\prochazka 8758a4bc86 SYNC: filter extensions in active drivers 2026-02-24 10:13:49 +00:00
Jan Prochazka aae328f8c8 Merge pull request #1365 from dbgate/feature/driver-selection
Feature/driver selection
2026-02-24 11:07:43 +01:00
Stela Augustinova 1953578a33 Fix driver reference checks in DriverSettings and stores for improved stability 2026-02-24 11:02:38 +01:00
Stela Augustinova 543bdd79d9 Fix filter syntax in ConnectionDriverFields to improve driver selection logic 2026-02-24 10:50:11 +01:00
Stela Augustinova e0e1a3c8e4 Enhance DriverSettings component to handle undefined drivers and improve check-all functionality 2026-02-24 10:33:30 +01:00
Stela Augustinova f1d84f448e Refactor FormConnectionTypeSelector to improve layout and integrate FontIcon in driver settings button 2026-02-24 10:14:32 +01:00
Jan Prochazka 7c5c21f15d SYNC: Merge pull request #56 from dbgate/feature/flipping-tabs-crash 2026-02-24 09:05:43 +00:00
Jan Prochazka 41ffaeebe3 Merge pull request #1362 from david-pivonka/fix/clickhouse-cte-results
fix(clickhouse): show query results for CTE (WITH) queries
2026-02-24 09:59:48 +01:00
SPRINX0\prochazka 5d9b44b647 SYNC: removed experimental flags 2026-02-24 08:26:44 +00:00
CI workflows a18d2c5650 chore: auto-update github workflows 2026-02-24 08:21:49 +00:00
CI workflows e0379bcf12 Update pro ref 2026-02-24 08:21:35 +00:00
SPRINX0\prochazka e91242d5a2 SYNC: use string instead of datatime in password_reset_token (for compatibility) 2026-02-24 08:21:23 +00:00
SPRINX0\prochazka 8177187b3a v7.0.7-premium-beta.9 2026-02-24 08:57:50 +01:00
CI workflows 6b3e1144bc chore: auto-update github workflows 2026-02-24 07:56:50 +00:00
CI workflows dfec88f52d Update pro ref 2026-02-24 07:56:34 +00:00
SPRINX0\prochazka b8df67659a v7.0.7-premium-beta.8 2026-02-24 08:25:26 +01:00
SPRINX0\prochazka 861da64581 fix 2026-02-24 08:24:14 +01:00
CI workflows ab147a2cc9 chore: auto-update github workflows 2026-02-24 07:20:26 +00:00
CI workflows e13191e894 Update pro ref 2026-02-24 07:20:09 +00:00
SPRINX0\prochazka 7f69ea8dc0 SYNC: fixed links (dbgate.org => dbgate.io) 2026-02-24 07:19:58 +00:00
SPRINX0\prochazka ef2140696b publish NPM plugins 2026-02-24 08:13:05 +01:00
SPRINX0\prochazka 4607900c3b SYNC: yarn.lock 2026-02-24 07:05:31 +00:00
CI workflows 3258d55796 chore: auto-update github workflows 2026-02-24 06:58:55 +00:00
CI workflows 35e6966c39 Update pro ref 2026-02-24 06:58:38 +00:00
SPRINX0\prochazka 885756b259 removed ADD plugin 2026-02-24 07:57:10 +01:00
Jan Prochazka 5fbc1b937c SYNC: Merge pull request #55 from dbgate/feature/dynamodb-plugin 2026-02-24 06:51:31 +00:00
CI workflows 7e444e9fc2 chore: auto-update github workflows 2026-02-24 06:35:37 +00:00
CI workflows c051237914 Update pro ref 2026-02-24 06:35:20 +00:00
Jan Prochazka 3855b0dd28 SYNC: Merge pull request #61 from dbgate/feature/gql-variables-mutation 2026-02-24 06:35:09 +00:00
Stela Augustinova afcc9e096a Add FormConnectionTypeSelector component and integrate into ConnectionDriverFields 2026-02-23 15:48:46 +01:00
Stela Augustinova f4df1fbff4 Add separator line in DriverSettings for improved UI clarity 2026-02-23 15:33:12 +01:00
Jan Prochazka 45b3a5af91 SYNC: Merge pull request #60 from dbgate/feature/redis-key-loading 2026-02-23 12:43:04 +00:00
Stela Augustinova f54b18e652 Refactor DriverSettings component to enhance check-all functionality and improve UI layout 2026-02-23 07:52:42 +01:00
Stela Augustinova b1210d19ad Add DriverSettings component and integrate into SettingsTab 2026-02-20 18:13:56 +01:00
CI workflows 21cbcc79c6 chore: auto-update github workflows 2026-02-20 14:33:44 +00:00
CI workflows a7d0c8fb0f Update pro ref 2026-02-20 14:33:26 +00:00
SPRINX0\prochazka 1e3dc54d81 v7.0.7-premium-beta.7 2026-02-20 13:25:12 +01:00
CI workflows 48f294fd83 chore: auto-update github workflows 2026-02-20 12:22:39 +00:00
CI workflows 298ad0de4b Update pro ref 2026-02-20 12:22:17 +00:00
Jan Prochazka c7953f9231 SYNC: Merge pull request #59 from dbgate/feature/graphql-connection-view 2026-02-20 12:22:05 +00:00
SPRINX0\prochazka afd97eae7d v7.0.7-premium-beta.6 2026-02-19 11:00:36 +01:00
Jan Prochazka f4e558b7e8 SYNC: Merge pull request #58 from dbgate/feature/array-grid-improvements 2026-02-19 09:58:30 +00:00
SPRINX0\prochazka 12c99c646e v7.0.7-premium-beta.5 2026-02-18 19:11:40 +01:00
CI workflows 6c1a2eedbe chore: auto-update github workflows 2026-02-18 17:52:44 +00:00
CI workflows 8a73216035 Update pro ref 2026-02-18 17:52:25 +00:00
Jan Prochazka c6a93f12f7 SYNC: Merge pull request #57 from dbgate/feature/improve-api-capabilities 2026-02-18 17:52:13 +00:00
CI workflows 09f44d94b3 chore: auto-update github workflows 2026-02-18 15:32:46 +00:00
CI workflows c26748154a Update pro ref 2026-02-18 15:32:30 +00:00
Jan Prochazka 2474f915d4 SYNC: Merge pull request #54 from dbgate/feature/odata-api 2026-02-18 15:32:17 +00:00
Jan Prochazka 53f940cd23 SYNC: Merge pull request #52 from dbgate/feature/group-by-timestamp 2026-02-18 14:45:25 +00:00
CI workflows 991b648854 chore: auto-update github workflows 2026-02-18 07:23:25 +00:00
CI workflows 663f057a9a Update pro ref 2026-02-18 07:23:08 +00:00
Jan Prochazka 61963fb824 SYNC: Merge pull request #53 from dbgate/feature/graphql-connection-display 2026-02-18 07:22:55 +00:00
SPRINX0\prochazka bdf3cf5b36 SYNC: Enhance GraphQL query parsing to include argument values and update related components to handle new structure 2026-02-17 13:46:14 +00:00
CI workflows 5cc459594b chore: auto-update github workflows 2026-02-17 13:22:07 +00:00
CI workflows 8d315e52df Update pro ref 2026-02-17 13:21:50 +00:00
SPRINX0\prochazka 48a24a8704 SYNC: Add support for GraphQL connection queries and enhance API type handling 2026-02-17 13:21:38 +00:00
SPRINX0\prochazka cdce52f0e5 v7.0.7-premium-beta.4 2026-02-17 10:46:42 +01:00
SPRINX0\prochazka d12ccbeac4 SYNC: Reduce default maximum depth for GraphQL explorer options from 6 to 2 2026-02-17 09:46:01 +00:00
David Pivoňka 0b1620105a fix(clickhouse): show query results for CTE (WITH) queries
The stream() method used a regex that only matched queries starting
with SELECT. Queries using CTEs (WITH ... SELECT) were incorrectly
sent through client.command() which discards results, causing the
query console to show "Query execution finished" with no data grid.

Update the regex to also match queries starting with WITH so they
flow through client.query() and display results correctly.

Fixes #1138

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-17 08:37:32 +01:00
CI workflows 2ae9c98acb chore: auto-update github workflows 2026-02-17 07:04:41 +00:00
CI workflows ed00848a1e Update pro ref 2026-02-17 07:04:24 +00:00
SPRINX0\prochazka 06f7741dbf SYNC: Refactor REST authentication handling and improve connection utilities 2026-02-17 07:04:12 +00:00
CI workflows 8d3b7cace8 chore: auto-update github workflows 2026-02-16 16:03:32 +00:00
CI workflows 8f0775e337 Update pro ref 2026-02-16 16:03:16 +00:00
SPRINX0\prochazka 444cb6aa0c SYNC: errors assign 2026-02-16 16:03:05 +00:00
SPRINX0\prochazka b4acc19ea2 v7.0.7-premium-beta.3 2026-02-16 16:56:20 +01:00
SPRINX0\prochazka 1ef17cd861 SYNC: Masking connections improved #1357 2026-02-16 15:48:21 +00:00
SPRINX0\prochazka e564e930e5 env config 2026-02-16 16:23:07 +01:00
SPRINX0\prochazka a30badbbe0 enhance connection masking #1357 2026-02-16 16:23:02 +01:00
SPRINX0\prochazka b33d21fdb3 v7.0.7-beta.2 2026-02-16 16:11:56 +01:00
SPRINX0\prochazka 78da83f7db fix: optimize query string handling in executeRestApiEndpoint 2026-02-16 15:09:30 +01:00
SPRINX0\prochazka 8f6313d4ec fix: update build process to include dbgate-rest and adjust dependencies 2026-02-16 14:55:46 +01:00
Jan Prochazka 14962a5622 Merge pull request #1360 from dbgate/feature/numeric-sum
Numeric handling in DataGridCore
2026-02-16 14:32:41 +01:00
Jan Prochazka b8048e7592 Merge pull request #1361 from dbgate/feature/mssql-fk-duplicate
Feature/mssql fk duplicate
2026-02-16 14:25:33 +01:00
SPRINX0\prochazka cf9823e123 v7.0.7-beta.1 2026-02-16 14:17:16 +01:00
SPRINX0\prochazka 1667dbfde0 added REST fake methods/files 2026-02-16 14:16:41 +01:00
CI workflows 416436a612 chore: auto-update github workflows 2026-02-16 13:01:33 +00:00
CI workflows dc1b724d8d Update pro ref 2026-02-16 13:01:17 +00:00
Jan Prochazka 080dc44175 SYNC: Merge pull request #51 from dbgate/feature/rest-poc 2026-02-16 13:00:56 +00:00
Stela Augustinova 3921f50feb Removed unnecessary endCommand call in tableOptions method 2026-02-16 07:45:20 +01:00
Stela Augustinova 6fc63be56a fixed duplicate FK 2026-02-16 07:39:18 +01:00
00adrn 721fdf09b3 Added option to toggle database formats on and off in Settings->Connection menu. Now, when creating a new connection, only enabled database formats will appear. 2026-02-15 21:01:23 -05:00
321 changed files with 21419 additions and 7331 deletions
+1 -1
View File
@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -1
View File
@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -19
View File
@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -90,14 +90,6 @@ jobs:
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run `packer init` for Azure
run: |
cd ../dbgate-merged/packer
packer init ./azure-ubuntu.pkr.hcl
- name: Run `packer build` for Azure
run: |
cd ../dbgate-merged/packer
packer build ./azure-ubuntu.pkr.hcl
- name: Run `packer init` for AWS
run: |
cd ../dbgate-merged/packer
@@ -114,16 +106,6 @@ jobs:
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
- name: Delete old Azure VMs
run: |
cd ../dbgate-merged/packer
chmod +x delete-old-azure-images.sh
./delete-old-azure-images.sh
env:
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
- name: Delete old AMIs (AWS)
run: |
cd ../dbgate-merged/packer
+1 -1
View File
@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+2 -2
View File
@@ -35,7 +35,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -53,7 +53,7 @@ jobs:
cd dbgate-merged
node adjustNpmPackageJsonPremium
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
- name: Remove dbmodel - should be not published
run: |
cd ..
+5 -2
View File
@@ -30,7 +30,7 @@ jobs:
with:
node-version: 22.x
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
- name: yarn install
run: |
yarn install
@@ -56,7 +56,10 @@ jobs:
working-directory: packages/sqltree
run: |
npm publish --tag "$NPM_TAG"
- name: Publish rest
working-directory: packages/rest
run: |
npm publish --tag "$NPM_TAG"
- name: Publish api
working-directory: packages/api
run: |
+5 -1
View File
@@ -30,7 +30,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 3ef5290af3820e8376e2304051b439611f789a07
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -132,6 +132,10 @@ jobs:
image: redis
ports:
- '16011:6379'
dynamodb:
image: amazon/dynamodb-local
ports:
- '16015:8000'
mssql:
image: mcr.microsoft.com/mssql/server
ports:
+34
View File
@@ -23,26 +23,49 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: yarn install
run: |
cd ../dbgate-merged
yarn install
- name: Integration tests
run: |
cd ../dbgate-merged
cd integration-tests
yarn test:ci
- name: Filter parser tests
if: always()
run: |
cd ../dbgate-merged
cd packages/filterparser
yarn test:ci
- name: Datalib (perspective) tests
if: always()
run: |
cd ../dbgate-merged
cd packages/datalib
yarn test:ci
- name: Tools tests
if: always()
run: |
cd ../dbgate-merged
cd packages/tools
yarn test:ci
services:
@@ -98,3 +121,14 @@ jobs:
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'
mongodb:
image: mongo:4.0.12
ports:
- '27017:27017'
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
dynamodb:
image: amazon/dynamodb-local
ports:
- '8000:8000'
+6 -1
View File
@@ -2,5 +2,10 @@
"jestrunner.jestCommand": "node_modules/.bin/cross-env DEVMODE=1 LOCALTEST=1 node_modules/.bin/jest",
"cSpell.words": [
"dbgate"
]
],
"chat.tools.terminal.autoApprove": {
"yarn workspace": true,
"yarn --cwd packages/rest": true,
"yarn --cwd packages/web": true
}
}
+9
View File
@@ -0,0 +1,9 @@
# AGENTS
## Rules
- In newly added code, always use `DBGM-00000` for message/error codes; do not introduce new numbered DBGM codes such as `DBGM-00316`.
- GUI uses Svelte4 (packages/web)
- GUI is tested with E2E tests in `e2e-tests` folder, using Cypress. Use data-testid attribute in components to make them easier to test.
- data-testid format: ComponentName_identifier. Use reasonable identifiers
- don't change content of storageModel.js - this is generated from table YAMLs with "yarn storage-json" command
+242 -39
View File
File diff suppressed because it is too large Load Diff
+119
View File
@@ -0,0 +1,119 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
DbGate is a cross-platform (no)SQL database manager supporting MySQL, PostgreSQL, SQL Server, Oracle, MongoDB, Redis, SQLite, and more. It runs as a web app (Docker/NPM), an Electron desktop app, or in a browser. The monorepo uses Yarn workspaces.
## Development Commands
```sh
yarn # install all packages (also builds TS libraries and plugins)
yarn start # run API (port 3000) + web (port 5001) concurrently
```
For more control, run these 3 commands in separate terminals:
```sh
yarn start:api # Express API on port 3000
yarn start:web # Svelte frontend on port 5001
yarn lib # watch-compile TS libraries and plugins
```
For Electron development:
```sh
yarn start:web # web on port 5001
yarn lib # watch TS libs/plugins
yarn start:app # Electron app
```
### Building
```sh
yarn build:lib # build all TS libraries (sqltree, tools, filterparser, datalib, rest)
yarn build:api # build API
yarn build:web # build web frontend
yarn ts # TypeScript type-check API and web
yarn prettier # format all source files
```
### Testing
Unit tests (in packages like `dbgate-tools`):
```sh
yarn workspace dbgate-tools test
```
Integration tests (requires Docker for database containers):
```sh
cd integration-tests
yarn test:local # run all tests
yarn test:local:path __tests__/alter-database.spec.js # run a single test file
```
E2E tests (Cypress):
```sh
yarn cy:open # open Cypress UI
cd e2e-tests && yarn cy:run:browse-data # run a specific spec headlessly
```
## Architecture
### Monorepo Structure
| Path | Package | Purpose |
|---|---|---|
| `packages/api` | `dbgate-api` | Express.js backend server |
| `packages/web` | `dbgate-web` | Svelte 4 frontend (built with Rolldown) |
| `packages/tools` | `dbgate-tools` | Shared TS utilities: SQL dumping, schema analysis, diffing, driver base classes |
| `packages/datalib` | `dbgate-datalib` | Grid display logic, changeset management, perspectives, chart definitions |
| `packages/sqltree` | `dbgate-sqltree` | SQL AST representation and dumping |
| `packages/filterparser` | `dbgate-filterparser` | Parses filter strings into SQL/Mongo conditions |
| `packages/rest` | `dbgate-rest` | REST connection support |
| `packages/types` | `dbgate-types` | TypeScript type definitions (`.d.ts` only) |
| `packages/aigwmock` | `dbgate-aigwmock` | Mock AI gateway server for E2E testing |
| `plugins/dbgate-plugin-*` | — | Database drivers and file format handlers |
| `app/` | — | Electron shell |
| `integration-tests/` | — | Jest-based DB integration tests (Docker) |
| `e2e-tests/` | — | Cypress E2E tests |
### API Backend (`packages/api`)
- Express.js server with controllers in `src/controllers/` — each file exposes REST endpoints via the `useController` utility
- Database connections run in child processes (`src/proc/`) to isolate crashes and long-running operations
- `src/shell/` contains stream-based data pipeline primitives (readers, writers, transforms) used for import/export and replication
- Plugin drivers are loaded dynamically via `requireEngineDriver`; each plugin in `plugins/` exports a driver conforming to `DriverBase` from `dbgate-tools`
### Frontend (`packages/web`)
- Svelte 4 components; builds with Rolldown (not Vite/Webpack)
- Global state in `src/stores.ts` using Svelte writable stores, with `writableWithStorage` / `writableWithForage` helpers for persistence
- API calls go through `src/utility/api.ts` (`apiCall`, `apiOff`, etc.) which handles auth, error display, and cache invalidation
- Tab system: each open editor/viewer is a "tab" tracked in `openedTabs` store; tab components live in `src/tabs/`
- Left-panel tree items are "AppObjects" in `src/appobj/`
- Metadata (table lists, column info) is loaded reactively via hooks in `src/utility/metadataLoaders.ts`
- Commands/keybindings are registered in `src/commands/`
### Plugin Architecture
Each `plugins/dbgate-plugin-*` package provides:
- **Frontend build** (`build:frontend`): bundled JS loaded by the web UI for query formatting, data rendering
- **Backend build** (`build:backend`): Node.js driver code loaded by the API for actual DB connections
Plugins are copied to `plugins/dist/` via `plugins:copydist` before building the app or Docker image.
### Key Conventions
- Error/message codes use `DBGM-00000` as placeholder — do not introduce new numbered `DBGM-NNNNN` codes
- Frontend uses **Svelte 4** (not Svelte 5)
- E2E test selectors use `data-testid` attribute with format `ComponentName_identifier`
- Prettier config: single quotes, 2-space indent, 120-char line width, trailing commas ES5
- Logging via `pinomin`; pipe through `pino-pretty` for human-readable output
### Translation System
```sh
yarn translations:extract # extract new strings
yarn translations:add-missing # add missing translations
yarn translations:check # check for issues
```
+2 -2
View File
@@ -13,9 +13,9 @@
<p>DbGate is cross-platform database manager. It's designed to be simple to use and effective, when working with more databases simultaneously. But there are also many advanced features like schema compare, visual query designer, chart visualisation or batch export and import.</p>
</description>
<url type="homepage">https://dbgate.org/</url>
<url type="homepage">https://www.dbgate.io/</url>
<url type="vcs-browser">https://github.com/dbgate/dbgate</url>
<url type="contact">https://dbgate.org/about/</url>
<url type="contact">https://www.dbgate.io/contact/</url>
<url type="donation">https://github.com/sponsors/dbgate</url>
<url type="bugtracker">https://github.com/dbgate/dbgate/issues</url>
+8
View File
@@ -400,6 +400,14 @@ function createWindow() {
},
});
mainWindow.webContents.session.webRequest.onBeforeSendHeaders(
{ urls: ['https://*.tile.openstreetmap.org/*'] },
(details, callback) => {
details.requestHeaders['Referer'] = 'https://www.dbgate.io';
callback({ requestHeaders: details.requestHeaders });
}
);
if (initialConfig['winIsMaximized']) {
mainWindow.maximize();
}
+2 -1
View File
@@ -4,5 +4,6 @@ module.exports = {
mssql: true,
oracle: true,
sqlite: true,
mongo: true
mongo: true,
dynamo: true,
};
+50
View File
@@ -3,8 +3,58 @@ const os = require('os');
const fs = require('fs');
const baseDir = path.join(os.homedir(), '.dbgate');
const testApiPidFile = path.join(__dirname, 'tmpdata', 'test-api.pid');
const aigwmockPidFile = path.join(__dirname, 'tmpdata', 'aigwmock.pid');
function readProcessStartTime(pid) {
if (process.platform === 'linux') {
try {
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
return stat.split(' ')[21] || null;
} catch (err) {
return null;
}
}
return null;
}
function isPidStillOurs(meta) {
if (!meta || !(meta.pid > 0)) return false;
if (process.platform === 'linux' && meta.startTime) {
const current = readProcessStartTime(meta.pid);
return current === meta.startTime;
}
return true;
}
function stopProcessByPidFile(pidFile) {
if (!fs.existsSync(pidFile)) return;
try {
const content = fs.readFileSync(pidFile, 'utf-8').trim();
let meta;
try {
meta = JSON.parse(content);
} catch (_) {
const pid = Number(content);
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
}
if (isPidStillOurs(meta)) {
process.kill(meta.pid);
}
} catch (err) {
// ignore stale PID files and dead processes
}
try {
fs.unlinkSync(pidFile);
} catch (err) {
// ignore cleanup errors
}
}
function clearTestingData() {
stopProcessByPidFile(testApiPidFile);
stopProcessByPidFile(aigwmockPidFile);
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
}
+6
View File
@@ -37,6 +37,9 @@ module.exports = defineConfig({
case 'browse-data':
serverProcess = exec('yarn start:browse-data');
break;
case 'rest':
serverProcess = exec('yarn start:rest');
break;
case 'team':
serverProcess = exec('yarn start:team');
break;
@@ -52,6 +55,9 @@ module.exports = defineConfig({
case 'redis':
serverProcess = exec('yarn start:redis');
break;
case 'ai-chat':
serverProcess = exec('yarn start:ai-chat');
break;
}
await waitOn({ resources: ['http://localhost:3000'] });
+105
View File
@@ -0,0 +1,105 @@
Cypress.on('uncaught:exception', err => {
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
return false;
}
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('Database Chat (MySQL)', () => {
it('Database chat - chart of popular genres', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('show me chart of most popular genres');
cy.get('body').realPress('Enter');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
);
cy.themeshot('database-chat-chart');
});
it('Database chat - find most popular artist', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('find most popular artist');
cy.get('body').realPress('Enter');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.contains('Iron Maiden', { timeout: 30000 });
cy.themeshot('database-chat-popular-artist');
});
});
describe('GraphQL Chat', () => {
it('GraphQL chat - list users', () => {
cy.contains('REST GraphQL').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_graphqlChat').click();
cy.wait(1000);
cy.get('body').realType('list all users');
cy.get('body').realPress('Enter');
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.contains('users', { timeout: 30000 });
cy.themeshot('graphql-chat-list-users');
});
it('GraphQL chat - product categories chart', () => {
cy.contains('REST GraphQL').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_graphqlChat').click();
cy.wait(1000);
cy.get('body').realType('show me a chart of product categories');
cy.get('body').realPress('Enter');
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
);
cy.themeshot('graphql-chat-categories-chart');
});
it('GraphQL chat - find most expensive product', () => {
cy.contains('REST GraphQL').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_graphqlChat').click();
cy.wait(1000);
cy.get('body').realType('find the most expensive product');
cy.get('body').realPress('Enter');
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.contains('products', { timeout: 30000 });
cy.themeshot('graphql-chat-expensive-product');
});
it('GraphQL chat - show all categories', () => {
cy.contains('REST GraphQL').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_graphqlChat').click();
cy.wait(1000);
cy.get('body').realType('show all categories');
cy.get('body').realPress('Enter');
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.contains('categories', { timeout: 30000 });
cy.themeshot('graphql-chat-all-categories');
});
it('Explain query error', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('select * from Invoice2');
cy.contains('Execute').click();
cy.testid('MessageViewRow-explainErrorButton-1').click();
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
cy.themeshot('explain-query-error');
});
});
+39
View File
@@ -512,4 +512,43 @@ describe('Data browser data', () => {
cy.testid('DataFilterControl_input_ArtistId.Name').type('mich{enter}');
cy.themeshot('data-browser-filter-by-expanded');
});
it('DynamoDB', () => {
cy.contains('Dynamo-connection').click();
cy.contains('us-east-1').click();
cy.contains('Album').click();
cy.contains('Pearl Jam').click();
cy.themeshot('dynamodb-table-data');
cy.contains('Switch to JSON').click();
cy.themeshot('dynamodb-json-view');
cy.contains('Customer').click();
cy.testid('DataFilterControl_input_CustomerId').type('<=10{enter}');
cy.contains('Rows: 10');
cy.wait(1000);
cy.contains('Helena').click().rightclick();
cy.contains('Show cell data').click();
cy.contains('City: "Prague"');
cy.themeshot('dynamodb-query-json-view');
cy.contains('Switch to JSON').click();
cy.contains('Leonie').rightclick();
cy.contains('Edit document').click();
Array.from({ length: 11 }).forEach(() => cy.realPress('ArrowDown'));
Array.from({ length: 14 }).forEach(() => cy.realPress('ArrowRight'));
Array.from({ length: 7 }).forEach(() => cy.realPress('Delete'));
cy.realType('Italy');
cy.testid('EditJsonModal_saveButton').click();
cy.contains('Helena').rightclick();
cy.contains('Delete document').click();
cy.contains('Save').click();
cy.themeshot('dynamodb-save-changes');
cy.testid('SqlObjectList_addButton').click();
cy.contains('New collection/container').click();
cy.themeshot('dynamodb-new-collection');
});
});
-49
View File
@@ -110,55 +110,6 @@ describe('Charts', () => {
cy.themeshot('new-object-window');
});
it.skip('Database chat - charts', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('show me chart of most popular genres');
cy.get('body').realPress('{enter}');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
);
cy.themeshot('database-chat-chart');
});
it.skip('Database chat', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_databaseChat').click();
cy.wait(1000);
cy.get('body').realType('find most popular artist');
cy.get('body').realPress('{enter}');
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
cy.wait(30000);
// cy.contains('Iron Maiden');
cy.themeshot('database-chat');
// cy.testid('DatabaseChatTab_promptInput').click();
// cy.get('body').realType('I need top 10 songs with the biggest income');
// cy.get('body').realPress('{enter}');
// cy.contains('Hot Girl', { timeout: 20000 });
// cy.wait(1000);
// cy.themeshot('database-chat');
});
it.skip('Explain query error', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('TabsPanel_buttonNewObject').click();
cy.testid('NewObjectModal_query').click();
cy.wait(1000);
cy.get('body').realType('select * from Invoice2');
cy.contains('Execute').click();
cy.testid('MessageViewRow-explainErrorButton-1').click();
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
cy.themeshot('explain-query-error');
});
it('Switch language', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
+3
View File
@@ -52,6 +52,9 @@ function multiTest(testProps, testDefinition) {
if (localconfig.mongo && !testProps.skipMongo) {
it('MongoDB', () => testDefinition('Mongo-connection', 'my_guitar_shop', 'mongo@dbgate-plugin-mongo'));
}
if (localconfig.dynamo && !testProps.skipMongo) {
it('DynamoDB', () => testDefinition('Dynamo-connection', null, 'dynamodb@dbgate-plugin-dynamodb'));
}
}
describe('Transactions', () => {
+39
View File
@@ -0,0 +1,39 @@
Cypress.on('uncaught:exception', err => {
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
return false;
}
});
beforeEach(() => {
cy.visit('http://localhost:3000');
cy.viewport(1250, 900);
});
describe('REST API connections', () => {
it('GraphQL test', () => {
cy.contains('REST GraphQL').click();
cy.contains('products').click();
cy.testid('GraphQlExplorerNode_toggle_products').click();
cy.testid('GraphQlExplorerNode_checkbox_products.name').click();
cy.testid('GraphQlExplorerNode_checkbox_products.price').click();
cy.testid('GraphQlExplorerNode_checkbox_products.description').click();
cy.testid('GraphQlExplorerNode_checkbox_products.category').click();
cy.testid('GraphQlQueryTab_execute').click();
cy.contains('Electronics');
cy.themeshot('rest-graphql-query');
});
it('REST OpenAPI test', () => {
cy.contains('REST OpenAPI').click();
cy.contains('/api/categories').click();
cy.testid('RestApiEndpointTab_execute').click();
cy.contains('Electronics');
cy.themeshot('rest-openapi-query');
});
it('REST OData test', () => {
cy.contains('REST OData').click();
cy.contains('/Users').click();
cy.testid('ODataEndpointTab_execute').click();
cy.contains('Henry');
cy.themeshot('rest-odata-query');
});
});
+12 -7
View File
@@ -5,14 +5,14 @@ services:
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
ports:
- 16000:5432
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
ports:
- 16004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
@@ -20,21 +20,21 @@ services:
mysql-ssh-login:
build: containers/mysql-ssh-login
restart: always
ports:
ports:
- 16017:3306
- "16012:22"
- '16012:22'
mysql-ssh-keyfile:
build: containers/mysql-ssh-keyfile
restart: always
ports:
ports:
- 16007:3306
- "16008:22"
- '16008:22'
dex:
build: containers/dex
ports:
- "16009:5556"
- '16009:5556'
mongo:
image: mongo:4.4.29
@@ -50,6 +50,11 @@ services:
ports:
- 16011:6379
dynamodb:
image: amazon/dynamodb-local
ports:
- 16015:8000
mssql:
image: mcr.microsoft.com/mssql/server
restart: always
+14
View File
@@ -0,0 +1,14 @@
CONNECTIONS=mysql,graphql
LOCAL_AI_GATEWAY=true
LABEL_mysql=MySql-connection
SERVER_mysql=localhost
USER_mysql=root
PASSWORD_mysql=Pwd2020Db
PORT_mysql=16004
ENGINE_mysql=mysql@dbgate-plugin-mysql
LABEL_graphql=REST GraphQL
ENGINE_graphql=graphql@rest
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
+7 -1
View File
@@ -1,4 +1,4 @@
CONNECTIONS=mysql,postgres,mongo
CONNECTIONS=mysql,postgres,mongo,dynamo
LABEL_mysql=MySql-connection
SERVER_mysql=localhost
@@ -22,3 +22,9 @@ USER_mongo=root
PASSWORD_mongo=Pwd2020Db
PORT_mongo=16010
ENGINE_mongo=mongo@dbgate-plugin-mongo
LABEL_dynamo=Dynamo-connection
SERVER_dynamo=localhost
PORT_dynamo=16015
AUTH_TYPE_dynamo=onpremise
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
+8 -1
View File
@@ -1,4 +1,4 @@
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo,dynamo
LOG_CONNECTION_SENSITIVE_VALUES=true
LABEL_mysql=MySql-connection
@@ -43,3 +43,10 @@ PASSWORD_mongo=Pwd2020Db
PORT_mongo=16010
ENGINE_mongo=mongo@dbgate-plugin-mongo
LABEL_dynamo=Dynamo-connection
SERVER_dynamo=localhost
PORT_dynamo=16015
AUTH_TYPE_dynamo=onpremise
DATABASE_dynamo=localhost
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
+14
View File
@@ -0,0 +1,14 @@
CONNECTIONS=odata,openapi,graphql
LABEL_odata=REST OData
ENGINE_odata=odata@rest
APISERVERURL1_odata=http://localhost:4444/odata/noauth
LABEL_openapi=REST OpenAPI
ENGINE_openapi=openapi@rest
APISERVERURL1_openapi=http://localhost:4444/openapi.json
APISERVERURL2_openapi=http://localhost:4444/openapi/noauth
LABEL_graphql=REST GraphQL
ENGINE_graphql=graphql@rest
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
+168
View File
@@ -0,0 +1,168 @@
const fs = require('fs');
const path = require('path');
const { spawn, spawnSync } = require('child_process');
const rootDir = path.resolve(__dirname, '..', '..');
const testApiDir = path.join(rootDir, 'test-api');
const aigwmockDir = path.join(rootDir, 'packages', 'aigwmock');
const tmpDataDir = path.resolve(__dirname, '..', 'tmpdata');
const testApiPidFile = path.join(tmpDataDir, 'test-api.pid');
const aigwmockPidFile = path.join(tmpDataDir, 'aigwmock.pid');
const isWindows = process.platform === 'win32';
const dbgateApi = require('dbgate-api');
dbgateApi.initializeApiEnvironment();
const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
// --- MySQL setup (same as charts init) ---
async function initMySqlDatabase(dbname, inputFile) {
const connection = {
server: process.env.SERVER_mysql,
user: process.env.USER_mysql,
password: process.env.PASSWORD_mysql,
port: process.env.PORT_mysql,
engine: 'mysql@dbgate-plugin-mysql',
};
await dbgateApi.executeQuery({
connection,
sql: `DROP DATABASE IF EXISTS ${dbname}`,
});
await dbgateApi.executeQuery({
connection,
sql: `CREATE DATABASE ${dbname}`,
});
await dbgateApi.importDatabase({
connection: { ...connection, database: dbname },
inputFile,
});
}
// --- Process management helpers ---
function readProcessStartTime(pid) {
if (process.platform === 'linux') {
try {
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
return stat.split(' ')[21] || null;
} catch (err) {
return null;
}
}
return null;
}
function isPidStillOurs(meta) {
if (!meta || !(meta.pid > 0)) return false;
if (process.platform === 'linux' && meta.startTime) {
const current = readProcessStartTime(meta.pid);
return current === meta.startTime;
}
return true;
}
function stopProcess(pidFile) {
if (!fs.existsSync(pidFile)) return;
try {
const content = fs.readFileSync(pidFile, 'utf-8').trim();
let meta;
try {
meta = JSON.parse(content);
} catch (_) {
const pid = Number(content);
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
}
if (isPidStillOurs(meta)) {
process.kill(meta.pid);
}
} catch (err) {
// ignore stale pid or already terminated
}
try {
fs.unlinkSync(pidFile);
} catch (err) {
// ignore
}
}
function ensureDependencies(dir, checkFile) {
if (fs.existsSync(checkFile)) return;
const command = isWindows ? 'cmd.exe' : 'yarn';
const args = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
const result = spawnSync(command, args, {
cwd: dir,
stdio: 'inherit',
env: process.env,
});
if (result.status !== 0) {
throw new Error(`DBGM-00297 Failed to install dependencies in ${dir}`);
}
}
function startBackgroundProcess(dir, pidFile, port) {
const command = isWindows ? 'cmd.exe' : 'yarn';
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
const child = spawn(command, args, {
cwd: dir,
env: { ...process.env, PORT: String(port) },
detached: true,
stdio: 'ignore',
});
child.unref();
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
const meta = { pid: child.pid };
const startTime = readProcessStartTime(child.pid);
if (startTime) meta.startTime = startTime;
fs.writeFileSync(pidFile, JSON.stringify(meta));
}
async function waitForReady(url, timeoutMs = 30000) {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
try {
const response = await fetch(url);
if (response.ok) return;
} catch (err) {
// continue waiting
}
await delay(500);
}
throw new Error(`DBGM-00305 Server at ${url} did not start in time`);
}
// --- Main ---
async function run() {
// 1. Set up MyChinook MySQL database
console.log('[ai-chat init] Setting up MyChinook database...');
await initMySqlDatabase('MyChinook', path.resolve(path.join(__dirname, '../data/chinook-mysql.sql')));
// 2. Start test-api (GraphQL/REST server on port 4444)
console.log('[ai-chat init] Starting test-api on port 4444...');
stopProcess(testApiPidFile);
ensureDependencies(testApiDir, path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json'));
startBackgroundProcess(testApiDir, testApiPidFile, 4444);
await waitForReady('http://localhost:4444/openapi.json');
console.log('[ai-chat init] test-api is ready');
// 3. Start aigwmock (AI Gateway mock on port 3110)
console.log('[ai-chat init] Starting aigwmock on port 3110...');
stopProcess(aigwmockPidFile);
ensureDependencies(aigwmockDir, path.join(aigwmockDir, 'node_modules', 'express', 'package.json'));
startBackgroundProcess(aigwmockDir, aigwmockPidFile, 3110);
await waitForReady('http://localhost:3110/openrouter/v1/models');
console.log('[ai-chat init] aigwmock is ready');
}
run().catch(err => {
console.error(err);
process.exit(1);
});
+32
View File
@@ -8,6 +8,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
dbgateApi.registerPlugins(dbgatePluginPostgres);
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
dbgateApi.registerPlugins(dbgatePluginDynamodb);
async function initMySqlDatabase(dbname, inputFile) {
await dbgateApi.executeQuery({
@@ -125,6 +127,34 @@ async function initMongoDatabase(dbname, inputDirectory) {
// });
}
async function initDynamoDatabase(inputDirectory) {
const dynamodbConnection = {
server: process.env.SERVER_dynamo,
port: process.env.PORT_dynamo,
authType: 'onpremise',
engine: 'dynamodb@dbgate-plugin-dynamodb',
};
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
const pool = await driver.connect(dynamodbConnection);
const collections = await driver.listCollections(pool);
for (const collection of collections) {
await driver.dropTable(pool, collection);
}
await driver.disconnect(pool);
for (const file of fs.readdirSync(inputDirectory)) {
const pureName = path.parse(file).name;
const src = await dbgateApi.jsonLinesReader({ fileName: path.join(inputDirectory, file) });
const dst = await dbgateApi.tableWriter({
connection: dynamodbConnection,
pureName,
createIfNotExists: true,
});
await dbgateApi.copyStream(src, dst);
}
}
const baseDir = path.join(os.homedir(), '.dbgate');
async function copyFolder(source, target) {
@@ -148,6 +178,8 @@ async function run() {
await initMongoDatabase('MgChinook', path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
await initMongoDatabase('MgRivers', path.resolve(path.join(__dirname, '../data/rivers-jsonl')));
await initDynamoDatabase(path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
await copyFolder(
path.resolve(path.join(__dirname, '../data/chinook-jsonl')),
path.join(baseDir, 'archive-e2etests', 'default')
+24
View File
@@ -7,6 +7,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
dbgateApi.registerPlugins(dbgatePluginMysql);
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
dbgateApi.registerPlugins(dbgatePluginPostgres);
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
dbgateApi.registerPlugins(dbgatePluginDynamodb);
async function createDb(connection, dropDbSql, createDbSql, database = 'my_guitar_shop', { dropDatabaseName } = {}) {
if (dropDbSql) {
@@ -125,6 +127,28 @@ async function run() {
{ dropDatabaseName: 'my_guitar_shop' }
);
}
if (localconfig.dynamo) {
const dynamodbConnection = {
server: process.env.SERVER_dynamo,
port: process.env.PORT_dynamo,
authType: 'onpremise',
engine: 'dynamodb@dbgate-plugin-dynamodb',
};
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
const pool = await driver.connect(dynamodbConnection);
const collections = await driver.listCollections(pool);
for (const collection of collections) {
await driver.dropTable(pool, collection);
}
await driver.disconnect(pool);
await dbgateApi.importDbFromFolder({
connection: dynamodbConnection,
folder: path.resolve(path.join(__dirname, '../data/my-guitar-shop')),
});
}
}
dbgateApi.runScript(run);
+133
View File
@@ -0,0 +1,133 @@
const fs = require('fs');
const path = require('path');
const { spawn, spawnSync } = require('child_process');
const rootDir = path.resolve(__dirname, '..', '..');
const testApiDir = path.join(rootDir, 'test-api');
const pidFile = path.resolve(__dirname, '..', 'tmpdata', 'test-api.pid');
const isWindows = process.platform === 'win32';
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function waitForApiReady(timeoutMs = 30000) {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
try {
const response = await fetch('http://localhost:4444/openapi.json');
if (response.ok) {
return;
}
} catch (err) {
// continue waiting
}
await delay(500);
}
throw new Error('DBGM-00306 test-api did not start on port 4444 in time');
}
function readProcessStartTime(pid) {
if (process.platform === 'linux') {
try {
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
return stat.split(' ')[21] || null;
} catch (err) {
return null;
}
}
return null;
}
function isPidStillOurs(meta) {
if (!meta || !(meta.pid > 0)) return false;
if (process.platform === 'linux' && meta.startTime) {
const current = readProcessStartTime(meta.pid);
return current === meta.startTime;
}
return true;
}
function stopPreviousTestApi() {
if (!fs.existsSync(pidFile)) {
return;
}
try {
const content = fs.readFileSync(pidFile, 'utf-8').trim();
let meta;
try {
meta = JSON.parse(content);
} catch (_) {
const pid = Number(content);
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
}
if (isPidStillOurs(meta)) {
process.kill(meta.pid);
}
} catch (err) {
// ignore stale pid file or already terminated process
}
try {
fs.unlinkSync(pidFile);
} catch (err) {
// ignore
}
}
function startTestApi() {
const command = isWindows ? 'cmd.exe' : 'yarn';
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
const child = spawn(command, args, {
cwd: testApiDir,
env: {
...process.env,
PORT: '4444',
},
detached: true,
stdio: 'ignore',
});
child.unref();
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
const meta = { pid: child.pid };
const startTime = readProcessStartTime(child.pid);
if (startTime) meta.startTime = startTime;
fs.writeFileSync(pidFile, JSON.stringify(meta));
}
function ensureTestApiDependencies() {
const dependencyCheckFile = path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json');
if (fs.existsSync(dependencyCheckFile)) {
return;
}
const installCommand = isWindows ? 'cmd.exe' : 'yarn';
const installArgs = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
const result = spawnSync(installCommand, installArgs, {
cwd: testApiDir,
stdio: 'inherit',
env: process.env,
});
if (result.status !== 0) {
throw new Error('DBGM-00307 Failed to install test-api dependencies');
}
}
async function run() {
stopPreviousTestApi();
ensureTestApiDependencies();
startTestApi();
await waitForApiReady();
}
run().catch(err => {
console.error(err);
process.exit(1);
});
+7 -1
View File
@@ -19,30 +19,36 @@
"cy:run:portal": "cypress run --spec cypress/e2e/portal.cy.js",
"cy:run:oauth": "cypress run --spec cypress/e2e/oauth.cy.js",
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
"cy:run:rest": "cypress run --spec cypress/e2e/rest.cy.js",
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
"cy:run:cloud": "cypress run --spec cypress/e2e/cloud.cy.js",
"cy:run:charts": "cypress run --spec cypress/e2e/charts.cy.js",
"cy:run:redis": "cypress run --spec cypress/e2e/redis.cy.js",
"cy:run:ai-chat": "cypress run --spec cypress/e2e/ai-chat.cy.js",
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:oauth": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/oauth/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:rest": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/rest/.env node e2e-tests/init/rest.js && env-cmd -f e2e-tests/env/rest/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:cloud": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/cloud/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:charts": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/charts/.env node e2e-tests/init/charts.js && env-cmd -f e2e-tests/env/charts/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:redis": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/redis/.env node e2e-tests/init/redis.js && env-cmd -f e2e-tests/env/redis/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"start:ai-chat": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/ai-chat/.env node e2e-tests/init/ai-chat.js && env-cmd -f e2e-tests/env/ai-chat/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
"test:oauth": "start-server-and-test start:oauth http://localhost:3000 cy:run:oauth",
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
"test:rest": "start-server-and-test start:rest http://localhost:3000 cy:run:rest",
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
"test:cloud": "start-server-and-test start:cloud http://localhost:3000 cy:run:cloud",
"test:charts": "start-server-and-test start:charts http://localhost:3000 cy:run:charts",
"test:redis": "start-server-and-test start:redis http://localhost:3000 cy:run:redis",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis",
"test:ai-chat": "start-server-and-test start:ai-chat http://localhost:3000 cy:run:ai-chat",
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:rest && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis && yarn test:ai-chat",
"test:ci": "yarn test"
},
"dependencies": {}
+2
View File
@@ -0,0 +1,2 @@
test-api.pid
aigwmock.pid
@@ -0,0 +1,536 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
const stream = require('stream');
const { mongoDbEngine, dynamoDbEngine } = require('../engines');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const tableReader = require('dbgate-api/src/shell/tableReader');
const copyStream = require('dbgate-api/src/shell/copyStream');
function randomCollectionName() {
return 'test_' + crypto.randomBytes(6).toString('hex');
}
const documentEngines = [
{ label: 'MongoDB', engine: mongoDbEngine },
{ label: 'DynamoDB', engine: dynamoDbEngine },
];
async function connectEngine(engine) {
const driver = requireEngineDriver(engine.connection);
const conn = await driver.connect(engine.connection);
return { driver, conn };
}
async function createCollection(driver, conn, collectionName, engine) {
if (engine.connection.engine.startsWith('dynamodb')) {
await driver.operation(conn, {
type: 'createCollection',
collection: {
name: collectionName,
partitionKey: '_id',
partitionKeyType: 'S',
},
});
} else {
await driver.operation(conn, {
type: 'createCollection',
collection: { name: collectionName },
});
}
}
async function dropCollection(driver, conn, collectionName) {
try {
await driver.operation(conn, {
type: 'dropCollection',
collection: collectionName,
});
} catch (e) {
// Ignore errors when dropping (collection may not exist)
}
}
async function insertDocument(driver, conn, collectionName, doc) {
return driver.updateCollection(conn, {
inserts: [{ pureName: collectionName, document: {}, fields: doc }],
updates: [],
deletes: [],
});
}
async function readAll(driver, conn, collectionName) {
return driver.readCollection(conn, { pureName: collectionName, limit: 1000 });
}
async function updateDocument(driver, conn, collectionName, condition, fields) {
return driver.updateCollection(conn, {
inserts: [],
updates: [{ pureName: collectionName, condition, fields }],
deletes: [],
});
}
async function deleteDocument(driver, conn, collectionName, condition) {
return driver.updateCollection(conn, {
inserts: [],
updates: [],
deletes: [{ pureName: collectionName, condition }],
});
}
describe('Collection CRUD', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
// ---- INSERT ----
test('insert a single document', async () => {
const res = await insertDocument(driver, conn, collectionName, {
_id: 'doc1',
name: 'Alice',
age: 30,
});
expect(res.inserted.length).toBe(1);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
expect(all.rows[0].age).toBe(30);
});
test('insert multiple documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'a1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'a2', name: 'Bob' });
await insertDocument(driver, conn, collectionName, { _id: 'a3', name: 'Charlie' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('insert document with nested object', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'nested1',
name: 'Alice',
address: { city: 'Prague', zip: '11000' },
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].address.city).toBe('Prague');
expect(all.rows[0].address.zip).toBe('11000');
});
// ---- READ ----
test('read from empty collection returns no rows', async () => {
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
test('read with limit', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'l1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'l2', name: 'B' });
await insertDocument(driver, conn, collectionName, { _id: 'l3', name: 'C' });
const limited = await driver.readCollection(conn, {
pureName: collectionName,
limit: 2,
});
expect(limited.rows.length).toBe(2);
});
test('count documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'c1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'c2', name: 'B' });
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(2);
});
test('count documents on empty collection returns zero', async () => {
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(0);
});
// ---- UPDATE ----
test('update an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u1', name: 'Alice', age: 25 });
const res = await updateDocument(driver, conn, collectionName, { _id: 'u1' }, { name: 'Alice Updated' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice Updated');
});
test('update does not create new document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u2', name: 'Bob' });
await updateDocument(driver, conn, collectionName, { _id: 'nonexistent' }, { name: 'Ghost' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('update only specified fields', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u3', name: 'Carol', age: 40, city: 'London' });
await updateDocument(driver, conn, collectionName, { _id: 'u3' }, { age: 41 });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Carol');
expect(all.rows[0].age).toBe(41);
expect(all.rows[0].city).toBe('London');
});
// ---- DELETE ----
test('delete an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'd1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'd2', name: 'Bob' });
const res = await deleteDocument(driver, conn, collectionName, { _id: 'd1' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('delete non-existing document does not affect collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'dx1', name: 'Alice' });
await deleteDocument(driver, conn, collectionName, { _id: 'nonexistent' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
});
test('delete all documents leaves empty collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'da1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'da2', name: 'B' });
await deleteDocument(driver, conn, collectionName, { _id: 'da1' });
await deleteDocument(driver, conn, collectionName, { _id: 'da2' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
// ---- EDGE CASES ----
test('insert and read document with empty string field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'e1', name: '', value: 'test' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('');
expect(all.rows[0].value).toBe('test');
});
test('insert and read document with numeric values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'n1',
intVal: 42,
floatVal: 3.14,
zero: 0,
negative: -10,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].intVal).toBe(42);
expect(all.rows[0].floatVal).toBeCloseTo(3.14);
expect(all.rows[0].zero).toBe(0);
expect(all.rows[0].negative).toBe(-10);
});
test('insert and read document with boolean values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'b1',
active: true,
deleted: false,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].active).toBe(true);
expect(all.rows[0].deleted).toBe(false);
});
test('reading non-existing collection returns error or empty', async () => {
const result = await driver.readCollection(conn, {
pureName: 'nonexistent_collection_' + crypto.randomBytes(4).toString('hex'),
limit: 10,
});
// Depending on the driver, this may return an error or empty rows
if (result.errorMessage) {
expect(typeof result.errorMessage).toBe('string');
} else {
expect(result.rows.length).toBe(0);
}
});
test('replace full document via update with document field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'r1', name: 'Original', extra: 'data' });
await driver.updateCollection(conn, {
inserts: [],
updates: [
{
pureName: collectionName,
condition: { _id: 'r1' },
document: { _id: 'r1', name: 'Replaced' },
fields: {},
},
],
deletes: [],
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Replaced');
});
test('insert then update then delete lifecycle', async () => {
// Insert
await insertDocument(driver, conn, collectionName, { _id: 'life1', name: 'Lifecycle', status: 'created' });
let all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].status).toBe('created');
// Update
await updateDocument(driver, conn, collectionName, { _id: 'life1' }, { status: 'updated' });
all = await readAll(driver, conn, collectionName);
expect(all.rows[0].status).toBe('updated');
// Delete
await deleteDocument(driver, conn, collectionName, { _id: 'life1' });
all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
});
});
function createDocumentImportStream(documents) {
const pass = new stream.PassThrough({ objectMode: true });
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
for (const doc of documents) {
pass.write(doc);
}
pass.end();
return pass;
}
function createExportStream() {
const writable = new stream.Writable({ objectMode: true });
writable.resultArray = [];
writable._write = (chunk, encoding, callback) => {
writable.resultArray.push(chunk);
callback();
};
return writable;
}
describe('Collection Import/Export', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
test('import documents via stream', async () => {
const documents = [
{ _id: 'imp1', name: 'Alice', age: 30 },
{ _id: 'imp2', name: 'Bob', age: 25 },
{ _id: 'imp3', name: 'Charlie', age: 35 },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('export documents via stream', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'exp1', name: 'Alice', city: 'Prague' });
await insertDocument(driver, conn, collectionName, { _id: 'exp2', name: 'Bob', city: 'Vienna' });
await insertDocument(driver, conn, collectionName, { _id: 'exp3', name: 'Charlie', city: 'Berlin' });
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(3);
const names = rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('import then export round-trip', async () => {
const documents = [
{ _id: 'rt1', name: 'Alice', value: 100 },
{ _id: 'rt2', name: 'Bob', value: 200 },
{ _id: 'rt3', name: 'Charlie', value: 300 },
{ _id: 'rt4', name: 'Diana', value: 400 },
];
// Import
const importReader = createDocumentImportStream(documents);
const importWriter = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(importReader, importWriter);
// Export
const exportReader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const exportWriter = createExportStream();
await copyStream(exportReader, exportWriter);
const rows = exportWriter.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(4);
const sortedRows = rows.sort((a, b) => a._id.localeCompare(b._id));
for (const doc of documents) {
const found = sortedRows.find(r => r._id === doc._id);
expect(found).toBeDefined();
expect(found.name).toBe(doc.name);
expect(found.value).toBe(doc.value);
}
});
test('import documents with nested objects', async () => {
const documents = [
{ _id: 'nest1', name: 'Alice', address: { city: 'Prague', zip: '11000' } },
{ _id: 'nest2', name: 'Bob', address: { city: 'Vienna', zip: '1010' } },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(2);
const alice = all.rows.find(r => r.name === 'Alice');
expect(alice.address.city).toBe('Prague');
expect(alice.address.zip).toBe('11000');
});
test('import many documents', async () => {
const documents = [];
for (let i = 0; i < 150; i++) {
documents.push({ _id: `many${i}`, name: `Name${i}`, index: i });
}
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(150);
});
test('export empty collection returns no data rows', async () => {
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(0);
});
});
});
+17
View File
@@ -123,5 +123,22 @@ services:
retries: 3
start_period: 40s
mongodb:
image: mongo:4.0.12
restart: always
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
ports:
- 27017:27017
dynamodb:
image: amazon/dynamodb-local
restart: always
ports:
- 8000:8000
volumes:
firebird-data:
mongo-data:
mongo-config:
+23
View File
@@ -738,6 +738,27 @@ const firebirdEngine = {
skipDropReferences: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mongoDbEngine = {
label: 'MongoDB',
connection: {
engine: 'mongo@dbgate-plugin-mongo',
server: 'localhost',
port: 27017,
},
};
/** @type {import('dbgate-types').TestEngineInfo} */
const dynamoDbEngine = {
label: 'DynamoDB',
connection: {
engine: 'dynamodb@dbgate-plugin-dynamodb',
server: 'localhost',
port: 8000,
authType: 'onpremise',
},
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -788,3 +809,5 @@ module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;
module.exports.firebirdEngine = firebirdEngine;
module.exports.mongoDbEngine = mongoDbEngine;
module.exports.dynamoDbEngine = dynamoDbEngine;
+1 -1
View File
@@ -1,7 +1,7 @@
{
"name": "dbgate-integration-tests",
"version": "7.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
+4 -2
View File
@@ -1,5 +1,6 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const engines = require('./engines');
const { mongoDbEngine, dynamoDbEngine } = require('./engines');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
@@ -9,7 +10,7 @@ global.DBGATE_PACKAGES = {
async function connectEngine(engine) {
const { connection } = engine;
const driver = requireEngineDriver(connection);
for (;;) {
for (; ;) {
try {
const conn = await driver.connect(connection);
await driver.getVersion(conn);
@@ -26,7 +27,8 @@ async function connectEngine(engine) {
async function run() {
await new Promise(resolve => setTimeout(resolve, 10000));
await Promise.all(engines.map(engine => connectEngine(engine)));
const documentEngines = [mongoDbEngine, dynamoDbEngine];
await Promise.all([...engines, ...documentEngines].map(engine => connectEngine(engine)));
}
run();
+5 -3
View File
@@ -1,6 +1,6 @@
{
"private": true,
"version": "7.0.6",
"version": "7.1.8",
"name": "dbgate-all",
"workspaces": [
"packages/*",
@@ -30,13 +30,15 @@
"start:web": "yarn workspace dbgate-web dev",
"start:sqltree": "yarn workspace dbgate-sqltree start",
"start:tools": "yarn workspace dbgate-tools start",
"start:rest": "yarn workspace dbgate-rest start",
"start:datalib": "yarn workspace dbgate-datalib start",
"start:filterparser": "yarn workspace dbgate-filterparser start",
"build:sqltree": "yarn workspace dbgate-sqltree build",
"build:datalib": "yarn workspace dbgate-datalib build",
"build:filterparser": "yarn workspace dbgate-filterparser build",
"build:tools": "yarn workspace dbgate-tools build",
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
"build:rest": "yarn workspace dbgate-rest build",
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib && yarn build:rest",
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
"build:api": "yarn workspace dbgate-api build",
"build:api:doc": "yarn workspace dbgate-api build:doc",
@@ -63,7 +65,7 @@
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
"build:e2e": "yarn build:lib && yarn prepare:packer",
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn start:rest\" \"yarn build:plugins:frontend:watch\"",
"ts:api": "yarn workspace dbgate-api ts",
"ts:web": "yarn workspace dbgate-web ts",
"ts": "yarn ts:api && yarn ts:web",
+14
View File
@@ -0,0 +1,14 @@
{
"name": "dbgate-aigwmock",
"version": "1.0.0",
"description": "Mock AI Gateway server for E2E testing",
"main": "src/index.js",
"scripts": {
"start": "node src/index.js"
},
"license": "GPL-3.0",
"dependencies": {
"cors": "^2.8.6",
"express": "^5.2.1"
}
}
+202
View File
@@ -0,0 +1,202 @@
const express = require('express');
const cors = require('cors');
const fs = require('fs');
const path = require('path');
const app = express();
app.use(cors());
app.use(express.json({ limit: '50mb' }));
const responses = JSON.parse(fs.readFileSync(path.join(__dirname, 'mockResponses.json'), 'utf-8'));
let callCounter = 0;
// GET /openrouter/v1/models
app.get('/openrouter/v1/models', (req, res) => {
res.json({
data: [{ id: 'mock-model', name: 'Mock Model' }],
preferredModel: 'mock-model',
});
});
// POST /openrouter/v1/chat/completions
app.post('/openrouter/v1/chat/completions', (req, res) => {
const messages = req.body.messages || [];
// Find the first user message (skip system messages)
const userMessage = messages.find(m => m.role === 'user');
if (!userMessage) {
return streamTextResponse(res, "I don't have enough context to help. Please ask a question.");
}
// Count assistant messages to determine the current step
const assistantCount = messages.filter(m => m.role === 'assistant').length;
// Find matching scenario by regex
const scenario = responses.scenarios.find(s => {
const regex = new RegExp(s.match, 'i');
return regex.test(userMessage.content);
});
if (!scenario) {
console.log(`[aigwmock] No scenario matched for: "${userMessage.content}"`);
return streamTextResponse(res, "I'm a mock AI assistant. I don't have a prepared response for that question.");
}
const step = scenario.steps[assistantCount];
if (!step) {
console.log(`[aigwmock] No more steps for scenario (step ${assistantCount})`);
return streamTextResponse(res, "I've completed my analysis of this topic.");
}
console.log(`[aigwmock] Scenario matched: "${scenario.match}", step ${assistantCount}, type: ${step.type}`);
if (step.type === 'tool_calls') {
return streamToolCallResponse(res, step.tool_calls);
} else {
return streamTextResponse(res, step.content);
}
});
function streamTextResponse(res, content) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
});
const id = `chatcmpl-mock-${Date.now()}`;
const created = Math.floor(Date.now() / 1000);
// Split content into chunks for realistic streaming
const chunkSize = 20;
const chunks = [];
for (let i = 0; i < content.length; i += chunkSize) {
chunks.push(content.substring(i, i + chunkSize));
}
// Send initial role chunk
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [{ index: 0, delta: { role: 'assistant', content: '' }, finish_reason: null }],
});
// Send content chunks
for (const chunk of chunks) {
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [{ index: 0, delta: { content: chunk }, finish_reason: null }],
});
}
// Send finish
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [{ index: 0, delta: {}, finish_reason: 'stop' }],
});
res.write('data: [DONE]\n\n');
res.end();
}
function streamToolCallResponse(res, toolCalls) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
});
const id = `chatcmpl-mock-${Date.now()}`;
const created = Math.floor(Date.now() / 1000);
for (let i = 0; i < toolCalls.length; i++) {
const tc = toolCalls[i];
const callId = `call_mock_${++callCounter}`;
const args = JSON.stringify(tc.arguments);
if (i === 0) {
// First tool call: include role
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [
{
index: 0,
delta: {
role: 'assistant',
content: null,
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
},
finish_reason: null,
},
],
});
} else {
// Additional tool calls
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [
{
index: 0,
delta: {
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
},
finish_reason: null,
},
],
});
}
// Stream the arguments
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [
{
index: 0,
delta: {
tool_calls: [{ index: i, function: { arguments: args } }],
},
finish_reason: null,
},
],
});
}
// Send finish with tool_calls reason
writeSSE(res, {
id,
object: 'chat.completion.chunk',
created,
model: 'mock-model',
choices: [{ index: 0, delta: {}, finish_reason: 'tool_calls' }],
});
res.write('data: [DONE]\n\n');
res.end();
}
function writeSSE(res, data) {
res.write(`data: ${JSON.stringify(data)}\n\n`);
}
const port = process.env.PORT || 3110;
app.listen(port, () => {
console.log(`[aigwmock] AI Gateway mock server listening on port ${port}`);
});
+193
View File
@@ -0,0 +1,193 @@
{
"scenarios": [
{
"match": "chart.*popular.*genre|popular.*genre.*chart|most popular genre",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Genre" } }
]
},
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_sql_select",
"arguments": {
"sql": "SELECT g.Name AS genre, COUNT(t.TrackId) AS track_count FROM Genre g JOIN Track t ON g.GenreId = t.GenreId GROUP BY g.Name ORDER BY track_count DESC LIMIT 10"
}
}
]
},
{
"type": "text",
"content": "Here is a chart showing the most popular genres by track count:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Rock\",\"Latin\",\"Metal\",\"Alternative & Punk\",\"Jazz\",\"Blues\",\"Classical\",\"R&B/Soul\",\"Reggae\",\"Pop\"],\"datasets\":[{\"label\":\"Track Count\",\"data\":[1297,579,374,332,130,81,74,61,58,48]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Most Popular Genres by Track Count\"}}}}\n```"
}
]
},
{
"match": "most popular artist|popular artist|top artist",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Artist" } }
]
},
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Album" } }
]
},
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_sql_select",
"arguments": {
"sql": "SELECT ar.Name AS artist, COUNT(t.TrackId) AS track_count FROM Artist ar JOIN Album al ON ar.ArtistId = al.ArtistId JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY ar.Name ORDER BY track_count DESC LIMIT 10"
}
}
]
},
{
"type": "text",
"content": "The most popular artist by number of tracks is **Iron Maiden** with 213 tracks, followed by **U2** with 135 tracks and **Led Zeppelin** with 114 tracks."
}
]
},
{
"match": "list.*user|show.*user|get.*user",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "graphql_introspect_schema", "arguments": {} }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_graphql_query",
"arguments": {
"query": "{ users { id firstName lastName email } }"
}
}
]
},
{
"type": "text",
"content": "Here are the users from the GraphQL API. The system contains multiple registered users with their names and email addresses."
}
]
},
{
"match": "chart.*product.*categor|product.*categor.*chart|chart.*categor",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "graphql_introspect_schema", "arguments": {} }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_graphql_query",
"arguments": {
"query": "{ products { category } }"
}
}
]
},
{
"type": "text",
"content": "Here is a bar chart showing the distribution of products across categories:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Electronics\",\"Clothing\",\"Books\",\"Home & Garden\",\"Sports\",\"Toys\"],\"datasets\":[{\"label\":\"Number of Products\",\"data\":[35,30,33,38,32,32]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Products by Category\"}}}}\n```"
}
]
},
{
"match": "most expensive product|expensive.*product|highest price",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "graphql_introspect_schema", "arguments": {} }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_graphql_query",
"arguments": {
"query": "{ products { id name price category } }"
}
}
]
},
{
"type": "text",
"content": "Based on the query results, I found the most expensive product in the system. The product details are shown in the query results above."
}
]
},
{
"match": "show.*categor|list.*categor|all.*categor",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "graphql_introspect_schema", "arguments": {} }
]
},
{
"type": "tool_calls",
"tool_calls": [
{
"name": "execute_graphql_query",
"arguments": {
"query": "{ categories { id name description active } }"
}
}
]
},
{
"type": "text",
"content": "Here are all the categories available in the system. Each category has a name, description, and active status indicating whether it is currently in use."
}
]
},
{
"match": "Explain the following error|doesn't exist|does not exist",
"steps": [
{
"type": "tool_calls",
"tool_calls": [
{ "name": "get_table_schema", "arguments": { "table": "Invoice" } }
]
},
{
"type": "text",
"content": "The error occurs because the table `Invoice2` does not exist in the `MyChinook` database. The correct table name is `Invoice`. Here is the corrected query:\n\n```sql\nSELECT * FROM Invoice\n```\n\nThe table name had a typo — `Invoice2` instead of `Invoice`. The `Invoice` table contains columns like `InvoiceId`, `CustomerId`, `InvoiceDate`, `Total`, and billing address fields."
}
]
}
]
}
+6 -1
View File
@@ -1,6 +1,7 @@
DEVMODE=1
DEVWEB=1
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle,mongourl
LABEL_mysql=MySql
SERVER_mysql=dbgatedckstage1.sprinx.cz
@@ -43,6 +44,10 @@ PORT_oracle=1521
ENGINE_oracle=oracle@dbgate-plugin-oracle
SERVICE_NAME_oracle=xe
LABEL_mongourl=Mongo URL
URL_mongourl=mongodb://root:Pwd2020Db@dbgatedckstage1.sprinx.cz:27017
ENGINE_mongourl=mongo@dbgate-plugin-mongo
# SETTINGS_dataGrid.showHintColumns=1
# docker run -p 3000:3000 -e CONNECTIONS=mongo -e URL_mongo=mongodb://localhost:27017 -e ENGINE_mongo=mongo@dbgate-plugin-mongo -e LABEL_mongo=mongo dbgate/dbgate:beta
+3 -2
View File
@@ -2,7 +2,7 @@
"name": "dbgate-api",
"main": "src/index.js",
"version": "7.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
@@ -31,7 +31,8 @@
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"dbgate-datalib": "^7.0.0-alpha.1",
"dbgate-query-splitter": "^4.11.9",
"dbgate-query-splitter": "^4.12.0",
"dbgate-rest": "^7.0.0-alpha.1",
"dbgate-sqltree": "^7.0.0-alpha.1",
"dbgate-tools": "^7.0.0-alpha.1",
"debug": "^4.3.4",
+64 -5
View File
@@ -19,6 +19,26 @@ const unzipDirectory = require('../shell/unzipDirectory');
const logger = getLogger('archive');
/**
* Rejects any archive name (folder or file) that contains path-traversal
* sequences, directory separators, or null bytes. These values are used
* directly in path.join() calls; allowing traversal would let callers read
* or write arbitrary files outside the archive directory.
*/
function assertSafeArchiveName(name, label) {
if (typeof name !== 'string' || name.length === 0) {
throw new Error(`DBGM-00000 Invalid ${label}: must be a non-empty string`);
}
if (name.includes('\0') || name.includes('..') || name.includes('/') || name.includes('\\')) {
throw new Error(`DBGM-00000 Invalid ${label}: path traversal not allowed`);
}
// Reject names that resolve to the archive root itself (e.g. '.')
const resolved = path.resolve(archivedir(), name);
if (resolved === path.resolve(archivedir())) {
throw new Error(`DBGM-00000 Invalid ${label}: must not resolve to the archive root`);
}
}
module.exports = {
folders_meta: true,
async folders() {
@@ -39,6 +59,7 @@ module.exports = {
createFolder_meta: true,
async createFolder({ folder }) {
assertSafeArchiveName(folder, 'folder');
await fs.mkdir(path.join(archivedir(), folder));
socket.emitChanged('archive-folders-changed');
return true;
@@ -46,8 +67,12 @@ module.exports = {
createLink_meta: true,
async createLink({ linkedFolder }) {
if ( typeof linkedFolder !== 'string' || linkedFolder.length === 0) {
throw new Error(`DBGM-00000 Invalid linkedFolder: must be a non-empty string`);
}
assertSafeArchiveName(path.parse(linkedFolder).name, 'linkedFolder');
const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' });
fs.writeFile(path.join(archivedir(), folder), linkedFolder);
await fs.writeFile(path.join(archivedir(), folder), linkedFolder);
clearArchiveLinksCache();
socket.emitChanged('archive-folders-changed');
return folder;
@@ -71,6 +96,7 @@ module.exports = {
files_meta: true,
async files({ folder }) {
assertSafeArchiveName(folder, 'folder');
try {
if (folder.endsWith('.zip')) {
if (await fs.exists(path.join(archivedir(), folder))) {
@@ -121,6 +147,9 @@ module.exports = {
createFile_meta: true,
async createFile({ folder, file, fileType, tableInfo }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.writeFile(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
@@ -131,6 +160,9 @@ module.exports = {
deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
socket.emitChanged(`archive-files-changed`, { folder });
return true;
@@ -138,6 +170,10 @@ module.exports = {
renameFile_meta: true,
async renameFile({ folder, file, newFile, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(newFile, 'newFile');
assertSafeArchiveName(fileType, 'fileType');
await fs.rename(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
@@ -148,6 +184,8 @@ module.exports = {
modifyFile_meta: true,
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await jsldata.closeDataStore(`archive://${folder}/${file}`);
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
@@ -187,6 +225,8 @@ module.exports = {
renameFolder_meta: true,
async renameFolder({ folder, newFolder }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(newFolder, 'newFolder');
const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
socket.emitChanged(`archive-folders-changed`);
@@ -196,6 +236,7 @@ module.exports = {
deleteFolder_meta: true,
async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter');
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
await fs.unlink(path.join(archivedir(), folder));
} else {
@@ -207,6 +248,8 @@ module.exports = {
saveText_meta: true,
async saveText({ folder, file, text }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
@@ -214,6 +257,8 @@ module.exports = {
saveJslData_meta: true,
async saveJslData({ folder, file, jslid, changeSet }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const source = getJslFileName(jslid);
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
if (changeSet) {
@@ -232,11 +277,20 @@ module.exports = {
saveRows_meta: true,
async saveRows({ folder, file, rows }) {
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const filePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
const fileStream = fs.createWriteStream(filePath);
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
const ok = fileStream.write(JSON.stringify(row) + '\n');
if (!ok) {
await new Promise(resolve => fileStream.once('drain', resolve));
}
}
await fileStream.close();
await new Promise((resolve, reject) => {
fileStream.end(() => resolve());
fileStream.on('error', reject);
});
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
@@ -256,6 +310,8 @@ module.exports = {
getArchiveData_meta: true,
async getArchiveData({ folder, file }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
let rows;
if (folder.endsWith('.zip')) {
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
@@ -270,7 +326,7 @@ module.exports = {
if (!fileName?.endsWith('.zip')) {
throw new Error(`${fileName} is not a ZIP file`);
}
assertSafeArchiveName(fileName.slice(0, -4), 'fileName');
const folder = await this.getNewArchiveFolder({ database: fileName });
await fs.copyFile(filePath, path.join(archivedir(), folder));
socket.emitChanged(`archive-folders-changed`);
@@ -280,6 +336,7 @@ module.exports = {
zip_meta: true,
async zip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
@@ -289,6 +346,7 @@ module.exports = {
unzip_meta: true,
async unzip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
@@ -298,6 +356,7 @@ module.exports = {
getZippedPath_meta: true,
async getZippedPath({ folder }) {
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.zip')) {
return { filePath: path.join(archivedir(), folder) };
}
+60 -3
View File
@@ -202,7 +202,7 @@ module.exports = {
const storageConnections = await storage.connections(req);
if (storageConnections) {
return storageConnections;
return storageConnections.map(maskConnection);
}
if (portalConnections) {
if (platformInfo.allowShellConnection) return portalConnections.map(x => encryptConnection(x));
@@ -484,7 +484,7 @@ module.exports = {
const storageConnection = await storage.getConnection({ conid });
if (storageConnection) {
return storageConnection;
return mask ? maskConnection(storageConnection) : storageConnection;
}
if (portalConnections) {
@@ -492,7 +492,61 @@ module.exports = {
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : encryptConnection(res);
}
const res = await this.datastore.get(conid);
return res || null;
if (res) return res;
// In a forked runner-script child process, ask the parent for connections that may be
// volatile (in-memory only, e.g. ask-for-password). We only do this when
// there really is a parent (process.send exists) to avoid an infinite loop
// when the parent's own getCore falls through here.
// The check is intentionally narrow: only runner scripts pass
// --process-display-name script, so connect/session/ssh-forward subprocesses
// are not affected and continue to return null immediately.
if (process.send && processArgs.processDisplayName === 'script') {
const conn = await new Promise(resolve => {
let resolved = false;
const cleanup = () => {
process.removeListener('message', handler);
process.removeListener('disconnect', onDisconnect);
clearTimeout(timeout);
};
const settle = value => {
if (!resolved) {
resolved = true;
cleanup();
resolve(value);
}
};
const handler = message => {
if (message?.msgtype === 'volatile-connection-response' && message.conid === conid) {
settle(message.conn || null);
}
};
const onDisconnect = () => settle(null);
const timeout = setTimeout(() => settle(null), 5000);
// Don't let the timer alone keep the process alive if all other work is done
timeout.unref();
process.on('message', handler);
process.once('disconnect', onDisconnect);
try {
process.send({ msgtype: 'get-volatile-connection', conid });
} catch {
settle(null);
}
});
if (conn) {
volatileConnections[conn._id] = conn; // cache for subsequent calls
return conn;
}
}
return null;
},
get_meta: true,
@@ -502,6 +556,9 @@ module.exports = {
_id: '__model',
};
}
if (!conid) {
return null;
}
await testConnectionPermission(conid, req);
return this.getCore({ conid, mask: true });
},
@@ -15,6 +15,7 @@ const {
getLogger,
extractErrorLogData,
filterStructureBySchema,
serializeJsTypesForJsonStringify,
} = require('dbgate-tools');
const { html, parse } = require('diff2html');
const { handleProcessCommunication } = require('../utility/processComm');
@@ -94,10 +95,12 @@ module.exports = {
}
},
handle_response(conid, database, { msgid, ...response }) {
const [resolve, reject, additionalData] = this.requests[msgid];
resolve(response);
if (additionalData?.auditLogger) {
additionalData?.auditLogger(response);
const [resolve, reject, additionalData] = this.requests[msgid] || [];
if (resolve) {
resolve(response);
if (additionalData?.auditLogger) {
additionalData?.auditLogger(response);
}
}
delete this.requests[msgid];
},
@@ -165,6 +168,11 @@ module.exports = {
if (!connection) {
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
}
if (connection.engine?.endsWith('@rest')) {
return { isApiConnection: true };
}
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
}
@@ -219,12 +227,13 @@ module.exports = {
this.close(conid, database, false);
});
subprocess.send({
const connectMessage = serializeJsTypesForJsonStringify({
msgtype: 'connect',
connection: { ...connection, database },
structure: lastClosed ? lastClosed.structure : null,
globalSettings: await config.getSettings(),
});
subprocess.send(connectMessage);
return newOpened;
},
@@ -232,9 +241,10 @@ module.exports = {
sendRequest(conn, message, additionalData = {}) {
const msgid = crypto.randomUUID();
const promise = new Promise((resolve, reject) => {
this.requests[msgid] = [resolve, reject, additionalData];
this.requests[msgid] = [resolve, reject, additionalData, conn.conid, conn.database];
try {
conn.subprocess.send({ msgid, ...message });
const serializedMessage = serializeJsTypesForJsonStringify({ msgid, ...message });
conn.subprocess.send(serializedMessage);
} catch (err) {
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
this.close(conn.conid, conn.database);
@@ -256,12 +266,12 @@ module.exports = {
},
sqlSelect_meta: true,
async sqlSelect({ conid, database, select, auditLogSessionGroup }, req) {
async sqlSelect({ conid, database, select, commandTimeout, auditLogSessionGroup }, req) {
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
const res = await this.sendRequest(
opened,
{ msgtype: 'sqlSelect', select },
{ msgtype: 'sqlSelect', select, commandTimeout },
{
auditLogger:
auditLogSessionGroup && select?.from?.name?.pureName
@@ -336,9 +346,12 @@ module.exports = {
},
collectionData_meta: true,
async collectionData({ conid, database, options, auditLogSessionGroup }, req) {
async collectionData({ conid, database, options, commandTimeout, auditLogSessionGroup }, req) {
await testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
if (commandTimeout && options) {
options.commandTimeout = commandTimeout;
}
const res = await this.sendRequest(
opened,
{ msgtype: 'collectionData', options },
@@ -468,6 +481,7 @@ module.exports = {
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
const tablePermissions = await loadTablePermissionsFromRequest(req);
const databasePermissionRole = getDatabasePermissionRole(conid, database, databasePermissions);
const fieldsAndRoles = [
[changeSet.inserts, 'create_update_delete'],
[changeSet.deletes, 'create_update_delete'],
@@ -482,7 +496,7 @@ module.exports = {
operation.schemaName,
operation.pureName,
tablePermissions,
databasePermissions
databasePermissionRole
);
if (getTablePermissionRoleLevelIndex(role) < getTablePermissionRoleLevelIndex(requiredRole)) {
throw new Error('DBGM-00262 Permission not granted');
@@ -571,6 +585,24 @@ module.exports = {
};
},
pingDatabases_meta: true,
async pingDatabases({ databases }, req) {
if (!databases || !Array.isArray(databases)) return { status: 'ok' };
for (const { conid, database } of databases) {
if (!conid || !database) continue;
const existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'DBGM-00308 Error pinging DB connection');
this.close(conid, database);
}
}
}
return { status: 'ok' };
},
refresh_meta: true,
async refresh({ conid, database, keepOpen }, req) {
await testConnectionPermission(conid, req);
@@ -613,6 +645,15 @@ module.exports = {
structure: existing.structure,
};
socket.emitChanged(`database-status-changed`, { conid, database });
// Reject all pending requests for this connection
for (const [msgid, entry] of Object.entries(this.requests)) {
const [resolve, reject, additionalData, reqConid, reqDatabase] = entry;
if (reqConid === conid && reqDatabase === database) {
reject('DBGM-00309 Database connection closed');
delete this.requests[msgid];
}
}
}
},
+45 -4
View File
@@ -15,7 +15,8 @@ const getDiagramExport = require('../utility/getDiagramExport');
const apps = require('./apps');
const getMapExport = require('../utility/getMapExport');
const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const { getLogger, getSqlFrontMatter } = require('dbgate-tools');
const yaml = require('js-yaml');
const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
@@ -35,13 +36,46 @@ function deserialize(format, text) {
module.exports = {
list_meta: true,
async list({ folder }, req) {
async list({ folder, parseFrontMatter }, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return [];
const dir = path.join(filesdir(), folder);
if (!(await fs.exists(dir))) return [];
const files = (await fs.readdir(dir)).map(file => ({ folder, file }));
return files;
const fileNames = await fs.readdir(dir);
if (!parseFrontMatter) {
return fileNames.map(file => ({ folder, file }));
}
const result = [];
for (const file of fileNames) {
const item = { folder, file };
let fh;
try {
fh = await require('fs').promises.open(path.join(dir, file), 'r');
const buf = new Uint8Array(512);
const { bytesRead } = await fh.read(buf, 0, 512, 0);
let text = Buffer.from(buf.buffer, 0, bytesRead).toString('utf-8');
if (text.includes('-- >>>') && !text.includes('-- <<<')) {
const stat = await fh.stat();
const fullSize = Math.min(stat.size, 4096);
if (fullSize > 512) {
const fullBuf = new Uint8Array(fullSize);
const { bytesRead: fullBytesRead } = await fh.read(fullBuf, 0, fullSize, 0);
text = Buffer.from(fullBuf.buffer, 0, fullBytesRead).toString('utf-8');
}
}
const fm = getSqlFrontMatter(text, yaml);
if (fm?.connectionId) item.connectionId = fm.connectionId;
if (fm?.databaseName) item.databaseName = fm.databaseName;
} catch (e) {
// ignore read errors for individual files
} finally {
if (fh) await fh.close().catch(() => {});
}
result.push(item);
}
return result;
},
listAll_meta: true,
@@ -257,6 +291,13 @@ module.exports = {
return true;
},
exportDiagramPng_meta: true,
async exportDiagramPng({ filePath, pngBase64 }) {
const base64 = pngBase64.replace(/^data:image\/png;base64,/, '');
await fs.writeFile(filePath, Buffer.from(base64, 'base64'));
return true;
},
getFileRealPath_meta: true,
async getFileRealPath({ folder, file }, req) {
const loadedPermissions = await loadPermissionsFromRequest(req);
+74 -1
View File
@@ -1,5 +1,8 @@
const { filterName } = require('dbgate-tools');
const { filterName, getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('jsldata');
const { jsldir, archivedir } = require('../utility/directories');
const fs = require('fs');
const path = require('path');
const lineReader = require('line-reader');
const _ = require('lodash');
const { __ } = require('lodash/fp');
@@ -149,6 +152,10 @@ module.exports = {
getRows_meta: true,
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
return [];
}
const datastore = await this.ensureDatastore(jslid, formatterFunction);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
},
@@ -159,6 +166,72 @@ module.exports = {
return fs.existsSync(fileName);
},
streamRows_meta: {
method: 'get',
raw: true,
},
streamRows(req, res) {
const { jslid } = req.query;
if (!jslid) {
res.status(400).json({ apiErrorMessage: 'Missing jslid' });
return;
}
// Reject file:// jslids — they resolve to arbitrary server-side paths
if (jslid.startsWith('file://')) {
res.status(403).json({ apiErrorMessage: 'Forbidden jslid scheme' });
return;
}
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
res.status(404).json({ apiErrorMessage: 'File not found' });
return;
}
// Dereference symlinks and normalize case (Windows) before the allow-list check.
// realpathSync is safe here because existsSync confirmed the file is present.
// path.resolve() alone cannot dereference symlinks, so a symlink inside an allowed
// root could otherwise point to an arbitrary external path.
const normalize = p => (process.platform === 'win32' ? p.toLowerCase() : p);
const resolveRoot = r => { try { return fs.realpathSync(r); } catch { return path.resolve(r); } };
let realFile;
try {
realFile = fs.realpathSync(fileName);
} catch {
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
const allowedRoots = [jsldir(), archivedir()].map(r => normalize(resolveRoot(r)) + path.sep);
const isAllowed = allowedRoots.some(root => normalize(realFile).startsWith(root));
if (!isAllowed) {
logger.warn({ jslid, realFile }, 'DBGM-00000 streamRows rejected path outside allowed roots');
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
res.setHeader('Content-Type', 'application/x-ndjson');
res.setHeader('Cache-Control', 'no-cache');
const stream = fs.createReadStream(realFile, 'utf-8');
req.on('close', () => {
stream.destroy();
});
stream.on('error', err => {
logger.error(extractErrorLogData(err), 'DBGM-00000 Error streaming JSONL file');
if (!res.headersSent) {
res.status(500).json({ apiErrorMessage: 'Stream error' });
} else {
res.end();
}
});
stream.pipe(res);
},
getStats_meta: true,
getStats({ jslid }) {
const file = `${getJslFileName(jslid)}.stats`;
+22 -6
View File
@@ -33,19 +33,35 @@ function readCore(reader, skip, limit, filter) {
});
}
module.exports = {
read_meta: true,
async read({ skip, limit, filter }) {
function readJsonl({ skip, limit, filter }) {
return new Promise(async (resolve, reject) => {
const fileName = path.join(datadir(), 'query-history.jsonl');
// @ts-ignore
if (!(await fs.exists(fileName))) return [];
if (!(await fs.exists(fileName))) return resolve([]);
const reader = fsReverse(fileName);
const res = await readCore(reader, skip, limit, filter);
return res;
resolve(res);
});
}
module.exports = {
read_meta: true,
async read({ skip, limit, filter }, req) {
const storage = require('./storage');
const storageResult = await storage.readQueryHistory({ skip, limit, filter }, req);
if (storageResult) return storageResult;
return readJsonl({ skip, limit, filter });
},
write_meta: true,
async write({ data }) {
async write({ data }, req) {
const storage = require('./storage');
const written = await storage.writeQueryHistory({ data }, req);
if (written) {
socket.emit('query-history-changed');
return 'OK';
}
const fileName = path.join(datadir(), 'query-history.jsonl');
await fs.appendFile(fileName, JSON.stringify(data) + '\n');
socket.emit('query-history-changed');
@@ -0,0 +1,41 @@
module.exports = {
disconnect_meta: true,
async disconnect({ conid }, req) {
return null;
},
getApiInfo_meta: true,
async getApiInfo({ conid }, req) {
return null;
},
restStatus_meta: true,
async restStatus() {
return {};
},
ping_meta: true,
async ping({ conidArray, strmid }) {
return null;
},
refresh_meta: true,
async refresh({ conid, keepOpen }, req) {
return null;
},
testConnection_meta: true,
async testConnection({ conid }, req) {
return null;
},
execute_meta: true,
async execute({ conid, method, endpoint, parameters, server }, req) {
return null;
},
apiQuery_meta: true,
async apiQuery({ conid, server, query, variables }, req) {
return null;
},
};
+41 -17
View File
@@ -10,6 +10,7 @@ const {
extractShellApiPlugins,
compileShellApiFunctionName,
jsonScriptToJavascript,
assertValidShellApiFunctionName,
getLogger,
safeJsonParse,
pinoLogRecordToMessageRecord,
@@ -54,19 +55,23 @@ logger.info('DBGM-00014 Finished job script');
dbgateApi.runScript(run);
`;
const loaderScriptTemplate = (prefix, functionName, props, runid) => `
const loaderScriptTemplate = (functionName, props, runid) => {
const plugins = extractShellApiPlugins(functionName, props);
const prefix = plugins.map(packageName => `// @require ${packageName}\n`).join('');
return `
${prefix}
const dbgateApi = require(process.env.DBGATE_API);
dbgateApi.initializeApiEnvironment();
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
${requirePluginsTemplate(plugins)}
require=null;
async function run() {
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
const writer=await dbgateApi.collectorWriter({runid: ${JSON.stringify(runid)}});
await dbgateApi.copyStream(reader, writer);
}
dbgateApi.runScript(run);
`;
};
module.exports = {
/** @type {import('dbgate-types').OpenedRunner[]} */
@@ -172,7 +177,7 @@ module.exports = {
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
this.rejectRequest(runid, { message: 'DBGM-00281 No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
socket.emit(`runner-done-${runid}`, code);
this.opened = this.opened.filter(x => x.runid != runid);
@@ -196,6 +201,27 @@ module.exports = {
// @ts-ignore
const { msgtype } = message;
if (handleProcessCommunication(message, subprocess)) return;
if (msgtype === 'get-volatile-connection') {
const connections = require('./connections');
// @ts-ignore
const conid = message.conid;
if (!conid || typeof conid !== 'string') return;
const trySend = payload => {
if (!subprocess.connected) return;
try {
subprocess.send(payload);
} catch {
// child disconnected between the check and the send — ignore
}
};
connections.getCore({ conid }).then(conn => {
trySend({ msgtype: 'volatile-connection-response', conid, conn: conn?.unsaved ? conn : null });
}).catch(err => {
logger.error({ ...extractErrorLogData(err), conid }, 'DBGM-00000 Error resolving volatile connection for child process');
trySend({ msgtype: 'volatile-connection-response', conid, conn: null });
});
return;
}
this[`handle_${msgtype}`](runid, message);
});
return _.pick(newOpened, ['runid']);
@@ -225,7 +251,7 @@ module.exports = {
subprocess.on('exit', code => {
console.log('... EXITED', code);
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
this.dispatchMessage(runid, `Finished external process with code ${code}`);
this.dispatchMessage(runid, `DBGM-00282 Finished external process with code ${code}`);
socket.emit(`runner-done-${runid}`, code);
if (onFinished) {
onFinished();
@@ -233,7 +259,7 @@ module.exports = {
this.opened = this.opened.filter(x => x.runid != runid);
});
subprocess.on('spawn', () => {
this.dispatchMessage(runid, `Started external process ${command}`);
this.dispatchMessage(runid, `DBGM-00283 Started external process ${command}`);
});
subprocess.on('error', error => {
console.log('... ERROR subprocess', error);
@@ -279,7 +305,7 @@ module.exports = {
if (script.type == 'json') {
if (!platformInfo.isElectron) {
if (!checkSecureDirectoriesInScript(script)) {
return { errorMessage: 'Unallowed directories in script' };
return { errorMessage: 'DBGM-00284 Unallowed directories in script' };
}
}
@@ -299,10 +325,10 @@ module.exports = {
action: 'script',
severity: 'warn',
detail: script,
message: 'Scripts are not allowed',
message: 'DBGM-00285 Scripts are not allowed',
});
return { errorMessage: 'Shell scripting is not allowed' };
return { errorMessage: 'DBGM-00286 Shell scripting is not allowed' };
}
sendToAuditLog(req, {
@@ -312,7 +338,7 @@ module.exports = {
action: 'script',
severity: 'info',
detail: script,
message: 'Running JS script',
message: 'DBGM-00287 Running JS script',
});
return this.startCore(runid, scriptTemplate(script, false));
@@ -327,7 +353,7 @@ module.exports = {
async cancel({ runid }) {
const runner = this.opened.find(x => x.runid == runid);
if (!runner) {
throw new Error('Invalid runner');
throw new Error('DBGM-00288 Invalid runner');
}
runner.subprocess.kill();
return { state: 'ok' };
@@ -353,17 +379,15 @@ module.exports = {
async loadReader({ functionName, props }) {
if (!platformInfo.isElectron) {
if (props?.fileName && !checkSecureDirectories(props.fileName)) {
return { errorMessage: 'Unallowed file' };
return { errorMessage: 'DBGM-00289 Unallowed file' };
}
}
const prefix = extractShellApiPlugins(functionName)
.map(packageName => `// @require ${packageName}\n`)
.join('');
const promise = new Promise((resolve, reject) => {
assertValidShellApiFunctionName(functionName);
const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
});
return promise;
},
@@ -371,7 +395,7 @@ module.exports = {
scriptResult_meta: true,
async scriptResult({ script }) {
if (script.type != 'json') {
return { errorMessage: 'Only JSON scripts are allowed' };
return { errorMessage: 'DBGM-00290 Only JSON scripts are allowed' };
}
const promise = new Promise(async (resolve, reject) => {
@@ -171,7 +171,7 @@ module.exports = {
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
const res = [];
for (const db of opened?.databases ?? []) {
const databasePermissionRole = getDatabasePermissionRole(db.id, db.name, databasePermissions);
const databasePermissionRole = getDatabasePermissionRole(conid, db.name, databasePermissions);
if (databasePermissionRole != 'deny') {
res.push({
...db,
+13
View File
@@ -228,6 +228,19 @@ module.exports = {
return { state: 'ok' };
},
setIsolationLevel_meta: true,
async setIsolationLevel({ sesid, level }) {
const session = this.opened.find(x => x.sesid == sesid);
if (!session) {
throw new Error('Invalid session');
}
logger.info({ sesid, level }, 'DBGM-00315 Setting transaction isolation level');
session.subprocess.send({ msgtype: 'setIsolationLevel', level });
return { state: 'ok' };
},
executeReader_meta: true,
async executeReader({ conid, database, sql, queryName, appFolder }) {
const { sesid } = await this.create({ conid, database });
+2
View File
@@ -14,6 +14,7 @@ const socket = require('./utility/socket');
const connections = require('./controllers/connections');
const serverConnections = require('./controllers/serverConnections');
const databaseConnections = require('./controllers/databaseConnections');
const restConnections = require('./controllers/restConnections');
const metadata = require('./controllers/metadata');
const sessions = require('./controllers/sessions');
const runners = require('./controllers/runners');
@@ -267,6 +268,7 @@ function useAllControllers(app, electron) {
useController(app, electron, '/auth', auth);
useController(app, electron, '/cloud', cloud);
useController(app, electron, '/team-files', teamFiles);
useController(app, electron, '/rest-connections', restConnections);
}
function setElectronSender(electronSender) {
+4 -1
View File
@@ -1,6 +1,6 @@
const childProcessChecker = require('../utility/childProcessChecker');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const { connectUtility, getRestAuthFromConnection } = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const { pickSafeConnectionInfo } = require('../utility/crypting');
const _ = require('lodash');
@@ -29,6 +29,9 @@ function start() {
try {
const driver = requireEngineDriver(connection);
const connectionChanged = driver?.beforeConnectionSave ? driver.beforeConnectionSave(connection) : connection;
if (driver?.databaseEngineTypes?.includes('rest')) {
connectionChanged.restAuth = getRestAuthFromConnection(connection);
}
if (!connection.isVolatileResolved) {
if (connectionChanged.useRedirectDbLogin) {
@@ -234,12 +234,12 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
}
}
async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false) {
async function handleQueryData({ msgid, sql, range, commandTimeout }, skipReadonlyCheck = false) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
const res = await driver.query(dbhan, sql, { range });
const res = await driver.query(dbhan, sql, { range, commandTimeout });
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
} catch (err) {
process.send({
@@ -250,11 +250,11 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
}
}
async function handleSqlSelect({ msgid, select }) {
async function handleSqlSelect({ msgid, select, commandTimeout }) {
const driver = requireEngineDriver(storedConnection);
const dmp = driver.createDumper();
dumpSqlSelect(dmp, select);
return handleQueryData({ msgid, sql: dmp.s, range: select.range }, true);
return handleQueryData({ msgid, sql: dmp.s, range: select.range, commandTimeout }, true);
}
async function handleDriverDataCore(msgid, callMethod, { logName }) {
+2
View File
@@ -1,6 +1,7 @@
const connectProcess = require('./connectProcess');
const databaseConnectionProcess = require('./databaseConnectionProcess');
const serverConnectionProcess = require('./serverConnectionProcess');
const restConnectionProcess = require('./restConnectionProcess');
const sessionProcess = require('./sessionProcess');
const jslDatastoreProcess = require('./jslDatastoreProcess');
const sshForwardProcess = require('./sshForwardProcess');
@@ -9,6 +10,7 @@ module.exports = {
connectProcess,
databaseConnectionProcess,
serverConnectionProcess,
restConnectionProcess,
sessionProcess,
jslDatastoreProcess,
sshForwardProcess,
@@ -0,0 +1,7 @@
const childProcessChecker = require('../utility/childProcessChecker');
function start() {
childProcessChecker();
}
module.exports = { start };
+33
View File
@@ -77,6 +77,38 @@ async function handleStopProfiler({ jslid }) {
currentProfiler = null;
}
async function handleSetIsolationLevel({ level }) {
lastActivity = new Date().getTime();
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (!driver.setTransactionIsolationLevel) {
process.send({ msgtype: 'done', skipFinishedMessage: true });
return;
}
if (driver.isolationLevels && level && !driver.isolationLevels.includes(level)) {
process.send({
msgtype: 'info',
info: {
message: `Isolation level "${level}" is not supported by this driver. Supported levels: ${driver.isolationLevels.join(', ')}`,
severity: 'error',
},
});
process.send({ msgtype: 'done', skipFinishedMessage: true });
return;
}
executingScripts++;
try {
await driver.setTransactionIsolationLevel(dbhan, level);
process.send({ msgtype: 'done', controlCommand: 'setIsolationLevel' });
} finally {
executingScripts--;
}
}
async function handleExecuteControlCommand({ command }) {
lastActivity = new Date().getTime();
@@ -210,6 +242,7 @@ const messageHandlers = {
connect: handleConnect,
executeQuery: handleExecuteQuery,
executeControlCommand: handleExecuteControlCommand,
setIsolationLevel: handleSetIsolationLevel,
executeReader: handleExecuteReader,
startProfiler: handleStartProfiler,
stopProfiler: handleStopProfiler,
+9 -3
View File
@@ -4,7 +4,8 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
const platformInfo = require('../utility/platformInfo');
const authProxy = require('../utility/authProxy');
const { getLogger } = require('dbgate-tools');
//
const { openApiDriver, graphQlDriver, oDataDriver } = require('dbgate-rest');
//
const logger = getLogger('requirePlugin');
const loadedPlugins = {};
@@ -13,16 +14,21 @@ const dbgateEnv = {
dbgateApi: null,
platformInfo,
authProxy,
isProApp: () =>{
isProApp: () => {
const { isProApp } = require('../utility/checkLicense');
return isProApp();
}
},
};
function requirePlugin(packageName, requiredPlugin = null) {
if (!packageName) throw new Error('Missing packageName in plugin');
if (loadedPlugins[packageName]) return loadedPlugins[packageName];
if (requiredPlugin == null) {
if (packageName.endsWith('@rest') || packageName === 'rest') {
return {
drivers: [openApiDriver, graphQlDriver, oDataDriver],
};
}
let module;
const modulePath = getPluginBackendPath(packageName);
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
+1
View File
@@ -7,6 +7,7 @@ async function runScript(func) {
if (processArgs.checkParent) {
childProcessChecker();
}
try {
await func();
process.exit(0);
+56 -7
View File
@@ -16,23 +16,53 @@ function unzipDirectory(zipPath, outputDirectory) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
let settled = false;
/** Track active streams so we can destroy them on early abort */
const activeStreams = new Set();
const safeReject = rejectErr => {
if (settled) return;
settled = true;
for (const s of activeStreams) {
s.destroy();
}
activeStreams.clear();
zipFile.close();
reject(rejectErr);
};
/** Pending per-file extractions we resolve the main promise after theyre all done */
const pending = [];
// Resolved output boundary used for zip-slip checks on every entry
const resolvedOutputDir = path.resolve(outputDirectory);
// kick things off
zipFile.readEntry();
zipFile.on('entry', entry => {
// Null-byte poison check
if (entry.fileName.includes('\0')) {
return safeReject(new Error(`DBGM-00000 ZIP entry with null byte in filename rejected`));
}
const destPath = path.join(outputDirectory, entry.fileName);
const resolvedDest = path.resolve(destPath);
// Zip-slip protection: every extracted path must stay inside outputDirectory
if (resolvedDest !== resolvedOutputDir && !resolvedDest.startsWith(resolvedOutputDir + path.sep)) {
return safeReject(
new Error(`DBGM-00000 ZIP slip detected: entry "${entry.fileName}" would escape output directory`)
);
}
// Handle directories (their names always end with “/” in ZIPs)
if (/\/$/.test(entry.fileName)) {
// Ensure directory exists, then continue to next entry
fs.promises
.mkdir(destPath, { recursive: true })
.then(() => zipFile.readEntry())
.catch(reject);
.then(() => {
if (!settled) zipFile.readEntry();
})
.catch(safeReject);
return;
}
@@ -46,17 +76,29 @@ function unzipDirectory(zipPath, outputDirectory) {
if (err) return rej(err);
const writeStream = fs.createWriteStream(destPath);
activeStreams.add(readStream);
activeStreams.add(writeStream);
readStream.pipe(writeStream);
// proceed to next entry once weve consumed *this* one
readStream.on('end', () => zipFile.readEntry());
// proceed to next entry once we've consumed *this* one
readStream.on('end', () => {
activeStreams.delete(readStream);
if (!settled) zipFile.readEntry();
});
readStream.on('error', readErr => {
activeStreams.delete(readStream);
rej(readErr);
});
writeStream.on('finish', () => {
activeStreams.delete(writeStream);
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
activeStreams.delete(writeStream);
logger.error(
extractErrorLogData(writeErr),
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
@@ -67,22 +109,29 @@ function unzipDirectory(zipPath, outputDirectory) {
})
);
// Immediately abort the whole unzip if this file fails; otherwise the
// zip would never emit 'end' (lazyEntries won't advance without readEntry).
filePromise.catch(safeReject);
pending.push(filePromise);
});
// Entire archive enumerated; wait for all streams to finish
zipFile.on('end', () => {
if (settled) return;
Promise.all(pending)
.then(() => {
if (settled) return;
settled = true;
zipFile.close();
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
.catch(safeReject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err);
safeReject(err);
});
});
});
+158 -26
View File
@@ -698,6 +698,30 @@ module.exports = {
"columnName": "id_original",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "httpProxyUrl",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "httpProxyUser",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "httpProxyPassword",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "defaultIsolationLevel",
"dataType": "varchar(250)",
"notNull": false
}
],
"foreignKeys": [
@@ -851,76 +875,106 @@ module.exports = {
]
},
{
"pureName": "password_reset_tokens",
"pureName": "query_history",
"columns": [
{
"pureName": "password_reset_tokens",
"pureName": "query_history",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "password_reset_tokens",
"pureName": "query_history",
"columnName": "created",
"dataType": "bigint",
"notNull": true
},
{
"pureName": "query_history",
"columnName": "user_id",
"dataType": "int",
"notNull": true
"notNull": false
},
{
"pureName": "password_reset_tokens",
"columnName": "token",
"dataType": "varchar(500)",
"notNull": true
"pureName": "query_history",
"columnName": "role_id",
"dataType": "int",
"notNull": false
},
{
"pureName": "password_reset_tokens",
"columnName": "created_at",
"dataType": "datetime",
"notNull": true
"pureName": "query_history",
"columnName": "sql",
"dataType": "text",
"notNull": false
},
{
"pureName": "password_reset_tokens",
"columnName": "expires_at",
"dataType": "datetime",
"notNull": true
"pureName": "query_history",
"columnName": "conid",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "password_reset_tokens",
"columnName": "used_at",
"dataType": "datetime",
"pureName": "query_history",
"columnName": "database",
"dataType": "varchar(200)",
"notNull": false
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_password_reset_tokens_user_id",
"pureName": "password_reset_tokens",
"constraintName": "FK_query_history_user_id",
"pureName": "query_history",
"refTableName": "users",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
},
{
"constraintType": "foreignKey",
"constraintName": "FK_query_history_role_id",
"pureName": "query_history",
"refTableName": "roles",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "role_id",
"refColumnName": "id"
}
]
}
],
"indexes": [
{
"constraintName": "idx_token",
"pureName": "password_reset_tokens",
"constraintName": "idx_query_history_user_id",
"pureName": "query_history",
"constraintType": "index",
"columns": [
{
"columnName": "token"
"columnName": "user_id"
}
]
},
{
"constraintName": "idx_query_history_role_id",
"pureName": "query_history",
"constraintType": "index",
"columns": [
{
"columnName": "role_id"
}
]
}
],
"primaryKey": {
"pureName": "password_reset_tokens",
"pureName": "query_history",
"constraintType": "primaryKey",
"constraintName": "PK_password_reset_tokens",
"constraintName": "PK_query_history",
"columns": [
{
"columnName": "id"
@@ -2252,6 +2306,84 @@ module.exports = {
]
}
},
{
"pureName": "user_password_reset_tokens",
"columns": [
{
"pureName": "user_password_reset_tokens",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "user_password_reset_tokens",
"columnName": "user_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "user_password_reset_tokens",
"columnName": "token",
"dataType": "varchar(500)",
"notNull": true
},
{
"pureName": "user_password_reset_tokens",
"columnName": "created_at",
"dataType": "varchar(32)",
"notNull": true
},
{
"pureName": "user_password_reset_tokens",
"columnName": "expires_at",
"dataType": "varchar(32)",
"notNull": true
},
{
"pureName": "user_password_reset_tokens",
"columnName": "used_at",
"dataType": "varchar(32)",
"notNull": false
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_user_password_reset_tokens_user_id",
"pureName": "user_password_reset_tokens",
"refTableName": "users",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
}
],
"indexes": [
{
"constraintName": "idx_token",
"pureName": "user_password_reset_tokens",
"constraintType": "index",
"columns": [
{
"columnName": "token"
}
]
}
],
"primaryKey": {
"pureName": "user_password_reset_tokens",
"constraintType": "primaryKey",
"constraintName": "PK_user_password_reset_tokens",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "user_permissions",
"columns": [
+57 -1
View File
@@ -1,9 +1,10 @@
const fs = require('fs-extra');
const { decryptConnection } = require('./crypting');
const { decryptConnection, decryptPasswordString } = require('./crypting');
const { getSshTunnelProxy } = require('./sshTunnelProxy');
const platformInfo = require('../utility/platformInfo');
const connections = require('../controllers/connections');
const _ = require('lodash');
const axios = require('axios');
async function loadConnection(driver, storedConnection, connectionMode) {
const { allowShellConnection, allowConnectionFromEnvVariables } = platformInfo;
@@ -132,11 +133,66 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
connection.ssl = await extractConnectionSslParams(connection);
const proxyUrl = String(connection.httpProxyUrl ?? '').trim();
const proxyUser = String(connection.httpProxyUser ?? '').trim();
const proxyPassword = String(connection.httpProxyPassword ?? '').trim();
if (!proxyUrl && (proxyUser || proxyPassword)) {
throw new Error('DBGM-00329 Proxy user or password is set but proxy URL is missing');
}
if (proxyUrl) {
let parsedProxy;
try {
const parsed = new URL(proxyUrl.includes('://') ? proxyUrl : `http://${proxyUrl}`);
parsedProxy = {
protocol: parsed.protocol.replace(':', ''),
host: parsed.hostname,
port: parsed.port ? parseInt(parsed.port, 10) : (parsed.protocol === 'https:' ? 443 : 80),
};
const username = connection.httpProxyUser ?? parsed.username;
const rawPassword = connection.httpProxyPassword ?? parsed.password;
const password = decryptPasswordString(rawPassword);
if (username) {
parsedProxy.auth = { username, password: password ?? '' };
}
} catch (err) {
throw new Error(`DBGM-00334 Invalid proxy URL "${proxyUrl}": ${err && err.message ? err.message : err}`);
}
connection.axios = axios.default.create({ proxy: parsedProxy });
} else {
connection.axios = axios.default;
}
const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
return conn;
}
function getRestAuthFromConnection(connection) {
if (!connection) return null;
if (connection.authType == 'basic') {
return {
type: 'basic',
user: connection.user,
password: decryptPasswordString(connection.password),
};
}
if (connection.authType == 'bearer') {
return {
type: 'bearer',
token: connection.authToken,
};
}
if (connection.authType == 'apikey') {
return {
type: 'apikey',
header: connection.apiKeyHeader,
value: connection.apiKeyValue,
};
}
return null;
}
module.exports = {
extractConnectionSslParams,
connectUtility,
getRestAuthFromConnection,
};
+22 -2
View File
@@ -101,7 +101,27 @@ function decryptObjectPasswordField(obj, field, encryptor = null) {
return obj;
}
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition'];
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition', 'httpProxyPassword'];
const additionalFieldsToMask = [
'databaseUrl',
'server',
'port',
'user',
'sshBastionHost',
'sshHost',
'sshKeyFile',
'sshLogin',
'sshMode',
'sshPort',
'sslCaFile',
'sslCertFilePassword',
'sslKeyFile',
'sslRejectUnauthorized',
'secretAccessKey',
'accessKeyId',
'endpoint',
'endpointKey',
];
function encryptConnection(connection, encryptor = null) {
if (connection.passwordMode != 'saveRaw') {
@@ -114,7 +134,7 @@ function encryptConnection(connection, encryptor = null) {
function maskConnection(connection) {
if (!connection) return connection;
return _.omit(connection, fieldsToEncrypt);
return _.omit(connection, [...fieldsToEncrypt, ...additionalFieldsToMask]);
}
function decryptConnection(connection) {
+13 -2
View File
@@ -25,8 +25,14 @@ function extractConnectionsFromEnv(env) {
socketPath: env[`SOCKET_PATH_${id}`],
serviceName: env[`SERVICE_NAME_${id}`],
authType: env[`AUTH_TYPE_${id}`] || (env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
defaultDatabase: env[`DATABASE_${id}`] || (env[`FILE_${id}`] ? getDatabaseFileLabel(env[`FILE_${id}`]) : null),
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`],
defaultDatabase:
env[`DATABASE_${id}`] ||
(env[`FILE_${id}`]
? getDatabaseFileLabel(env[`FILE_${id}`])
: env[`APISERVERURL1_${id}`]
? '_api_database_'
: null),
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`] || !!env[`APISERVERURL1_${id}`],
displayName: env[`LABEL_${id}`],
isReadOnly: env[`READONLY_${id}`],
databases: env[`DBCONFIG_${id}`] ? safeJsonParse(env[`DBCONFIG_${id}`]) : null,
@@ -54,6 +60,11 @@ function extractConnectionsFromEnv(env) {
sslKeyFile: env[`SSL_KEY_FILE_${id}`],
sslRejectUnauthorized: env[`SSL_REJECT_UNAUTHORIZED_${id}`],
trustServerCertificate: env[`SSL_TRUST_CERTIFICATE_${id}`],
apiServerUrl1: env[`APISERVERURL1_${id}`],
apiServerUrl2: env[`APISERVERURL2_${id}`],
apiKeyHeader: env[`APIKEYHEADER_${id}`],
apiKeyValue: env[`APIKEYVALUE_${id}`],
}));
return connections;
+3 -2
View File
@@ -96,8 +96,9 @@ async function loadFilePermissionsFromRequest(req) {
}
function matchDatabasePermissionRow(conid, database, permissionRow) {
if (permissionRow.connection_id) {
if (conid != permissionRow.connection_id) {
const connectionIdentifier = permissionRow.connection_conid ?? permissionRow.connection_id;
if (connectionIdentifier) {
if (conid != connectionIdentifier) {
return false;
}
}
@@ -84,8 +84,12 @@ export function analyseCollectionDisplayColumns(rows, display) {
if (res.find(x => x.uniqueName == added)) continue;
res.push(getDisplayColumn([], added, display));
}
// Use driver-specific column sorting if available
const sortedColumns = display?.driver?.sortCollectionDisplayColumns ? display.driver.sortCollectionDisplayColumns(res) : res;
return (
res.map(col => ({
sortedColumns.map(col => ({
...col,
isChecked: display.isColumnChecked(col),
})) || []
+5 -2
View File
@@ -1,5 +1,6 @@
import _ from 'lodash';
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
import { evalFilterBehaviour } from 'dbgate-tools';
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
import { GridConfig, GridCache } from './GridConfig';
import { FreeTableModel } from './FreeTableModel';
@@ -11,13 +12,15 @@ export class FreeTableGridDisplay extends GridDisplay {
config: GridConfig,
setConfig: ChangeConfigFunc,
cache: GridCache,
setCache: ChangeCacheFunc
setCache: ChangeCacheFunc,
options: { filterable?: boolean } = {}
) {
super(config, setConfig, cache, setCache);
this.columns = model?.structure?.__isDynamicStructure
? analyseCollectionDisplayColumns(model?.rows, this)
: this.getDisplayColumns(model);
this.filterable = false;
this.filterable = options.filterable ?? false;
this.filterBehaviourOverride = evalFilterBehaviour;
this.sortable = false;
}
+1 -1
View File
@@ -1,5 +1,5 @@
# dbmodel
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](https://dbgate.org) tooling and plugins for connecting many different databases.
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](www.dbgate.io) tooling and plugins for connecting many different databases.
If you want to use this tool from JavaScript interface, please use [dbgate-api](https://www.npmjs.com/package/dbgate-api) package.
+1 -1
View File
@@ -1,7 +1,7 @@
{
"name": "dbmodel",
"version": "7.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
+40 -1
View File
@@ -16,7 +16,46 @@ function getDateStringWithoutTimeZone(dateString) {
export function getFilterValueExpression(value, dataType?) {
if (value == null) return 'NULL';
if (isTypeDateTime(dataType)) return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
if (isTypeDateTime(dataType)) {
// Check for year as number (GROUP:YEAR)
if (typeof value === 'number' && Number.isInteger(value) && value >= 1000 && value <= 9999) {
return value.toString();
}
if (_isString(value)) {
// Year only
if (/^\d{4}$/.test(value)) {
return value;
}
// Year-month: validate month is in range 01-12
const yearMonthMatch = value.match(/^(\d{4})-(\d{1,2})$/);
if (yearMonthMatch) {
const month = parseInt(yearMonthMatch[2], 10);
if (month >= 1 && month <= 12) {
return value;
}
}
// Year-month-day: validate month and day
const yearMonthDayMatch = value.match(/^(\d{4})-(\d{1,2})-(\d{1,2})$/);
if (yearMonthDayMatch) {
const month = parseInt(yearMonthDayMatch[2], 10);
const day = parseInt(yearMonthDayMatch[3], 10);
// Quick validation: month 1-12, day 1-31
if (month >= 1 && month <= 12 && day >= 1 && day <= 31) {
// Construct a date to verify it's actually valid (e.g., reject 2024-02-30)
const dateStr = `${yearMonthDayMatch[1]}-${String(month).padStart(2, '0')}-${String(day).padStart(2, '0')}`;
const date = toDate(dateStr);
if (!isNaN(date.getTime())) {
return value;
}
}
}
}
return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
}
if (value === true) return 'TRUE';
if (value === false) return 'FALSE';
if (value.$oid) return `ObjectId("${value.$oid}")`;
+1
View File
@@ -0,0 +1 @@
lib
+7
View File
@@ -0,0 +1,7 @@
# dbgate-rest
REST API support for DbGate
## Installation
yarn add dbgate-rest
+6
View File
@@ -0,0 +1,6 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
moduleFileExtensions: ['ts', 'js'],
reporters: ['default', 'github-actions'],
};
+42
View File
@@ -0,0 +1,42 @@
{
"version": "7.0.0-alpha.1",
"name": "dbgate-rest",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
},
"author": "Jan Prochazka",
"license": "GPL-3.0",
"keywords": [
"sql",
"dbgate"
],
"scripts": {
"build": "tsc",
"start": "tsc --watch",
"prepublishOnly": "yarn build",
"test": "jest",
"test:ci": "jest --json --outputFile=result.json --testLocationInResults"
},
"files": [
"lib"
],
"devDependencies": {
"@types/node": "^13.7.0",
"dbgate-types": "^7.0.0-alpha.1",
"jest": "^28.1.3",
"ts-jest": "^28.0.7",
"typescript": "^4.4.3"
},
"dependencies": {
"dbgate-tools": "^7.0.0-alpha.1",
"lodash": "^4.17.21",
"openapi-types": "^12.1.3",
"pinomin": "^1.0.5",
"uuid": "^3.4.0",
"js-yaml": "^4.1.0"
}
}
+90
View File
@@ -0,0 +1,90 @@
type FlatObject = Record<string, any>;
function isPlainObject(value: any): value is Record<string, any> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function flattenValue(value: any) {
if (Array.isArray(value)) {
const primitiveArray = value.every(item => item == null || typeof item !== 'object');
if (primitiveArray) {
return value.join(', ');
}
return JSON.stringify(value);
}
return value;
}
function flattenObject(obj: Record<string, any>, prefix = '', out: FlatObject = {}, visited = new WeakSet()): FlatObject {
if (visited.has(obj)) return out;
visited.add(obj);
for (const [key, value] of Object.entries(obj)) {
const nextKey = prefix ? `${prefix}.${key}` : key;
if (isPlainObject(value)) {
flattenObject(value, nextKey, out, visited);
continue;
}
out[nextKey] = flattenValue(value);
}
return out;
}
function unwrapArrayItem(item: any) {
if (isPlainObject(item) && isPlainObject(item.node)) {
return item.node;
}
return item;
}
function collectArrayCandidates(
value: any,
set: Set<any[]>,
visited = new WeakSet(),
depth = 0
): void {
if (depth > 10) return;
if (Array.isArray(value)) {
set.add(value);
return;
}
if (!isPlainObject(value)) return;
if (visited.has(value)) return;
visited.add(value);
if (Array.isArray(value.edges)) set.add(value.edges);
if (Array.isArray(value.nodes)) set.add(value.nodes);
if (Array.isArray(value.items)) set.add(value.items);
for (const nested of Object.values(value)) {
collectArrayCandidates(nested, set, visited, depth + 1);
}
}
function findUniqueArrayCandidate(value: any): any[] | null {
if (Array.isArray(value)) return value;
const candidates = new Set<any[]>();
collectArrayCandidates(value, candidates);
if (candidates.size !== 1) return null;
return candidates.values().next().value ?? null;
}
export function arrayifyToFlatObjects(input: any): FlatObject[] | undefined {
const arrayCandidate = findUniqueArrayCandidate(input);
if (!arrayCandidate) return undefined;
return arrayCandidate.map(item => {
const unwrapped = unwrapArrayItem(item);
if (isPlainObject(unwrapped)) {
return flattenObject(unwrapped);
}
return { value: unwrapped };
});
}
+65
View File
@@ -0,0 +1,65 @@
import type { EngineDriver } from 'dbgate-types';
import { fetchGraphQLSchema, GraphQLIntrospectionResult } from './graphqlIntrospection';
import { apiDriverBase } from './restDriverBase';
import { buildRestAuthHeaders } from './restAuthTools';
async function loadGraphQlSchema(dbhan: any): Promise<GraphQLIntrospectionResult> {
if (!dbhan?.connection?.apiServerUrl1) {
throw new Error('DBGM-00310 GraphQL endpoint URL is not configured');
}
const introspectionResult = await fetchGraphQLSchema(
dbhan.connection.apiServerUrl1,
buildRestAuthHeaders(dbhan.connection.restAuth),
dbhan.axios
);
if (!introspectionResult || typeof introspectionResult !== 'object') {
throw new Error('DBGM-00311 GraphQL schema is empty or could not be loaded');
}
return introspectionResult;
}
// @ts-ignore
export const graphQlDriver: EngineDriver = {
...apiDriverBase,
engine: 'graphql@rest',
title: 'GraphQL',
databaseEngineTypes: ['rest', 'graphql'],
icon: '<svg version="1.1" id="GraphQL_Logo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 400 400" enable-background="new 0 0 400 400" xml:space="preserve"><g><g><g><rect x="122" y="-0.4" transform="matrix(-0.866 -0.5 0.5 -0.866 163.3196 363.3136)" fill="#E535AB" width="16.6" height="320.3"/></g></g><g><g><rect x="39.8" y="272.2" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="37.9" y="312.2" transform="matrix(-0.866 -0.5 0.5 -0.866 83.0693 663.3409)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="177.1" y="71.1" transform="matrix(-0.866 -0.5 0.5 -0.866 463.3409 283.0693)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="122.1" y="-13" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7903 232.1221)" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="109.6" y="151.6" transform="matrix(-0.5 -0.866 0.866 -0.5 266.0828 473.3766)" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="52.5" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="330.9" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="262.4" y="240.1" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7953 714.2875)" fill="#E535AB" width="14.5" height="160.9"/></g></g><path fill="#E535AB" d="M369.5,297.9c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C373.5,259.9,379.2,281.2,369.5,297.9"/><path fill="#E535AB" d="M90.9,137c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C94.8,99,100.5,120.3,90.9,137"/><path fill="#E535AB" d="M30.5,297.9c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6,16.7,3.9,38-12.8,47.7 C61.4,320.3,40.1,314.6,30.5,297.9"/><path fill="#E535AB" d="M309.1,137c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6-16.7,3.9-38-12.8,47.7 C340.1,159.4,318.7,153.7,309.1,137"/><path fill="#E535AB" d="M200,395.8c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,380.1,219.3,395.8,200,395.8"/><path fill="#E535AB" d="M200,74c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,58.4,219.3,74,200,74"/></g></svg>',
showConnectionField: (field, values) => {
if (apiDriverBase.showConnectionField(field, values)) return true;
if (field === 'apiServerUrl1') return true;
return false;
},
apiServerUrl1Label: 'GraphQL Endpoint URL',
beforeConnectionSave: connection => ({
...connection,
singleDatabase: true,
defaultDatabase: '_api_database_',
}),
async connect(connection: any) {
return {
connection,
client: null,
database: '_api_database_',
axios: connection.axios,
};
},
async getVersion(dbhan: any) {
const introspectionResult = await loadGraphQlSchema(dbhan);
const schema = introspectionResult.__schema;
// const version = 'GraphQL';
return {
version: `GraphQL, ${schema.types?.length || 0} types`,
};
},
};
+235
View File
@@ -0,0 +1,235 @@
export function parseGraphQlSelectionPaths(text: string): {
fieldPaths: string[];
argumentPaths: string[];
argumentValues: Record<string, Record<string, string>>;
} {
if (!text) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
const cleaned = text.replace(/#[^\n]*/g, '');
const tokens: string[] =
cleaned.match(
/\.\.\.|"(?:[^"\\]|\\.)*"|[A-Za-z_][A-Za-z0-9_]*|\$[A-Za-z_][A-Za-z0-9_]*|-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?|[@{}()\[\],!:$]/g
) || [];
const startIndex = tokens.indexOf('{');
if (startIndex === -1) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
const result = parseSelectionSet(tokens, startIndex, []);
return {
fieldPaths: result.fieldPaths.map(parts => parts.join('.')),
argumentPaths: result.argumentPaths.map(parts => parts.join('.')),
argumentValues: result.argumentValues,
};
}
function parseArgumentValue(tokens: string[], startIndex: number): { value: string; endIndex: number } {
const valueTokens: string[] = [];
let index = startIndex;
let parenthesesDepth = 0;
let bracketDepth = 0;
let braceDepth = 0;
while (index < tokens.length) {
const token = tokens[index];
if (token === '(') {
parenthesesDepth += 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === '[') {
bracketDepth += 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === '{') {
braceDepth += 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === ')') {
if (parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
break;
}
parenthesesDepth -= 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === ']') {
if (bracketDepth === 0) break;
bracketDepth -= 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === '}') {
if (braceDepth === 0) break;
braceDepth -= 1;
valueTokens.push(token);
index += 1;
continue;
}
if (token === ',' && parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
break;
}
valueTokens.push(token);
index += 1;
}
return {
value: valueTokens.join(''),
endIndex: index,
};
}
function parseArgumentsFromField(
tokens: string[],
startIndex: number
): { arguments: { name: string; value: string }[]; endIndex: number } {
const args: { name: string; value: string }[] = [];
let index = startIndex;
if (tokens[index] !== '(') {
return { arguments: args, endIndex: index };
}
let depth = 1;
index += 1;
while (index < tokens.length && depth > 0) {
if (tokens[index] === '(') depth += 1;
if (tokens[index] === ')') depth -= 1;
// Look for argument names (identifier followed by colon) and their values
if (depth > 0 && /^[A-Za-z_]/.test(tokens[index]) && tokens[index + 1] === ':') {
const argumentName = tokens[index];
const { value, endIndex } = parseArgumentValue(tokens, index + 2);
args.push({ name: argumentName, value });
index = endIndex;
if (tokens[index] === ',') {
index += 1;
}
} else {
index += 1;
}
}
return { arguments: args, endIndex: index };
}
function parseSelectionSet(
tokens: string[],
startIndex: number,
prefix: string[]
): {
fieldPaths: string[][];
argumentPaths: string[][];
argumentValues: Record<string, Record<string, string>>;
index: number;
} {
const fieldPaths: string[][] = [];
const argumentPaths: string[][] = [];
const argumentValues: Record<string, Record<string, string>> = {};
let index = startIndex + 1;
while (index < tokens.length) {
const token = tokens[index];
if (token === '}') {
return { fieldPaths, argumentPaths, argumentValues, index: index + 1 };
}
if (token === '...') {
index += 1;
if (tokens[index] === 'on') {
index += 2;
}
while (index < tokens.length && tokens[index] !== '{' && tokens[index] !== '}') {
index += 1;
}
if (tokens[index] === '{') {
const frag = parseSelectionSet(tokens, index, prefix);
fieldPaths.push(...frag.fieldPaths);
argumentPaths.push(...frag.argumentPaths);
for (const [fieldPath, values] of Object.entries(frag.argumentValues)) {
argumentValues[fieldPath] = {
...(argumentValues[fieldPath] || {}),
...values,
};
}
index = frag.index;
continue;
}
continue;
}
if (/^[A-Za-z_]/.test(token)) {
let fieldName = token;
if (tokens[index + 1] === ':' && /^[A-Za-z_]/.test(tokens[index + 2] || '')) {
fieldName = tokens[index + 2];
index += 3;
} else {
index += 1;
}
// Parse arguments if present
const { arguments: args, endIndex: argsEndIndex } = parseArgumentsFromField(tokens, index);
index = argsEndIndex;
// Add argument paths for this field
const currentFieldPath = [...prefix, fieldName].join('.');
for (const arg of args) {
argumentPaths.push([...prefix, fieldName, arg.name]);
if (!argumentValues[currentFieldPath]) {
argumentValues[currentFieldPath] = {};
}
argumentValues[currentFieldPath][arg.name] = arg.value;
}
while (tokens[index] === '@') {
index += 2;
if (tokens[index] === '(') {
let depth = 1;
index += 1;
while (index < tokens.length && depth > 0) {
if (tokens[index] === '(') depth += 1;
if (tokens[index] === ')') depth -= 1;
index += 1;
}
}
}
if (tokens[index] === '{') {
const nested = parseSelectionSet(tokens, index, [...prefix, fieldName]);
if (nested.fieldPaths.length > 0) {
fieldPaths.push(...nested.fieldPaths);
} else {
fieldPaths.push([...prefix, fieldName]);
}
argumentPaths.push(...nested.argumentPaths);
for (const [fieldPath, values] of Object.entries(nested.argumentValues)) {
argumentValues[fieldPath] = {
...(argumentValues[fieldPath] || {}),
...values,
};
}
index = nested.index;
} else {
fieldPaths.push([...prefix, fieldName]);
}
continue;
}
index += 1;
}
return { fieldPaths, argumentPaths, argumentValues, index };
}
+127
View File
@@ -0,0 +1,127 @@
export type GraphQlVariableDefinition = {
name: string;
type: string;
};
export function extractGraphQlVariableDefinitions(text: string): GraphQlVariableDefinition[] {
if (!text) return [];
const cleaned = text.replace(/#[^\n]*/g, '');
const regex = /\$([A-Za-z_][A-Za-z0-9_]*)\s*:\s*([^=,)\n]+)/g;
const names = new Set<string>();
const definitions: GraphQlVariableDefinition[] = [];
let match: RegExpExecArray | null = null;
while ((match = regex.exec(cleaned))) {
const name = match[1];
if (names.has(name)) continue;
names.add(name);
definitions.push({
name,
type: match[2].trim(),
});
}
return definitions;
}
function unwrapNonNull(typeText: string): string {
let current = (typeText || '').trim();
while (current.endsWith('!')) {
current = current.slice(0, -1).trim();
}
return current;
}
function isListType(typeText: string): boolean {
const unwrapped = unwrapNonNull(typeText);
return unwrapped.startsWith('[') && unwrapped.endsWith(']');
}
function getInnerListType(typeText: string): string {
const unwrapped = unwrapNonNull(typeText);
if (!(unwrapped.startsWith('[') && unwrapped.endsWith(']'))) return unwrapped;
return unwrapped.slice(1, -1).trim();
}
function getBaseType(typeText: string): string {
let current = unwrapNonNull(typeText);
while (current.startsWith('[') && current.endsWith(']')) {
current = current.slice(1, -1).trim();
current = unwrapNonNull(current);
}
return current;
}
function parseJsonIfPossible(raw: string): any {
const trimmed = (raw || '').trim();
if (!trimmed) return null;
try {
return JSON.parse(trimmed);
} catch {
return raw;
}
}
function toInt(raw: string): number | null {
const trimmed = (raw || '').trim();
if (!trimmed) return null;
const num = Number(trimmed);
if (!Number.isFinite(num)) return null;
return Math.trunc(num);
}
function toFloat(raw: string): number | null {
const trimmed = (raw || '').trim();
if (!trimmed) return null;
const num = Number(trimmed);
if (!Number.isFinite(num)) return null;
return num;
}
function toBoolean(raw: string): boolean | null {
const lowered = (raw || '').trim().toLowerCase();
if (!lowered) return null;
if (['true', '1', 'yes', 'y', 'on'].includes(lowered)) return true;
if (['false', '0', 'no', 'n', 'off'].includes(lowered)) return false;
return null;
}
function convertByGraphQlTypeValue(raw: any, graphQlType: string): any {
if (raw == null) return null;
if (isListType(graphQlType)) {
const innerType = getInnerListType(graphQlType);
const parsed = typeof raw === 'string' ? parseJsonIfPossible(raw) : raw;
const arrayValue = Array.isArray(parsed) ? parsed : [parsed];
return arrayValue.map(item => convertByGraphQlTypeValue(item, innerType));
}
const baseType = getBaseType(graphQlType);
const stringValue = typeof raw === 'string' ? raw : JSON.stringify(raw);
if (baseType === 'Int') return toInt(stringValue);
if (baseType === 'Float') return toFloat(stringValue);
if (baseType === 'Boolean') return toBoolean(stringValue);
if (baseType === 'String' || baseType === 'ID') return String(raw);
if (typeof raw === 'string') {
return parseJsonIfPossible(raw);
}
return raw;
}
export function convertGraphQlVariablesForRequest(
queryText: string,
rawVariables: Record<string, string> = {}
): Record<string, any> {
const definitions = extractGraphQlVariableDefinitions(queryText || '');
const next: Record<string, any> = {};
for (const definition of definitions) {
const raw = rawVariables?.[definition.name] ?? '';
next[definition.name] = convertByGraphQlTypeValue(raw, definition.type);
}
return next;
}
+175
View File
@@ -0,0 +1,175 @@
import type { GraphQLField, GraphQLInputValue, GraphQLIntrospectionResult, GraphQLType, GraphQLTypeRef } from './graphqlIntrospection';
export type GraphQLExplorerOperationType = 'query' | 'mutation' | 'subscription';
export interface GraphQLExplorerFieldNode {
name: string;
description?: string;
typeName: string;
typeDisplay: string;
isLeaf: boolean;
isArgument?: boolean;
arguments?: GraphQLExplorerFieldNode[];
children?: GraphQLExplorerFieldNode[];
}
export interface GraphQLExplorerOperation {
operationType: GraphQLExplorerOperationType;
rootTypeName: string;
fields: GraphQLExplorerFieldNode[];
}
interface GraphQLExplorerOptions {
maxDepth?: number;
}
const DEFAULT_MAX_DEPTH = 2;
function getTypeDisplay(typeRef: GraphQLTypeRef | null | undefined): string {
if (!typeRef) return 'Unknown';
if (typeRef.kind === 'NON_NULL') return `${getTypeDisplay(typeRef.ofType)}!`;
if (typeRef.kind === 'LIST') return `[${getTypeDisplay(typeRef.ofType)}]`;
return typeRef.name || 'Unknown';
}
function unwrapNamedType(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
if (!typeRef) return null;
if (typeRef.kind === 'NON_NULL' || typeRef.kind === 'LIST') return unwrapNamedType(typeRef.ofType);
return typeRef;
}
function buildTypeMap(types: GraphQLType[]): Map<string, GraphQLType> {
return new Map(types.map(type => [type.name, type]));
}
function isCompositeType(type: GraphQLType | undefined): boolean {
return type?.kind === 'OBJECT' || type?.kind === 'INTERFACE';
}
function buildFieldNode(
field: GraphQLField,
typeMap: Map<string, GraphQLType>,
depth: number,
maxDepth: number,
visitedTypes: Set<string>
): GraphQLExplorerFieldNode {
const namedType = unwrapNamedType(field.type);
const typeDef = namedType?.name ? typeMap.get(namedType.name) : undefined;
const composite = isCompositeType(typeDef);
const nextVisited = new Set(visitedTypes);
if (typeDef?.name) {
nextVisited.add(typeDef.name);
}
let children: GraphQLExplorerFieldNode[] | undefined;
if (composite && depth < maxDepth && typeDef?.fields && !visitedTypes.has(typeDef.name)) {
children = typeDef.fields.map(childField =>
buildFieldNode(childField, typeMap, depth + 1, maxDepth, nextVisited)
);
}
return {
name: field.name,
description: field.description,
typeName: namedType?.name || 'Unknown',
typeDisplay: getTypeDisplay(field.type),
isLeaf: !composite || !children || children.length === 0,
children,
};
}
function buildOperationFields(
rootTypeName: string,
types: GraphQLType[],
maxDepth: number
): GraphQLExplorerFieldNode[] {
const typeMap = buildTypeMap(types);
const rootType = typeMap.get(rootTypeName);
if (!rootType?.fields) return [];
return rootType.fields.map(field => buildFieldNode(field, typeMap, 1, maxDepth, new Set([rootTypeName])));
}
export function buildGraphQlExplorerOperations(
introspectionResult: GraphQLIntrospectionResult,
options: GraphQLExplorerOptions = {}
): GraphQLExplorerOperation[] {
const { __schema } = introspectionResult || {};
if (!__schema?.types) return [];
const maxDepth = options.maxDepth ?? DEFAULT_MAX_DEPTH;
const operations: GraphQLExplorerOperation[] = [];
if (__schema.queryType?.name) {
operations.push({
operationType: 'query',
rootTypeName: __schema.queryType.name,
fields: buildOperationFields(__schema.queryType.name, __schema.types, maxDepth),
});
}
if (__schema.mutationType?.name) {
operations.push({
operationType: 'mutation',
rootTypeName: __schema.mutationType.name,
fields: buildOperationFields(__schema.mutationType.name, __schema.types, maxDepth),
});
}
if (__schema.subscriptionType?.name) {
operations.push({
operationType: 'subscription',
rootTypeName: __schema.subscriptionType.name,
fields: buildOperationFields(__schema.subscriptionType.name, __schema.types, maxDepth),
});
}
return operations;
}
export function buildGraphQlQueryText(
operationType: GraphQLExplorerOperationType,
selectionPaths: string[],
options: { operationName?: string; indent?: string } = {}
): string {
const indent = options.indent ?? ' ';
const opName = options.operationName?.trim();
const tree = new Map<string, Map<string, any>>();
for (const path of selectionPaths) {
if (!path) continue;
const parts = path.split('.').filter(Boolean);
let node = tree;
for (const part of parts) {
if (!node.has(part)) {
node.set(part, new Map());
}
node = node.get(part) as Map<string, any>;
}
}
const renderTree = (node: Map<string, any>, level: number): string[] => {
const lines: string[] = [];
for (const [name, children] of node.entries()) {
if (children.size === 0) {
lines.push(`${indent.repeat(level)}${name}`);
} else {
lines.push(`${indent.repeat(level)}${name} {`);
lines.push(...renderTree(children, level + 1));
lines.push(`${indent.repeat(level)}}`);
}
}
return lines;
};
const header = opName ? `${operationType} ${opName}` : operationType;
const lines = [`${header} {`];
if (tree.size > 0) {
lines.push(...renderTree(tree, 1));
}
lines.push('}');
return lines.join('\n');
}
+495
View File
@@ -0,0 +1,495 @@
import type { RestApiDefinition } from './restApiDef';
import type { AxiosInstance } from 'axios';
const DEFAULT_INTROSPECTION_DEPTH = 6;
function buildTypeRefSelection(depth: number): string {
if (depth <= 0) {
return `
kind
name
`;
}
return `
kind
name
ofType {
${buildTypeRefSelection(depth - 1)}
}
`;
}
function buildIntrospectionQuery(maxDepth: number): string {
const typeRefSelection = buildTypeRefSelection(maxDepth);
return `
query IntrospectionQuery {
__schema {
types {
kind
name
description
fields {
name
description
type {
${typeRefSelection}
}
args {
name
description
type {
${typeRefSelection}
}
defaultValue
}
}
inputFields {
name
description
type {
${typeRefSelection}
}
}
}
queryType {
name
}
mutationType {
name
}
subscriptionType {
name
}
}
}
`;
}
export interface GraphQLTypeRef {
kind: string;
name?: string;
ofType?: GraphQLTypeRef | null;
}
export interface GraphQLInputValue {
name: string;
description?: string;
type: GraphQLTypeRef;
defaultValue?: string;
}
export interface GraphQLField {
name: string;
description?: string;
type: GraphQLTypeRef;
args?: GraphQLInputValue[];
}
export interface GraphQLType {
kind: string;
name: string;
description?: string;
fields?: GraphQLField[];
inputFields?: GraphQLField[];
possibleTypes?: GraphQLTypeRef[];
}
export interface GraphQLIntrospectionResult {
__schema: {
types: GraphQLType[];
queryType?: { name: string };
mutationType?: { name: string };
subscriptionType?: { name: string };
};
}
function getTypeString(type: GraphQLTypeRef | null | undefined): string {
if (!type) return 'Unknown';
if (type.kind === 'NON_NULL') return getTypeString(type.ofType) + '!';
if (type.kind === 'LIST') return '[' + getTypeString(type.ofType) + ']';
return type.name || 'Unknown';
}
function findType(types: GraphQLType[], name: string): GraphQLType | undefined {
return types.find(t => t.name === name);
}
function unwrapNamedTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
if (!typeRef) return null;
if (typeRef.kind === 'NON_NULL' || typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
return typeRef;
}
function unwrapListTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
if (!typeRef) return null;
if (typeRef.kind === 'NON_NULL') return unwrapListTypeRef(typeRef.ofType);
if (typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
return null;
}
function buildTypeMap(types: GraphQLType[]): Map<string, GraphQLType> {
return new Map((types || []).map(type => [type.name, type]));
}
function isScalarLikeField(field: GraphQLField, typeMap: Map<string, GraphQLType>): boolean {
const namedType = unwrapNamedTypeRef(field.type);
if (!namedType?.name) return false;
const type = typeMap.get(namedType.name);
if (!type) return namedType.kind === 'SCALAR' || namedType.kind === 'ENUM';
return type.kind === 'SCALAR' || type.kind === 'ENUM';
}
export function scoreFieldName(name: string): number {
const lowerName = (name || '').toLowerCase();
const exactOrder = [
'id',
'name',
'title',
'email',
'username',
'status',
'createdat',
'updatedat',
'type',
'code',
'key',
];
const exactIndex = exactOrder.indexOf(lowerName);
if (exactIndex >= 0) {
return 500 - exactIndex;
}
if (lowerName.endsWith('id')) return 300;
if (lowerName.includes('name')) return 280;
if (lowerName.includes('title')) return 260;
if (lowerName.includes('email')) return 240;
if (lowerName.includes('status')) return 220;
if (lowerName.includes('date') || lowerName.endsWith('at')) return 200;
return 100;
}
export function chooseUsefulNodeAttributes(nodeType: GraphQLType | undefined, typeMap: Map<string, GraphQLType>): string[] {
if (!nodeType?.fields?.length) return ['__typename'];
const scalarFields = nodeType.fields.filter(field => isScalarLikeField(field, typeMap));
if (scalarFields.length === 0) return ['__typename'];
return scalarFields
.map((field, index) => ({
field,
score: scoreFieldName(field.name),
index,
}))
.sort((left, right) => {
if (right.score !== left.score) return right.score - left.score;
return left.index - right.index;
})
.slice(0, 10)
.map(item => item.field.name);
}
function stringifyArgumentValue(argumentTypeRef: GraphQLTypeRef | null | undefined, value: number | string): string {
const namedType = unwrapNamedTypeRef(argumentTypeRef);
if (!namedType?.name) {
// Fallback: safely stringify as a JSON string literal
return JSON.stringify(String(value));
}
const typeName = namedType.name.toLowerCase();
if (typeName === 'int' || typeName === 'float') {
const numValue = typeof value === 'number' ? value : Number(value);
if (Number.isFinite(numValue)) {
return String(numValue);
}
// If the value cannot be parsed as a valid number, fall back to a quoted string
return JSON.stringify(String(value));
}
// For non-numeric types, safely serialize as a JSON string literal
return JSON.stringify(String(value));
}
export function buildFirstTenArgs(field: GraphQLField, filterParamName?: string | null, filterValue?: string): string {
const args = field.args || [];
if (args.length === 0) return '';
const argPairs: string[] = [];
// Add pagination argument
const candidates = ['first', 'limit', 'pagesize', 'perpage', 'take', 'size', 'count', 'maxresults'];
const paginationArg = args.find(item => candidates.includes((item.name || '').toLowerCase()));
if (paginationArg) {
argPairs.push(`${paginationArg.name}: ${stringifyArgumentValue(paginationArg.type, 10)}`);
}
// Add filter argument if provided
if (filterParamName && filterValue) {
const filterArg = args.find(item => item.name === filterParamName);
if (filterArg) {
argPairs.push(`${filterParamName}: ${stringifyArgumentValue(filterArg.type, filterValue)}`);
}
}
if (argPairs.length === 0) return '';
return `(${argPairs.join(', ')})`;
}
export type GraphQLConnectionProjection =
| {
kind: 'edges';
nodeTypeName: string;
hasPageInfo: boolean;
}
| {
kind: 'listField';
listFieldName: string;
nodeTypeName: string;
};
export function detectConnectionProjection(
field: GraphQLField,
typeMap: Map<string, GraphQLType>
): GraphQLConnectionProjection | null {
const fieldTypeRef = unwrapNamedTypeRef(field.type);
if (!fieldTypeRef?.name) return null;
const returnType = typeMap.get(fieldTypeRef.name);
if (!returnType || returnType.kind !== 'OBJECT' || !returnType.fields?.length) return null;
const edgesField = returnType.fields.find(item => item.name === 'edges');
if (edgesField) {
const edgeTypeRef = unwrapListTypeRef(edgesField.type);
if (edgeTypeRef?.name) {
const edgeType = typeMap.get(edgeTypeRef.name);
const nodeField = edgeType?.fields?.find(item => item.name === 'node');
const nodeTypeRef = unwrapNamedTypeRef(nodeField?.type);
if (nodeTypeRef?.name) {
const hasPageInfo = !!returnType.fields.find(item => item.name === 'pageInfo');
return {
kind: 'edges',
nodeTypeName: nodeTypeRef.name,
hasPageInfo,
};
}
}
}
const listFieldNames = ['nodes', 'items', 'results', 'data'];
for (const listFieldName of listFieldNames) {
const listField = returnType.fields.find(item => item.name === listFieldName);
if (!listField) continue;
const listItemTypeRef = unwrapListTypeRef(listField.type);
if (!listItemTypeRef?.name) continue;
return {
kind: 'listField',
listFieldName,
nodeTypeName: listItemTypeRef.name,
};
}
return null;
}
function buildConnectionQuery(field: GraphQLField, typeMap: Map<string, GraphQLType>): string | null {
const projection = detectConnectionProjection(field, typeMap);
if (!projection) return null;
const nodeType = typeMap.get(projection.nodeTypeName);
const selectedAttributes = chooseUsefulNodeAttributes(nodeType, typeMap);
const argsString = buildFirstTenArgs(field);
const attributeBlock = selectedAttributes.map(attr => ` ${attr}`).join('\n');
if (projection.kind === 'edges') {
const pageInfoBlock = projection.hasPageInfo
? `
pageInfo {
hasNextPage
endCursor
}`
: '';
return `query {
${field.name}${argsString} {
edges {
node {
${attributeBlock}
}
}${pageInfoBlock}
}
}`;
}
return `query {
${field.name}${argsString} {
${projection.listFieldName} {
${attributeBlock}
}
}
}`;
}
function buildConnectionEndpoints(
types: GraphQLType[],
rootTypeName?: string
): Array<{
name: string;
description?: string;
fields?: string;
connectionQuery?: string;
}> {
if (!rootTypeName) return [];
const rootType = findType(types, rootTypeName);
if (!rootType?.fields?.length) return [];
const typeMap = buildTypeMap(types);
const connectionEndpoints = [];
for (const field of rootType.fields) {
const connectionQuery = buildConnectionQuery(field, typeMap);
if (!connectionQuery) continue;
connectionEndpoints.push({
name: field.name,
description: field.description || '',
fields: field.description,
connectionQuery,
});
}
return connectionEndpoints;
}
function buildOperationEndpoints(
types: GraphQLType[],
operationType: 'OBJECT',
rootTypeName?: string
): Array<{ name: string; description?: string; fields?: string }> {
if (!rootTypeName) return [];
const rootType = findType(types, rootTypeName);
if (!rootType || !rootType.fields) return [];
return rootType.fields.map(field => ({
name: field.name,
description: field.description || '',
fields: field.description,
}));
}
export function extractRestApiDefinitionFromGraphQlIntrospectionResult(
introspectionResult: GraphQLIntrospectionResult
): RestApiDefinition {
const { __schema } = introspectionResult;
const categories: any[] = [];
// Connections (query fields returning connection-like payloads)
if (__schema.queryType?.name) {
const connectionEndpoints = buildConnectionEndpoints(__schema.types, __schema.queryType.name);
if (connectionEndpoints.length > 0) {
categories.push({
name: 'Connections',
endpoints: connectionEndpoints.map(connection => ({
method: 'POST',
path: connection.name,
summary: connection.description,
description: connection.fields,
parameters: [],
connectionQuery: connection.connectionQuery,
})),
});
}
}
// Queries
if (__schema.queryType?.name) {
const queryEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.queryType.name);
if (queryEndpoints.length > 0) {
categories.push({
name: 'Queries',
endpoints: queryEndpoints.map(q => ({
method: 'POST',
path: q.name,
summary: q.description,
description: q.fields,
parameters: [],
})),
});
}
}
// Mutations
if (__schema.mutationType?.name) {
const mutationEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.mutationType.name);
if (mutationEndpoints.length > 0) {
categories.push({
name: 'Mutations',
endpoints: mutationEndpoints.map(m => ({
method: 'POST',
path: m.name,
summary: m.description,
description: m.fields,
parameters: [],
})),
});
}
}
// Subscriptions
if (__schema.subscriptionType?.name) {
const subscriptionEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.subscriptionType.name);
if (subscriptionEndpoints.length > 0) {
categories.push({
name: 'Subscriptions',
endpoints: subscriptionEndpoints.map(s => ({
method: 'POST',
path: s.name,
summary: s.description,
description: s.fields,
parameters: [],
})),
});
}
}
return {
categories,
servers: [],
};
}
export async function fetchGraphQLSchema(
url: string,
headers: Record<string, string>,
axios: AxiosInstance,
maxDepth: number = DEFAULT_INTROSPECTION_DEPTH
): Promise<GraphQLIntrospectionResult> {
try {
const query = buildIntrospectionQuery(maxDepth);
const response = await axios.post(
url,
{ query },
{
timeout: 10000,
headers: {
'Content-Type': 'application/json',
...headers,
},
}
);
if (response.data.errors) {
throw new Error(`GraphQL introspection error: ${JSON.stringify(response.data.errors)}`);
}
if (!response.data.data) {
throw new Error('Invalid introspection response: no data field');
}
return response.data.data as GraphQLIntrospectionResult;
} catch (err: any) {
throw new Error(`DBGM-00312 Could not fetch GraphQL schema: ${err.message}`);
}
}
+13
View File
@@ -0,0 +1,13 @@
export * from './openApiDriver';
export * from './oDataDriver';
export * from './graphQlDriver';
export * from './openApiAdapter';
export * from './oDataAdapter';
export * from './oDataMetadataParser';
export * from './restApiExecutor';
export * from './arrayify';
export * from './graphqlIntrospection';
export * from './graphqlExplorer';
export * from './graphQlQueryParser';
export * from './graphQlVariables';
export * from './restAuthTools';
+70
View File
@@ -0,0 +1,70 @@
const { analyseODataDefinition } = require('./oDataAdapter');
function findEndpoint(apiInfo, path, method = 'GET') {
return apiInfo.categories
.flatMap(category => category.endpoints)
.find(endpoint => endpoint.path === path && endpoint.method === method);
}
test('deduces mandatory company parameter for customers and items from ContainsTarget metadata', () => {
const serviceDocument = {
'@odata.context': 'https://example/odata/$metadata',
value: [
{ name: 'companies', kind: 'EntitySet', url: 'companies' },
{ name: 'customers', kind: 'EntitySet', url: 'customers' },
{ name: 'items', kind: 'EntitySet', url: 'items' },
],
};
const metadataXml = `<?xml version="1.0" encoding="utf-8"?>
<edmx:Edmx Version="4.0" xmlns:edmx="http://docs.oasis-open.org/odata/ns/edmx">
<edmx:DataServices>
<Schema Namespace="Microsoft.NAV" Alias="NAV" xmlns="http://docs.oasis-open.org/odata/ns/edm">
<EntityType Name="company">
<Key><PropertyRef Name="id"/></Key>
<Property Name="id" Type="Edm.Guid"/>
<Property Name="displayName" Type="Edm.String"/>
<NavigationProperty Name="customers" Type="Collection(NAV.customer)" ContainsTarget="true" />
<NavigationProperty Name="items" Type="Collection(NAV.item)" ContainsTarget="true" />
</EntityType>
<EntityType Name="customer">
<Property Name="id" Type="Edm.Guid"/>
</EntityType>
<EntityType Name="item">
<Property Name="id" Type="Edm.Guid"/>
</EntityType>
<EntityContainer Name="default">
<EntitySet Name="companies" EntityType="NAV.company">
<NavigationPropertyBinding Path="customers" Target="customers"/>
<NavigationPropertyBinding Path="items" Target="items"/>
</EntitySet>
<EntitySet Name="customers" EntityType="NAV.customer"/>
<EntitySet Name="items" EntityType="NAV.item"/>
</EntityContainer>
</Schema>
</edmx:DataServices>
</edmx:Edmx>`;
const apiInfo = analyseODataDefinition(serviceDocument, 'https://example/odata', metadataXml);
const customersGet = findEndpoint(apiInfo, '/customers', 'GET');
const itemsGet = findEndpoint(apiInfo, '/items', 'GET');
expect(customersGet).toBeDefined();
expect(itemsGet).toBeDefined();
const customersCompany = customersGet.parameters.find(param => param.name === 'company');
const itemsCompany = itemsGet.parameters.find(param => param.name === 'company');
expect(customersCompany).toBeDefined();
expect(customersCompany.required).toBe(true);
expect(customersCompany.in).toBe('query');
expect(customersCompany.odataLookupEntitySet).toBe('companies');
expect(customersCompany.odataLookupPath).toBe('/companies');
expect(itemsCompany).toBeDefined();
expect(itemsCompany.required).toBe(true);
expect(itemsCompany.in).toBe('query');
expect(itemsCompany.odataLookupEntitySet).toBe('companies');
expect(itemsCompany.odataLookupPath).toBe('/companies');
});
+458
View File
@@ -0,0 +1,458 @@
import { RestApiDefinition, RestApiEndpoint, RestApiParameter, RestApiServer } from './restApiDef';
import { parseODataMetadataDocument } from './oDataMetadataParser';
export type ODataServiceResource = {
name?: string;
kind?: string;
url?: string;
};
export type ODataServiceDocument = {
'@odata.context'?: string;
value?: ODataServiceResource[];
};
export interface ODataMetadataNavigationProperty {
name: string;
type?: string;
containsTarget: boolean;
nullable: boolean;
}
export interface ODataMetadataEntityType {
typeName: string;
fullTypeName: string;
keyProperties: string[];
stringProperties: string[];
navigationProperties: ODataMetadataNavigationProperty[];
}
export interface ODataMetadataEntitySet {
name: string;
entityType: string;
navigationBindings: Record<string, string>;
}
export interface ODataMetadataDocument {
entityTypes: Record<string, ODataMetadataEntityType>;
entitySets: Record<string, ODataMetadataEntitySet>;
}
function normalizeServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
const safeFallback = String(fallbackUrl ?? '').trim();
if (typeof contextUrl === 'string' && contextUrl.trim()) {
try {
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
resolved.hash = '';
resolved.search = '';
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
const url = resolved.toString();
return url.endsWith('/') ? url : `${url}/`;
} catch {
// ignore, fallback below
}
}
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
}
function normalizeEndpointPath(valueUrl: string | undefined): string | null {
const input = String(valueUrl ?? '').trim();
if (!input) return null;
try {
const parsed = new URL(input, 'http://odata.local');
const pathWithQuery = `${parsed.pathname}${parsed.search}`;
return pathWithQuery.startsWith('/') ? pathWithQuery : `/${pathWithQuery}`;
} catch {
return input.startsWith('/') ? input : `/${input}`;
}
}
function inferMethods(kind: string | undefined): RestApiEndpoint['method'][] {
const normalizedKind = String(kind ?? '').toLowerCase();
if (normalizedKind === 'actionimport') return ['POST'];
if (normalizedKind === 'entityset') return ['GET', 'POST'];
return ['GET'];
}
function toLowerCamelCase(value: string | undefined): string {
const text = String(value ?? '').trim();
if (!text) return '';
return text.charAt(0).toLowerCase() + text.slice(1);
}
function normalizeSingularName(value: string | undefined): string {
const text = String(value ?? '').trim();
if (!text) return '';
if (/ies$/i.test(text)) return `${text.slice(0, -3)}y`;
if (/sses$/i.test(text)) return text;
if (/s$/i.test(text) && text.length > 1) return text.slice(0, -1);
return text;
}
function normalizePluralName(value: string | undefined): string {
const text = String(value ?? '').trim();
if (!text) return '';
if (/y$/i.test(text)) return `${text.slice(0, -1)}ies`;
if (/s$/i.test(text)) return text;
return `${text}s`;
}
function normalizeEntityTypeName(typeName: string | undefined): string {
const text = String(typeName ?? '').trim();
if (!text) return '';
const collectionMatch = text.match(/^Collection\((.+)\)$/i);
const unwrapped = collectionMatch ? collectionMatch[1] : text;
const slashStripped = unwrapped.includes('/') ? unwrapped.split('/').pop() || unwrapped : unwrapped;
return slashStripped.trim();
}
function buildTypeReferenceKeys(typeReference: string | undefined): string[] {
const normalizedReference = normalizeEntityTypeName(typeReference);
if (!normalizedReference) return [];
const keys = new Set<string>();
const lower = normalizedReference.toLowerCase();
keys.add(lower);
const withoutNamespace = normalizedReference.includes('.')
? normalizedReference.split('.').pop() || normalizedReference
: normalizedReference;
keys.add(withoutNamespace.toLowerCase());
return Array.from(keys);
}
function buildEntityTypeLookup(entityTypes: Record<string, ODataMetadataEntityType>): Map<string, ODataMetadataEntityType> {
const lookup = new Map<string, ODataMetadataEntityType>();
for (const [entityTypeKey, entityType] of Object.entries(entityTypes || {})) {
const keys = new Set<string>([
...buildTypeReferenceKeys(entityTypeKey),
...buildTypeReferenceKeys(entityType.fullTypeName),
...buildTypeReferenceKeys(entityType.typeName),
]);
for (const key of keys) {
if (!lookup.has(key)) {
lookup.set(key, entityType);
}
}
}
return lookup;
}
function resolveEntityType(
entityTypeLookup: Map<string, ODataMetadataEntityType>,
typeReference: string | undefined
): ODataMetadataEntityType | null {
const keys = buildTypeReferenceKeys(typeReference);
for (const key of keys) {
const found = entityTypeLookup.get(key);
if (found) return found;
}
return null;
}
function resolveLookupPath(entitySetName: string, serviceResourceMap: Map<string, ODataServiceResource>): string {
const serviceResource = serviceResourceMap.get(entitySetName);
const resourceUrl = String(serviceResource?.url ?? '').trim();
if (!resourceUrl) return `/${entitySetName}`;
return resourceUrl.startsWith('/') ? resourceUrl : `/${resourceUrl}`;
}
function buildServiceResourceNameLookup(resources: ODataServiceResource[]): Map<string, string> {
const lookup = new Map<string, string>();
for (const resource of resources || []) {
const resourceName = String(resource?.name ?? '').trim();
if (!resourceName) continue;
const lower = resourceName.toLowerCase();
if (!lookup.has(lower)) {
lookup.set(lower, resourceName);
}
}
return lookup;
}
function resolveServiceResourceNameForEntityType(
entityType: ODataMetadataEntityType,
serviceResourceNameLookup: Map<string, string>
): string | null {
const baseNames = [
String(entityType?.typeName ?? '').trim(),
normalizeSingularName(entityType?.typeName),
normalizeEntityTypeName(entityType?.fullTypeName),
normalizeSingularName(normalizeEntityTypeName(entityType?.fullTypeName)),
].filter(Boolean);
const candidates = new Set<string>();
for (const baseName of baseNames) {
candidates.add(baseName);
candidates.add(normalizeSingularName(baseName));
candidates.add(normalizePluralName(baseName));
}
for (const candidate of candidates) {
const matched = serviceResourceNameLookup.get(String(candidate).toLowerCase());
if (matched) return matched;
}
return null;
}
type MandatoryNavigationTargetParameter = {
name: string;
lookupEntitySet: string;
lookupPath: string;
lookupValueField?: string;
lookupLabelField?: string;
};
type MandatoryNavigationByTarget = Record<string, MandatoryNavigationTargetParameter[]>;
type ParentNavigationContext = {
parentEntitySetName: string;
parentType: ODataMetadataEntityType;
navigationBindings: Record<string, string>;
};
function deduceMandatoryNavigationByTarget(
metadataDocument: ODataMetadataDocument | null,
resources: ODataServiceResource[]
): MandatoryNavigationByTarget {
if (!metadataDocument) return {};
const entityTypeLookup = buildEntityTypeLookup(metadataDocument.entityTypes || {});
const serviceResourceMap = new Map<string, ODataServiceResource>();
for (const resource of resources) {
const resourceName = String(resource?.name ?? '').trim();
if (resourceName) {
serviceResourceMap.set(resourceName, resource);
}
}
const serviceResourceNameLookup = buildServiceResourceNameLookup(resources);
const entitySetsByEntityType = new Map<string, string[]>();
for (const [entitySetName, entitySet] of Object.entries(metadataDocument.entitySets || {})) {
const typeKeys = buildTypeReferenceKeys(entitySet?.entityType);
if (typeKeys.length === 0) continue;
for (const typeKey of typeKeys) {
const list = entitySetsByEntityType.get(typeKey) || [];
if (!list.includes(entitySetName)) {
list.push(entitySetName);
entitySetsByEntityType.set(typeKey, list);
}
}
}
const mandatoryByTarget: MandatoryNavigationByTarget = {};
const parentContexts: ParentNavigationContext[] = [];
const parentTypeKeysCovered = new Set<string>();
for (const [parentEntitySetName, parentEntitySet] of Object.entries(metadataDocument.entitySets || {})) {
const parentType = resolveEntityType(entityTypeLookup, parentEntitySet.entityType);
if (!parentType) continue;
parentContexts.push({
parentEntitySetName,
parentType,
navigationBindings: parentEntitySet.navigationBindings || {},
});
for (const typeKey of buildTypeReferenceKeys(parentEntitySet.entityType)) {
parentTypeKeysCovered.add(typeKey);
}
}
for (const entityType of Object.values(metadataDocument.entityTypes || {})) {
const typeKeys = [
...buildTypeReferenceKeys(entityType.fullTypeName),
...buildTypeReferenceKeys(entityType.typeName),
];
const alreadyCovered = typeKeys.some(typeKey => parentTypeKeysCovered.has(typeKey));
if (alreadyCovered) continue;
if (!Array.isArray(entityType.navigationProperties) || entityType.navigationProperties.length === 0) {
continue;
}
const parentEntitySetName = resolveServiceResourceNameForEntityType(entityType, serviceResourceNameLookup);
if (!parentEntitySetName) continue;
parentContexts.push({
parentEntitySetName,
parentType: entityType,
navigationBindings: {},
});
for (const typeKey of typeKeys) {
parentTypeKeysCovered.add(typeKey);
}
}
for (const { parentEntitySetName, parentType, navigationBindings } of parentContexts) {
const parentParamName =
toLowerCamelCase(parentType.typeName) ||
toLowerCamelCase(normalizeSingularName(parentEntitySetName)) ||
toLowerCamelCase(parentEntitySetName);
if (!parentParamName) continue;
for (const navProperty of parentType.navigationProperties || []) {
if (!navProperty.containsTarget) continue;
const targetNames = new Set<string>();
const directBoundTarget = navigationBindings?.[navProperty.name];
if (directBoundTarget) {
targetNames.add(directBoundTarget);
}
const navTypeKeys = buildTypeReferenceKeys(navProperty.type);
if (navTypeKeys.length > 0) {
const typeTargets = navTypeKeys.flatMap(typeKey => entitySetsByEntityType.get(typeKey) || []);
for (const targetName of typeTargets) {
targetNames.add(targetName);
}
}
for (const targetEntitySetName of targetNames) {
const targetList = mandatoryByTarget[targetEntitySetName] || [];
const exists = targetList.some(item => item.name.toLowerCase() === parentParamName.toLowerCase());
if (exists) continue;
targetList.push({
name: parentParamName,
lookupEntitySet: parentEntitySetName,
lookupPath: resolveLookupPath(parentEntitySetName, serviceResourceMap),
lookupValueField: parentType.keyProperties?.[0],
lookupLabelField: parentType.stringProperties?.find(prop => /name/i.test(prop)) || parentType.stringProperties?.[0],
});
mandatoryByTarget[targetEntitySetName] = targetList;
}
}
}
return mandatoryByTarget;
}
function buildMandatoryNavigationParameters(
resource: ODataServiceResource,
mandatoryByTarget: MandatoryNavigationByTarget
): RestApiParameter[] {
const resourceName = String(resource?.name ?? '').trim();
if (!resourceName) return [];
const mandatoryTargets = mandatoryByTarget[resourceName] || [];
const mandatoryParameters: RestApiParameter[] = [];
const seenNames = new Set<string>();
for (const mandatoryTarget of mandatoryTargets) {
const normalizedName = mandatoryTarget.name.toLowerCase();
if (seenNames.has(normalizedName)) continue;
const description = mandatoryTarget.lookupEntitySet
? `Required navigation parameter deduced from OData metadata (lookup: ${mandatoryTarget.lookupEntitySet})`
: 'Required navigation parameter deduced from OData metadata';
mandatoryParameters.push({
name: mandatoryTarget.name,
in: 'query',
dataType: 'string',
required: true,
description,
odataLookupPath: mandatoryTarget.lookupPath,
odataLookupEntitySet: mandatoryTarget.lookupEntitySet,
odataLookupValueField: mandatoryTarget.lookupValueField,
odataLookupLabelField: mandatoryTarget.lookupLabelField,
});
seenNames.add(normalizedName);
}
return mandatoryParameters;
}
function createODataResourceEndpoints(
resource: ODataServiceResource,
mandatoryByTarget: MandatoryNavigationByTarget
): RestApiEndpoint[] {
const path = normalizeEndpointPath(resource.url);
if (!path) return [];
const summary = resource.name || resource.url || path;
const descriptionKind = String(resource.kind ?? '').trim();
const methods = inferMethods(resource.kind);
const mandatoryNavigationParameters = buildMandatoryNavigationParameters(resource, mandatoryByTarget);
return methods.map(method => {
const parameters: RestApiParameter[] = [...mandatoryNavigationParameters];
if (method === 'POST') {
parameters.push({
name: 'body',
in: 'body',
dataType: 'object',
contentType: 'application/json',
});
}
return {
method,
path,
summary,
description: descriptionKind ? `OData ${descriptionKind}` : 'OData resource',
parameters,
};
});
}
export function analyseODataDefinition(
doc: ODataServiceDocument,
endpointUrl: string,
metadataDocumentXml?: string | null
): RestApiDefinition {
const resources = Array.isArray(doc?.value) ? doc.value : [];
const categoriesByName = new Map<string, RestApiEndpoint[]>();
const metadataDocument = metadataDocumentXml ? parseODataMetadataDocument(metadataDocumentXml) : null;
const mandatoryByTarget = deduceMandatoryNavigationByTarget(metadataDocument, resources);
for (const resource of resources) {
const endpoints = createODataResourceEndpoints(resource, mandatoryByTarget);
if (endpoints.length === 0) continue;
const categoryName = String(resource.kind ?? 'Resources').trim() || 'Resources';
const existingEndpoints = categoriesByName.get(categoryName) || [];
existingEndpoints.push(...endpoints);
categoriesByName.set(categoryName, existingEndpoints);
}
const metadataEndpoint: RestApiEndpoint = {
method: 'GET',
path: '/$metadata',
summary: '$metadata',
description: 'OData service metadata',
parameters: [],
};
const metadataCategory = categoriesByName.get('Metadata') || [];
metadataCategory.push(metadataEndpoint);
categoriesByName.set('Metadata', metadataCategory);
const serviceRoot = normalizeServiceRoot(doc?.['@odata.context'], endpointUrl);
const servers: RestApiServer[] = serviceRoot ? [{ url: serviceRoot }] : [];
return {
categories: Array.from(categoriesByName.entries()).map(([name, endpoints]) => ({
name,
endpoints,
})),
servers,
};
}
+93
View File
@@ -0,0 +1,93 @@
import type { EngineDriver } from 'dbgate-types';
import { buildRestAuthHeaders } from './restAuthTools';
import { apiDriverBase } from './restDriverBase';
function resolveServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
const safeFallback = String(fallbackUrl ?? '').trim();
if (typeof contextUrl === 'string' && contextUrl.trim()) {
try {
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
resolved.hash = '';
resolved.search = '';
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
const url = resolved.toString();
return url.endsWith('/') ? url : `${url}/`;
} catch {
// ignore, fallback below
}
}
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
}
async function loadODataServiceDocument(dbhan: any) {
if (!dbhan?.connection?.apiServerUrl1) {
throw new Error('DBGM-00330 OData endpoint URL is not configured');
}
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1, {
headers: buildRestAuthHeaders(dbhan.connection.restAuth),
});
const document = response?.data;
if (!document || typeof document !== 'object') {
throw new Error('DBGM-00331 OData service document is empty or invalid');
}
if (!document['@odata.context']) {
throw new Error('DBGM-00332 OData service document does not contain @odata.context');
}
return document;
}
function getODataVersion(document: any): string {
const contextUrl = String(document?.['@odata.context'] ?? '').trim();
const versionMatch = contextUrl.match(/\/v(\d+(?:\.\d+)*)\/$metadata$/i);
if (versionMatch?.[1]) return versionMatch[1];
return '';
}
// @ts-ignore
export const oDataDriver: EngineDriver = {
...apiDriverBase,
engine: 'odata@rest',
title: 'OData - REST',
databaseEngineTypes: ['rest', 'odata'],
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><rect width="128" height="128" fill="#f9a000"/><rect x="12" y="12" width="47" height="12" fill="#ffffff"/><rect x="69" y="12" width="47" height="12" fill="#ffffff"/><rect x="12" y="37" width="47" height="12" fill="#ffffff"/><rect x="69" y="37" width="47" height="12" fill="#ffffff"/><rect x="12" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="87" width="47" height="12" fill="#ffffff"/><circle cx="35" cy="102" r="20" fill="#e6e6e6"/></svg>',
apiServerUrl1Label: 'OData Service URL',
showConnectionField: (field, values) => {
if (apiDriverBase.showConnectionField(field, values)) return true;
if (field === 'apiServerUrl1') return true;
return false;
},
beforeConnectionSave: connection => ({
...connection,
singleDatabase: true,
defaultDatabase: '_api_database_',
}),
async connect(connection: any) {
return {
connection,
client: null,
database: '_api_database_',
axios: connection.axios,
};
},
async getVersion(dbhan: any) {
const document = await loadODataServiceDocument(dbhan);
const resourcesCount = Array.isArray(document?.value) ? document.value.length : 0;
const odataVersion = getODataVersion(document);
return {
version: odataVersion || 'OData',
versionText: `OData${odataVersion ? ` ${odataVersion}` : ''}, ${resourcesCount} resources`,
};
},
};
+161
View File
@@ -0,0 +1,161 @@
import type { ODataMetadataDocument, ODataMetadataEntitySet, ODataMetadataEntityType, ODataMetadataNavigationProperty } from './oDataAdapter';
function decodeXmlEntities(value: string): string {
return String(value ?? '')
.replace(/&quot;/g, '"')
.replace(/&apos;/g, "'")
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&amp;/g, '&');
}
function parseXmlAttributes(attributesText: string): Record<string, string> {
const attributes: Record<string, string> = {};
const regex = /([A-Za-z_][A-Za-z0-9_.:-]*)\s*=\s*("([^"]*)"|'([^']*)')/g;
let match = regex.exec(attributesText || '');
while (match) {
const rawName = match[1];
const localName = rawName.includes(':') ? rawName.split(':').pop() || rawName : rawName;
const rawValue = match[3] ?? match[4] ?? '';
const decoded = decodeXmlEntities(rawValue);
attributes[rawName] = decoded;
attributes[localName] = decoded;
match = regex.exec(attributesText || '');
}
return attributes;
}
function extractXmlElements(xml: string, elementName: string): Array<{ attributes: Record<string, string>; innerXml: string }> {
const elements: Array<{ attributes: Record<string, string>; innerXml: string }> = [];
const fullTagRegex = new RegExp(
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)>([\\s\\S]*?)<\\/(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}>`,
'gi'
);
const selfClosingRegex = new RegExp(
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)\\/>`,
'gi'
);
let fullMatch = fullTagRegex.exec(xml || '');
while (fullMatch) {
elements.push({
attributes: parseXmlAttributes(fullMatch[1] || ''),
innerXml: fullMatch[2] || '',
});
fullMatch = fullTagRegex.exec(xml || '');
}
let selfClosingMatch = selfClosingRegex.exec(xml || '');
while (selfClosingMatch) {
elements.push({
attributes: parseXmlAttributes(selfClosingMatch[1] || ''),
innerXml: '',
});
selfClosingMatch = selfClosingRegex.exec(xml || '');
}
return elements;
}
function toBoolAttribute(value: string | undefined): boolean {
return String(value ?? '').trim().toLowerCase() === 'true';
}
function normalizeEntitySetName(value: string | undefined): string {
const input = String(value ?? '').trim();
if (!input) return '';
const noContainer = input.includes('/') ? input.split('/').pop() || '' : input;
return noContainer.includes('.') ? noContainer.split('.').pop() || noContainer : noContainer;
}
export function parseODataMetadataDocument(metadataXml: string): ODataMetadataDocument {
const schemas = extractXmlElements(metadataXml || '', 'Schema');
const entityTypes: Record<string, ODataMetadataEntityType> = {};
const entitySets: Record<string, ODataMetadataEntitySet> = {};
for (const schema of schemas) {
const namespace = String(schema.attributes.Namespace || '').trim();
for (const entityTypeNode of extractXmlElements(schema.innerXml, 'EntityType')) {
const typeName = String(entityTypeNode.attributes.Name || '').trim();
if (!typeName) continue;
const fullTypeName = namespace ? `${namespace}.${typeName}` : typeName;
const keyProperties: string[] = [];
const stringProperties: string[] = [];
const navigationProperties: ODataMetadataNavigationProperty[] = [];
for (const keyNode of extractXmlElements(entityTypeNode.innerXml, 'Key')) {
for (const propRef of extractXmlElements(keyNode.innerXml, 'PropertyRef')) {
const keyName = String(propRef.attributes.Name || '').trim();
if (keyName && !keyProperties.includes(keyName)) {
keyProperties.push(keyName);
}
}
}
for (const propertyNode of extractXmlElements(entityTypeNode.innerXml, 'Property')) {
const propName = String(propertyNode.attributes.Name || '').trim();
const propType = String(propertyNode.attributes.Type || '').trim();
if (propName && /^Edm\.String$/i.test(propType)) {
stringProperties.push(propName);
}
}
for (const navNode of extractXmlElements(entityTypeNode.innerXml, 'NavigationProperty')) {
const navName = String(navNode.attributes.Name || '').trim();
if (!navName) continue;
navigationProperties.push({
name: navName,
type: String(navNode.attributes.Type || '').trim(),
containsTarget: toBoolAttribute(navNode.attributes.ContainsTarget),
nullable: navNode.attributes.Nullable === undefined ? true : toBoolAttribute(navNode.attributes.Nullable),
});
}
entityTypes[fullTypeName] = {
typeName,
fullTypeName,
keyProperties,
stringProperties,
navigationProperties,
};
}
for (const entitySetNode of extractXmlElements(schema.innerXml, 'EntitySet')) {
const setName = String(entitySetNode.attributes.Name || '').trim();
const entityType = String(entitySetNode.attributes.EntityType || '').trim();
if (!setName || !entityType) continue;
const navigationBindings: Record<string, string> = {};
for (const bindingNode of extractXmlElements(entitySetNode.innerXml, 'NavigationPropertyBinding')) {
const path = String(bindingNode.attributes.Path || '').trim();
const target = normalizeEntitySetName(bindingNode.attributes.Target);
if (!path || !target) continue;
navigationBindings[path] = target;
const pathLastSegment = path.split('/').pop();
if (pathLastSegment && !navigationBindings[pathLastSegment]) {
navigationBindings[pathLastSegment] = target;
}
}
entitySets[setName] = {
name: setName,
entityType,
navigationBindings,
};
}
}
return {
entityTypes,
entitySets,
};
}
+285
View File
@@ -0,0 +1,285 @@
import type { OpenAPIV3_1 } from 'openapi-types';
import { RestApiDefinition, RestApiCategory, RestApiEndpoint, RestApiParameter, RestApiServer } from './restApiDef';
/**
* Converts an OpenAPI v3.1 document into a simplified REST API definition
* Organizes endpoints by tags into categories
*/
export function analyseOpenApiDefinition(doc: OpenAPIV3_1.Document): RestApiDefinition {
const categories = new Map<string, RestApiEndpoint[]>();
// Process all paths and methods
if (doc.paths) {
for (const [path, pathItem] of Object.entries(doc.paths)) {
if (!pathItem) continue;
// Process each HTTP method in the path
const methods = ['get', 'post', 'put', 'patch', 'delete', 'options', 'head', 'trace'] as const;
for (const method of methods) {
const operation = (pathItem as any)[method] as OpenAPIV3_1.OperationObject | undefined;
if (!operation) continue;
const endpoint: RestApiEndpoint = {
method: method.toUpperCase() as any,
path,
summary: operation.summary,
description: operation.description,
parameters: extractParameters(operation, pathItem as any),
};
// Use tags to organize into categories
const tags = operation.tags || ['Other'];
for (const tag of tags) {
if (!categories.has(tag)) {
categories.set(tag, []);
}
categories.get(tag)!.push(endpoint);
}
}
}
}
// Convert Map to RestApiCategory array
const categoryArray: RestApiCategory[] = Array.from(categories.entries()).map(([name, endpoints]) => ({
name,
endpoints,
}));
const servers: RestApiServer[] = (doc.servers || []).map(server => ({
url: server.url,
description: server.description,
}));
return {
categories: categoryArray,
servers,
};
}
/**
* Extract parameters from operation and path item
*/
function extractParameters(
operation: OpenAPIV3_1.OperationObject,
pathItem: OpenAPIV3_1.PathItemObject
): RestApiParameter[] {
const parameters: RestApiParameter[] = [];
// Path item level parameters (apply to all methods)
if (pathItem.parameters) {
for (const param of pathItem.parameters) {
if (!('$ref' in param)) {
parameters.push(convertParameter(param as OpenAPIV3_1.ParameterObject));
}
}
}
// Operation level parameters
if (operation.parameters) {
for (const param of operation.parameters) {
if (!('$ref' in param)) {
parameters.push(convertParameter(param as OpenAPIV3_1.ParameterObject));
}
}
}
const bodyParameter = convertRequestBodyParameter(operation.requestBody);
if (bodyParameter) {
parameters.push(bodyParameter);
}
return parameters;
}
function isSchemaObject(schema: OpenAPIV3_1.SchemaObject | OpenAPIV3_1.ReferenceObject | undefined): schema is OpenAPIV3_1.SchemaObject {
return !!schema && !('$ref' in schema);
}
function isExampleObject(example: OpenAPIV3_1.ExampleObject | OpenAPIV3_1.ReferenceObject | undefined): example is OpenAPIV3_1.ExampleObject {
return !!example && !('$ref' in example);
}
function cloneValue(value: any) {
if (value == null) return value;
if (typeof value !== 'object') return value;
try {
return JSON.parse(JSON.stringify(value));
} catch {
return value;
}
}
function extractMediaTypeExample(mediaType: OpenAPIV3_1.MediaTypeObject | undefined): any {
if (!mediaType) return undefined;
if (mediaType.example !== undefined) return cloneValue(mediaType.example);
if (mediaType.examples) {
const firstExample = Object.values(mediaType.examples)[0];
if (isExampleObject(firstExample) && firstExample.value !== undefined) {
return cloneValue(firstExample.value);
}
}
return undefined;
}
function buildSchemaExample(
schema: OpenAPIV3_1.SchemaObject | undefined,
recursionDepth = 0
): any {
if (!schema || recursionDepth > 6) return undefined;
if (schema.example !== undefined) return cloneValue(schema.example);
if (schema.default !== undefined) return cloneValue(schema.default);
if (schema.oneOf?.length) {
const oneOfSchema = schema.oneOf[0];
return isSchemaObject(oneOfSchema) ? buildSchemaExample(oneOfSchema, recursionDepth + 1) : undefined;
}
if (schema.anyOf?.length) {
const anyOfSchema = schema.anyOf[0];
return isSchemaObject(anyOfSchema) ? buildSchemaExample(anyOfSchema, recursionDepth + 1) : undefined;
}
if (schema.allOf?.length) {
const mergedObject = {};
let hasValue = false;
for (const item of schema.allOf) {
if (!isSchemaObject(item)) continue;
const itemExample = buildSchemaExample(item, recursionDepth + 1);
if (itemExample && typeof itemExample === 'object' && !Array.isArray(itemExample)) {
Object.assign(mergedObject, itemExample);
hasValue = true;
}
}
return hasValue ? mergedObject : undefined;
}
if (schema.enum?.length) return cloneValue(schema.enum[0]);
if (schema.type === 'object' || schema.properties || schema.additionalProperties) {
const result: Record<string, any> = {};
let hasAnyProperty = false;
for (const [propertyName, propertySchema] of Object.entries(schema.properties || {})) {
if (!isSchemaObject(propertySchema)) continue;
const propertyValue = buildSchemaExample(propertySchema, recursionDepth + 1);
if (propertyValue !== undefined) {
result[propertyName] = propertyValue;
hasAnyProperty = true;
}
}
if (schema.additionalProperties) {
if (schema.additionalProperties === true) {
result.additionalProp1 = 'string';
hasAnyProperty = true;
} else if (isSchemaObject(schema.additionalProperties)) {
result.additionalProp1 = buildSchemaExample(schema.additionalProperties, recursionDepth + 1) ?? 'string';
hasAnyProperty = true;
}
}
return hasAnyProperty ? result : {};
}
if (schema.type === 'array') {
if (isSchemaObject(schema.items)) {
const itemValue = buildSchemaExample(schema.items, recursionDepth + 1);
return itemValue !== undefined ? [itemValue] : [];
}
return [];
}
if (schema.type === 'number' || schema.type === 'integer') return 0;
if (schema.type === 'boolean') return true;
if (schema.type === 'null') return null;
return 'string';
}
function getSchemaType(schema: OpenAPIV3_1.SchemaObject | undefined): string | undefined {
if (!schema) return undefined;
if (schema.type === 'array') {
if (isSchemaObject(schema.items)) {
return `array<${schema.items.type || 'any'}>`;
}
return 'array';
}
if (Array.isArray(schema.type)) return schema.type.join(' | ');
if (schema.type) return schema.type;
if (schema.properties) return 'object';
return undefined;
}
function isStringListSchema(schema: OpenAPIV3_1.SchemaObject | undefined): boolean {
return schema?.type === 'array' && isSchemaObject(schema.items) && schema.items.type === 'string';
}
function convertRequestBodyParameter(
requestBody: OpenAPIV3_1.RequestBodyObject | OpenAPIV3_1.ReferenceObject | undefined
): RestApiParameter | null {
if (!requestBody || '$ref' in requestBody || !requestBody.content) return null;
const preferredContentTypes = [
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data',
'text/plain',
];
const availableContentTypes = Object.keys(requestBody.content);
if (availableContentTypes.length === 0) return null;
const selectedContentType =
preferredContentTypes.find(contentType => requestBody.content?.[contentType]) || availableContentTypes[0];
const mediaType = requestBody.content[selectedContentType];
if (!mediaType || !isSchemaObject(mediaType.schema)) {
return {
name: 'body',
in: 'body',
contentType: selectedContentType,
description: requestBody.description,
required: requestBody.required,
};
}
const schema = mediaType.schema;
const mediaTypeExample = extractMediaTypeExample(mediaType);
const generatedExample = buildSchemaExample(schema);
return {
name: 'body',
in: 'body',
dataType: getSchemaType(schema),
contentType: selectedContentType,
isStringList: isStringListSchema(schema),
description: requestBody.description,
required: requestBody.required,
defaultValue: mediaTypeExample ?? generatedExample,
};
}
/**
* Convert OpenAPI parameter to REST API parameter
*/
function convertParameter(param: OpenAPIV3_1.ParameterObject): RestApiParameter {
const schema = isSchemaObject(param.schema) ? param.schema : undefined;
return {
name: param.name,
in: param.in as any,
dataType: getSchemaType(schema),
isStringList: isStringListSchema(schema),
description: param.description,
required: param.required,
defaultValue: schema?.default,
};
}
+94
View File
@@ -0,0 +1,94 @@
import type { EngineDriver } from 'dbgate-types';
import yaml from 'js-yaml';
import { apiDriverBase } from './restDriverBase';
async function loadOpenApiDefinition(dbhan: any) {
if (!dbhan?.connection?.apiServerUrl1) {
throw new Error('DBGM-00313 REST connection URL is not configured');
}
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1);
const content = response?.data;
let openApiDefinition: any = content;
if (typeof content === 'string') {
try {
openApiDefinition = JSON.parse(content);
} catch {
openApiDefinition = yaml.load(content);
}
}
if (!openApiDefinition || typeof openApiDefinition !== 'object') {
throw new Error('DBGM-00314 API documentation is empty or could not be parsed');
}
return openApiDefinition;
}
// @ts-ignore
export const openApiDriver: EngineDriver = {
...apiDriverBase,
engine: 'openapi@rest',
title: 'OpenAPI - REST',
databaseEngineTypes: ['rest', 'openapi'],
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#85ea2d" d="M63.999 124.945c-33.607 0-60.95-27.34-60.95-60.949C3.05 30.388 30.392 3.048 64 3.048s60.95 27.342 60.95 60.95c0 33.607-27.343 60.946-60.95 60.946z"/><path fill="#173647" d="M40.3 43.311c-.198 2.19.072 4.454-.073 6.668-.173 2.217-.444 4.407-.888 6.596-.615 3.126-2.56 5.489-5.24 7.458 5.218 3.396 5.807 8.662 6.152 14.003.172 2.88.098 5.785.394 8.638.221 2.215 1.082 2.782 3.372 2.854.935.025 1.894 0 2.978 0v6.842c-6.768 1.156-12.354-.762-13.734-6.496a39.329 39.329 0 0 1-.836-6.4c-.148-2.287.097-4.577-.074-6.864-.492-6.277-1.305-8.393-7.308-8.689v-7.8c.441-.1.86-.174 1.302-.223 3.298-.172 4.701-1.182 5.414-4.43a37.512 37.512 0 0 0 .616-5.536c.247-3.569.148-7.21.763-10.754.86-5.094 4.01-7.556 9.254-7.852 1.476-.074 2.978 0 4.676 0v6.99c-.714.05-1.33.147-1.969.147-4.258-.148-4.48 1.304-4.8 4.848zm8.195 16.193h-.099c-2.462-.123-4.578 1.796-4.702 4.258-.122 2.485 1.797 4.603 4.259 4.724h.295c2.436.148 4.527-1.724 4.676-4.16v-.245c.05-2.486-1.944-4.527-4.43-4.577zm15.43 0c-2.386-.074-4.38 1.796-4.454 4.159 0 .149 0 .271.024.418 0 2.684 1.821 4.406 4.578 4.406 2.707 0 4.406-1.772 4.406-4.553-.025-2.682-1.823-4.455-4.554-4.43Zm15.801 0a4.596 4.596 0 0 0-4.676 4.454 4.515 4.515 0 0 0 4.528 4.528h.05c2.264.394 4.553-1.796 4.701-4.429.122-2.437-2.092-4.553-4.604-4.553Zm21.682.369c-2.855-.123-4.284-1.083-4.996-3.79a27.444 27.444 0 0 1-.811-5.292c-.198-3.298-.174-6.62-.395-9.918-.516-7.826-6.177-10.557-14.397-9.205v6.792c1.304 0 2.313 0 3.322.025 1.748.024 3.077.69 3.249 2.634.172 1.772.172 3.568.344 5.365.346 3.57.542 7.187 1.157 10.706.542 2.904 2.536 5.07 5.02 6.841-4.355 2.929-5.636 7.113-5.857 11.814-.122 3.223-.196 6.472-.368 9.721-.148 2.953-1.181 3.913-4.16 3.987-.835.024-1.648.098-2.583.148v6.964c1.748 0 3.347.1 4.946 0 4.971-.295 7.974-2.706 8.96-7.531.417-2.658.662-5.34.737-8.023.171-2.46.148-4.946.394-7.382.369-3.815 2.116-5.389 5.93-5.636a5.161 5.161 0 0 0 1.06-.245v-7.801c-.64-.074-1.084-.148-1.552-.173zM64 6.1c31.977 0 57.9 25.92 57.9 57.898 0 31.977-25.923 57.899-57.9 57.899-31.976 0-57.898-25.922-57.898-57.9C6.102 32.023 32.024 6.101 64 6.101m0-6.1C28.71 0 0 28.71 0 64c0 35.288 28.71 63.998 64 63.998 35.289 0 64-28.71 64-64S99.289.002 64 .002Z"/></svg>',
apiServerUrl1Label: 'API Definition URL',
apiServerUrl2Label: 'API Server URL',
apiServerUrl2Placeholder: '(optional - if not set, the first server URL from the API definition will be used)',
loadApiServerUrl2Options: true,
showConnectionField: (field, values) => {
if (apiDriverBase.showConnectionField(field, values)) return true;
if (field === 'apiServerUrl1') return true;
if (field === 'apiServerUrl2') return true;
return false;
},
beforeConnectionSave: connection => ({
...connection,
singleDatabase: true,
defaultDatabase: '_api_database_',
}),
async connect(connection: any) {
return {
connection,
client: null,
database: '_api_database_',
axios: connection.axios
};
},
async listDatabases(dbhan: any) {
const openApiDefinition = await loadOpenApiDefinition(dbhan);
const servers = Array.isArray(openApiDefinition.servers) ? openApiDefinition.servers : [];
return servers
.map(server => String(server?.url ?? '').trim())
.filter(Boolean)
.map(url => ({
name: url,
}));
},
async getVersion(dbhan: any) {
const openApiDefinition = await loadOpenApiDefinition(dbhan);
const specVersion = String(openApiDefinition.openapi ?? openApiDefinition.swagger ?? '').trim();
const apiVersion = String(openApiDefinition.info?.version ?? '').trim();
const version = apiVersion || specVersion || 'Unknown';
const versionText = [
apiVersion ? `API ${apiVersion}` : null,
specVersion ? `OpenAPI ${specVersion}` : null,
]
.filter(Boolean)
.join(', ');
return {
version,
...(versionText ? { versionText } : {}),
};
},
};
+65
View File
@@ -0,0 +1,65 @@
export interface RestApiParameter {
name: string;
in: 'query' | 'header' | 'path' | 'cookie' | 'body';
dataType?: string;
contentType?: string;
isStringList?: boolean;
description?: string;
required?: boolean;
defaultValue?: any;
options?: Array<{ label: string; value: string }>;
odataLookupPath?: string;
odataLookupEntitySet?: string;
odataLookupValueField?: string;
odataLookupLabelField?: string;
}
export interface RestApiEndpoint {
method: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'OPTIONS' | 'HEAD';
path: string;
summary?: string;
description?: string;
parameters: RestApiParameter[];
}
export interface RestApiCategory {
name: string;
endpoints: RestApiEndpoint[];
}
export interface RestApiServer {
url: string;
description?: string;
}
export interface RestApiDefinition {
categories: RestApiCategory[];
servers?: RestApiServer[];
}
export interface RestApiAuthorization_None {
type: 'none';
}
export interface RestApiAuthorization_Basic {
type: 'basic';
user: string;
password: string;
}
export interface RestApiAuthorization_Bearer {
type: 'bearer';
token: string;
}
export interface RestApiAuthorization_ApiKey {
type: 'apikey';
header: string;
value: string;
}
export type RestApiAuthorization =
| RestApiAuthorization_None
| RestApiAuthorization_Basic
| RestApiAuthorization_Bearer
| RestApiAuthorization_ApiKey;
+134
View File
@@ -0,0 +1,134 @@
const { executeODataApiEndpoint } = require('./restApiExecutor');
function createDefinition() {
return {
categories: [
{
name: 'EntitySet',
endpoints: [
{
method: 'GET',
path: '/customers',
parameters: [
{
name: 'company',
in: 'query',
dataType: 'string',
required: true,
},
],
},
{
method: 'GET',
path: '/$metadata',
parameters: [],
},
],
},
],
};
}
test('adds OData system query options from parameterValues', async () => {
const calls = [];
const axios = async args => {
calls.push(args);
return { status: 200, data: {} };
};
await executeODataApiEndpoint(
createDefinition(),
'/customers',
'GET',
{
company: '123',
'$top': 50,
'$skip': '10',
'$count': true,
'$select': ['id', 'displayName'],
'$orderby': 'displayName asc',
'$filter': 'displayName ne null',
'$search': 'dino',
'$expand': 'addresses',
'$format': 'application/json',
},
'https://example.test/odata',
null,
axios
);
expect(calls).toHaveLength(1);
const requestUrl = String(calls[0].url);
const parsed = new URL(requestUrl);
expect(parsed.pathname).toBe('/odata/customers');
expect(parsed.searchParams.get('company')).toBe('123');
expect(parsed.searchParams.get('$top')).toBe('50');
expect(parsed.searchParams.get('$skip')).toBe('10');
expect(parsed.searchParams.get('$count')).toBe('true');
expect(parsed.searchParams.get('$select')).toBe('id,displayName');
expect(parsed.searchParams.get('$orderby')).toBe('displayName asc');
expect(parsed.searchParams.get('$filter')).toBe('displayName ne null');
expect(parsed.searchParams.get('$search')).toBe('dino');
expect(parsed.searchParams.get('$expand')).toBe('addresses');
expect(parsed.searchParams.get('$format')).toBe('application/json');
});
test('accepts non-dollar aliases and ignores invalid system option values', async () => {
const calls = [];
const axios = async args => {
calls.push(args);
return { status: 200, data: {} };
};
await executeODataApiEndpoint(
createDefinition(),
'/customers',
'GET',
{
company: '123',
top: 'abc',
skip: -1,
count: 'yes',
select: ['id'],
filter: 'id ne null',
},
'https://example.test/odata',
null,
axios
);
expect(calls).toHaveLength(1);
const parsed = new URL(String(calls[0].url));
expect(parsed.searchParams.get('$top')).toBeNull();
expect(parsed.searchParams.get('$skip')).toBeNull();
expect(parsed.searchParams.get('$count')).toBeNull();
expect(parsed.searchParams.get('$select')).toBe('id');
expect(parsed.searchParams.get('$filter')).toBe('id ne null');
});
test('does not add OData system query options to $metadata endpoint', async () => {
const calls = [];
const axios = async args => {
calls.push(args);
return { status: 200, data: {} };
};
await executeODataApiEndpoint(
createDefinition(),
'/$metadata',
'GET',
{
'$top': 10,
'$count': true,
},
'https://example.test/odata',
null,
axios
);
expect(calls).toHaveLength(1);
const parsed = new URL(String(calls[0].url));
expect(parsed.pathname).toBe('/odata/$metadata');
expect(parsed.search).toBe('');
});
+329
View File
@@ -0,0 +1,329 @@
import type { AxiosInstance } from 'axios';
import { RestApiAuthorization, RestApiDefinition, RestApiParameter } from './restApiDef';
function hasValue(value: any) {
if (value === null || value === undefined) return false;
if (typeof value === 'string') return value.trim() !== '';
if (Array.isArray(value)) return value.length > 0;
return true;
}
function normalizeValueForRequest(value: any, parameter: RestApiParameter): any {
if (!hasValue(value)) return undefined;
if (parameter.isStringList) {
if (Array.isArray(value)) return value.filter(item => item != null && String(item).trim() !== '');
return [String(value)];
}
if (parameter.in === 'body' && typeof value === 'string') {
const trimmed = value.trim();
if (!trimmed) return undefined;
if ((parameter.contentType || '').includes('json') || parameter.dataType === 'object') {
try {
return JSON.parse(trimmed);
} catch {
return value;
}
}
}
return value;
}
function splitPathAndQuery(path: string) {
const value = String(path || '');
const index = value.indexOf('?');
if (index < 0) {
return {
pathOnly: value,
queryString: '',
};
}
return {
pathOnly: value.slice(0, index),
queryString: value.slice(index + 1),
};
}
function addAuthHeaders(headers: Record<string, string>, auth: RestApiAuthorization | null) {
if (!auth) return;
if (auth.type === 'basic') {
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
headers['Authorization'] = `Basic ${basicAuth}`;
} else if (auth.type === 'bearer') {
headers['Authorization'] = `Bearer ${auth.token}`;
} else if (auth.type === 'apikey') {
headers[auth.header] = auth.value;
}
}
function findEndpointDefinition(
definition: RestApiDefinition,
endpoint: string,
method: string
) {
return definition.categories
.flatMap(category => category.endpoints)
.find(ep => ep.path === endpoint && ep.method === method);
}
function buildRequestUrl(server: string, pathOnly: string) {
const normalizedServer = String(server || '').trim();
const normalizedPath = String(pathOnly || '').trim();
if (!normalizedServer) {
return normalizedPath;
}
try {
const baseUrl = normalizedServer.endsWith('/') ? normalizedServer : `${normalizedServer}/`;
const relativePath = normalizedPath.replace(/^\//, '');
return new URL(relativePath, baseUrl).toString();
} catch {
return normalizedServer + normalizedPath;
}
}
function appendQueryAndCookies(
url: string,
query: URLSearchParams,
cookies: string[],
headers: Record<string, string>
) {
const queryStringValue = query.toString();
if (queryStringValue) {
const separator = url.includes('?') ? '&' : '?';
url += separator + queryStringValue;
}
if (cookies.length > 0) {
headers['Cookie'] = cookies.join('; ');
}
return url;
}
const ODATA_SYSTEM_QUERY_OPTIONS = new Set([
'$filter',
'$select',
'$expand',
'$orderby',
'$top',
'$skip',
'$count',
'$search',
'$format',
]);
const ODATA_SYSTEM_QUERY_ALIASES: Record<string, string> = {
filter: '$filter',
select: '$select',
expand: '$expand',
orderby: '$orderby',
top: '$top',
skip: '$skip',
count: '$count',
search: '$search',
format: '$format',
};
function resolveODataQueryOptionKey(rawKey: string): string | null {
const key = String(rawKey || '').trim();
if (!key) return null;
const keyLower = key.toLowerCase();
if (ODATA_SYSTEM_QUERY_OPTIONS.has(keyLower)) {
return keyLower;
}
return ODATA_SYSTEM_QUERY_ALIASES[keyLower] || null;
}
function normalizeODataQueryOptionValue(optionKey: string, value: any): string | null {
if (!hasValue(value)) return null;
if (Array.isArray(value)) {
const items = value.filter(item => hasValue(item)).map(item => String(item).trim()).filter(Boolean);
if (items.length === 0) return null;
return items.join(',');
}
if (optionKey === '$count') {
if (typeof value === 'boolean') return value ? 'true' : 'false';
const lowered = String(value).trim().toLowerCase();
if (lowered === 'true' || lowered === 'false') return lowered;
return null;
}
if (optionKey === '$top' || optionKey === '$skip') {
const numeric = Number(value);
if (Number.isFinite(numeric) && numeric >= 0) {
return String(Math.trunc(numeric));
}
return null;
}
return String(value).trim();
}
function applyODataSystemQueryOptions(query: URLSearchParams, parameterValues: Record<string, any>) {
for (const [rawKey, rawValue] of Object.entries(parameterValues || {})) {
const optionKey = resolveODataQueryOptionKey(rawKey);
if (!optionKey) continue;
const normalizedValue = normalizeODataQueryOptionValue(optionKey, rawValue);
if (!hasValue(normalizedValue)) continue;
query.set(optionKey, String(normalizedValue));
}
}
export async function executeRestApiEndpointOpenApi(
definition: RestApiDefinition,
endpoint: string,
method: string,
parameterValues: Record<string, any>,
server: string,
auth: RestApiAuthorization | null,
axios: AxiosInstance
): Promise<any> {
const endpointDef = findEndpointDefinition(definition, endpoint, method);
if (!endpointDef) {
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
}
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
let url = buildRequestUrl(server, pathOnly);
const headers: Record<string, string> = {};
const query = new URLSearchParams(queryString);
const cookies: string[] = [];
let body: any = undefined;
for (const param of endpointDef.parameters) {
const value = normalizeValueForRequest(parameterValues[param.name], param);
if (!hasValue(value) && param.in !== 'path') {
continue;
}
if (param.in === 'path') {
url = url.replace(`{${param.name}}`, encodeURIComponent(value));
} else if (param.in === 'query') {
if (Array.isArray(value)) {
for (const item of value) {
query.append(param.name, String(item));
}
} else {
query.append(param.name, String(value));
}
} else if (param.in === 'header') {
headers[param.name] = Array.isArray(value) ? value.map(item => String(item)).join(',') : String(value);
} else if (param.in === 'cookie') {
if (Array.isArray(value)) {
for (const item of value) {
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(item))}`);
}
} else {
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(value))}`);
}
} else if (param.in === 'body') {
body = value;
if (param.contentType && !headers['Content-Type']) {
headers['Content-Type'] = param.contentType;
}
}
}
url = appendQueryAndCookies(url, query, cookies, headers);
addAuthHeaders(headers, auth);
const resp = await axios({
method,
url,
headers,
data: body,
});
return resp;
}
export async function executeODataApiEndpoint(
definition: RestApiDefinition,
endpoint: string,
method: string,
parameterValues: Record<string, any>,
server: string,
auth: RestApiAuthorization | null,
axios: AxiosInstance
): Promise<any> {
const endpointDef = findEndpointDefinition(definition, endpoint, method);
if (!endpointDef) {
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
}
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
const metadataPath = pathOnly.replace(/\/+$/, '') === '/$metadata';
let url = buildRequestUrl(server, pathOnly);
const headers: Record<string, string> = {
Accept: 'application/json',
'OData-Version': '4.0',
};
const query = metadataPath ? new URLSearchParams() : new URLSearchParams(queryString);
const cookies: string[] = [];
let body: any = undefined;
for (const param of endpointDef.parameters) {
const value = normalizeValueForRequest(parameterValues[param.name], param);
if (!hasValue(value) && param.in !== 'path') {
continue;
}
if (param.in === 'path') {
url = url.replace(`{${param.name}}`, encodeURIComponent(value));
} else if (param.in === 'query') {
if (metadataPath) continue;
if (Array.isArray(value)) {
for (const item of value) {
query.append(param.name, String(item));
}
} else {
query.append(param.name, String(value));
}
} else if (param.in === 'header') {
headers[param.name] = Array.isArray(value) ? value.map(item => String(item)).join(',') : String(value);
} else if (param.in === 'cookie') {
if (Array.isArray(value)) {
for (const item of value) {
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(item))}`);
}
} else {
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(value))}`);
}
} else if (param.in === 'body') {
body = value;
if (param.contentType && !headers['Content-Type']) {
headers['Content-Type'] = param.contentType;
}
}
}
if (!metadataPath) {
applyODataSystemQueryOptions(query, parameterValues);
}
url = appendQueryAndCookies(url, query, cookies, headers);
addAuthHeaders(headers, auth);
const resp = await axios({
method,
url,
headers,
data: body,
});
return resp;
}
+15
View File
@@ -0,0 +1,15 @@
import { RestApiAuthorization } from './restApiDef';
export function buildRestAuthHeaders(auth: RestApiAuthorization | null) {
const headers = {};
if (!auth) return headers;
if (auth.type === 'basic') {
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
headers['Authorization'] = `Basic ${basicAuth}`;
} else if (auth.type === 'bearer') {
headers['Authorization'] = `Bearer ${auth.token}`;
} else if (auth.type === 'apikey') {
headers[auth.header] = auth.value;
}
return headers;
}
+50
View File
@@ -0,0 +1,50 @@
import { driverBase } from 'dbgate-tools';
export const apiDriverBase = {
...driverBase,
supportExecuteQuery: false,
getAuthTypes() {
return [
{
title: 'No Authentication',
name: 'none',
},
{
title: 'Basic Authentication',
name: 'basic',
},
{
title: 'Bearer Token Authentication',
name: 'bearer',
},
{
title: 'API Key Authentication',
name: 'apikey',
},
];
},
showAuthConnectionField: (field, values) => {
if (field === 'authType') return true;
if (values?.authType === 'basic') {
if (field === 'user') return true;
if (field === 'password') return true;
}
if (values?.authType === 'bearer') {
if (field === 'authToken') return true;
}
if (values?.authType === 'apikey') {
if (field === 'apiKeyHeader') return true;
if (field === 'apiKeyValue') return true;
}
return false;
},
showConnectionField: (field, values) => {
if (apiDriverBase.showAuthConnectionField(field, values)) return true;
if (field === 'httpProxyUrl') return true;
if (field === 'httpProxyUser') return true;
if (field === 'httpProxyPassword') return true;
return false;
},
};
+14
View File
@@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "ES2018",
"module": "commonjs",
"declaration": true,
"skipLibCheck": true,
"outDir": "lib",
"preserveWatchOutput": true,
"esModuleInterop": true
},
"include": [
"src/**/*"
]
}
+2 -2
View File
@@ -41,7 +41,7 @@ STORAGE_DATABASE=dbname
STORAGE_ENGINE=mysql@dbgate-plugin-mysql
```
You could find more about environment variable configuration on [DbGate docs](https://dbgate.org/docs/env-variables/) page.
You could find more about environment variable configuration on [DbGate docs](https://docs.dbgate.io/env-variables/) page.
After installing, you can run dbgate with command:
```sh
@@ -65,7 +65,7 @@ dbgate-serve
Then open http://localhost:3000 in your browser
## Download desktop app
You can also download binary packages for desktop app from https://dbgate.org . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
You can also download binary packages for desktop app from https://www.dbgate.io . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
## Use Oracle with Instant client (thick mode)
If you are Oracle database user and you would like to use Oracle instant client (thick mode) instead of thin mode (pure JS NPM package), please make the following:
+1 -1
View File
@@ -1,7 +1,7 @@
{
"name": "dbgate-serve",
"version": "7.0.0-alpha.1",
"homepage": "https://dbgate.org/",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
+1 -1
View File
@@ -3,7 +3,7 @@
"name": "dbgate-sqltree",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
"homepage": "https://dbgate.org/",
"homepage": "https://www.dbgate.io/",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate.git"
+55 -4
View File
@@ -1,8 +1,55 @@
import type { SqlDumper } from 'dbgate-types';
import { Condition, BinaryCondition } from './types';
import { Condition, BinaryCondition, LikeCondition } from './types';
import { dumpSqlExpression } from './dumpSqlExpression';
import { dumpSqlSelect } from './dumpSqlCommand';
function dumpLikeAsFunctionCondition(dmp: SqlDumper, condition: LikeCondition) {
// For DynamoDB: contains() works only on string attributes
// For numeric values, search both as number and as string
const likeExpr = condition.right;
let isNumericValue = false;
let numericStringValue = '';
if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'string') {
const cleanedStr = (likeExpr.value || '').replace(/%/g, '').trim();
// Only match valid decimal numbers (not Infinity, NaN, etc.)
isNumericValue = /^-?\d+(\.\d+)?$/.test(cleanedStr);
numericStringValue = cleanedStr;
} else if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'number') {
isNumericValue = Number.isFinite(likeExpr.value);
numericStringValue = String(likeExpr.value);
}
if (isNumericValue) {
// For numeric values: (column = value OR contains(column, 'value'))
dmp.putRaw('(');
dumpSqlExpression(dmp, condition.left);
dmp.putRaw(' = ');
dmp.put('%s', numericStringValue);
dmp.putRaw(' OR contains(');
dumpSqlExpression(dmp, condition.left);
dmp.putRaw(', ');
dmp.put('%v', numericStringValue);
dmp.putRaw('))');
} else {
// String value: contains(column, value)
dmp.putRaw('contains(');
dumpSqlExpression(dmp, condition.left);
dmp.putRaw(', ');
if (likeExpr.exprType === 'value') {
let cleanValue = likeExpr.value;
if (typeof cleanValue === 'string') {
cleanValue = cleanValue.replace(/%/g, '');
}
dmp.put('%v', cleanValue);
} else {
dumpSqlExpression(dmp, likeExpr);
}
dmp.putRaw(')');
}
}
export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
switch (condition.conditionType) {
case 'binary':
@@ -51,9 +98,13 @@ export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
});
break;
case 'like':
dumpSqlExpression(dmp, condition.left);
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
dumpSqlExpression(dmp, condition.right);
if (dmp.dialect.likeAsFunction) {
dumpLikeAsFunctionCondition(dmp, condition);
} else {
dumpSqlExpression(dmp, condition.left);
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
dumpSqlExpression(dmp, condition.right);
}
break;
case 'notLike':
dumpSqlExpression(dmp, condition.left);

Some files were not shown because too many files have changed in this diff Show More