Compare commits
236 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 137fc6b928 | |||
| 9d1d7b7e34 | |||
| 588cd39d7c | |||
| 79ebfa9b7a | |||
| 0c6b2746d1 | |||
| 978972c55c | |||
| 37854fc577 | |||
| 5537e193a6 | |||
| 0d42b2b133 | |||
| 44bd7972d4 | |||
| 5143eb39f7 | |||
| cf51883b3e | |||
| 484ca0c78a | |||
| 8f5cad0e2c | |||
| 988512a571 | |||
| f8bd380051 | |||
| 281131dbba | |||
| ea3a61077a | |||
| d1a898b40d | |||
| a521a81ef0 | |||
| 2505c61975 | |||
| ab5a54dbb6 | |||
| 44ad8fa60a | |||
| 5b27a241d7 | |||
| 084019ca65 | |||
| ba147af8fe | |||
| 1b3f4db07d | |||
| c36705d458 | |||
| 0e126cb8cf | |||
| c48183a539 | |||
| 50f380dbbe | |||
| 66023a9a68 | |||
| c3fbc3354c | |||
| a7d2ed11f3 | |||
| 6a3dc92572 | |||
| e3a4667422 | |||
| c4dd99bba9 | |||
| 588b6f9882 | |||
| 375f69ca1e | |||
| a32e5cc139 | |||
| 8e00137751 | |||
| 003db50833 | |||
| bc519c2c20 | |||
| 3b41fa8cfa | |||
| 39ed0f6d2d | |||
| 710f796832 | |||
| 9ec5fb7263 | |||
| 407db457d5 | |||
| 0c5d2cfcd1 | |||
| 87ace375bb | |||
| d010020f3b | |||
| c60227a98f | |||
| 2824681bff | |||
| 073a3e3946 | |||
| 93e91127a0 | |||
| b60a6cff56 | |||
| 1f3b1963d9 | |||
| 4915f57abb | |||
| 97c6fc97d5 | |||
| b68421bbc3 | |||
| 2d10559754 | |||
| b398a7b546 | |||
| 1711d2102d | |||
| 97cea230f3 | |||
| b6a0fe9465 | |||
| 06c50659bb | |||
| 244b47f548 | |||
| b72a244d93 | |||
| c1e069d4dc | |||
| f99994085a | |||
| 32fd0dd78c | |||
| a557b6b2b4 | |||
| e84583c776 | |||
| a548b0d543 | |||
| de94f15383 | |||
| 7045d986ef | |||
| de7ae9cf09 | |||
| ab3d6888dc | |||
| 98a70891f3 | |||
| 52e7326a2c | |||
| bfd2e3b07a | |||
| 799f5e30d3 | |||
| d3e544c3c0 | |||
| 866fd55834 | |||
| 74ce1fba32 | |||
| a11b93b4cc | |||
| 066f2baa03 | |||
| e02396280f | |||
| a654c80746 | |||
| 3b50f4bd7c | |||
| cc1f77f5bc | |||
| 381fce4a82 | |||
| bc3be97cee | |||
| 1c389208a7 | |||
| cbeed2d3d0 | |||
| 3d974ad144 | |||
| 749042a05d | |||
| 52413b82ee | |||
| 212a7ec083 | |||
| cee94fe113 | |||
| e1ead2519a | |||
| 80330a25ac | |||
| 508470e970 | |||
| bc64b4b5c7 | |||
| 48d8494ead | |||
| 2a51d2ed96 | |||
| cfabcc7bf6 | |||
| 90fc8fd0fc | |||
| ff54533e33 | |||
| 2072f0b5ba | |||
| 6efc720a45 | |||
| c7cb1efe9c | |||
| e193531246 | |||
| 2aa53f414e | |||
| 843c15d754 | |||
| fb19582088 | |||
| 8040466cbe | |||
| 302b4d7acd | |||
| a8ccc24d46 | |||
| b2fb071a7b | |||
| 204d7b97d5 | |||
| f3da709aac | |||
| 0ab8afb838 | |||
| d50999547f | |||
| 04741b0eba | |||
| ba86fe32e7 | |||
| 9deb7d7fdc | |||
| 55eb64e5ca | |||
| a5f50f3f2b | |||
| 47214eb5b3 | |||
| 599509d417 | |||
| 9d366fc359 | |||
| 0e1ed0bde6 | |||
| 6ad7824bf2 | |||
| 1174f51c07 | |||
| 1950dda1ab | |||
| 8231b6d5be | |||
| 0feacbe6eb | |||
| 80b5f5adca | |||
| 13650f36e6 | |||
| 3f58d99069 | |||
| 0c8a025cf6 | |||
| 5014df4859 | |||
| 34a491e2ef | |||
| 884e4ca88e | |||
| a670c5e86c | |||
| af1fba79be | |||
| ac44de0bf4 | |||
| f013a241ce | |||
| 0e29a7206d | |||
| 689b3f299c | |||
| 02ccb990bd | |||
| 61fe4f0d57 | |||
| 0a920195d5 | |||
| 18896bf56d | |||
| 098c9041a0 | |||
| 61a41d8eb2 | |||
| e76073d5c8 | |||
| 8c34added7 | |||
| 66fc6b93ae | |||
| 881d5a8008 | |||
| 5d263de954 | |||
| c8d0494000 | |||
| a9b48b5aa5 | |||
| f08a951eef | |||
| 8758a4bc86 | |||
| aae328f8c8 | |||
| 1953578a33 | |||
| 543bdd79d9 | |||
| e0e1a3c8e4 | |||
| f1d84f448e | |||
| 7c5c21f15d | |||
| 41ffaeebe3 | |||
| 5d9b44b647 | |||
| a18d2c5650 | |||
| e0379bcf12 | |||
| e91242d5a2 | |||
| 8177187b3a | |||
| 6b3e1144bc | |||
| dfec88f52d | |||
| b8df67659a | |||
| 861da64581 | |||
| ab147a2cc9 | |||
| e13191e894 | |||
| 7f69ea8dc0 | |||
| ef2140696b | |||
| 4607900c3b | |||
| 3258d55796 | |||
| 35e6966c39 | |||
| 885756b259 | |||
| 5fbc1b937c | |||
| 7e444e9fc2 | |||
| c051237914 | |||
| 3855b0dd28 | |||
| afcc9e096a | |||
| f4df1fbff4 | |||
| 45b3a5af91 | |||
| f54b18e652 | |||
| b1210d19ad | |||
| 21cbcc79c6 | |||
| a7d0c8fb0f | |||
| 1e3dc54d81 | |||
| 48f294fd83 | |||
| 298ad0de4b | |||
| c7953f9231 | |||
| afd97eae7d | |||
| f4e558b7e8 | |||
| 12c99c646e | |||
| 6c1a2eedbe | |||
| 8a73216035 | |||
| c6a93f12f7 | |||
| 09f44d94b3 | |||
| c26748154a | |||
| 2474f915d4 | |||
| 53f940cd23 | |||
| 991b648854 | |||
| 663f057a9a | |||
| 61963fb824 | |||
| bdf3cf5b36 | |||
| 5cc459594b | |||
| 8d315e52df | |||
| 48a24a8704 | |||
| cdce52f0e5 | |||
| d12ccbeac4 | |||
| 0b1620105a | |||
| 2ae9c98acb | |||
| ed00848a1e | |||
| 06f7741dbf | |||
| 8d3b7cace8 | |||
| 8f0775e337 | |||
| 444cb6aa0c | |||
| b4acc19ea2 | |||
| 1ef17cd861 | |||
| e564e930e5 | |||
| a30badbbe0 | |||
| 721fdf09b3 |
@@ -47,7 +47,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -90,14 +90,6 @@ jobs:
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Run `packer init` for Azure
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer init ./azure-ubuntu.pkr.hcl
|
||||
- name: Run `packer build` for Azure
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer build ./azure-ubuntu.pkr.hcl
|
||||
- name: Run `packer init` for AWS
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
@@ -114,16 +106,6 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
- name: Delete old Azure VMs
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
chmod +x delete-old-azure-images.sh
|
||||
./delete-old-azure-images.sh
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
|
||||
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
|
||||
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
|
||||
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
|
||||
- name: Delete old AMIs (AWS)
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -56,7 +56,10 @@ jobs:
|
||||
working-directory: packages/sqltree
|
||||
run: |
|
||||
npm publish --tag "$NPM_TAG"
|
||||
|
||||
- name: Publish rest
|
||||
working-directory: packages/rest
|
||||
run: |
|
||||
npm publish --tag "$NPM_TAG"
|
||||
- name: Publish api
|
||||
working-directory: packages/api
|
||||
run: |
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 6a2cf34e2dc4bc51de51b7440c3b18164ffbf46c
|
||||
ref: 5baa88d0ad253537298e911868579bae0835888d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -132,6 +132,10 @@ jobs:
|
||||
image: redis
|
||||
ports:
|
||||
- '16011:6379'
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
ports:
|
||||
- '16015:8000'
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
ports:
|
||||
|
||||
Vendored
+6
-1
@@ -2,5 +2,10 @@
|
||||
"jestrunner.jestCommand": "node_modules/.bin/cross-env DEVMODE=1 LOCALTEST=1 node_modules/.bin/jest",
|
||||
"cSpell.words": [
|
||||
"dbgate"
|
||||
]
|
||||
],
|
||||
"chat.tools.terminal.autoApprove": {
|
||||
"yarn workspace": true,
|
||||
"yarn --cwd packages/rest": true,
|
||||
"yarn --cwd packages/web": true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
# AGENTS
|
||||
|
||||
## Rules
|
||||
|
||||
- In newly added code, always use `DBGM-00000` for message/error codes; do not introduce new numbered DBGM codes such as `DBGM-00316`.
|
||||
- GUI uses Svelte4 (packages/web)
|
||||
- GUI is tested with E2E tests in `e2e-tests` folder, using Cypress. Use data-testid attribute in components to make them easier to test.
|
||||
- data-testid format: ComponentName_identifier. Use reasonable identifiers
|
||||
+222
-39
@@ -1,18 +1,73 @@
|
||||
# ChangeLog
|
||||
|
||||
Builds:
|
||||
- docker - build
|
||||
- npm - npm package dbgate-serve
|
||||
- app - classic electron app
|
||||
- mac - application for macOS
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
- docker - build
|
||||
- npm - npm package dbgate-serve
|
||||
- app - classic electron app
|
||||
- mac - application for macOS
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
## 7.1.5
|
||||
|
||||
- FIXED: Issues with cloud and file loading
|
||||
- ADDED: Support for displaying MongoDB UUID #1394
|
||||
- ADDED: SVG icon sanitization
|
||||
|
||||
## 7.1.4
|
||||
|
||||
- FIXED: NPM installation failure #1375
|
||||
|
||||
## 7.1.3
|
||||
|
||||
- FIXED: "Add files" button in import/export #1373
|
||||
- FIXED: Importing XLSX files #1379
|
||||
- ADDED: Ability to set default transaction isolation level for connections #1376
|
||||
- ADDED: Option to set transaction isolation level directly in Query Tab #1376
|
||||
- ADDED: Filtering of SQL Scripts by connection and database name #1377
|
||||
- ADDED: Proxy configuration support for OData, OpenAPI and GraphQL (Premium) #1381
|
||||
- CHANGED: Updated DuckDB version to 1.5.0 #1386
|
||||
- FIXED: DuckDB column order in query result #1385
|
||||
- FIXED: Administration panel not displayed for authorized users (Team Premium) #1374
|
||||
|
||||
## 7.1.2
|
||||
|
||||
- ADDED: GraphQL chat - AI chat with GraphQL endpoint (Premium)
|
||||
- FIXED: Error "400 Provider returned error" in Database Chat (Premium)
|
||||
- CHANGED: Upgraded AI components to latest versions, improved stability and performance of AI features (Premium)
|
||||
- ADDED: New LLM models available (GPT-5.1 Codex Mini - now default), Claude Haiku 4.5
|
||||
- CHANGED: Upgraded some internal building components (svelte-preprocess, typescript)
|
||||
|
||||
## 7.1.1
|
||||
|
||||
- CHANGED: Fixed some DynamoDB issues, improved filtering performance
|
||||
- FIXED: Afilter filter scroll issue #1370
|
||||
- FIXED: Team Premium - filtering by connection in database and table permissions
|
||||
- FIXED: Team Premium - Creating role and user in PostgreSQL - settings is remembered without reopening new role/user
|
||||
- FIXED: Team Premium - don't show errors "Connection permission not granted" when no connection is selected
|
||||
- FIXED: Firebird - improved connectivity & table loading #1324
|
||||
- ADDED: New GraphQL query option, changed GraphQL query icon (Premium)
|
||||
|
||||
## 7.1.0
|
||||
|
||||
- ADDED: Support for Amazon DynamoDB (Premium)
|
||||
- ADDED: Connect to API endpoints - OpenAPI (Swagger), GraphQL and oData (Premium)
|
||||
- FIXED: Redis key list infinite loading when first key hierarchy segment is numeric (e.g. "0:profile:1234") #1363
|
||||
- FIXED: Sum of PostgreSQL numeric values always 0 #1354
|
||||
- FIXED: SQL SERVER Table structure key duplication #1351
|
||||
- FIXED: SQL Server - Incorrect SQL generated for 'Group by Year/Month/Day' #1350
|
||||
- ADDED: Choose drivers available in connection dialog
|
||||
- FIXED: Show query results for CTE (WITH) queries
|
||||
- CHANGED: Used rolldown bundler instead of legacy rollup
|
||||
|
||||
## 7.0.6
|
||||
|
||||
- ADDED: Reset password for Team Premium edition
|
||||
- ADDED: Encrypting passwords sent to frontend when using SHELL_CONNECTION=1 in Docker Community edition #1357
|
||||
|
||||
## 7.0.4
|
||||
|
||||
- FIXED: MS SQL server export to CSV does not convert bit FALSE to 0 #1276
|
||||
- ADDED: MySQL FULLTEXT support #1305
|
||||
- FIXED: Error messages in Chinese will display garbled characters(MS SQL over ODBC) #1321
|
||||
@@ -25,6 +80,7 @@ Builds:
|
||||
- CHANGED: Improved custom connection color palette
|
||||
|
||||
## 7.0.3
|
||||
|
||||
- FIXED: Optimalized loading MySQL primary keys #1261
|
||||
- FIXED: Test connection now works for MS Entra authentication #1315
|
||||
- FIXED: SQL Server - Unable to use 'Is Empty or Null' or 'Has Not Empty Value' filters on a field with data type TEXT #1338
|
||||
@@ -36,6 +92,7 @@ Builds:
|
||||
- CHANGED: Upgraded node for DbGate AWS distribution
|
||||
|
||||
## 7.0.1
|
||||
|
||||
- FIXED: Foreign key actions not detected on PostgreSQL #1323
|
||||
- FIXED: Vulnerabilities in bundled dependencies: axios, cross-spawn, glob #1322
|
||||
- FIXED: The JsonB field in the cell data view always displays as null. #1320
|
||||
@@ -46,6 +103,7 @@ Builds:
|
||||
- ADDED: Default editor theme is part of application theme now
|
||||
|
||||
## 7.0.0
|
||||
|
||||
- CHANGED: New design of application, new theme system
|
||||
- ADDED: Theme AI assistant - create custom themes using AI (Premium)
|
||||
- CHANGED: Themes are now defined in JSON files, custom themes could be shared via DbGate Cloud
|
||||
@@ -61,12 +119,15 @@ Builds:
|
||||
- ADDED: Widget for currently opened tabs
|
||||
|
||||
## 6.8.2
|
||||
|
||||
- FIXED: Initialize storage database from envoronment variables failed with PostgreSQL
|
||||
|
||||
## 6.8.1
|
||||
|
||||
- FIXED: Won't navigate to the relevant field on click of a field in columns #1303
|
||||
|
||||
## 6.8.0
|
||||
|
||||
- ADDED: Form cell view for detailed data inspection and editing in data grids, with multi-row bulk editing support
|
||||
- CHANGED: Cell data sidebar moved to right side, now is part of data grid
|
||||
- FIXED: Improved widget resizing algorithm
|
||||
@@ -85,11 +146,13 @@ Builds:
|
||||
- ADDED: Import connections from environment variables (Team Premium)
|
||||
|
||||
## 6.7.3
|
||||
|
||||
- FIXED: Fixed problem in analyser core - in PostgreSQL, after dropping table, dropped table still appeared in structure
|
||||
- FIXED: PostgreSQL numeric columns do not align right #1254
|
||||
- ADDED: Custom thousands separator #1213
|
||||
|
||||
## 6.7.2
|
||||
|
||||
- CHANGED: Settings modal redesign - now is settings opened in tab instead of modal, similarily as in VSCode
|
||||
- FIXED: Fixed search in table shortcuts #1273
|
||||
- CHANGED: Improved foreign key editor UX
|
||||
@@ -99,6 +162,7 @@ Builds:
|
||||
- CHANGED: Improved storage of settings, especially for Team Premium edition
|
||||
|
||||
## 6.7.1
|
||||
|
||||
- ADDED: LANGUAGE environment variable for the web version. #1266
|
||||
- ADDED: New localizations (Italian, Portugese (Brazil), Japanese)
|
||||
- ADDED: Option to detect language from browser settings in web version
|
||||
@@ -112,6 +176,7 @@ Builds:
|
||||
- ADDED: Other files types supported in Team Premium edition (diagrams, query design, perspectives, import/export jobs, shell scripts, database compare jobs)
|
||||
|
||||
## 6.7.0
|
||||
|
||||
- ADDED: Added localization support, now you can use DbGate in multiple languages (French, Spanish, German, Czech, Slovak, Simplified Chinese) #347 #705 #939 #1079
|
||||
- CHANGED: Solved many issues with binary fields, huge performance improvements in binary fields processing
|
||||
- FIXED: Export to CSV produces empty file #1247
|
||||
@@ -125,13 +190,16 @@ Builds:
|
||||
- FIXED: Horizontal scrolling on macOS trackpad/Magic Mouse #1250
|
||||
|
||||
## 6.6.12
|
||||
|
||||
- FIXED: Cannot paste license key on Mac (special commands like copy/paste were disabled on license screen)
|
||||
|
||||
## 6.6.11
|
||||
|
||||
- FIXED: Fixed theming on application startup
|
||||
- CHANGED: Improved licensing page
|
||||
|
||||
## 6.6.10
|
||||
|
||||
- FIXED: License from environment variable is not refreshed #1245
|
||||
- FIXED: connection closing / reconnecting #1237
|
||||
- ADDED: retain history across multiple queries #1236
|
||||
@@ -139,19 +207,22 @@ Builds:
|
||||
- FIXED: Not possible to scroll the data view horizontally by pressing shift and scroll mouse middle button on Mac #453
|
||||
- FIXED: Expired trial workflow (Premium)
|
||||
- ADDED: Column name collision resolving #1234 (MySQL)
|
||||
|
||||
|
||||
## 6.6.8
|
||||
|
||||
- CHANGED: Windows executable now uses Azure trusted signing certificate
|
||||
- CHANGED: NPM packages now use GitHub OIDC provenance signing for better security
|
||||
- CHANGED: Some features moved to Premium edition (master/detail views, FK lookups, column expansion, split view, advanced export/import, data archives, grouping, macros)
|
||||
|
||||
## 6.6.6
|
||||
|
||||
- ADDED: Allow disable/re-enable filter #1174
|
||||
- ADDED: Close right side tabs #1219
|
||||
- ADDED: Ability disable execute current line in query editor #1209
|
||||
- ADDED: Support for Redis Cluster #1204 (Premium)
|
||||
|
||||
## 6.6.5
|
||||
|
||||
- ADDED: SQL AI assistant - powered by database chat, could help you to write SQL queries (Premium)
|
||||
- ADDED: Explain SQL error (powered by AI) (Premium)
|
||||
- ADDED: Database chat (and SQL AI Assistant) now supports showing charts (Premium)
|
||||
@@ -160,6 +231,7 @@ Builds:
|
||||
- FIXED: Cannot open up large JSON file #1215
|
||||
|
||||
## 6.6.4
|
||||
|
||||
- ADDED: AI Database chat now supports much more LLM models. (Premium)
|
||||
- ADDED: Possibility to use your own API key with OPENAI-compatible providers (OpenRouter, Antropic...)
|
||||
- ADDED: Possibility to use self-hosted own LLM (eg. Llama)
|
||||
@@ -173,11 +245,13 @@ Builds:
|
||||
- CHANGED: Community edition now supports shared folders in read-only mode
|
||||
|
||||
## 6.6.3
|
||||
|
||||
- FIXED: Error “db.getCollection(…).renameCollection is not a function” when renaming collection in dbGate #1198
|
||||
- FIXED: Can't list databases from Azure SQL SERVER #1197
|
||||
- ADDED: Save zoom level in electron apps
|
||||
|
||||
## 6.6.2
|
||||
|
||||
- ADDED: List of processes, ability to kill process (Server summary) #1178
|
||||
- ADDED: Database and table permissions (Team Premium edition)
|
||||
- ADDED: Redis search box - Scan all #1191
|
||||
@@ -187,6 +261,7 @@ Builds:
|
||||
- FIXED: Executing queries for SQLite crash #1195
|
||||
|
||||
## 6.6.1
|
||||
|
||||
- ADDED: Support for Mongo shell (Premium) - #1114
|
||||
- FIXED: Support for BLOB in Oracle #1181
|
||||
- ADDED: Connect to named SQL Server instance #340
|
||||
@@ -196,12 +271,14 @@ Builds:
|
||||
- CHANGED: Improved logging system, added related database and connection to logs metadata
|
||||
|
||||
## 6.6.0
|
||||
|
||||
- ADDED: Database chat - AI powered chatbot, which knows your database (Premium)
|
||||
- ADDED: Firestore support (Premium)
|
||||
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
|
||||
- FIXED: Chart permissions were ignored (Premium)
|
||||
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
|
||||
- FIXED: Chart permissions were ignored (Premium)
|
||||
|
||||
## 6.5.6
|
||||
|
||||
- ADDED: New object window - quick access to most common functions
|
||||
- ADDED: Possibility to disable split query by empty line #1162
|
||||
- ADDED: Possibility to opt out authentication #1152
|
||||
@@ -210,6 +287,7 @@ Builds:
|
||||
- FIXED: Fixed some minor problems of charts
|
||||
|
||||
## 6.5.5
|
||||
|
||||
- ADDED: Administer cloud folder window
|
||||
- CHANGED: Cloud menu redesign
|
||||
- ADDED: Audit log (for Team Premium edition)
|
||||
@@ -219,25 +297,29 @@ Builds:
|
||||
- ADDED: Added chart data type detection
|
||||
- FIXED: Fixed chart displaying problems
|
||||
- FIXED: Fixed exporting chart to HTML
|
||||
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual __count)
|
||||
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual \_\_count)
|
||||
- FIXED: Problems with authentification administration, especially for Postgres storage
|
||||
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
|
||||
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
|
||||
|
||||
## 6.5.3
|
||||
|
||||
- CHANGED: Improved DbGate Cloud sign-in workflow
|
||||
- FIXED: Some fixes and error handling in new charts engine
|
||||
- ADDED: Charts - ability to choose aggregate function
|
||||
- CHANGED: Improved About window
|
||||
|
||||
## 6.5.2
|
||||
|
||||
- CHANGED: Autodetecting charts is disabled by default #1145
|
||||
- CHANGED: Improved chart displaying workflow
|
||||
- ADDED: Ability to close chart
|
||||
|
||||
## 6.5.1
|
||||
|
||||
- FIXED: DbGate Cloud e-mail sign-in method for desktop clients
|
||||
|
||||
## 6.5.0
|
||||
|
||||
- ADDED: DbGate cloud - online storage for connections, SQL scripts and other objects
|
||||
- ADDED: Public knowledge base - common SQL scripts for specific DB engines (table sizes, index stats etc.)
|
||||
- ADDED: Query results could be visualised in charts (Premium)
|
||||
@@ -258,7 +340,7 @@ Builds:
|
||||
|
||||
## 6.4.2
|
||||
|
||||
- ADDED: Source label to docker container #1105
|
||||
- ADDED: Source label to docker container #1105
|
||||
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
|
||||
- ADDED: View PostgreSQL query console output #1108
|
||||
- FIXED: Single quote generete MySql error #1107
|
||||
@@ -268,6 +350,7 @@ Builds:
|
||||
- FIXED: Fixed loading Redis keys with :: in key name
|
||||
|
||||
## 6.4.0
|
||||
|
||||
- ADDED: DuckDB support
|
||||
- ADDED: Data deployer (Premium)
|
||||
- ADDED: Compare data between JSON lines file in archive and database table
|
||||
@@ -289,6 +372,7 @@ Builds:
|
||||
- CHANGED: Amazon and Azure instalations are not auto-upgraded by default
|
||||
|
||||
## 6.3.3
|
||||
|
||||
- CHANGED: New administration UI, redesigned administration of users, connections and roles
|
||||
- ADDED: Encrypting passwords in team-premium edition
|
||||
- ADDED: Show scale bar on map #1090
|
||||
@@ -298,6 +382,7 @@ Builds:
|
||||
- ADDED: Line Wrap for JSON viewer #768
|
||||
|
||||
### 6.3.2
|
||||
|
||||
- ADDED: "Use system theme" switch, use changed system theme without restart #1084
|
||||
- ADDED: "Skip SETNAME instruction" option for Redis #1077
|
||||
- FIXED: Clickhouse views are now available even for user with limited permissions #1076
|
||||
@@ -310,6 +395,7 @@ Builds:
|
||||
- FIXED: Correctly end connection process after succesful/unsuccesful connect
|
||||
|
||||
### 6.3.0
|
||||
|
||||
- ADDED: Support for libSQL and Turso (Premium)
|
||||
- ADDED: Native backup and restore database for MySQL and PostgreSQL (Premium)
|
||||
- REMOVED: DbGate internal dump export for MySQL (replaced with call of mysqldump)
|
||||
@@ -321,6 +407,7 @@ Builds:
|
||||
- FIXED: Linux Appimage crash => A JavaScript error occurred in the main process #1065 , #1067
|
||||
|
||||
### 6.2.1
|
||||
|
||||
- ADDED: Commit/rollback and autocommit in scripts #1039
|
||||
- FIXED: Doesn't import all the records from MongoDB #1044
|
||||
- ADDED: Show server name alongside database name in title of the tab group #1041
|
||||
@@ -333,6 +420,7 @@ Builds:
|
||||
- CHANGED: Upgraded SQLite engine version
|
||||
|
||||
### 6.2.0
|
||||
|
||||
- ADDED: Query AI Assistant (Premium)
|
||||
- ADDED: Cassandra database support
|
||||
- ADDED: XML cell data view
|
||||
@@ -345,13 +433,16 @@ Builds:
|
||||
- CHANGED: Open real executed query, when datagrid shows loading error
|
||||
|
||||
### 6.1.6
|
||||
|
||||
- FIXED: Hotfix build process for premium edition
|
||||
|
||||
### 6.1.5
|
||||
|
||||
- FIXED: Serious security hotfix (for Docker and NPM, when using LOGIN and PASSWORD environment variables or LOGIN_PASSWORD_xxx)
|
||||
- no changes for desktop app and for Team premium edition, when using storage DB
|
||||
|
||||
### 6.1.4
|
||||
|
||||
- CHANGED: Show Data/Structure button in one place #1015
|
||||
- ADDED: Data view coloring (every second row) #1014
|
||||
- ADDED: Pin icon for tab in preview mode (#1013)
|
||||
@@ -366,11 +457,12 @@ Builds:
|
||||
- ADDED: Redis JSON format for String values #852
|
||||
|
||||
### 6.1.3
|
||||
|
||||
- FIXED: Fulltext search now shows correctly columns and SQL code lines
|
||||
- ADDED: Configuration of SSH tunnel local host (IPv4 vs IPv6). Should fix majority of SSH tunnel problems
|
||||
- FIXED: Handled SSH tunnel connection error, now it shows error instead of connecting forever
|
||||
- ADDED: Support of triggers (SQLite)
|
||||
- ADDED: Create, drop trigger
|
||||
- ADDED: Create, drop trigger
|
||||
- ADDED: Support for MySQL scheduled events
|
||||
- FIXED: Cannot connect to DB using askUser/askPassword mode #995
|
||||
- FIXED: Filtering in Oracle #992
|
||||
@@ -378,6 +470,7 @@ Builds:
|
||||
- ADDED: Introduced E2E Cypress tests, test refactor
|
||||
|
||||
### 6.1.1
|
||||
|
||||
- ADDED: Trigger support (SQL Server, PostgreSQL, MySQL, Oracle)
|
||||
- FIXED: PostgreSQL and Oracle export #970
|
||||
- FIXED: Cursor Becomes Stuck When Escaping "Case" #954
|
||||
@@ -385,6 +478,7 @@ Builds:
|
||||
- FIXED: Search in packed list
|
||||
|
||||
### 6.1.0
|
||||
|
||||
- ADDED: Fulltext search in DB model and connections, highlight searched names
|
||||
- ADDED: Tab preview mode configuration #963
|
||||
- CHANGED: Single-click to open server connection/database + ability to configure this #959
|
||||
@@ -401,6 +495,7 @@ Builds:
|
||||
- ADDED: Display comment into tables and column list #755
|
||||
|
||||
### 6.0.0
|
||||
|
||||
- ADDED: Order or filter the indexes for huge tables #922
|
||||
- ADDED: Empty string filters
|
||||
- CHANGED: (Premium) Workflow for new installation (used in Docker and AWS distribution)
|
||||
@@ -433,6 +528,7 @@ Builds:
|
||||
- ADDED: Show SQL quick view
|
||||
|
||||
### 5.5.6
|
||||
|
||||
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
|
||||
- FIXED: Correctly closing connection behind SSH tunnel (#920)
|
||||
- FIXED: Updating MongoDB documents on MongoDB 4 (#916)
|
||||
@@ -440,6 +536,7 @@ Builds:
|
||||
- FIXED: (Premium) Better handling of connection storage errors
|
||||
|
||||
### 5.5.5
|
||||
|
||||
- ADDED: AWS IAM authentication for MySQL, MariaDB, PostgreSQL (Premium)
|
||||
- FIXED: Datitme filtering #912
|
||||
- FIXED: Load redis keys
|
||||
@@ -450,6 +547,7 @@ Builds:
|
||||
- FIXED: Save connection params in administration for MS SQL and Postgres storages (Team Premium)
|
||||
|
||||
### 5.5.4
|
||||
|
||||
- FIXED: correct handling when use LOGIN and PASSWORD env variables #903
|
||||
- FIXED: fixed problems in dbmodel commandline tool
|
||||
- ADDED: dbmodel - allow connection defined in environment variables
|
||||
@@ -461,6 +559,7 @@ Builds:
|
||||
- ADDED: (Premium) Show purchase button after trial license is expired
|
||||
|
||||
### 5.5.3
|
||||
|
||||
- FIXED: Separate schema mode #894 - for databases with many schemas
|
||||
- FIXED: Sort by UUID column in POstgreSQL #895
|
||||
- ADDED: Load pg_dump outputs #893
|
||||
@@ -470,9 +569,11 @@ Builds:
|
||||
- FIXED: MS Entra authentication for Azure SQL
|
||||
|
||||
### 5.5.2
|
||||
|
||||
- FIXED: MySQL, PostgreSQL readonly conections #900
|
||||
|
||||
### 5.5.1
|
||||
|
||||
- ADDED: Clickhouse support (#532)
|
||||
- ADDED: MySQL - specify table engine, show table engine in table list
|
||||
- FIXED: Hidden primary key name in PK editor for DB engines with anonymous PK (MySQL)
|
||||
@@ -500,6 +601,7 @@ Builds:
|
||||
- ADDED: (Premium) MS Entra authentization for Azure SQL databases
|
||||
|
||||
### 5.4.4
|
||||
|
||||
- CHANGED: Improved autoupdate, notification is now in app
|
||||
- CHANGED: Default behaviour of autoupdate, new version is downloaded after click of "Download" button
|
||||
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)
|
||||
@@ -508,14 +610,17 @@ Builds:
|
||||
- FIXED: Fixes following issues: #886, #865, #782, #375
|
||||
|
||||
### 5.4.2
|
||||
|
||||
- FIXED: DbGate now works correctly with Oracle 10g
|
||||
- FIXED: Fixed update channel for premium edition
|
||||
|
||||
### 5.4.1
|
||||
|
||||
- FIXED: Broken older plugins #881
|
||||
- ADDED: Premium edition - "Start trial" button
|
||||
|
||||
### 5.4.0
|
||||
|
||||
- ADDED: Support for CosmosDB (Premium only)
|
||||
- ADDED: Administration UI (Premium only)
|
||||
- ADDED: New application icon
|
||||
@@ -532,10 +637,12 @@ Builds:
|
||||
- FIXED: Script with escaped backslash causes erro #880
|
||||
|
||||
### 5.3.4
|
||||
|
||||
- FIXED: On blank system does not start (window does not appear) #862
|
||||
- FIXED: Missing Execute, Export bar #861
|
||||
|
||||
### 5.3.3
|
||||
|
||||
- FIXED: The application Window is not visible when openning after changing monitor configuration. #856
|
||||
- FIXED: Multi column filter is broken for Postgresql #855
|
||||
- ADDED: Do not display internal timescaledb objects in postgres databases #839
|
||||
@@ -543,12 +650,14 @@ Builds:
|
||||
- FIXED: Cannot filter by uuid field in psql #538
|
||||
|
||||
### 5.3.1
|
||||
|
||||
- FIXED: Column sorting on query tab not working #819
|
||||
- FIXED: Postgres Connection stays in "Loading database structure" until reloading the page #826
|
||||
- FIXED: Cannot read properties of undefined (reading 'length') on Tables #824
|
||||
- FIXED: Redshift doesn't show tables when connected #816
|
||||
|
||||
### 5.3.0
|
||||
|
||||
- CHANGED: New Oracle driver, much better Oracle support. Works now also in docker distribution
|
||||
- FIXED: Connection to oracle with service name #809
|
||||
- ADDED: Connect to redis using a custom username #807
|
||||
@@ -557,18 +666,20 @@ Builds:
|
||||
- ADDED: Switch connection for opened file #814
|
||||
|
||||
### 5.2.9
|
||||
|
||||
- FIXED: PostgresSQL doesn't show tables when connected #793 #805
|
||||
- FIXED: MongoDB write operations fail #798 #802
|
||||
- FIXED: Elecrron app logging losed most of log messages
|
||||
- FIXED: Connection error with SSH tunnel
|
||||
- FIXED: Connection error with SSH tunnel
|
||||
- ADDED: option to disable autoupgrades (with --disable-auto-upgrade)
|
||||
- ADDED: Send error context to github gist
|
||||
|
||||
### 5.2.8
|
||||
|
||||
- FIXED: file menu save and save as not working
|
||||
- FIXED: query editor on import/export screen overlaps with selector
|
||||
- FIXED: Fixed inconsistencies in max/unmaximize window buttons
|
||||
- FIXED: shortcut for select all
|
||||
- FIXED: shortcut for select all
|
||||
- FIXED: download with auth header
|
||||
- CHANGED: Upgraded database drivers for mysql, postgres, sqlite, mssql, mongo, redis
|
||||
- CHANGED: Upgraded electron version (now using v30)
|
||||
@@ -585,8 +696,8 @@ Builds:
|
||||
- ADDED: Button for discard/reset changes (#759)
|
||||
- FIXED: Don't show error dialog when subprocess fails, as DbGate handles this correctly (#751, #746, #542, #272)
|
||||
|
||||
|
||||
### 5.2.7
|
||||
|
||||
- FIXED: fix body overflow when context menu height great than viewport #592
|
||||
- FIXED: Pass signals in entrypoint.sh #596
|
||||
- FIXED: Remove missing links to jenasoft #625
|
||||
@@ -597,6 +708,7 @@ Builds:
|
||||
- CHANGED: Improved stability of electron client on Windows and Mac (fewer EPIPE errors)
|
||||
|
||||
### 5.2.6
|
||||
|
||||
- FIXED: DbGate creates a lot of .tmp.node files in the temp directory #561
|
||||
- FIXED: Typo in datetimeoffset dataType #556
|
||||
- FIXED: SQL export is using the wrong hour formatting #537
|
||||
@@ -604,6 +716,7 @@ Builds:
|
||||
- FIXED: MongoDB password could contain special characters #560
|
||||
|
||||
### 5.2.5
|
||||
|
||||
- ADDED: Split Windows #394
|
||||
- FIXED: Postgres index asc/desc #514
|
||||
- FIXED: Excel export not working since 5.2.3 #511
|
||||
@@ -612,9 +725,11 @@ Builds:
|
||||
- FIXED: Solved some minor problems with widget collapsing
|
||||
|
||||
### 5.2.4
|
||||
|
||||
- FIXED: npm version crash (#508)
|
||||
|
||||
### 5.2.3
|
||||
|
||||
- ADDED: Search entire table (multi column filter) #491
|
||||
- ADDED: OracleDB - connection to toher than default ports #496
|
||||
- CHANGED: OracleDB - status of support set to experimental
|
||||
@@ -646,8 +761,8 @@ Builds:
|
||||
- FIXED: Fixed some scenarios using tables from different DBs
|
||||
- FIXED: Sessions with long-running queries are not killed
|
||||
|
||||
|
||||
### 5.2.2
|
||||
|
||||
- FIXED: Optimalized load DB structure for PostgreSQL #451
|
||||
- ADDED: Auto-closing query connections after configurable (15 minutes default) no-activity interval #468
|
||||
- ADDED: Set application-name connection parameter (for PostgreSQL and MS SQL) for easier identifying of DbGate connections
|
||||
@@ -658,8 +773,8 @@ Builds:
|
||||
- FIXED: crash on Windows and Mac after system goes in suspend mode #458
|
||||
- ADDED: dbmodel standalone NPM package (https://www.npmjs.com/package/dbmodel) - deploy database via commandline tool
|
||||
|
||||
|
||||
### 5.2.1
|
||||
|
||||
- FIXED: client_id param in OAuth
|
||||
- ADDED: OAuth scope parameter
|
||||
- FIXED: login page - password was not sent, when submitting by pressing ENTER
|
||||
@@ -667,6 +782,7 @@ Builds:
|
||||
- FIXED: Export modal - fixed crash when selecting different database
|
||||
|
||||
### 5.2.0
|
||||
|
||||
- ADDED: Oracle database support #380
|
||||
- ADDED: OAuth authentification #407
|
||||
- ADDED: Active directory (Windows) authentification #261
|
||||
@@ -688,7 +804,7 @@ Builds:
|
||||
- ADDED: Perspective designer supports joins from MongoDB nested documents and arrays
|
||||
- FIXED: Perspective designer joins on MongoDB ObjectId fields
|
||||
- ADDED: Filtering columns in designer (query designer, diagram designer, perspective designer)
|
||||
- FIXED: Clone MongoDB rows without _id attribute #404
|
||||
- FIXED: Clone MongoDB rows without \_id attribute #404
|
||||
- CHANGED: Improved cell view with GPS latitude, longitude fields
|
||||
- ADDED: SQL: ALTER VIEW and SQL:ALTER PROCEDURE scripts
|
||||
- ADDED: Ctrl+F5 refreshes data grid also with database structure #428
|
||||
@@ -697,8 +813,8 @@ Builds:
|
||||
- ADDED: Rename, remove connection folder, memoize opened state after app restart #425
|
||||
- FIXED: Show SQLServer alter store procedure #435
|
||||
|
||||
|
||||
### 5.1.6
|
||||
|
||||
- ADDED: Connection folders support #274
|
||||
- ADDED: Keyboard shortcut to hide result window and show/hide the side toolbar #406
|
||||
- ADDED: Ability to show/hide query results #406
|
||||
@@ -710,6 +826,7 @@ Builds:
|
||||
- CHANGED: More strict timeouts to kill database and server connections (reduces resource consumption)
|
||||
|
||||
### 5.1.5
|
||||
|
||||
- ADDED: Support perspectives for MongoDB - MongoDB query designer
|
||||
- ADDED: Show JSON content directly in the overview #395
|
||||
- CHANGED: OSX Command H shortcut for hiding window #390
|
||||
@@ -720,6 +837,7 @@ Builds:
|
||||
- ADDED: connect via socket - configurable via environment variables #358
|
||||
|
||||
### 5.1.4
|
||||
|
||||
- ADDED: Drop database commands #384
|
||||
- ADDED: Customizable Redis key separator #379
|
||||
- ADDED: ARM support for docker images
|
||||
@@ -728,6 +846,7 @@ Builds:
|
||||
- ADDED: Unsaved marker for SQL files
|
||||
|
||||
### 5.1.3
|
||||
|
||||
- ADDED: Editing multiline cell values #378 #371 #359
|
||||
- ADDED: Truncate table #333
|
||||
- ADDED: Perspectives - show row count
|
||||
@@ -736,6 +855,7 @@ Builds:
|
||||
- FIXED: Correct error line numbers returned from queries
|
||||
|
||||
### 5.1.2
|
||||
|
||||
- FIXED: MongoDb any export function does not work. #373
|
||||
- ADDED: Query Designer short order more flexibility #372
|
||||
- ADDED: Form View move between records #370
|
||||
@@ -749,6 +869,7 @@ Builds:
|
||||
- ADDED: Perspectives - cells without joined data are gray
|
||||
|
||||
### 5.1.1
|
||||
|
||||
- ADDED: Perspective designer
|
||||
- FIXED: NULL,NOT NULL filter datatime columns #356
|
||||
- FIXED: Recognize computed columns on SQL server #354
|
||||
@@ -758,32 +879,35 @@ Builds:
|
||||
- ADDED: Custom editor font size #345
|
||||
- ADDED: Ability to open perspective files
|
||||
|
||||
|
||||
### 5.1.0
|
||||
|
||||
- ADDED: Perspectives (docs: https://dbgate.org/docs/perspectives.html )
|
||||
- CHANGED: Upgraded SQLite engine version (driver better-sqlite3: 7.6.2)
|
||||
- CHANGED: Upgraded ElectronJS version (from version 13 to version 17)
|
||||
- CHANGED: Upgraded all dependencies with current available minor version updates
|
||||
- CHANGED: By default, connect on click #332˝
|
||||
- CHANGED: Improved keyboard navigation, when editing table data #331
|
||||
- ADDED: Option to skip Save changes dialog #329
|
||||
- ADDED: Option to skip Save changes dialog #329
|
||||
- FIXED: Unsigned column doesn't work correctly. #324
|
||||
- FIXED: Connect to MS SQL with domain user now works also under Linux and Mac #305
|
||||
|
||||
### 5.0.9
|
||||
|
||||
- FIXED: Fixed problem with SSE events on web version
|
||||
- ADDED: Added menu command "New query designer"
|
||||
- ADDED: Added menu command "New ER diagram"
|
||||
|
||||
### 5.0.8
|
||||
|
||||
- ADDED: SQL Server - support using domain logins under Linux and Mac #305
|
||||
- ADDED: Permissions for connections #318
|
||||
- ADDED: Ability to change editor front #308
|
||||
- ADDED: Custom expression in query designer #306
|
||||
- ADDED: OR conditions in query designer #321
|
||||
- ADDED: Ability to configure settings view environment variables #304
|
||||
|
||||
|
||||
### 5.0.7
|
||||
|
||||
- FIXED: Fixed some problems with SSH tunnel (upgraded SSH client) #315
|
||||
- FIXED: Fixed MognoDB executing find query #312
|
||||
- ADDED: Interval filters for date/time columns #311
|
||||
@@ -791,8 +915,9 @@ Builds:
|
||||
- ADDED: connecting option Trust server certificate for SQL Server #305
|
||||
- ADDED: Autorefresh, reload table every x second #303
|
||||
- FIXED(app): Changing editor theme and font size in Editor Themes #300
|
||||
|
||||
|
||||
### 5.0.6
|
||||
|
||||
- ADDED: Search in columns
|
||||
- CHANGED: Upgraded mongodb driver
|
||||
- ADDED: Ability to reset view, when data load fails
|
||||
@@ -800,6 +925,7 @@ Builds:
|
||||
- FIXED: Fixed some NPM package problems
|
||||
|
||||
### 5.0.5
|
||||
|
||||
- ADDED: Visualisation geographics objects on map #288
|
||||
- ADDED: Support for native SQL as default value inside yaml files #296
|
||||
- FIXED: Postgres boolean columns don't filter correctly #298
|
||||
@@ -807,10 +933,11 @@ Builds:
|
||||
- FIXED: Handle error when reading deleted archive
|
||||
|
||||
### 5.0.3
|
||||
|
||||
- CHANGED: Optimalization of loading DB structure for PostgreSQL, MySQL #273
|
||||
- CHANGED: Upgraded mysql driver #293
|
||||
- CHANGED: Better UX when defining SSH port #291
|
||||
- ADDED: Database object menu from tab
|
||||
- ADDED: Database object menu from tab
|
||||
- CHANGED: Ability to close file uploader
|
||||
- FIXED: Correct handling of NUL values in update keys
|
||||
- CHANGED: Upgraded MS SQL tedious driver
|
||||
@@ -820,13 +947,17 @@ Builds:
|
||||
- ADDED: Configurable object actions #255
|
||||
- ADDED: Multiple sort criteria #235
|
||||
- ADDED(app): Open JSON file
|
||||
|
||||
### 5.0.2
|
||||
|
||||
- FIXED: Cannot use SSH Tunnel after update #291
|
||||
|
||||
### 5.0.1
|
||||
|
||||
- FIXED(app): Can't Click Sidebar Menu Item #287
|
||||
|
||||
### 5.0.0
|
||||
|
||||
- CHANGED: Connection workflow, connections are opened on tabs instead of modals
|
||||
- ADDED: Posibility to connect to DB without saving connection
|
||||
- ADDED(mac): Support for SQLite on Mac M1
|
||||
@@ -839,6 +970,7 @@ Builds:
|
||||
- FIXED: Removed SSL tab on Redis connection (SSL is not supported for Redis)
|
||||
|
||||
### 4.8.8
|
||||
|
||||
- CHANGED: New app icon
|
||||
- ADDED: SQL dump, SQL import - also from/to saved queries
|
||||
- FIXED(mac): Fixed crash when reopening main window
|
||||
@@ -847,6 +979,7 @@ Builds:
|
||||
- ADDED(app): Browse tabs in reverse order with Ctrl+Shift+Tab #245
|
||||
|
||||
### 4.8.7
|
||||
|
||||
- ADDED: MySQL dump/backup database
|
||||
- ADDED: Import SQL dump from file or from URL
|
||||
- FIXED(mac): Fixed Cmd+C, Cmd+V, Cmd+X - shortcuts for copy/cut/paste #270
|
||||
@@ -855,6 +988,7 @@ Builds:
|
||||
- ADDED: Support for dockerhost network name under docker #271
|
||||
|
||||
### 4.8.4
|
||||
|
||||
- FIXED(mac): Fixed build for macOS arm64 #259
|
||||
- FIXED(mac): Fixed opening SQLite files on macOS #243
|
||||
- FIXED(mac): Fixed opening PEM certificates on macOS #206
|
||||
@@ -866,6 +1000,7 @@ Builds:
|
||||
- ADDED: Added menu command "Tools/Change to recent database"
|
||||
|
||||
### 4.8.3
|
||||
|
||||
- FIXED: filters in query result and NDJSON/archive viewer
|
||||
- ADDED: Added select values from query result and NDJSON/archive viewer
|
||||
- ADDED: tab navigation in datagrid #254
|
||||
@@ -875,19 +1010,24 @@ Builds:
|
||||
- ADDED: Data type + reference link in column manager
|
||||
- FIXED(win,linux,mac): Unable to change theme after installing plugin #244
|
||||
|
||||
### 4.8.2
|
||||
- ADDED: implemented missing redis search key logic
|
||||
### 4.8.2
|
||||
|
||||
### 4.8.1
|
||||
- FIXED: fixed crash after disconnecting from all DBs
|
||||
- ADDED: implemented missing redis search key logic
|
||||
|
||||
### 4.8.1
|
||||
|
||||
- FIXED: fixed crash after disconnecting from all DBs
|
||||
|
||||
### 4.8.0
|
||||
|
||||
- ADDED: Redis support (support stream type), removed experimental status
|
||||
- ADDED: Redis readonly support
|
||||
- ADDED: Explicit NDJSON support, when opening NDJSON/JSON lines file, table data are immediately shown, without neccesarity to import
|
||||
- ADDED(win,linux,mac): Opening developer tools when crashing without reload app
|
||||
|
||||
### 4.7.4
|
||||
- ADDED: Experimental Redis support (full support is planned to version 4.8.0)
|
||||
|
||||
- ADDED: Experimental Redis support (full support is planned to version 4.8.0)
|
||||
- ADDED: Read-only connections
|
||||
- FIXED: MongoDB filters
|
||||
- ADDED: MongoDB column value selection
|
||||
@@ -895,13 +1035,14 @@ Builds:
|
||||
- ADDED: Fuzzy search #246
|
||||
- ADDED(docker, npm): New permissions
|
||||
- FIXED(npm): NPM build no longer allocates additonal ports
|
||||
- CHANGED(npm): renamed NPM package dbgate => dbgate-serve
|
||||
- CHANGED(npm): renamed NPM package dbgate => dbgate-serve
|
||||
- CHANGED(docker): custom JavaScripts and connections defined in scripts are now prohibited by default, use SHELL_CONNECTION and SHELL_SCRIPTING environment variables for allowing this
|
||||
- ADDED(docker, npm): Better documentation of environment variables configuration, https://dbgate.org/docs/env-variables.html
|
||||
- ADDED(docker): support for multiple users with different permissions
|
||||
- ADDED(docker): logout operation
|
||||
|
||||
### 4.7.3
|
||||
|
||||
- CHANGED: Export menu redesign, quick export menu merged with old export menu
|
||||
- REMOVED: Quick export menu
|
||||
- ADDED: Export column mapping
|
||||
@@ -916,6 +1057,7 @@ Builds:
|
||||
- ADDED: NPM dist accepts .env configuration
|
||||
|
||||
### 4.7.2
|
||||
|
||||
- CHANGED: documentation URL - https://dbgate.org/docs/
|
||||
- CHANGED: Close button available for all tab groups - #238
|
||||
- ADDED: Search function for the Keyboard Shortcuts overview - #239
|
||||
@@ -924,7 +1066,8 @@ Builds:
|
||||
- FIXED: bug in cache subsystem
|
||||
|
||||
### 4.7.1
|
||||
- FIXED: Fixed connecting to MS SQL server running in docker container from DbGate running in docker container #236
|
||||
|
||||
- FIXED: Fixed connecting to MS SQL server running in docker container from DbGate running in docker container #236
|
||||
- FIXED: Fixed export MongoDB collections into Excel and CSV #240
|
||||
- ADDED: Added support for docker volumes to persiste connections, when not using configuration via env variables #232
|
||||
- ADDED: DbGate in Docker can run in subdirectory #228
|
||||
@@ -934,7 +1077,9 @@ Builds:
|
||||
- ADDED: Improved fullscreen state, title bar with menu is hidden, menu is in hamburger menu, like in web version
|
||||
- ADDED: Theme choose dialog (added as tab in settings)
|
||||
- FIXED: Fixed crash when clicking on application layers #231
|
||||
|
||||
### 4.7.0
|
||||
|
||||
- CHANGED: Changed main menu style, menu and title bar is in one line (+ability to switch to system menu)
|
||||
- REMOVED: Removed main toolbar, use main menu or tab related bottom tool instead
|
||||
- ADDED: Added tab related context bottom toolbar
|
||||
@@ -953,11 +1098,13 @@ Builds:
|
||||
- ADDED: Better work with JSON lines file, added JSONL editor with preview
|
||||
|
||||
### 4.6.3
|
||||
|
||||
- FIXED: Fixed Windows build
|
||||
- FIXED: Fixed crash, when there is invalid value in browser local storage
|
||||
- FIXED: Fixed plugin description display, where author name or description is not correctly filled
|
||||
|
||||
### 4.6.2
|
||||
|
||||
- FIXED: Fixed issues of XML import plugin
|
||||
- ADDED: Split columns macro (available in data sheet editor)
|
||||
- CHANGED: Accepting non standard plugins names (which doesn't start with dbgate-plugin-)
|
||||
@@ -969,6 +1116,7 @@ Builds:
|
||||
- FIXED: Fixed configuring connection to SQLite with environment variables #215
|
||||
|
||||
### 4.6.1
|
||||
|
||||
- ADDED: Ability to configure SSH tunnel over environment variables #210 (for docker container)
|
||||
- ADDED: XML export and import
|
||||
- ADDED: Archive file - show and edit source text file
|
||||
@@ -984,20 +1132,23 @@ Builds:
|
||||
- CHANGED: UX improvements of table editor
|
||||
|
||||
### 4.6.0
|
||||
|
||||
- ADDED: ER diagrams #118
|
||||
- Generate diagram from table or for database
|
||||
- Automatic layout
|
||||
- Diagram styles - colors, select columns to display, optional displaying data type or nullability
|
||||
- Export diagram to HTML file
|
||||
- Generate diagram from table or for database
|
||||
- Automatic layout
|
||||
- Diagram styles - colors, select columns to display, optional displaying data type or nullability
|
||||
- Export diagram to HTML file
|
||||
- FIXED: Mac latest build link #204
|
||||
|
||||
### 4.5.1
|
||||
|
||||
- FIXED: MongoId detection
|
||||
- FIXED: #203 disabled spellchecker
|
||||
- FIXED: Prevented display filters in form view twice
|
||||
- FIXED: Query designer fixes
|
||||
|
||||
### 4.5.0
|
||||
|
||||
- ADDED: #220 functions, materialized views and stored procedures in code completion
|
||||
- ADDED: Query result in statusbar
|
||||
- ADDED: Highlight and execute current query
|
||||
@@ -1015,6 +1166,7 @@ Builds:
|
||||
- FIXED: Fixed delete dependency cycle detection (delete didn't work for some tables)
|
||||
|
||||
### 4.4.4
|
||||
|
||||
- FIXED: Database colors
|
||||
- CHANGED: Precise work with MongoDB ObjectId
|
||||
- FIXED: Run macro works on MongoDB collection data editor
|
||||
@@ -1029,6 +1181,7 @@ Builds:
|
||||
- ADDED: Show change log after app upgrade
|
||||
|
||||
### 4.4.3
|
||||
|
||||
- ADDED: Connection and database colors
|
||||
- ADDED: Ability to pin connection or table
|
||||
- ADDED: MongoDb: create, drop collection from menu
|
||||
@@ -1046,6 +1199,7 @@ Builds:
|
||||
- CHANGED: Save widget visibility and size
|
||||
|
||||
### 4.4.2
|
||||
|
||||
- ADDED: Open SQL script from SQL confirm
|
||||
- CHANGED: Better looking statusbar
|
||||
- ADDED: Create table from database popup menu
|
||||
@@ -1055,6 +1209,7 @@ Builds:
|
||||
- ADDED: Support for Command key on Mac (#199)
|
||||
|
||||
### 4.4.1
|
||||
|
||||
- FIXED: #188 Fixed problem with datetime values in PostgreSQL and mysql
|
||||
- ADDED: #194 Close tabs by DB
|
||||
- FIXED: Improved form view width calculations
|
||||
@@ -1068,6 +1223,7 @@ Builds:
|
||||
- ADDED: Row count information moved into status bar, when only one grid on tab is used (typical case)
|
||||
|
||||
### 4.4.0
|
||||
|
||||
- ADDED: Database structure compare, export report to HTML
|
||||
- ADDED: Experimental: Deploy DB structure changes between databases
|
||||
- ADDED: Lookup dialog, available in table view on columns with foreign key
|
||||
@@ -1084,21 +1240,25 @@ Builds:
|
||||
- FIXED: Fixed import into SQLite and PostgreSQL databases, added integration test for this
|
||||
|
||||
### 4.3.4
|
||||
|
||||
- FIXED: Delete row with binary ID in MySQL (#182)
|
||||
- ADDED: Using 'ODBC Driver 17 for SQL Server' or 'SQL Server Native Client 11.0', when connecting to MS SQL using windows auth #183
|
||||
|
||||
### 4.3.3
|
||||
|
||||
- ADDED: Generate SQL from data (#176 - Copy row as INSERT/UPDATE statement)
|
||||
- ADDED: Datagrid keyboard column operations (Ctrl+F - find column, Ctrl+H - hide column) #180
|
||||
- FIXED: Make window remember that it was maximized
|
||||
- FIXED: Fixed lost focus after copy to clipboard and after inserting SQL join
|
||||
|
||||
### 4.3.2
|
||||
|
||||
- FIXED: Sorted database list in PostgreSQL (#178)
|
||||
- FIXED: Loading stricture of PostgreSQL database, when it contains indexes on expressions (#175)
|
||||
- ADDED: Hotkey Shift+Alt+F for formatting SQL code
|
||||
|
||||
### 4.3.1
|
||||
|
||||
- FIXED: #173 Using key phrase for SSH key file connection
|
||||
- ADDED: #172 Abiloity to quick search within database names
|
||||
- ADDED: Database search added to command palette (Ctrl+P)
|
||||
@@ -1106,24 +1266,28 @@ Builds:
|
||||
- ADDED: DELETE cascade option - ability to delete all referenced rows, when deleting rows
|
||||
|
||||
### 4.3.0
|
||||
|
||||
- ADDED: Table structure editor
|
||||
- ADDED: Index support
|
||||
- ADDED: Unique constraint support
|
||||
- ADDED: Context menu for drop/rename table/columns and for drop view/procedure/function
|
||||
- ADDED: Added support for Windows arm64 platform
|
||||
- FIXED: Search by _id in MongoDB
|
||||
- FIXED: Search by \_id in MongoDB
|
||||
|
||||
### 4.2.6
|
||||
|
||||
- FIXED: Fixed MongoDB import
|
||||
- ADDED: Configurable thousands separator #136
|
||||
- ADDED: Using case insensitive text search in postgres
|
||||
|
||||
### 4.2.5
|
||||
|
||||
- FIXED: Fixed crash when using large model on some installations
|
||||
- FIXED: Postgre SQL CREATE function
|
||||
- FIXED: Postgre SQL CREATE function
|
||||
- FIXED: Analysing of MySQL when modifyDate is not known
|
||||
|
||||
### 4.2.4
|
||||
|
||||
- ADDED: Query history
|
||||
- ADDED: One-click exports in desktop app
|
||||
- ADDED: JSON array export
|
||||
@@ -1135,23 +1299,27 @@ Builds:
|
||||
- CHANGED: Introduced package dbgate-query-splitter, instead of sql-query-identifier and @verycrazydog/mysql-parse
|
||||
|
||||
### 4.2.3
|
||||
|
||||
- ADDED: ARM builds for MacOS and Linux
|
||||
- ADDED: Filter by columns in form view
|
||||
|
||||
### 4.2.2
|
||||
|
||||
- CHANGED: Further startup optimalization (approx. 2 times quicker start of electron app)
|
||||
|
||||
### 4.2.1
|
||||
|
||||
- FIXED: Fixed+optimalized app startup (esp. on Windows)
|
||||
|
||||
### 4.2.0
|
||||
|
||||
- ADDED: Support of SQLite database
|
||||
- ADDED: Support of Amazon Redshift database
|
||||
- ADDED: Support of CockcroachDB
|
||||
- CHANGED: DB Model is not auto-refreshed by default, refresh could be invoked from statusbar
|
||||
- FIXED: Fixed race conditions on startup
|
||||
- FIXED: Fixed broken style in data grid under strange circumstances
|
||||
- ADDED: Configure connections with commandline arguments #108
|
||||
- ADDED: Configure connections with commandline arguments #108
|
||||
- CHANGED: Optimalized algorithm of incremental DB model updates
|
||||
- CHANGED: Loading queries from PostgreSQL doesn't need cursors, using streamed query instead
|
||||
- ADDED: Disconnect command
|
||||
@@ -1160,9 +1328,11 @@ Builds:
|
||||
- ADDED: Cosmetic improvements of MariaDB support
|
||||
|
||||
### 4.1.11
|
||||
|
||||
- FIX: Fixed crash of API process when using SSH tunnel connection (race condition)
|
||||
|
||||
### 4.1.11
|
||||
|
||||
- FIX: fixed processing postgre query containing $$
|
||||
- FIX: fixed postgre analysing procedures & functions
|
||||
- FIX: patched svelte crash #105
|
||||
@@ -1175,6 +1345,7 @@ Builds:
|
||||
- CHANGED: Toolbar design - current tab related commands are delimited
|
||||
|
||||
### 4.1.10
|
||||
|
||||
- ADDED: Default database option in connectin settings #96 #92
|
||||
- FIX: Bundle size optimalization for Windows #97
|
||||
- FIX: Popup menu placement on smaller displays #94
|
||||
@@ -1185,22 +1356,32 @@ Builds:
|
||||
- ADDED: Show database server version in status bar
|
||||
- ADDED: Show detailed info about error, when connect to database fails
|
||||
- ADDED: Portable ZIP distribution for Windows #84
|
||||
|
||||
### 4.1.9
|
||||
|
||||
- FIX: Incorrect row count info in query result #83
|
||||
|
||||
### 4.1.1
|
||||
|
||||
- CHANGED: Default plugins are now part of installation
|
||||
|
||||
### 4.1.0
|
||||
|
||||
- ADDED: MongoDB support
|
||||
- ADDED: Configurable keyboard shortcuts
|
||||
- ADDED: JSON row cell data view
|
||||
- FIX: Fixed some problems from migration to Svelte
|
||||
|
||||
### 4.0.3
|
||||
|
||||
- FIX: fixes for FireFox (mainly incorrent handle of bind:clientHeight, replaces with resizeobserver)
|
||||
|
||||
### 4.0.2
|
||||
|
||||
- FIX: fixed docker and NPM build
|
||||
|
||||
### 4.0.0
|
||||
|
||||
- CHANGED: Excahnged React with Svelte. Changed theme colors. Huge speed and memory optimalization
|
||||
- ADDED: SQL Generator (CREATE, INSERT, DROP)
|
||||
- ADDED: Command palette (F1). Introduced commands, extended some context menus
|
||||
@@ -1212,6 +1393,7 @@ Builds:
|
||||
- FIX: Solved reconnecting expired connection
|
||||
|
||||
### 3.9.6
|
||||
|
||||
- ADDED: Connect using SSH Tunnel
|
||||
- ADDED: Connect using SSL
|
||||
- ADDED: Database connection dialog redesigned
|
||||
@@ -1221,4 +1403,5 @@ Builds:
|
||||
- FIX: #62 - import, export executed from SNAP installs didn't work
|
||||
|
||||
### 3.9.5
|
||||
|
||||
- Start point of changelog
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
DbGate is a cross-platform (no)SQL database manager supporting MySQL, PostgreSQL, SQL Server, Oracle, MongoDB, Redis, SQLite, and more. It runs as a web app (Docker/NPM), an Electron desktop app, or in a browser. The monorepo uses Yarn workspaces.
|
||||
|
||||
## Development Commands
|
||||
|
||||
```sh
|
||||
yarn # install all packages (also builds TS libraries and plugins)
|
||||
yarn start # run API (port 3000) + web (port 5001) concurrently
|
||||
```
|
||||
|
||||
For more control, run these 3 commands in separate terminals:
|
||||
```sh
|
||||
yarn start:api # Express API on port 3000
|
||||
yarn start:web # Svelte frontend on port 5001
|
||||
yarn lib # watch-compile TS libraries and plugins
|
||||
```
|
||||
|
||||
For Electron development:
|
||||
```sh
|
||||
yarn start:web # web on port 5001
|
||||
yarn lib # watch TS libs/plugins
|
||||
yarn start:app # Electron app
|
||||
```
|
||||
|
||||
### Building
|
||||
|
||||
```sh
|
||||
yarn build:lib # build all TS libraries (sqltree, tools, filterparser, datalib, rest)
|
||||
yarn build:api # build API
|
||||
yarn build:web # build web frontend
|
||||
yarn ts # TypeScript type-check API and web
|
||||
yarn prettier # format all source files
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Unit tests (in packages like `dbgate-tools`):
|
||||
```sh
|
||||
yarn workspace dbgate-tools test
|
||||
```
|
||||
|
||||
Integration tests (requires Docker for database containers):
|
||||
```sh
|
||||
cd integration-tests
|
||||
yarn test:local # run all tests
|
||||
yarn test:local:path __tests__/alter-database.spec.js # run a single test file
|
||||
```
|
||||
|
||||
E2E tests (Cypress):
|
||||
```sh
|
||||
yarn cy:open # open Cypress UI
|
||||
cd e2e-tests && yarn cy:run:browse-data # run a specific spec headlessly
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Monorepo Structure
|
||||
|
||||
| Path | Package | Purpose |
|
||||
|---|---|---|
|
||||
| `packages/api` | `dbgate-api` | Express.js backend server |
|
||||
| `packages/web` | `dbgate-web` | Svelte 4 frontend (built with Rolldown) |
|
||||
| `packages/tools` | `dbgate-tools` | Shared TS utilities: SQL dumping, schema analysis, diffing, driver base classes |
|
||||
| `packages/datalib` | `dbgate-datalib` | Grid display logic, changeset management, perspectives, chart definitions |
|
||||
| `packages/sqltree` | `dbgate-sqltree` | SQL AST representation and dumping |
|
||||
| `packages/filterparser` | `dbgate-filterparser` | Parses filter strings into SQL/Mongo conditions |
|
||||
| `packages/rest` | `dbgate-rest` | REST connection support |
|
||||
| `packages/types` | `dbgate-types` | TypeScript type definitions (`.d.ts` only) |
|
||||
| `packages/aigwmock` | `dbgate-aigwmock` | Mock AI gateway server for E2E testing |
|
||||
| `plugins/dbgate-plugin-*` | — | Database drivers and file format handlers |
|
||||
| `app/` | — | Electron shell |
|
||||
| `integration-tests/` | — | Jest-based DB integration tests (Docker) |
|
||||
| `e2e-tests/` | — | Cypress E2E tests |
|
||||
|
||||
### API Backend (`packages/api`)
|
||||
|
||||
- Express.js server with controllers in `src/controllers/` — each file exposes REST endpoints via the `useController` utility
|
||||
- Database connections run in child processes (`src/proc/`) to isolate crashes and long-running operations
|
||||
- `src/shell/` contains stream-based data pipeline primitives (readers, writers, transforms) used for import/export and replication
|
||||
- Plugin drivers are loaded dynamically via `requireEngineDriver`; each plugin in `plugins/` exports a driver conforming to `DriverBase` from `dbgate-tools`
|
||||
|
||||
### Frontend (`packages/web`)
|
||||
|
||||
- Svelte 4 components; builds with Rolldown (not Vite/Webpack)
|
||||
- Global state in `src/stores.ts` using Svelte writable stores, with `writableWithStorage` / `writableWithForage` helpers for persistence
|
||||
- API calls go through `src/utility/api.ts` (`apiCall`, `apiOff`, etc.) which handles auth, error display, and cache invalidation
|
||||
- Tab system: each open editor/viewer is a "tab" tracked in `openedTabs` store; tab components live in `src/tabs/`
|
||||
- Left-panel tree items are "AppObjects" in `src/appobj/`
|
||||
- Metadata (table lists, column info) is loaded reactively via hooks in `src/utility/metadataLoaders.ts`
|
||||
- Commands/keybindings are registered in `src/commands/`
|
||||
|
||||
### Plugin Architecture
|
||||
|
||||
Each `plugins/dbgate-plugin-*` package provides:
|
||||
- **Frontend build** (`build:frontend`): bundled JS loaded by the web UI for query formatting, data rendering
|
||||
- **Backend build** (`build:backend`): Node.js driver code loaded by the API for actual DB connections
|
||||
|
||||
Plugins are copied to `plugins/dist/` via `plugins:copydist` before building the app or Docker image.
|
||||
|
||||
### Key Conventions
|
||||
|
||||
- Error/message codes use `DBGM-00000` as placeholder — do not introduce new numbered `DBGM-NNNNN` codes
|
||||
- Frontend uses **Svelte 4** (not Svelte 5)
|
||||
- E2E test selectors use `data-testid` attribute with format `ComponentName_identifier`
|
||||
- Prettier config: single quotes, 2-space indent, 120-char line width, trailing commas ES5
|
||||
- Logging via `pinomin`; pipe through `pino-pretty` for human-readable output
|
||||
|
||||
### Translation System
|
||||
|
||||
```sh
|
||||
yarn translations:extract # extract new strings
|
||||
yarn translations:add-missing # add missing translations
|
||||
yarn translations:check # check for issues
|
||||
```
|
||||
@@ -13,9 +13,9 @@
|
||||
<p>DbGate is cross-platform database manager. It's designed to be simple to use and effective, when working with more databases simultaneously. But there are also many advanced features like schema compare, visual query designer, chart visualisation or batch export and import.</p>
|
||||
</description>
|
||||
|
||||
<url type="homepage">https://dbgate.org/</url>
|
||||
<url type="homepage">https://www.dbgate.io/</url>
|
||||
<url type="vcs-browser">https://github.com/dbgate/dbgate</url>
|
||||
<url type="contact">https://dbgate.org/about/</url>
|
||||
<url type="contact">https://www.dbgate.io/contact/</url>
|
||||
<url type="donation">https://github.com/sponsors/dbgate</url>
|
||||
<url type="bugtracker">https://github.com/dbgate/dbgate/issues</url>
|
||||
|
||||
|
||||
@@ -4,5 +4,6 @@ module.exports = {
|
||||
mssql: true,
|
||||
oracle: true,
|
||||
sqlite: true,
|
||||
mongo: true
|
||||
mongo: true,
|
||||
dynamo: true,
|
||||
};
|
||||
|
||||
@@ -3,8 +3,58 @@ const os = require('os');
|
||||
const fs = require('fs');
|
||||
|
||||
const baseDir = path.join(os.homedir(), '.dbgate');
|
||||
const testApiPidFile = path.join(__dirname, 'tmpdata', 'test-api.pid');
|
||||
const aigwmockPidFile = path.join(__dirname, 'tmpdata', 'aigwmock.pid');
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopProcessByPidFile(pidFile) {
|
||||
if (!fs.existsSync(pidFile)) return;
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale PID files and dead processes
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
}
|
||||
|
||||
function clearTestingData() {
|
||||
stopProcessByPidFile(testApiPidFile);
|
||||
stopProcessByPidFile(aigwmockPidFile);
|
||||
|
||||
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
|
||||
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
|
||||
}
|
||||
|
||||
@@ -37,6 +37,9 @@ module.exports = defineConfig({
|
||||
case 'browse-data':
|
||||
serverProcess = exec('yarn start:browse-data');
|
||||
break;
|
||||
case 'rest':
|
||||
serverProcess = exec('yarn start:rest');
|
||||
break;
|
||||
case 'team':
|
||||
serverProcess = exec('yarn start:team');
|
||||
break;
|
||||
@@ -52,6 +55,9 @@ module.exports = defineConfig({
|
||||
case 'redis':
|
||||
serverProcess = exec('yarn start:redis');
|
||||
break;
|
||||
case 'ai-chat':
|
||||
serverProcess = exec('yarn start:ai-chat');
|
||||
break;
|
||||
}
|
||||
|
||||
await waitOn({ resources: ['http://localhost:3000'] });
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
Cypress.on('uncaught:exception', err => {
|
||||
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.viewport(1250, 900);
|
||||
});
|
||||
|
||||
describe('Database Chat (MySQL)', () => {
|
||||
it('Database chat - chart of popular genres', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me chart of most popular genres');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('database-chat-chart');
|
||||
});
|
||||
|
||||
it('Database chat - find most popular artist', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find most popular artist');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('Iron Maiden', { timeout: 30000 });
|
||||
cy.themeshot('database-chat-popular-artist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GraphQL Chat', () => {
|
||||
it('GraphQL chat - list users', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('list all users');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('users', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-list-users');
|
||||
});
|
||||
|
||||
it('GraphQL chat - product categories chart', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me a chart of product categories');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('graphql-chat-categories-chart');
|
||||
});
|
||||
|
||||
it('GraphQL chat - find most expensive product', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find the most expensive product');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('products', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-expensive-product');
|
||||
});
|
||||
|
||||
it('GraphQL chat - show all categories', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show all categories');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('categories', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-all-categories');
|
||||
});
|
||||
|
||||
it('Explain query error', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_query').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('select * from Invoice2');
|
||||
cy.contains('Execute').click();
|
||||
cy.testid('MessageViewRow-explainErrorButton-1').click();
|
||||
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
|
||||
cy.themeshot('explain-query-error');
|
||||
});
|
||||
});
|
||||
@@ -512,4 +512,43 @@ describe('Data browser data', () => {
|
||||
cy.testid('DataFilterControl_input_ArtistId.Name').type('mich{enter}');
|
||||
cy.themeshot('data-browser-filter-by-expanded');
|
||||
});
|
||||
|
||||
it('DynamoDB', () => {
|
||||
cy.contains('Dynamo-connection').click();
|
||||
cy.contains('us-east-1').click();
|
||||
|
||||
cy.contains('Album').click();
|
||||
cy.contains('Pearl Jam').click();
|
||||
cy.themeshot('dynamodb-table-data');
|
||||
cy.contains('Switch to JSON').click();
|
||||
cy.themeshot('dynamodb-json-view');
|
||||
|
||||
cy.contains('Customer').click();
|
||||
cy.testid('DataFilterControl_input_CustomerId').type('<=10{enter}');
|
||||
cy.contains('Rows: 10');
|
||||
cy.wait(1000);
|
||||
cy.contains('Helena').click().rightclick();
|
||||
cy.contains('Show cell data').click();
|
||||
cy.contains('City: "Prague"');
|
||||
cy.themeshot('dynamodb-query-json-view');
|
||||
|
||||
cy.contains('Switch to JSON').click();
|
||||
cy.contains('Leonie').rightclick();
|
||||
cy.contains('Edit document').click();
|
||||
|
||||
Array.from({ length: 11 }).forEach(() => cy.realPress('ArrowDown'));
|
||||
Array.from({ length: 14 }).forEach(() => cy.realPress('ArrowRight'));
|
||||
Array.from({ length: 7 }).forEach(() => cy.realPress('Delete'));
|
||||
cy.realType('Italy');
|
||||
cy.testid('EditJsonModal_saveButton').click();
|
||||
|
||||
cy.contains('Helena').rightclick();
|
||||
cy.contains('Delete document').click();
|
||||
cy.contains('Save').click();
|
||||
cy.themeshot('dynamodb-save-changes');
|
||||
|
||||
cy.testid('SqlObjectList_addButton').click();
|
||||
cy.contains('New collection/container').click();
|
||||
cy.themeshot('dynamodb-new-collection');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -110,55 +110,6 @@ describe('Charts', () => {
|
||||
cy.themeshot('new-object-window');
|
||||
});
|
||||
|
||||
it.skip('Database chat - charts', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me chart of most popular genres');
|
||||
cy.get('body').realPress('{enter}');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('database-chat-chart');
|
||||
});
|
||||
|
||||
it.skip('Database chat', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find most popular artist');
|
||||
cy.get('body').realPress('{enter}');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.wait(30000);
|
||||
// cy.contains('Iron Maiden');
|
||||
cy.themeshot('database-chat');
|
||||
|
||||
// cy.testid('DatabaseChatTab_promptInput').click();
|
||||
// cy.get('body').realType('I need top 10 songs with the biggest income');
|
||||
// cy.get('body').realPress('{enter}');
|
||||
// cy.contains('Hot Girl', { timeout: 20000 });
|
||||
// cy.wait(1000);
|
||||
// cy.themeshot('database-chat');
|
||||
});
|
||||
|
||||
it.skip('Explain query error', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_query').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('select * from Invoice2');
|
||||
cy.contains('Execute').click();
|
||||
cy.testid('MessageViewRow-explainErrorButton-1').click();
|
||||
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
|
||||
cy.themeshot('explain-query-error');
|
||||
});
|
||||
|
||||
it('Switch language', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
|
||||
@@ -52,6 +52,9 @@ function multiTest(testProps, testDefinition) {
|
||||
if (localconfig.mongo && !testProps.skipMongo) {
|
||||
it('MongoDB', () => testDefinition('Mongo-connection', 'my_guitar_shop', 'mongo@dbgate-plugin-mongo'));
|
||||
}
|
||||
if (localconfig.dynamo && !testProps.skipMongo) {
|
||||
it('DynamoDB', () => testDefinition('Dynamo-connection', null, 'dynamodb@dbgate-plugin-dynamodb'));
|
||||
}
|
||||
}
|
||||
|
||||
describe('Transactions', () => {
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
Cypress.on('uncaught:exception', err => {
|
||||
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.viewport(1250, 900);
|
||||
});
|
||||
|
||||
describe('REST API connections', () => {
|
||||
it('GraphQL test', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.contains('products').click();
|
||||
cy.testid('GraphQlExplorerNode_toggle_products').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.name').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.price').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.description').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.category').click();
|
||||
cy.testid('GraphQlQueryTab_execute').click();
|
||||
cy.contains('Electronics');
|
||||
cy.themeshot('rest-graphql-query');
|
||||
});
|
||||
it('REST OpenAPI test', () => {
|
||||
cy.contains('REST OpenAPI').click();
|
||||
cy.contains('/api/categories').click();
|
||||
cy.testid('RestApiEndpointTab_execute').click();
|
||||
cy.contains('Electronics');
|
||||
cy.themeshot('rest-openapi-query');
|
||||
});
|
||||
it('REST OData test', () => {
|
||||
cy.contains('REST OData').click();
|
||||
cy.contains('/Users').click();
|
||||
cy.testid('ODataEndpointTab_execute').click();
|
||||
cy.contains('Henry');
|
||||
cy.themeshot('rest-odata-query');
|
||||
});
|
||||
});
|
||||
@@ -5,14 +5,14 @@ services:
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
ports:
|
||||
- 16000:5432
|
||||
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
@@ -20,21 +20,21 @@ services:
|
||||
mysql-ssh-login:
|
||||
build: containers/mysql-ssh-login
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16017:3306
|
||||
- "16012:22"
|
||||
- '16012:22'
|
||||
|
||||
mysql-ssh-keyfile:
|
||||
build: containers/mysql-ssh-keyfile
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16007:3306
|
||||
- "16008:22"
|
||||
- '16008:22'
|
||||
|
||||
dex:
|
||||
build: containers/dex
|
||||
ports:
|
||||
- "16009:5556"
|
||||
- '16009:5556'
|
||||
|
||||
mongo:
|
||||
image: mongo:4.4.29
|
||||
@@ -50,6 +50,11 @@ services:
|
||||
ports:
|
||||
- 16011:6379
|
||||
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
ports:
|
||||
- 16015:8000
|
||||
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
restart: always
|
||||
|
||||
Vendored
+14
@@ -0,0 +1,14 @@
|
||||
CONNECTIONS=mysql,graphql
|
||||
|
||||
LOCAL_AI_GATEWAY=true
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
SERVER_mysql=localhost
|
||||
USER_mysql=root
|
||||
PASSWORD_mysql=Pwd2020Db
|
||||
PORT_mysql=16004
|
||||
ENGINE_mysql=mysql@dbgate-plugin-mysql
|
||||
|
||||
LABEL_graphql=REST GraphQL
|
||||
ENGINE_graphql=graphql@rest
|
||||
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
|
||||
Vendored
+7
-1
@@ -1,4 +1,4 @@
|
||||
CONNECTIONS=mysql,postgres,mongo
|
||||
CONNECTIONS=mysql,postgres,mongo,dynamo
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
SERVER_mysql=localhost
|
||||
@@ -22,3 +22,9 @@ USER_mongo=root
|
||||
PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=16010
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_dynamo=Dynamo-connection
|
||||
SERVER_dynamo=localhost
|
||||
PORT_dynamo=16015
|
||||
AUTH_TYPE_dynamo=onpremise
|
||||
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
|
||||
|
||||
Vendored
+8
-1
@@ -1,4 +1,4 @@
|
||||
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo
|
||||
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo,dynamo
|
||||
LOG_CONNECTION_SENSITIVE_VALUES=true
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
@@ -43,3 +43,10 @@ PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=16010
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_dynamo=Dynamo-connection
|
||||
SERVER_dynamo=localhost
|
||||
PORT_dynamo=16015
|
||||
AUTH_TYPE_dynamo=onpremise
|
||||
DATABASE_dynamo=localhost
|
||||
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
|
||||
|
||||
|
||||
Vendored
+14
@@ -0,0 +1,14 @@
|
||||
CONNECTIONS=odata,openapi,graphql
|
||||
|
||||
LABEL_odata=REST OData
|
||||
ENGINE_odata=odata@rest
|
||||
APISERVERURL1_odata=http://localhost:4444/odata/noauth
|
||||
|
||||
LABEL_openapi=REST OpenAPI
|
||||
ENGINE_openapi=openapi@rest
|
||||
APISERVERURL1_openapi=http://localhost:4444/openapi.json
|
||||
APISERVERURL2_openapi=http://localhost:4444/openapi/noauth
|
||||
|
||||
LABEL_graphql=REST GraphQL
|
||||
ENGINE_graphql=graphql@rest
|
||||
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
|
||||
@@ -0,0 +1,168 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn, spawnSync } = require('child_process');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
const testApiDir = path.join(rootDir, 'test-api');
|
||||
const aigwmockDir = path.join(rootDir, 'packages', 'aigwmock');
|
||||
const tmpDataDir = path.resolve(__dirname, '..', 'tmpdata');
|
||||
const testApiPidFile = path.join(tmpDataDir, 'test-api.pid');
|
||||
const aigwmockPidFile = path.join(tmpDataDir, 'aigwmock.pid');
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
const dbgateApi = require('dbgate-api');
|
||||
dbgateApi.initializeApiEnvironment();
|
||||
const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
// --- MySQL setup (same as charts init) ---
|
||||
|
||||
async function initMySqlDatabase(dbname, inputFile) {
|
||||
const connection = {
|
||||
server: process.env.SERVER_mysql,
|
||||
user: process.env.USER_mysql,
|
||||
password: process.env.PASSWORD_mysql,
|
||||
port: process.env.PORT_mysql,
|
||||
engine: 'mysql@dbgate-plugin-mysql',
|
||||
};
|
||||
|
||||
await dbgateApi.executeQuery({
|
||||
connection,
|
||||
sql: `DROP DATABASE IF EXISTS ${dbname}`,
|
||||
});
|
||||
|
||||
await dbgateApi.executeQuery({
|
||||
connection,
|
||||
sql: `CREATE DATABASE ${dbname}`,
|
||||
});
|
||||
|
||||
await dbgateApi.importDatabase({
|
||||
connection: { ...connection, database: dbname },
|
||||
inputFile,
|
||||
});
|
||||
}
|
||||
|
||||
// --- Process management helpers ---
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopProcess(pidFile) {
|
||||
if (!fs.existsSync(pidFile)) return;
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale pid or already terminated
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
function ensureDependencies(dir, checkFile) {
|
||||
if (fs.existsSync(checkFile)) return;
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: dir,
|
||||
stdio: 'inherit',
|
||||
env: process.env,
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
throw new Error(`DBGM-00297 Failed to install dependencies in ${dir}`);
|
||||
}
|
||||
}
|
||||
|
||||
function startBackgroundProcess(dir, pidFile, port) {
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
|
||||
const child = spawn(command, args, {
|
||||
cwd: dir,
|
||||
env: { ...process.env, PORT: String(port) },
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
});
|
||||
child.unref();
|
||||
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
|
||||
const meta = { pid: child.pid };
|
||||
const startTime = readProcessStartTime(child.pid);
|
||||
if (startTime) meta.startTime = startTime;
|
||||
fs.writeFileSync(pidFile, JSON.stringify(meta));
|
||||
}
|
||||
|
||||
async function waitForReady(url, timeoutMs = 30000) {
|
||||
const startedAt = Date.now();
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (response.ok) return;
|
||||
} catch (err) {
|
||||
// continue waiting
|
||||
}
|
||||
await delay(500);
|
||||
}
|
||||
throw new Error(`DBGM-00305 Server at ${url} did not start in time`);
|
||||
}
|
||||
|
||||
// --- Main ---
|
||||
|
||||
async function run() {
|
||||
// 1. Set up MyChinook MySQL database
|
||||
console.log('[ai-chat init] Setting up MyChinook database...');
|
||||
await initMySqlDatabase('MyChinook', path.resolve(path.join(__dirname, '../data/chinook-mysql.sql')));
|
||||
|
||||
// 2. Start test-api (GraphQL/REST server on port 4444)
|
||||
console.log('[ai-chat init] Starting test-api on port 4444...');
|
||||
stopProcess(testApiPidFile);
|
||||
ensureDependencies(testApiDir, path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json'));
|
||||
startBackgroundProcess(testApiDir, testApiPidFile, 4444);
|
||||
await waitForReady('http://localhost:4444/openapi.json');
|
||||
console.log('[ai-chat init] test-api is ready');
|
||||
|
||||
// 3. Start aigwmock (AI Gateway mock on port 3110)
|
||||
console.log('[ai-chat init] Starting aigwmock on port 3110...');
|
||||
stopProcess(aigwmockPidFile);
|
||||
ensureDependencies(aigwmockDir, path.join(aigwmockDir, 'node_modules', 'express', 'package.json'));
|
||||
startBackgroundProcess(aigwmockDir, aigwmockPidFile, 3110);
|
||||
await waitForReady('http://localhost:3110/openrouter/v1/models');
|
||||
console.log('[ai-chat init] aigwmock is ready');
|
||||
}
|
||||
|
||||
run().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -8,6 +8,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
|
||||
dbgateApi.registerPlugins(dbgatePluginPostgres);
|
||||
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
|
||||
dbgateApi.registerPlugins(dbgatePluginDynamodb);
|
||||
|
||||
async function initMySqlDatabase(dbname, inputFile) {
|
||||
await dbgateApi.executeQuery({
|
||||
@@ -125,6 +127,34 @@ async function initMongoDatabase(dbname, inputDirectory) {
|
||||
// });
|
||||
}
|
||||
|
||||
async function initDynamoDatabase(inputDirectory) {
|
||||
const dynamodbConnection = {
|
||||
server: process.env.SERVER_dynamo,
|
||||
port: process.env.PORT_dynamo,
|
||||
authType: 'onpremise',
|
||||
engine: 'dynamodb@dbgate-plugin-dynamodb',
|
||||
};
|
||||
|
||||
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
|
||||
const pool = await driver.connect(dynamodbConnection);
|
||||
const collections = await driver.listCollections(pool);
|
||||
for (const collection of collections) {
|
||||
await driver.dropTable(pool, collection);
|
||||
}
|
||||
await driver.disconnect(pool);
|
||||
|
||||
for (const file of fs.readdirSync(inputDirectory)) {
|
||||
const pureName = path.parse(file).name;
|
||||
const src = await dbgateApi.jsonLinesReader({ fileName: path.join(inputDirectory, file) });
|
||||
const dst = await dbgateApi.tableWriter({
|
||||
connection: dynamodbConnection,
|
||||
pureName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await dbgateApi.copyStream(src, dst);
|
||||
}
|
||||
}
|
||||
|
||||
const baseDir = path.join(os.homedir(), '.dbgate');
|
||||
|
||||
async function copyFolder(source, target) {
|
||||
@@ -148,6 +178,8 @@ async function run() {
|
||||
await initMongoDatabase('MgChinook', path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
|
||||
await initMongoDatabase('MgRivers', path.resolve(path.join(__dirname, '../data/rivers-jsonl')));
|
||||
|
||||
await initDynamoDatabase(path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
|
||||
|
||||
await copyFolder(
|
||||
path.resolve(path.join(__dirname, '../data/chinook-jsonl')),
|
||||
path.join(baseDir, 'archive-e2etests', 'default')
|
||||
|
||||
@@ -7,6 +7,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
|
||||
dbgateApi.registerPlugins(dbgatePluginPostgres);
|
||||
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
|
||||
dbgateApi.registerPlugins(dbgatePluginDynamodb);
|
||||
|
||||
async function createDb(connection, dropDbSql, createDbSql, database = 'my_guitar_shop', { dropDatabaseName } = {}) {
|
||||
if (dropDbSql) {
|
||||
@@ -125,6 +127,28 @@ async function run() {
|
||||
{ dropDatabaseName: 'my_guitar_shop' }
|
||||
);
|
||||
}
|
||||
|
||||
if (localconfig.dynamo) {
|
||||
const dynamodbConnection = {
|
||||
server: process.env.SERVER_dynamo,
|
||||
port: process.env.PORT_dynamo,
|
||||
authType: 'onpremise',
|
||||
engine: 'dynamodb@dbgate-plugin-dynamodb',
|
||||
};
|
||||
|
||||
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
|
||||
const pool = await driver.connect(dynamodbConnection);
|
||||
const collections = await driver.listCollections(pool);
|
||||
for (const collection of collections) {
|
||||
await driver.dropTable(pool, collection);
|
||||
}
|
||||
await driver.disconnect(pool);
|
||||
|
||||
await dbgateApi.importDbFromFolder({
|
||||
connection: dynamodbConnection,
|
||||
folder: path.resolve(path.join(__dirname, '../data/my-guitar-shop')),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
dbgateApi.runScript(run);
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn, spawnSync } = require('child_process');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
const testApiDir = path.join(rootDir, 'test-api');
|
||||
const pidFile = path.resolve(__dirname, '..', 'tmpdata', 'test-api.pid');
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function waitForApiReady(timeoutMs = 30000) {
|
||||
const startedAt = Date.now();
|
||||
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
try {
|
||||
const response = await fetch('http://localhost:4444/openapi.json');
|
||||
if (response.ok) {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
// continue waiting
|
||||
}
|
||||
|
||||
await delay(500);
|
||||
}
|
||||
|
||||
throw new Error('DBGM-00306 test-api did not start on port 4444 in time');
|
||||
}
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopPreviousTestApi() {
|
||||
if (!fs.existsSync(pidFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale pid file or already terminated process
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
function startTestApi() {
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
|
||||
|
||||
const child = spawn(command, args, {
|
||||
cwd: testApiDir,
|
||||
env: {
|
||||
...process.env,
|
||||
PORT: '4444',
|
||||
},
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
});
|
||||
|
||||
child.unref();
|
||||
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
|
||||
const meta = { pid: child.pid };
|
||||
const startTime = readProcessStartTime(child.pid);
|
||||
if (startTime) meta.startTime = startTime;
|
||||
fs.writeFileSync(pidFile, JSON.stringify(meta));
|
||||
}
|
||||
|
||||
function ensureTestApiDependencies() {
|
||||
const dependencyCheckFile = path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json');
|
||||
if (fs.existsSync(dependencyCheckFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const installCommand = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const installArgs = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
|
||||
const result = spawnSync(installCommand, installArgs, {
|
||||
cwd: testApiDir,
|
||||
stdio: 'inherit',
|
||||
env: process.env,
|
||||
});
|
||||
|
||||
if (result.status !== 0) {
|
||||
throw new Error('DBGM-00307 Failed to install test-api dependencies');
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
stopPreviousTestApi();
|
||||
ensureTestApiDependencies();
|
||||
startTestApi();
|
||||
await waitForApiReady();
|
||||
}
|
||||
|
||||
run().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -19,30 +19,36 @@
|
||||
"cy:run:portal": "cypress run --spec cypress/e2e/portal.cy.js",
|
||||
"cy:run:oauth": "cypress run --spec cypress/e2e/oauth.cy.js",
|
||||
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
|
||||
"cy:run:rest": "cypress run --spec cypress/e2e/rest.cy.js",
|
||||
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
|
||||
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
|
||||
"cy:run:cloud": "cypress run --spec cypress/e2e/cloud.cy.js",
|
||||
"cy:run:charts": "cypress run --spec cypress/e2e/charts.cy.js",
|
||||
"cy:run:redis": "cypress run --spec cypress/e2e/redis.cy.js",
|
||||
"cy:run:ai-chat": "cypress run --spec cypress/e2e/ai-chat.cy.js",
|
||||
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:oauth": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/oauth/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:rest": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/rest/.env node e2e-tests/init/rest.js && env-cmd -f e2e-tests/env/rest/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:cloud": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/cloud/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:charts": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/charts/.env node e2e-tests/init/charts.js && env-cmd -f e2e-tests/env/charts/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:redis": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/redis/.env node e2e-tests/init/redis.js && env-cmd -f e2e-tests/env/redis/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:ai-chat": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/ai-chat/.env node e2e-tests/init/ai-chat.js && env-cmd -f e2e-tests/env/ai-chat/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
|
||||
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
|
||||
"test:oauth": "start-server-and-test start:oauth http://localhost:3000 cy:run:oauth",
|
||||
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
|
||||
"test:rest": "start-server-and-test start:rest http://localhost:3000 cy:run:rest",
|
||||
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
|
||||
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
|
||||
"test:cloud": "start-server-and-test start:cloud http://localhost:3000 cy:run:cloud",
|
||||
"test:charts": "start-server-and-test start:charts http://localhost:3000 cy:run:charts",
|
||||
"test:redis": "start-server-and-test start:redis http://localhost:3000 cy:run:redis",
|
||||
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis",
|
||||
"test:ai-chat": "start-server-and-test start:ai-chat http://localhost:3000 cy:run:ai-chat",
|
||||
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:rest && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis && yarn test:ai-chat",
|
||||
"test:ci": "yarn test"
|
||||
},
|
||||
"dependencies": {}
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
test-api.pid
|
||||
aigwmock.pid
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-integration-tests",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "7.0.7-beta.2",
|
||||
"version": "7.1.6",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "dbgate-aigwmock",
|
||||
"version": "1.0.0",
|
||||
"description": "Mock AI Gateway server for E2E testing",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"start": "node src/index.js"
|
||||
},
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"cors": "^2.8.6",
|
||||
"express": "^5.2.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const app = express();
|
||||
app.use(cors());
|
||||
app.use(express.json({ limit: '50mb' }));
|
||||
|
||||
const responses = JSON.parse(fs.readFileSync(path.join(__dirname, 'mockResponses.json'), 'utf-8'));
|
||||
|
||||
let callCounter = 0;
|
||||
|
||||
// GET /openrouter/v1/models
|
||||
app.get('/openrouter/v1/models', (req, res) => {
|
||||
res.json({
|
||||
data: [{ id: 'mock-model', name: 'Mock Model' }],
|
||||
preferredModel: 'mock-model',
|
||||
});
|
||||
});
|
||||
|
||||
// POST /openrouter/v1/chat/completions
|
||||
app.post('/openrouter/v1/chat/completions', (req, res) => {
|
||||
const messages = req.body.messages || [];
|
||||
|
||||
// Find the first user message (skip system messages)
|
||||
const userMessage = messages.find(m => m.role === 'user');
|
||||
if (!userMessage) {
|
||||
return streamTextResponse(res, "I don't have enough context to help. Please ask a question.");
|
||||
}
|
||||
|
||||
// Count assistant messages to determine the current step
|
||||
const assistantCount = messages.filter(m => m.role === 'assistant').length;
|
||||
|
||||
// Find matching scenario by regex
|
||||
const scenario = responses.scenarios.find(s => {
|
||||
const regex = new RegExp(s.match, 'i');
|
||||
return regex.test(userMessage.content);
|
||||
});
|
||||
|
||||
if (!scenario) {
|
||||
console.log(`[aigwmock] No scenario matched for: "${userMessage.content}"`);
|
||||
return streamTextResponse(res, "I'm a mock AI assistant. I don't have a prepared response for that question.");
|
||||
}
|
||||
|
||||
const step = scenario.steps[assistantCount];
|
||||
if (!step) {
|
||||
console.log(`[aigwmock] No more steps for scenario (step ${assistantCount})`);
|
||||
return streamTextResponse(res, "I've completed my analysis of this topic.");
|
||||
}
|
||||
|
||||
console.log(`[aigwmock] Scenario matched: "${scenario.match}", step ${assistantCount}, type: ${step.type}`);
|
||||
|
||||
if (step.type === 'tool_calls') {
|
||||
return streamToolCallResponse(res, step.tool_calls);
|
||||
} else {
|
||||
return streamTextResponse(res, step.content);
|
||||
}
|
||||
});
|
||||
|
||||
function streamTextResponse(res, content) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
});
|
||||
|
||||
const id = `chatcmpl-mock-${Date.now()}`;
|
||||
const created = Math.floor(Date.now() / 1000);
|
||||
|
||||
// Split content into chunks for realistic streaming
|
||||
const chunkSize = 20;
|
||||
const chunks = [];
|
||||
for (let i = 0; i < content.length; i += chunkSize) {
|
||||
chunks.push(content.substring(i, i + chunkSize));
|
||||
}
|
||||
|
||||
// Send initial role chunk
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: { role: 'assistant', content: '' }, finish_reason: null }],
|
||||
});
|
||||
|
||||
// Send content chunks
|
||||
for (const chunk of chunks) {
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: { content: chunk }, finish_reason: null }],
|
||||
});
|
||||
}
|
||||
|
||||
// Send finish
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: {}, finish_reason: 'stop' }],
|
||||
});
|
||||
|
||||
res.write('data: [DONE]\n\n');
|
||||
res.end();
|
||||
}
|
||||
|
||||
function streamToolCallResponse(res, toolCalls) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
});
|
||||
|
||||
const id = `chatcmpl-mock-${Date.now()}`;
|
||||
const created = Math.floor(Date.now() / 1000);
|
||||
|
||||
for (let i = 0; i < toolCalls.length; i++) {
|
||||
const tc = toolCalls[i];
|
||||
const callId = `call_mock_${++callCounter}`;
|
||||
const args = JSON.stringify(tc.arguments);
|
||||
|
||||
if (i === 0) {
|
||||
// First tool call: include role
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
role: 'assistant',
|
||||
content: null,
|
||||
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
} else {
|
||||
// Additional tool calls
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Stream the arguments
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
tool_calls: [{ index: i, function: { arguments: args } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Send finish with tool_calls reason
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: {}, finish_reason: 'tool_calls' }],
|
||||
});
|
||||
|
||||
res.write('data: [DONE]\n\n');
|
||||
res.end();
|
||||
}
|
||||
|
||||
function writeSSE(res, data) {
|
||||
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
}
|
||||
|
||||
const port = process.env.PORT || 3110;
|
||||
app.listen(port, () => {
|
||||
console.log(`[aigwmock] AI Gateway mock server listening on port ${port}`);
|
||||
});
|
||||
@@ -0,0 +1,193 @@
|
||||
{
|
||||
"scenarios": [
|
||||
{
|
||||
"match": "chart.*popular.*genre|popular.*genre.*chart|most popular genre",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Genre" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_sql_select",
|
||||
"arguments": {
|
||||
"sql": "SELECT g.Name AS genre, COUNT(t.TrackId) AS track_count FROM Genre g JOIN Track t ON g.GenreId = t.GenreId GROUP BY g.Name ORDER BY track_count DESC LIMIT 10"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here is a chart showing the most popular genres by track count:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Rock\",\"Latin\",\"Metal\",\"Alternative & Punk\",\"Jazz\",\"Blues\",\"Classical\",\"R&B/Soul\",\"Reggae\",\"Pop\"],\"datasets\":[{\"label\":\"Track Count\",\"data\":[1297,579,374,332,130,81,74,61,58,48]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Most Popular Genres by Track Count\"}}}}\n```"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "most popular artist|popular artist|top artist",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Artist" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Album" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_sql_select",
|
||||
"arguments": {
|
||||
"sql": "SELECT ar.Name AS artist, COUNT(t.TrackId) AS track_count FROM Artist ar JOIN Album al ON ar.ArtistId = al.ArtistId JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY ar.Name ORDER BY track_count DESC LIMIT 10"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "The most popular artist by number of tracks is **Iron Maiden** with 213 tracks, followed by **U2** with 135 tracks and **Led Zeppelin** with 114 tracks."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "list.*user|show.*user|get.*user",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ users { id firstName lastName email } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here are the users from the GraphQL API. The system contains multiple registered users with their names and email addresses."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "chart.*product.*categor|product.*categor.*chart|chart.*categor",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ products { category } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here is a bar chart showing the distribution of products across categories:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Electronics\",\"Clothing\",\"Books\",\"Home & Garden\",\"Sports\",\"Toys\"],\"datasets\":[{\"label\":\"Number of Products\",\"data\":[35,30,33,38,32,32]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Products by Category\"}}}}\n```"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "most expensive product|expensive.*product|highest price",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ products { id name price category } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Based on the query results, I found the most expensive product in the system. The product details are shown in the query results above."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "show.*categor|list.*categor|all.*categor",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ categories { id name description active } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here are all the categories available in the system. Each category has a name, description, and active status indicating whether it is currently in use."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "Explain the following error|doesn't exist|does not exist",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Invoice" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "The error occurs because the table `Invoice2` does not exist in the `MyChinook` database. The correct table name is `Invoice`. Here is the corrected query:\n\n```sql\nSELECT * FROM Invoice\n```\n\nThe table name had a typo — `Invoice2` instead of `Invoice`. The `Invoice` table contains columns like `InvoiceId`, `CustomerId`, `InvoiceDate`, `Total`, and billing address fields."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
Vendored
+6
-1
@@ -1,6 +1,7 @@
|
||||
DEVMODE=1
|
||||
DEVWEB=1
|
||||
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle,mongourl
|
||||
|
||||
LABEL_mysql=MySql
|
||||
SERVER_mysql=dbgatedckstage1.sprinx.cz
|
||||
@@ -43,6 +44,10 @@ PORT_oracle=1521
|
||||
ENGINE_oracle=oracle@dbgate-plugin-oracle
|
||||
SERVICE_NAME_oracle=xe
|
||||
|
||||
LABEL_mongourl=Mongo URL
|
||||
URL_mongourl=mongodb://root:Pwd2020Db@dbgatedckstage1.sprinx.cz:27017
|
||||
ENGINE_mongourl=mongo@dbgate-plugin-mongo
|
||||
|
||||
# SETTINGS_dataGrid.showHintColumns=1
|
||||
|
||||
# docker run -p 3000:3000 -e CONNECTIONS=mongo -e URL_mongo=mongodb://localhost:27017 -e ENGINE_mongo=mongo@dbgate-plugin-mongo -e LABEL_mongo=mongo dbgate/dbgate:beta
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "dbgate-api",
|
||||
"main": "src/index.js",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
@@ -31,7 +31,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^7.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.9",
|
||||
"dbgate-query-splitter": "^4.12.0",
|
||||
"dbgate-rest": "^7.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^7.0.0-alpha.1",
|
||||
"dbgate-tools": "^7.0.0-alpha.1",
|
||||
|
||||
@@ -202,7 +202,7 @@ module.exports = {
|
||||
|
||||
const storageConnections = await storage.connections(req);
|
||||
if (storageConnections) {
|
||||
return storageConnections;
|
||||
return storageConnections.map(maskConnection);
|
||||
}
|
||||
if (portalConnections) {
|
||||
if (platformInfo.allowShellConnection) return portalConnections.map(x => encryptConnection(x));
|
||||
@@ -484,7 +484,7 @@ module.exports = {
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
if (storageConnection) {
|
||||
return storageConnection;
|
||||
return mask ? maskConnection(storageConnection) : storageConnection;
|
||||
}
|
||||
|
||||
if (portalConnections) {
|
||||
@@ -502,6 +502,9 @@ module.exports = {
|
||||
_id: '__model',
|
||||
};
|
||||
}
|
||||
if (!conid) {
|
||||
return null;
|
||||
}
|
||||
await testConnectionPermission(conid, req);
|
||||
return this.getCore({ conid, mask: true });
|
||||
},
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
getLogger,
|
||||
extractErrorLogData,
|
||||
filterStructureBySchema,
|
||||
serializeJsTypesForJsonStringify,
|
||||
} = require('dbgate-tools');
|
||||
const { html, parse } = require('diff2html');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
@@ -94,10 +95,12 @@ module.exports = {
|
||||
}
|
||||
},
|
||||
handle_response(conid, database, { msgid, ...response }) {
|
||||
const [resolve, reject, additionalData] = this.requests[msgid];
|
||||
resolve(response);
|
||||
if (additionalData?.auditLogger) {
|
||||
additionalData?.auditLogger(response);
|
||||
const [resolve, reject, additionalData] = this.requests[msgid] || [];
|
||||
if (resolve) {
|
||||
resolve(response);
|
||||
if (additionalData?.auditLogger) {
|
||||
additionalData?.auditLogger(response);
|
||||
}
|
||||
}
|
||||
delete this.requests[msgid];
|
||||
},
|
||||
@@ -224,12 +227,13 @@ module.exports = {
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
const connectMessage = serializeJsTypesForJsonStringify({
|
||||
msgtype: 'connect',
|
||||
connection: { ...connection, database },
|
||||
structure: lastClosed ? lastClosed.structure : null,
|
||||
globalSettings: await config.getSettings(),
|
||||
});
|
||||
subprocess.send(connectMessage);
|
||||
return newOpened;
|
||||
},
|
||||
|
||||
@@ -237,9 +241,10 @@ module.exports = {
|
||||
sendRequest(conn, message, additionalData = {}) {
|
||||
const msgid = crypto.randomUUID();
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
this.requests[msgid] = [resolve, reject, additionalData];
|
||||
this.requests[msgid] = [resolve, reject, additionalData, conn.conid, conn.database];
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
const serializedMessage = serializeJsTypesForJsonStringify({ msgid, ...message });
|
||||
conn.subprocess.send(serializedMessage);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
|
||||
this.close(conn.conid, conn.database);
|
||||
@@ -261,12 +266,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
sqlSelect_meta: true,
|
||||
async sqlSelect({ conid, database, select, auditLogSessionGroup }, req) {
|
||||
async sqlSelect({ conid, database, select, commandTimeout, auditLogSessionGroup }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(
|
||||
opened,
|
||||
{ msgtype: 'sqlSelect', select },
|
||||
{ msgtype: 'sqlSelect', select, commandTimeout },
|
||||
{
|
||||
auditLogger:
|
||||
auditLogSessionGroup && select?.from?.name?.pureName
|
||||
@@ -341,9 +346,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
collectionData_meta: true,
|
||||
async collectionData({ conid, database, options, auditLogSessionGroup }, req) {
|
||||
async collectionData({ conid, database, options, commandTimeout, auditLogSessionGroup }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
if (commandTimeout && options) {
|
||||
options.commandTimeout = commandTimeout;
|
||||
}
|
||||
const res = await this.sendRequest(
|
||||
opened,
|
||||
{ msgtype: 'collectionData', options },
|
||||
@@ -473,6 +481,7 @@ module.exports = {
|
||||
|
||||
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
|
||||
const tablePermissions = await loadTablePermissionsFromRequest(req);
|
||||
const databasePermissionRole = getDatabasePermissionRole(conid, database, databasePermissions);
|
||||
const fieldsAndRoles = [
|
||||
[changeSet.inserts, 'create_update_delete'],
|
||||
[changeSet.deletes, 'create_update_delete'],
|
||||
@@ -487,7 +496,7 @@ module.exports = {
|
||||
operation.schemaName,
|
||||
operation.pureName,
|
||||
tablePermissions,
|
||||
databasePermissions
|
||||
databasePermissionRole
|
||||
);
|
||||
if (getTablePermissionRoleLevelIndex(role) < getTablePermissionRoleLevelIndex(requiredRole)) {
|
||||
throw new Error('DBGM-00262 Permission not granted');
|
||||
@@ -576,6 +585,24 @@ module.exports = {
|
||||
};
|
||||
},
|
||||
|
||||
pingDatabases_meta: true,
|
||||
async pingDatabases({ databases }, req) {
|
||||
if (!databases || !Array.isArray(databases)) return { status: 'ok' };
|
||||
for (const { conid, database } of databases) {
|
||||
if (!conid || !database) continue;
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) {
|
||||
try {
|
||||
existing.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00308 Error pinging DB connection');
|
||||
this.close(conid, database);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
refresh_meta: true,
|
||||
async refresh({ conid, database, keepOpen }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
@@ -618,6 +645,15 @@ module.exports = {
|
||||
structure: existing.structure,
|
||||
};
|
||||
socket.emitChanged(`database-status-changed`, { conid, database });
|
||||
|
||||
// Reject all pending requests for this connection
|
||||
for (const [msgid, entry] of Object.entries(this.requests)) {
|
||||
const [resolve, reject, additionalData, reqConid, reqDatabase] = entry;
|
||||
if (reqConid === conid && reqDatabase === database) {
|
||||
reject('DBGM-00309 Database connection closed');
|
||||
delete this.requests[msgid];
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -15,7 +15,8 @@ const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, getSqlFrontMatter } = require('dbgate-tools');
|
||||
const yaml = require('js-yaml');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
|
||||
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
|
||||
@@ -35,13 +36,46 @@ function deserialize(format, text) {
|
||||
|
||||
module.exports = {
|
||||
list_meta: true,
|
||||
async list({ folder }, req) {
|
||||
async list({ folder, parseFrontMatter }, req) {
|
||||
const loadedPermissions = await loadPermissionsFromRequest(req);
|
||||
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return [];
|
||||
const dir = path.join(filesdir(), folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = (await fs.readdir(dir)).map(file => ({ folder, file }));
|
||||
return files;
|
||||
const fileNames = await fs.readdir(dir);
|
||||
if (!parseFrontMatter) {
|
||||
return fileNames.map(file => ({ folder, file }));
|
||||
}
|
||||
const result = [];
|
||||
for (const file of fileNames) {
|
||||
const item = { folder, file };
|
||||
let fh;
|
||||
try {
|
||||
fh = await require('fs').promises.open(path.join(dir, file), 'r');
|
||||
const buf = new Uint8Array(512);
|
||||
const { bytesRead } = await fh.read(buf, 0, 512, 0);
|
||||
let text = Buffer.from(buf.buffer, 0, bytesRead).toString('utf-8');
|
||||
|
||||
if (text.includes('-- >>>') && !text.includes('-- <<<')) {
|
||||
const stat = await fh.stat();
|
||||
const fullSize = Math.min(stat.size, 4096);
|
||||
if (fullSize > 512) {
|
||||
const fullBuf = new Uint8Array(fullSize);
|
||||
const { bytesRead: fullBytesRead } = await fh.read(fullBuf, 0, fullSize, 0);
|
||||
text = Buffer.from(fullBuf.buffer, 0, fullBytesRead).toString('utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
const fm = getSqlFrontMatter(text, yaml);
|
||||
if (fm?.connectionId) item.connectionId = fm.connectionId;
|
||||
if (fm?.databaseName) item.databaseName = fm.databaseName;
|
||||
} catch (e) {
|
||||
// ignore read errors for individual files
|
||||
} finally {
|
||||
if (fh) await fh.close().catch(() => {});
|
||||
}
|
||||
result.push(item);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
listAll_meta: true,
|
||||
@@ -257,6 +291,13 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
exportDiagramPng_meta: true,
|
||||
async exportDiagramPng({ filePath, pngBase64 }) {
|
||||
const base64 = pngBase64.replace(/^data:image\/png;base64,/, '');
|
||||
await fs.writeFile(filePath, Buffer.from(base64, 'base64'));
|
||||
return true;
|
||||
},
|
||||
|
||||
getFileRealPath_meta: true,
|
||||
async getFileRealPath({ folder, file }, req) {
|
||||
const loadedPermissions = await loadPermissionsFromRequest(req);
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
const { filterName } = require('dbgate-tools');
|
||||
const { filterName, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('jsldata');
|
||||
const { jsldir, archivedir } = require('../utility/directories');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const lineReader = require('line-reader');
|
||||
const _ = require('lodash');
|
||||
const { __ } = require('lodash/fp');
|
||||
@@ -149,6 +152,10 @@ module.exports = {
|
||||
|
||||
getRows_meta: true,
|
||||
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
|
||||
const fileName = getJslFileName(jslid);
|
||||
if (!fs.existsSync(fileName)) {
|
||||
return [];
|
||||
}
|
||||
const datastore = await this.ensureDatastore(jslid, formatterFunction);
|
||||
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
|
||||
},
|
||||
@@ -159,6 +166,72 @@ module.exports = {
|
||||
return fs.existsSync(fileName);
|
||||
},
|
||||
|
||||
streamRows_meta: {
|
||||
method: 'get',
|
||||
raw: true,
|
||||
},
|
||||
streamRows(req, res) {
|
||||
const { jslid } = req.query;
|
||||
if (!jslid) {
|
||||
res.status(400).json({ apiErrorMessage: 'Missing jslid' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Reject file:// jslids — they resolve to arbitrary server-side paths
|
||||
if (jslid.startsWith('file://')) {
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden jslid scheme' });
|
||||
return;
|
||||
}
|
||||
|
||||
const fileName = getJslFileName(jslid);
|
||||
|
||||
if (!fs.existsSync(fileName)) {
|
||||
res.status(404).json({ apiErrorMessage: 'File not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Dereference symlinks and normalize case (Windows) before the allow-list check.
|
||||
// realpathSync is safe here because existsSync confirmed the file is present.
|
||||
// path.resolve() alone cannot dereference symlinks, so a symlink inside an allowed
|
||||
// root could otherwise point to an arbitrary external path.
|
||||
const normalize = p => (process.platform === 'win32' ? p.toLowerCase() : p);
|
||||
const resolveRoot = r => { try { return fs.realpathSync(r); } catch { return path.resolve(r); } };
|
||||
|
||||
let realFile;
|
||||
try {
|
||||
realFile = fs.realpathSync(fileName);
|
||||
} catch {
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
|
||||
return;
|
||||
}
|
||||
|
||||
const allowedRoots = [jsldir(), archivedir()].map(r => normalize(resolveRoot(r)) + path.sep);
|
||||
const isAllowed = allowedRoots.some(root => normalize(realFile).startsWith(root));
|
||||
if (!isAllowed) {
|
||||
logger.warn({ jslid, realFile }, 'DBGM-00000 streamRows rejected path outside allowed roots');
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
|
||||
return;
|
||||
}
|
||||
res.setHeader('Content-Type', 'application/x-ndjson');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
const stream = fs.createReadStream(realFile, 'utf-8');
|
||||
|
||||
req.on('close', () => {
|
||||
stream.destroy();
|
||||
});
|
||||
|
||||
stream.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00000 Error streaming JSONL file');
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ apiErrorMessage: 'Stream error' });
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
stream.pipe(res);
|
||||
},
|
||||
|
||||
getStats_meta: true,
|
||||
getStats({ jslid }) {
|
||||
const file = `${getJslFileName(jslid)}.stats`;
|
||||
|
||||
@@ -172,7 +172,7 @@ module.exports = {
|
||||
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
|
||||
subprocess.on('exit', code => {
|
||||
// console.log('... EXITED', code);
|
||||
this.rejectRequest(runid, { message: 'DBGM-00000 No data returned, maybe input data source is too big' });
|
||||
this.rejectRequest(runid, { message: 'DBGM-00281 No data returned, maybe input data source is too big' });
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
@@ -225,7 +225,7 @@ module.exports = {
|
||||
subprocess.on('exit', code => {
|
||||
console.log('... EXITED', code);
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
|
||||
this.dispatchMessage(runid, `DBGM-00000 Finished external process with code ${code}`);
|
||||
this.dispatchMessage(runid, `DBGM-00282 Finished external process with code ${code}`);
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
if (onFinished) {
|
||||
onFinished();
|
||||
@@ -233,7 +233,7 @@ module.exports = {
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
subprocess.on('spawn', () => {
|
||||
this.dispatchMessage(runid, `DBGM-00000 Started external process ${command}`);
|
||||
this.dispatchMessage(runid, `DBGM-00283 Started external process ${command}`);
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
console.log('... ERROR subprocess', error);
|
||||
@@ -279,7 +279,7 @@ module.exports = {
|
||||
if (script.type == 'json') {
|
||||
if (!platformInfo.isElectron) {
|
||||
if (!checkSecureDirectoriesInScript(script)) {
|
||||
return { errorMessage: 'DBGM-00000 Unallowed directories in script' };
|
||||
return { errorMessage: 'DBGM-00284 Unallowed directories in script' };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,10 +299,10 @@ module.exports = {
|
||||
action: 'script',
|
||||
severity: 'warn',
|
||||
detail: script,
|
||||
message: 'DBGM-00000 Scripts are not allowed',
|
||||
message: 'DBGM-00285 Scripts are not allowed',
|
||||
});
|
||||
|
||||
return { errorMessage: 'DBGM-00000 Shell scripting is not allowed' };
|
||||
return { errorMessage: 'DBGM-00286 Shell scripting is not allowed' };
|
||||
}
|
||||
|
||||
sendToAuditLog(req, {
|
||||
@@ -312,7 +312,7 @@ module.exports = {
|
||||
action: 'script',
|
||||
severity: 'info',
|
||||
detail: script,
|
||||
message: 'DBGM-00000 Running JS script',
|
||||
message: 'DBGM-00287 Running JS script',
|
||||
});
|
||||
|
||||
return this.startCore(runid, scriptTemplate(script, false));
|
||||
@@ -327,7 +327,7 @@ module.exports = {
|
||||
async cancel({ runid }) {
|
||||
const runner = this.opened.find(x => x.runid == runid);
|
||||
if (!runner) {
|
||||
throw new Error('DBGM-00000 Invalid runner');
|
||||
throw new Error('DBGM-00288 Invalid runner');
|
||||
}
|
||||
runner.subprocess.kill();
|
||||
return { state: 'ok' };
|
||||
@@ -353,7 +353,7 @@ module.exports = {
|
||||
async loadReader({ functionName, props }) {
|
||||
if (!platformInfo.isElectron) {
|
||||
if (props?.fileName && !checkSecureDirectories(props.fileName)) {
|
||||
return { errorMessage: 'DBGM-00000 Unallowed file' };
|
||||
return { errorMessage: 'DBGM-00289 Unallowed file' };
|
||||
}
|
||||
}
|
||||
const prefix = extractShellApiPlugins(functionName)
|
||||
@@ -371,7 +371,7 @@ module.exports = {
|
||||
scriptResult_meta: true,
|
||||
async scriptResult({ script }) {
|
||||
if (script.type != 'json') {
|
||||
return { errorMessage: 'DBGM-00000 Only JSON scripts are allowed' };
|
||||
return { errorMessage: 'DBGM-00290 Only JSON scripts are allowed' };
|
||||
}
|
||||
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
|
||||
@@ -171,7 +171,7 @@ module.exports = {
|
||||
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
|
||||
const res = [];
|
||||
for (const db of opened?.databases ?? []) {
|
||||
const databasePermissionRole = getDatabasePermissionRole(db.id, db.name, databasePermissions);
|
||||
const databasePermissionRole = getDatabasePermissionRole(conid, db.name, databasePermissions);
|
||||
if (databasePermissionRole != 'deny') {
|
||||
res.push({
|
||||
...db,
|
||||
|
||||
@@ -228,6 +228,19 @@ module.exports = {
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
setIsolationLevel_meta: true,
|
||||
async setIsolationLevel({ sesid, level }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
|
||||
logger.info({ sesid, level }, 'DBGM-00315 Setting transaction isolation level');
|
||||
session.subprocess.send({ msgtype: 'setIsolationLevel', level });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
executeReader_meta: true,
|
||||
async executeReader({ conid, database, sql, queryName, appFolder }) {
|
||||
const { sesid } = await this.create({ conid, database });
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { connectUtility, getRestAuthFromConnection } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const _ = require('lodash');
|
||||
@@ -29,6 +29,9 @@ function start() {
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const connectionChanged = driver?.beforeConnectionSave ? driver.beforeConnectionSave(connection) : connection;
|
||||
if (driver?.databaseEngineTypes?.includes('rest')) {
|
||||
connectionChanged.restAuth = getRestAuthFromConnection(connection);
|
||||
}
|
||||
|
||||
if (!connection.isVolatileResolved) {
|
||||
if (connectionChanged.useRedirectDbLogin) {
|
||||
|
||||
@@ -234,12 +234,12 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
|
||||
}
|
||||
}
|
||||
|
||||
async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false) {
|
||||
async function handleQueryData({ msgid, sql, range, commandTimeout }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
const res = await driver.query(dbhan, sql, { range });
|
||||
const res = await driver.query(dbhan, sql, { range, commandTimeout });
|
||||
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
@@ -250,11 +250,11 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSqlSelect({ msgid, select }) {
|
||||
async function handleSqlSelect({ msgid, select, commandTimeout }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const dmp = driver.createDumper();
|
||||
dumpSqlSelect(dmp, select);
|
||||
return handleQueryData({ msgid, sql: dmp.s, range: select.range }, true);
|
||||
return handleQueryData({ msgid, sql: dmp.s, range: select.range, commandTimeout }, true);
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
|
||||
@@ -77,6 +77,38 @@ async function handleStopProfiler({ jslid }) {
|
||||
currentProfiler = null;
|
||||
}
|
||||
|
||||
async function handleSetIsolationLevel({ level }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!driver.setTransactionIsolationLevel) {
|
||||
process.send({ msgtype: 'done', skipFinishedMessage: true });
|
||||
return;
|
||||
}
|
||||
|
||||
if (driver.isolationLevels && level && !driver.isolationLevels.includes(level)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
message: `Isolation level "${level}" is not supported by this driver. Supported levels: ${driver.isolationLevels.join(', ')}`,
|
||||
severity: 'error',
|
||||
},
|
||||
});
|
||||
process.send({ msgtype: 'done', skipFinishedMessage: true });
|
||||
return;
|
||||
}
|
||||
|
||||
executingScripts++;
|
||||
try {
|
||||
await driver.setTransactionIsolationLevel(dbhan, level);
|
||||
process.send({ msgtype: 'done', controlCommand: 'setIsolationLevel' });
|
||||
} finally {
|
||||
executingScripts--;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteControlCommand({ command }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
@@ -210,6 +242,7 @@ const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
executeQuery: handleExecuteQuery,
|
||||
executeControlCommand: handleExecuteControlCommand,
|
||||
setIsolationLevel: handleSetIsolationLevel,
|
||||
executeReader: handleExecuteReader,
|
||||
startProfiler: handleStartProfiler,
|
||||
stopProfiler: handleStopProfiler,
|
||||
|
||||
@@ -4,7 +4,7 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const authProxy = require('../utility/authProxy');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { openApiDriver, graphQlDriver } = require('dbgate-rest');
|
||||
const { openApiDriver, graphQlDriver, oDataDriver } = require('dbgate-rest');
|
||||
//
|
||||
const logger = getLogger('requirePlugin');
|
||||
|
||||
@@ -26,7 +26,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (requiredPlugin == null) {
|
||||
if (packageName.endsWith('@rest') || packageName === 'rest') {
|
||||
return {
|
||||
drivers: [openApiDriver, graphQlDriver],
|
||||
drivers: [openApiDriver, graphQlDriver, oDataDriver],
|
||||
};
|
||||
}
|
||||
let module;
|
||||
|
||||
@@ -698,6 +698,30 @@ module.exports = {
|
||||
"columnName": "id_original",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyUrl",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyUser",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyPassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "defaultIsolationLevel",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
@@ -850,84 +874,6 @@ module.exports = {
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "token",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "created_at",
|
||||
"dataType": "datetime",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "expires_at",
|
||||
"dataType": "datetime",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "used_at",
|
||||
"dataType": "datetime",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"constraintName": "FK_password_reset_tokens_user_id",
|
||||
"pureName": "password_reset_tokens",
|
||||
"refTableName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"indexes": [
|
||||
{
|
||||
"constraintName": "idx_token",
|
||||
"pureName": "password_reset_tokens",
|
||||
"constraintType": "index",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "token"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "password_reset_tokens",
|
||||
"constraintType": "primaryKey",
|
||||
"constraintName": "PK_password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columns": [
|
||||
@@ -2252,6 +2198,84 @@ module.exports = {
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "token",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "created_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "expires_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "used_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"constraintName": "FK_user_password_reset_tokens_user_id",
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"refTableName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"indexes": [
|
||||
{
|
||||
"constraintName": "idx_token",
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"constraintType": "index",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "token"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"constraintType": "primaryKey",
|
||||
"constraintName": "PK_user_password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columns": [
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const fs = require('fs-extra');
|
||||
const { decryptConnection } = require('./crypting');
|
||||
const { decryptConnection, decryptPasswordString } = require('./crypting');
|
||||
const { getSshTunnelProxy } = require('./sshTunnelProxy');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
@@ -132,13 +132,67 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
||||
}
|
||||
|
||||
connection.ssl = await extractConnectionSslParams(connection);
|
||||
connection.axios = axios.default;
|
||||
|
||||
const proxyUrl = String(connection.httpProxyUrl ?? '').trim();
|
||||
const proxyUser = String(connection.httpProxyUser ?? '').trim();
|
||||
const proxyPassword = String(connection.httpProxyPassword ?? '').trim();
|
||||
if (!proxyUrl && (proxyUser || proxyPassword)) {
|
||||
throw new Error('DBGM-00329 Proxy user or password is set but proxy URL is missing');
|
||||
}
|
||||
if (proxyUrl) {
|
||||
let parsedProxy;
|
||||
try {
|
||||
const parsed = new URL(proxyUrl.includes('://') ? proxyUrl : `http://${proxyUrl}`);
|
||||
parsedProxy = {
|
||||
protocol: parsed.protocol.replace(':', ''),
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port, 10) : (parsed.protocol === 'https:' ? 443 : 80),
|
||||
};
|
||||
const username = connection.httpProxyUser ?? parsed.username;
|
||||
const rawPassword = connection.httpProxyPassword ?? parsed.password;
|
||||
const password = decryptPasswordString(rawPassword);
|
||||
if (username) {
|
||||
parsedProxy.auth = { username, password: password ?? '' };
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`DBGM-00334 Invalid proxy URL "${proxyUrl}": ${err && err.message ? err.message : err}`);
|
||||
}
|
||||
connection.axios = axios.default.create({ proxy: parsedProxy });
|
||||
} else {
|
||||
connection.axios = axios.default;
|
||||
}
|
||||
|
||||
const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
|
||||
return conn;
|
||||
}
|
||||
|
||||
function getRestAuthFromConnection(connection) {
|
||||
if (!connection) return null;
|
||||
if (connection.authType == 'basic') {
|
||||
return {
|
||||
type: 'basic',
|
||||
user: connection.user,
|
||||
password: decryptPasswordString(connection.password),
|
||||
};
|
||||
}
|
||||
if (connection.authType == 'bearer') {
|
||||
return {
|
||||
type: 'bearer',
|
||||
token: connection.authToken,
|
||||
};
|
||||
}
|
||||
if (connection.authType == 'apikey') {
|
||||
return {
|
||||
type: 'apikey',
|
||||
header: connection.apiKeyHeader,
|
||||
value: connection.apiKeyValue,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractConnectionSslParams,
|
||||
connectUtility,
|
||||
getRestAuthFromConnection,
|
||||
};
|
||||
|
||||
@@ -101,7 +101,27 @@ function decryptObjectPasswordField(obj, field, encryptor = null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition'];
|
||||
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition', 'httpProxyPassword'];
|
||||
const additionalFieldsToMask = [
|
||||
'databaseUrl',
|
||||
'server',
|
||||
'port',
|
||||
'user',
|
||||
'sshBastionHost',
|
||||
'sshHost',
|
||||
'sshKeyFile',
|
||||
'sshLogin',
|
||||
'sshMode',
|
||||
'sshPort',
|
||||
'sslCaFile',
|
||||
'sslCertFilePassword',
|
||||
'sslKeyFile',
|
||||
'sslRejectUnauthorized',
|
||||
'secretAccessKey',
|
||||
'accessKeyId',
|
||||
'endpoint',
|
||||
'endpointKey',
|
||||
];
|
||||
|
||||
function encryptConnection(connection, encryptor = null) {
|
||||
if (connection.passwordMode != 'saveRaw') {
|
||||
@@ -114,7 +134,7 @@ function encryptConnection(connection, encryptor = null) {
|
||||
|
||||
function maskConnection(connection) {
|
||||
if (!connection) return connection;
|
||||
return _.omit(connection, fieldsToEncrypt);
|
||||
return _.omit(connection, [...fieldsToEncrypt, ...additionalFieldsToMask]);
|
||||
}
|
||||
|
||||
function decryptConnection(connection) {
|
||||
|
||||
@@ -25,8 +25,14 @@ function extractConnectionsFromEnv(env) {
|
||||
socketPath: env[`SOCKET_PATH_${id}`],
|
||||
serviceName: env[`SERVICE_NAME_${id}`],
|
||||
authType: env[`AUTH_TYPE_${id}`] || (env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
|
||||
defaultDatabase: env[`DATABASE_${id}`] || (env[`FILE_${id}`] ? getDatabaseFileLabel(env[`FILE_${id}`]) : null),
|
||||
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`],
|
||||
defaultDatabase:
|
||||
env[`DATABASE_${id}`] ||
|
||||
(env[`FILE_${id}`]
|
||||
? getDatabaseFileLabel(env[`FILE_${id}`])
|
||||
: env[`APISERVERURL1_${id}`]
|
||||
? '_api_database_'
|
||||
: null),
|
||||
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`] || !!env[`APISERVERURL1_${id}`],
|
||||
displayName: env[`LABEL_${id}`],
|
||||
isReadOnly: env[`READONLY_${id}`],
|
||||
databases: env[`DBCONFIG_${id}`] ? safeJsonParse(env[`DBCONFIG_${id}`]) : null,
|
||||
@@ -54,6 +60,11 @@ function extractConnectionsFromEnv(env) {
|
||||
sslKeyFile: env[`SSL_KEY_FILE_${id}`],
|
||||
sslRejectUnauthorized: env[`SSL_REJECT_UNAUTHORIZED_${id}`],
|
||||
trustServerCertificate: env[`SSL_TRUST_CERTIFICATE_${id}`],
|
||||
|
||||
apiServerUrl1: env[`APISERVERURL1_${id}`],
|
||||
apiServerUrl2: env[`APISERVERURL2_${id}`],
|
||||
apiKeyHeader: env[`APIKEYHEADER_${id}`],
|
||||
apiKeyValue: env[`APIKEYVALUE_${id}`],
|
||||
}));
|
||||
|
||||
return connections;
|
||||
|
||||
@@ -96,8 +96,9 @@ async function loadFilePermissionsFromRequest(req) {
|
||||
}
|
||||
|
||||
function matchDatabasePermissionRow(conid, database, permissionRow) {
|
||||
if (permissionRow.connection_id) {
|
||||
if (conid != permissionRow.connection_id) {
|
||||
const connectionIdentifier = permissionRow.connection_conid ?? permissionRow.connection_id;
|
||||
if (connectionIdentifier) {
|
||||
if (conid != connectionIdentifier) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,8 +84,12 @@ export function analyseCollectionDisplayColumns(rows, display) {
|
||||
if (res.find(x => x.uniqueName == added)) continue;
|
||||
res.push(getDisplayColumn([], added, display));
|
||||
}
|
||||
|
||||
// Use driver-specific column sorting if available
|
||||
const sortedColumns = display?.driver?.sortCollectionDisplayColumns ? display.driver.sortCollectionDisplayColumns(res) : res;
|
||||
|
||||
return (
|
||||
res.map(col => ({
|
||||
sortedColumns.map(col => ({
|
||||
...col,
|
||||
isChecked: display.isColumnChecked(col),
|
||||
})) || []
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { evalFilterBehaviour } from 'dbgate-tools';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { FreeTableModel } from './FreeTableModel';
|
||||
@@ -11,13 +12,15 @@ export class FreeTableGridDisplay extends GridDisplay {
|
||||
config: GridConfig,
|
||||
setConfig: ChangeConfigFunc,
|
||||
cache: GridCache,
|
||||
setCache: ChangeCacheFunc
|
||||
setCache: ChangeCacheFunc,
|
||||
options: { filterable?: boolean } = {}
|
||||
) {
|
||||
super(config, setConfig, cache, setCache);
|
||||
this.columns = model?.structure?.__isDynamicStructure
|
||||
? analyseCollectionDisplayColumns(model?.rows, this)
|
||||
: this.getDisplayColumns(model);
|
||||
this.filterable = false;
|
||||
this.filterable = options.filterable ?? false;
|
||||
this.filterBehaviourOverride = evalFilterBehaviour;
|
||||
this.sortable = false;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dbmodel
|
||||
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](https://dbgate.org) tooling and plugins for connecting many different databases.
|
||||
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](www.dbgate.io) tooling and plugins for connecting many different databases.
|
||||
|
||||
If you want to use this tool from JavaScript interface, please use [dbgate-api](https://www.npmjs.com/package/dbgate-api) package.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbmodel",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -16,7 +16,46 @@ function getDateStringWithoutTimeZone(dateString) {
|
||||
|
||||
export function getFilterValueExpression(value, dataType?) {
|
||||
if (value == null) return 'NULL';
|
||||
if (isTypeDateTime(dataType)) return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
|
||||
if (isTypeDateTime(dataType)) {
|
||||
// Check for year as number (GROUP:YEAR)
|
||||
if (typeof value === 'number' && Number.isInteger(value) && value >= 1000 && value <= 9999) {
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
if (_isString(value)) {
|
||||
// Year only
|
||||
if (/^\d{4}$/.test(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// Year-month: validate month is in range 01-12
|
||||
const yearMonthMatch = value.match(/^(\d{4})-(\d{1,2})$/);
|
||||
if (yearMonthMatch) {
|
||||
const month = parseInt(yearMonthMatch[2], 10);
|
||||
if (month >= 1 && month <= 12) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// Year-month-day: validate month and day
|
||||
const yearMonthDayMatch = value.match(/^(\d{4})-(\d{1,2})-(\d{1,2})$/);
|
||||
if (yearMonthDayMatch) {
|
||||
const month = parseInt(yearMonthDayMatch[2], 10);
|
||||
const day = parseInt(yearMonthDayMatch[3], 10);
|
||||
|
||||
// Quick validation: month 1-12, day 1-31
|
||||
if (month >= 1 && month <= 12 && day >= 1 && day <= 31) {
|
||||
// Construct a date to verify it's actually valid (e.g., reject 2024-02-30)
|
||||
const dateStr = `${yearMonthDayMatch[1]}-${String(month).padStart(2, '0')}-${String(day).padStart(2, '0')}`;
|
||||
const date = toDate(dateStr);
|
||||
if (!isNaN(date.getTime())) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
|
||||
}
|
||||
if (value === true) return 'TRUE';
|
||||
if (value === false) return 'FALSE';
|
||||
if (value.$oid) return `ObjectId("${value.$oid}")`;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
moduleFileExtensions: ['js'],
|
||||
moduleFileExtensions: ['ts', 'js'],
|
||||
reporters: ['default', 'github-actions'],
|
||||
};
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import { fetchGraphQLSchema } from './graphqlIntrospection';
|
||||
import { fetchGraphQLSchema, GraphQLIntrospectionResult } from './graphqlIntrospection';
|
||||
import { apiDriverBase } from './restDriverBase';
|
||||
import { buildRestAuthHeaders } from './restAuthTools';
|
||||
|
||||
async function loadGraphQlSchema(dbhan: any) {
|
||||
async function loadGraphQlSchema(dbhan: any): Promise<GraphQLIntrospectionResult> {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00000 GraphQL endpoint URL is not configured');
|
||||
throw new Error('DBGM-00310 GraphQL endpoint URL is not configured');
|
||||
}
|
||||
|
||||
const introspectionResult = await fetchGraphQLSchema(
|
||||
dbhan.connection.apiServerUrl1,
|
||||
dbhan.connection.restAuth || {},
|
||||
buildRestAuthHeaders(dbhan.connection.restAuth),
|
||||
dbhan.axios
|
||||
);
|
||||
|
||||
if (!introspectionResult || typeof introspectionResult !== 'object') {
|
||||
throw new Error('DBGM-00000 GraphQL schema is empty or could not be loaded');
|
||||
throw new Error('DBGM-00311 GraphQL schema is empty or could not be loaded');
|
||||
}
|
||||
|
||||
return introspectionResult;
|
||||
@@ -24,12 +25,12 @@ async function loadGraphQlSchema(dbhan: any) {
|
||||
export const graphQlDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'graphql@rest',
|
||||
title: 'GraphQL (experimental)',
|
||||
title: 'GraphQL',
|
||||
databaseEngineTypes: ['rest', 'graphql'],
|
||||
icon: '<svg version="1.1" id="GraphQL_Logo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 400 400" enable-background="new 0 0 400 400" xml:space="preserve"><g><g><g><rect x="122" y="-0.4" transform="matrix(-0.866 -0.5 0.5 -0.866 163.3196 363.3136)" fill="#E535AB" width="16.6" height="320.3"/></g></g><g><g><rect x="39.8" y="272.2" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="37.9" y="312.2" transform="matrix(-0.866 -0.5 0.5 -0.866 83.0693 663.3409)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="177.1" y="71.1" transform="matrix(-0.866 -0.5 0.5 -0.866 463.3409 283.0693)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="122.1" y="-13" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7903 232.1221)" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="109.6" y="151.6" transform="matrix(-0.5 -0.866 0.866 -0.5 266.0828 473.3766)" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="52.5" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="330.9" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="262.4" y="240.1" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7953 714.2875)" fill="#E535AB" width="14.5" height="160.9"/></g></g><path fill="#E535AB" d="M369.5,297.9c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C373.5,259.9,379.2,281.2,369.5,297.9"/><path fill="#E535AB" d="M90.9,137c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C94.8,99,100.5,120.3,90.9,137"/><path fill="#E535AB" d="M30.5,297.9c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6,16.7,3.9,38-12.8,47.7 C61.4,320.3,40.1,314.6,30.5,297.9"/><path fill="#E535AB" d="M309.1,137c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6-16.7,3.9-38-12.8,47.7 C340.1,159.4,318.7,153.7,309.1,137"/><path fill="#E535AB" d="M200,395.8c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,380.1,219.3,395.8,200,395.8"/><path fill="#E535AB" d="M200,74c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,58.4,219.3,74,200,74"/></g></svg>',
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showAuthConnectionField(field, values)) return true;
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
return false;
|
||||
},
|
||||
@@ -55,18 +56,10 @@ export const graphQlDriver: EngineDriver = {
|
||||
const introspectionResult = await loadGraphQlSchema(dbhan);
|
||||
const schema = introspectionResult.__schema;
|
||||
|
||||
const version = 'GraphQL';
|
||||
const versionText = [
|
||||
schema?.queryType?.name ? `Query ${schema.queryType.name}` : null,
|
||||
schema?.mutationType?.name ? `Mutation ${schema.mutationType.name}` : null,
|
||||
schema?.subscriptionType?.name ? `Subscription ${schema.subscriptionType.name}` : null,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(', ');
|
||||
// const version = 'GraphQL';
|
||||
|
||||
return {
|
||||
version,
|
||||
...(versionText ? { versionText } : {}),
|
||||
version: `GraphQL, ${schema.types?.length || 0} types`,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,20 +1,102 @@
|
||||
export function parseGraphQlSelectionPaths(text: string): { fieldPaths: string[]; argumentPaths: string[] } {
|
||||
if (!text) return { fieldPaths: [], argumentPaths: [] };
|
||||
const cleaned = text.replace(/#[^\n]*/g, '').replace(/"([^"\\]|\\.)*"/g, '""');
|
||||
export function parseGraphQlSelectionPaths(text: string): {
|
||||
fieldPaths: string[];
|
||||
argumentPaths: string[];
|
||||
argumentValues: Record<string, Record<string, string>>;
|
||||
} {
|
||||
if (!text) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
|
||||
const cleaned = text.replace(/#[^\n]*/g, '');
|
||||
|
||||
const tokens: string[] = cleaned.match(/\.\.\.|[A-Za-z_][A-Za-z0-9_]*|\$[A-Za-z_][A-Za-z0-9_]*|[@{}()!:$]/g) || [];
|
||||
const tokens: string[] =
|
||||
cleaned.match(
|
||||
/\.\.\.|"(?:[^"\\]|\\.)*"|[A-Za-z_][A-Za-z0-9_]*|\$[A-Za-z_][A-Za-z0-9_]*|-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?|[@{}()\[\],!:$]/g
|
||||
) || [];
|
||||
const startIndex = tokens.indexOf('{');
|
||||
if (startIndex === -1) return { fieldPaths: [], argumentPaths: [] };
|
||||
if (startIndex === -1) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
|
||||
|
||||
const result = parseSelectionSet(tokens, startIndex, []);
|
||||
return {
|
||||
fieldPaths: result.fieldPaths.map(parts => parts.join('.')),
|
||||
argumentPaths: result.argumentPaths.map(parts => parts.join('.')),
|
||||
argumentValues: result.argumentValues,
|
||||
};
|
||||
}
|
||||
|
||||
function parseArgumentsFromField(tokens: string[], startIndex: number): { arguments: string[]; endIndex: number } {
|
||||
const args: string[] = [];
|
||||
function parseArgumentValue(tokens: string[], startIndex: number): { value: string; endIndex: number } {
|
||||
const valueTokens: string[] = [];
|
||||
let index = startIndex;
|
||||
let parenthesesDepth = 0;
|
||||
let bracketDepth = 0;
|
||||
let braceDepth = 0;
|
||||
|
||||
while (index < tokens.length) {
|
||||
const token = tokens[index];
|
||||
|
||||
if (token === '(') {
|
||||
parenthesesDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '[') {
|
||||
bracketDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '{') {
|
||||
braceDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ')') {
|
||||
if (parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
|
||||
break;
|
||||
}
|
||||
parenthesesDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ']') {
|
||||
if (bracketDepth === 0) break;
|
||||
bracketDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '}') {
|
||||
if (braceDepth === 0) break;
|
||||
braceDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ',' && parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return {
|
||||
value: valueTokens.join(''),
|
||||
endIndex: index,
|
||||
};
|
||||
}
|
||||
|
||||
function parseArgumentsFromField(
|
||||
tokens: string[],
|
||||
startIndex: number
|
||||
): { arguments: { name: string; value: string }[]; endIndex: number } {
|
||||
const args: { name: string; value: string }[] = [];
|
||||
let index = startIndex;
|
||||
|
||||
if (tokens[index] !== '(') {
|
||||
@@ -27,10 +109,15 @@ function parseArgumentsFromField(tokens: string[], startIndex: number): { argume
|
||||
if (tokens[index] === '(') depth += 1;
|
||||
if (tokens[index] === ')') depth -= 1;
|
||||
|
||||
// Look for argument names (identifier followed by colon)
|
||||
// Look for argument names (identifier followed by colon) and their values
|
||||
if (depth > 0 && /^[A-Za-z_]/.test(tokens[index]) && tokens[index + 1] === ':') {
|
||||
args.push(tokens[index]);
|
||||
index += 2;
|
||||
const argumentName = tokens[index];
|
||||
const { value, endIndex } = parseArgumentValue(tokens, index + 2);
|
||||
args.push({ name: argumentName, value });
|
||||
index = endIndex;
|
||||
if (tokens[index] === ',') {
|
||||
index += 1;
|
||||
}
|
||||
} else {
|
||||
index += 1;
|
||||
}
|
||||
@@ -43,15 +130,21 @@ function parseSelectionSet(
|
||||
tokens: string[],
|
||||
startIndex: number,
|
||||
prefix: string[]
|
||||
): { fieldPaths: string[][]; argumentPaths: string[][]; index: number } {
|
||||
): {
|
||||
fieldPaths: string[][];
|
||||
argumentPaths: string[][];
|
||||
argumentValues: Record<string, Record<string, string>>;
|
||||
index: number;
|
||||
} {
|
||||
const fieldPaths: string[][] = [];
|
||||
const argumentPaths: string[][] = [];
|
||||
const argumentValues: Record<string, Record<string, string>> = {};
|
||||
let index = startIndex + 1;
|
||||
|
||||
while (index < tokens.length) {
|
||||
const token = tokens[index];
|
||||
if (token === '}') {
|
||||
return { fieldPaths, argumentPaths, index: index + 1 };
|
||||
return { fieldPaths, argumentPaths, argumentValues, index: index + 1 };
|
||||
}
|
||||
|
||||
if (token === '...') {
|
||||
@@ -66,6 +159,12 @@ function parseSelectionSet(
|
||||
const frag = parseSelectionSet(tokens, index, prefix);
|
||||
fieldPaths.push(...frag.fieldPaths);
|
||||
argumentPaths.push(...frag.argumentPaths);
|
||||
for (const [fieldPath, values] of Object.entries(frag.argumentValues)) {
|
||||
argumentValues[fieldPath] = {
|
||||
...(argumentValues[fieldPath] || {}),
|
||||
...values,
|
||||
};
|
||||
}
|
||||
index = frag.index;
|
||||
continue;
|
||||
}
|
||||
@@ -86,8 +185,13 @@ function parseSelectionSet(
|
||||
index = argsEndIndex;
|
||||
|
||||
// Add argument paths for this field
|
||||
const currentFieldPath = [...prefix, fieldName].join('.');
|
||||
for (const arg of args) {
|
||||
argumentPaths.push([...prefix, fieldName, arg]);
|
||||
argumentPaths.push([...prefix, fieldName, arg.name]);
|
||||
if (!argumentValues[currentFieldPath]) {
|
||||
argumentValues[currentFieldPath] = {};
|
||||
}
|
||||
argumentValues[currentFieldPath][arg.name] = arg.value;
|
||||
}
|
||||
|
||||
while (tokens[index] === '@') {
|
||||
@@ -111,6 +215,12 @@ function parseSelectionSet(
|
||||
fieldPaths.push([...prefix, fieldName]);
|
||||
}
|
||||
argumentPaths.push(...nested.argumentPaths);
|
||||
for (const [fieldPath, values] of Object.entries(nested.argumentValues)) {
|
||||
argumentValues[fieldPath] = {
|
||||
...(argumentValues[fieldPath] || {}),
|
||||
...values,
|
||||
};
|
||||
}
|
||||
index = nested.index;
|
||||
} else {
|
||||
fieldPaths.push([...prefix, fieldName]);
|
||||
@@ -121,5 +231,5 @@ function parseSelectionSet(
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return { fieldPaths, argumentPaths, index };
|
||||
return { fieldPaths, argumentPaths, argumentValues, index };
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ interface GraphQLExplorerOptions {
|
||||
maxDepth?: number;
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_DEPTH = 6;
|
||||
const DEFAULT_MAX_DEPTH = 2;
|
||||
|
||||
function getTypeDisplay(typeRef: GraphQLTypeRef | null | undefined): string {
|
||||
if (!typeRef) return 'Unknown';
|
||||
|
||||
@@ -116,6 +116,253 @@ function findType(types: GraphQLType[], name: string): GraphQLType | undefined {
|
||||
return types.find(t => t.name === name);
|
||||
}
|
||||
|
||||
function unwrapNamedTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
|
||||
if (!typeRef) return null;
|
||||
if (typeRef.kind === 'NON_NULL' || typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
|
||||
return typeRef;
|
||||
}
|
||||
|
||||
function unwrapListTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
|
||||
if (!typeRef) return null;
|
||||
if (typeRef.kind === 'NON_NULL') return unwrapListTypeRef(typeRef.ofType);
|
||||
if (typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildTypeMap(types: GraphQLType[]): Map<string, GraphQLType> {
|
||||
return new Map((types || []).map(type => [type.name, type]));
|
||||
}
|
||||
|
||||
function isScalarLikeField(field: GraphQLField, typeMap: Map<string, GraphQLType>): boolean {
|
||||
const namedType = unwrapNamedTypeRef(field.type);
|
||||
if (!namedType?.name) return false;
|
||||
const type = typeMap.get(namedType.name);
|
||||
if (!type) return namedType.kind === 'SCALAR' || namedType.kind === 'ENUM';
|
||||
return type.kind === 'SCALAR' || type.kind === 'ENUM';
|
||||
}
|
||||
|
||||
export function scoreFieldName(name: string): number {
|
||||
const lowerName = (name || '').toLowerCase();
|
||||
const exactOrder = [
|
||||
'id',
|
||||
'name',
|
||||
'title',
|
||||
'email',
|
||||
'username',
|
||||
'status',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'type',
|
||||
'code',
|
||||
'key',
|
||||
];
|
||||
|
||||
const exactIndex = exactOrder.indexOf(lowerName);
|
||||
if (exactIndex >= 0) {
|
||||
return 500 - exactIndex;
|
||||
}
|
||||
|
||||
if (lowerName.endsWith('id')) return 300;
|
||||
if (lowerName.includes('name')) return 280;
|
||||
if (lowerName.includes('title')) return 260;
|
||||
if (lowerName.includes('email')) return 240;
|
||||
if (lowerName.includes('status')) return 220;
|
||||
if (lowerName.includes('date') || lowerName.endsWith('at')) return 200;
|
||||
return 100;
|
||||
}
|
||||
|
||||
export function chooseUsefulNodeAttributes(nodeType: GraphQLType | undefined, typeMap: Map<string, GraphQLType>): string[] {
|
||||
if (!nodeType?.fields?.length) return ['__typename'];
|
||||
|
||||
const scalarFields = nodeType.fields.filter(field => isScalarLikeField(field, typeMap));
|
||||
if (scalarFields.length === 0) return ['__typename'];
|
||||
|
||||
return scalarFields
|
||||
.map((field, index) => ({
|
||||
field,
|
||||
score: scoreFieldName(field.name),
|
||||
index,
|
||||
}))
|
||||
.sort((left, right) => {
|
||||
if (right.score !== left.score) return right.score - left.score;
|
||||
return left.index - right.index;
|
||||
})
|
||||
.slice(0, 10)
|
||||
.map(item => item.field.name);
|
||||
}
|
||||
|
||||
function stringifyArgumentValue(argumentTypeRef: GraphQLTypeRef | null | undefined, value: number | string): string {
|
||||
const namedType = unwrapNamedTypeRef(argumentTypeRef);
|
||||
if (!namedType?.name) {
|
||||
// Fallback: safely stringify as a JSON string literal
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
const typeName = namedType.name.toLowerCase();
|
||||
if (typeName === 'int' || typeName === 'float') {
|
||||
const numValue = typeof value === 'number' ? value : Number(value);
|
||||
if (Number.isFinite(numValue)) {
|
||||
return String(numValue);
|
||||
}
|
||||
// If the value cannot be parsed as a valid number, fall back to a quoted string
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
// For non-numeric types, safely serialize as a JSON string literal
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
export function buildFirstTenArgs(field: GraphQLField, filterParamName?: string | null, filterValue?: string): string {
|
||||
const args = field.args || [];
|
||||
if (args.length === 0) return '';
|
||||
|
||||
const argPairs: string[] = [];
|
||||
|
||||
// Add pagination argument
|
||||
const candidates = ['first', 'limit', 'pagesize', 'perpage', 'take', 'size', 'count', 'maxresults'];
|
||||
const paginationArg = args.find(item => candidates.includes((item.name || '').toLowerCase()));
|
||||
if (paginationArg) {
|
||||
argPairs.push(`${paginationArg.name}: ${stringifyArgumentValue(paginationArg.type, 10)}`);
|
||||
}
|
||||
|
||||
// Add filter argument if provided
|
||||
if (filterParamName && filterValue) {
|
||||
const filterArg = args.find(item => item.name === filterParamName);
|
||||
if (filterArg) {
|
||||
argPairs.push(`${filterParamName}: ${stringifyArgumentValue(filterArg.type, filterValue)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (argPairs.length === 0) return '';
|
||||
return `(${argPairs.join(', ')})`;
|
||||
}
|
||||
|
||||
export type GraphQLConnectionProjection =
|
||||
| {
|
||||
kind: 'edges';
|
||||
nodeTypeName: string;
|
||||
hasPageInfo: boolean;
|
||||
}
|
||||
| {
|
||||
kind: 'listField';
|
||||
listFieldName: string;
|
||||
nodeTypeName: string;
|
||||
};
|
||||
|
||||
export function detectConnectionProjection(
|
||||
field: GraphQLField,
|
||||
typeMap: Map<string, GraphQLType>
|
||||
): GraphQLConnectionProjection | null {
|
||||
const fieldTypeRef = unwrapNamedTypeRef(field.type);
|
||||
if (!fieldTypeRef?.name) return null;
|
||||
|
||||
const returnType = typeMap.get(fieldTypeRef.name);
|
||||
if (!returnType || returnType.kind !== 'OBJECT' || !returnType.fields?.length) return null;
|
||||
|
||||
const edgesField = returnType.fields.find(item => item.name === 'edges');
|
||||
if (edgesField) {
|
||||
const edgeTypeRef = unwrapListTypeRef(edgesField.type);
|
||||
if (edgeTypeRef?.name) {
|
||||
const edgeType = typeMap.get(edgeTypeRef.name);
|
||||
const nodeField = edgeType?.fields?.find(item => item.name === 'node');
|
||||
const nodeTypeRef = unwrapNamedTypeRef(nodeField?.type);
|
||||
if (nodeTypeRef?.name) {
|
||||
const hasPageInfo = !!returnType.fields.find(item => item.name === 'pageInfo');
|
||||
return {
|
||||
kind: 'edges',
|
||||
nodeTypeName: nodeTypeRef.name,
|
||||
hasPageInfo,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const listFieldNames = ['nodes', 'items', 'results', 'data'];
|
||||
for (const listFieldName of listFieldNames) {
|
||||
const listField = returnType.fields.find(item => item.name === listFieldName);
|
||||
if (!listField) continue;
|
||||
const listItemTypeRef = unwrapListTypeRef(listField.type);
|
||||
if (!listItemTypeRef?.name) continue;
|
||||
return {
|
||||
kind: 'listField',
|
||||
listFieldName,
|
||||
nodeTypeName: listItemTypeRef.name,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildConnectionQuery(field: GraphQLField, typeMap: Map<string, GraphQLType>): string | null {
|
||||
const projection = detectConnectionProjection(field, typeMap);
|
||||
if (!projection) return null;
|
||||
|
||||
const nodeType = typeMap.get(projection.nodeTypeName);
|
||||
const selectedAttributes = chooseUsefulNodeAttributes(nodeType, typeMap);
|
||||
const argsString = buildFirstTenArgs(field);
|
||||
const attributeBlock = selectedAttributes.map(attr => ` ${attr}`).join('\n');
|
||||
|
||||
if (projection.kind === 'edges') {
|
||||
const pageInfoBlock = projection.hasPageInfo
|
||||
? `
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}`
|
||||
: '';
|
||||
|
||||
return `query {
|
||||
${field.name}${argsString} {
|
||||
edges {
|
||||
node {
|
||||
${attributeBlock}
|
||||
}
|
||||
}${pageInfoBlock}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
|
||||
return `query {
|
||||
${field.name}${argsString} {
|
||||
${projection.listFieldName} {
|
||||
${attributeBlock}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
|
||||
function buildConnectionEndpoints(
|
||||
types: GraphQLType[],
|
||||
rootTypeName?: string
|
||||
): Array<{
|
||||
name: string;
|
||||
description?: string;
|
||||
fields?: string;
|
||||
connectionQuery?: string;
|
||||
}> {
|
||||
if (!rootTypeName) return [];
|
||||
|
||||
const rootType = findType(types, rootTypeName);
|
||||
if (!rootType?.fields?.length) return [];
|
||||
|
||||
const typeMap = buildTypeMap(types);
|
||||
const connectionEndpoints = [];
|
||||
|
||||
for (const field of rootType.fields) {
|
||||
const connectionQuery = buildConnectionQuery(field, typeMap);
|
||||
if (!connectionQuery) continue;
|
||||
|
||||
connectionEndpoints.push({
|
||||
name: field.name,
|
||||
description: field.description || '',
|
||||
fields: field.description,
|
||||
connectionQuery,
|
||||
});
|
||||
}
|
||||
|
||||
return connectionEndpoints;
|
||||
}
|
||||
|
||||
function buildOperationEndpoints(
|
||||
types: GraphQLType[],
|
||||
operationType: 'OBJECT',
|
||||
@@ -138,6 +385,24 @@ export function extractRestApiDefinitionFromGraphQlIntrospectionResult(
|
||||
const { __schema } = introspectionResult;
|
||||
const categories: any[] = [];
|
||||
|
||||
// Connections (query fields returning connection-like payloads)
|
||||
if (__schema.queryType?.name) {
|
||||
const connectionEndpoints = buildConnectionEndpoints(__schema.types, __schema.queryType.name);
|
||||
if (connectionEndpoints.length > 0) {
|
||||
categories.push({
|
||||
name: 'Connections',
|
||||
endpoints: connectionEndpoints.map(connection => ({
|
||||
method: 'POST',
|
||||
path: connection.name,
|
||||
summary: connection.description,
|
||||
description: connection.fields,
|
||||
parameters: [],
|
||||
connectionQuery: connection.connectionQuery,
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Queries
|
||||
if (__schema.queryType?.name) {
|
||||
const queryEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.queryType.name);
|
||||
@@ -225,6 +490,6 @@ export async function fetchGraphQLSchema(
|
||||
|
||||
return response.data.data as GraphQLIntrospectionResult;
|
||||
} catch (err: any) {
|
||||
throw new Error(`DBGM-00000 Could not fetch GraphQL schema: ${err.message}`);
|
||||
throw new Error(`DBGM-00312 Could not fetch GraphQL schema: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
export * from './openApiDriver';
|
||||
export * from './oDataDriver';
|
||||
export * from './graphQlDriver';
|
||||
export * from './openApiAdapter';
|
||||
export * from './oDataAdapter';
|
||||
export * from './oDataMetadataParser';
|
||||
export * from './restApiExecutor';
|
||||
export * from './arrayify';
|
||||
export * from './graphqlIntrospection';
|
||||
export * from './graphqlExplorer';
|
||||
export * from './graphQlQueryParser';
|
||||
export * from './graphQlVariables';
|
||||
export * from './graphQlVariables';
|
||||
export * from './restAuthTools';
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
const { analyseODataDefinition } = require('./oDataAdapter');
|
||||
|
||||
function findEndpoint(apiInfo, path, method = 'GET') {
|
||||
return apiInfo.categories
|
||||
.flatMap(category => category.endpoints)
|
||||
.find(endpoint => endpoint.path === path && endpoint.method === method);
|
||||
}
|
||||
|
||||
test('deduces mandatory company parameter for customers and items from ContainsTarget metadata', () => {
|
||||
const serviceDocument = {
|
||||
'@odata.context': 'https://example/odata/$metadata',
|
||||
value: [
|
||||
{ name: 'companies', kind: 'EntitySet', url: 'companies' },
|
||||
{ name: 'customers', kind: 'EntitySet', url: 'customers' },
|
||||
{ name: 'items', kind: 'EntitySet', url: 'items' },
|
||||
],
|
||||
};
|
||||
|
||||
const metadataXml = `<?xml version="1.0" encoding="utf-8"?>
|
||||
<edmx:Edmx Version="4.0" xmlns:edmx="http://docs.oasis-open.org/odata/ns/edmx">
|
||||
<edmx:DataServices>
|
||||
<Schema Namespace="Microsoft.NAV" Alias="NAV" xmlns="http://docs.oasis-open.org/odata/ns/edm">
|
||||
<EntityType Name="company">
|
||||
<Key><PropertyRef Name="id"/></Key>
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
<Property Name="displayName" Type="Edm.String"/>
|
||||
<NavigationProperty Name="customers" Type="Collection(NAV.customer)" ContainsTarget="true" />
|
||||
<NavigationProperty Name="items" Type="Collection(NAV.item)" ContainsTarget="true" />
|
||||
</EntityType>
|
||||
<EntityType Name="customer">
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
</EntityType>
|
||||
<EntityType Name="item">
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
</EntityType>
|
||||
<EntityContainer Name="default">
|
||||
<EntitySet Name="companies" EntityType="NAV.company">
|
||||
<NavigationPropertyBinding Path="customers" Target="customers"/>
|
||||
<NavigationPropertyBinding Path="items" Target="items"/>
|
||||
</EntitySet>
|
||||
<EntitySet Name="customers" EntityType="NAV.customer"/>
|
||||
<EntitySet Name="items" EntityType="NAV.item"/>
|
||||
</EntityContainer>
|
||||
</Schema>
|
||||
</edmx:DataServices>
|
||||
</edmx:Edmx>`;
|
||||
|
||||
const apiInfo = analyseODataDefinition(serviceDocument, 'https://example/odata', metadataXml);
|
||||
|
||||
const customersGet = findEndpoint(apiInfo, '/customers', 'GET');
|
||||
const itemsGet = findEndpoint(apiInfo, '/items', 'GET');
|
||||
|
||||
expect(customersGet).toBeDefined();
|
||||
expect(itemsGet).toBeDefined();
|
||||
|
||||
const customersCompany = customersGet.parameters.find(param => param.name === 'company');
|
||||
const itemsCompany = itemsGet.parameters.find(param => param.name === 'company');
|
||||
|
||||
expect(customersCompany).toBeDefined();
|
||||
expect(customersCompany.required).toBe(true);
|
||||
expect(customersCompany.in).toBe('query');
|
||||
expect(customersCompany.odataLookupEntitySet).toBe('companies');
|
||||
expect(customersCompany.odataLookupPath).toBe('/companies');
|
||||
|
||||
expect(itemsCompany).toBeDefined();
|
||||
expect(itemsCompany.required).toBe(true);
|
||||
expect(itemsCompany.in).toBe('query');
|
||||
expect(itemsCompany.odataLookupEntitySet).toBe('companies');
|
||||
expect(itemsCompany.odataLookupPath).toBe('/companies');
|
||||
});
|
||||
@@ -0,0 +1,458 @@
|
||||
import { RestApiDefinition, RestApiEndpoint, RestApiParameter, RestApiServer } from './restApiDef';
|
||||
import { parseODataMetadataDocument } from './oDataMetadataParser';
|
||||
|
||||
export type ODataServiceResource = {
|
||||
name?: string;
|
||||
kind?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
export type ODataServiceDocument = {
|
||||
'@odata.context'?: string;
|
||||
value?: ODataServiceResource[];
|
||||
};
|
||||
|
||||
export interface ODataMetadataNavigationProperty {
|
||||
name: string;
|
||||
type?: string;
|
||||
containsTarget: boolean;
|
||||
nullable: boolean;
|
||||
}
|
||||
|
||||
export interface ODataMetadataEntityType {
|
||||
typeName: string;
|
||||
fullTypeName: string;
|
||||
keyProperties: string[];
|
||||
stringProperties: string[];
|
||||
navigationProperties: ODataMetadataNavigationProperty[];
|
||||
}
|
||||
|
||||
export interface ODataMetadataEntitySet {
|
||||
name: string;
|
||||
entityType: string;
|
||||
navigationBindings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ODataMetadataDocument {
|
||||
entityTypes: Record<string, ODataMetadataEntityType>;
|
||||
entitySets: Record<string, ODataMetadataEntitySet>;
|
||||
}
|
||||
|
||||
function normalizeServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
|
||||
const safeFallback = String(fallbackUrl ?? '').trim();
|
||||
|
||||
if (typeof contextUrl === 'string' && contextUrl.trim()) {
|
||||
try {
|
||||
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
|
||||
resolved.hash = '';
|
||||
resolved.search = '';
|
||||
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
|
||||
|
||||
const url = resolved.toString();
|
||||
return url.endsWith('/') ? url : `${url}/`;
|
||||
} catch {
|
||||
// ignore, fallback below
|
||||
}
|
||||
}
|
||||
|
||||
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
|
||||
}
|
||||
|
||||
function normalizeEndpointPath(valueUrl: string | undefined): string | null {
|
||||
const input = String(valueUrl ?? '').trim();
|
||||
if (!input) return null;
|
||||
|
||||
try {
|
||||
const parsed = new URL(input, 'http://odata.local');
|
||||
const pathWithQuery = `${parsed.pathname}${parsed.search}`;
|
||||
return pathWithQuery.startsWith('/') ? pathWithQuery : `/${pathWithQuery}`;
|
||||
} catch {
|
||||
return input.startsWith('/') ? input : `/${input}`;
|
||||
}
|
||||
}
|
||||
|
||||
function inferMethods(kind: string | undefined): RestApiEndpoint['method'][] {
|
||||
const normalizedKind = String(kind ?? '').toLowerCase();
|
||||
|
||||
if (normalizedKind === 'actionimport') return ['POST'];
|
||||
if (normalizedKind === 'entityset') return ['GET', 'POST'];
|
||||
return ['GET'];
|
||||
}
|
||||
|
||||
function toLowerCamelCase(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
return text.charAt(0).toLowerCase() + text.slice(1);
|
||||
}
|
||||
|
||||
function normalizeSingularName(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
if (/ies$/i.test(text)) return `${text.slice(0, -3)}y`;
|
||||
if (/sses$/i.test(text)) return text;
|
||||
if (/s$/i.test(text) && text.length > 1) return text.slice(0, -1);
|
||||
return text;
|
||||
}
|
||||
|
||||
function normalizePluralName(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
if (/y$/i.test(text)) return `${text.slice(0, -1)}ies`;
|
||||
if (/s$/i.test(text)) return text;
|
||||
return `${text}s`;
|
||||
}
|
||||
|
||||
function normalizeEntityTypeName(typeName: string | undefined): string {
|
||||
const text = String(typeName ?? '').trim();
|
||||
if (!text) return '';
|
||||
|
||||
const collectionMatch = text.match(/^Collection\((.+)\)$/i);
|
||||
const unwrapped = collectionMatch ? collectionMatch[1] : text;
|
||||
const slashStripped = unwrapped.includes('/') ? unwrapped.split('/').pop() || unwrapped : unwrapped;
|
||||
return slashStripped.trim();
|
||||
}
|
||||
|
||||
function buildTypeReferenceKeys(typeReference: string | undefined): string[] {
|
||||
const normalizedReference = normalizeEntityTypeName(typeReference);
|
||||
if (!normalizedReference) return [];
|
||||
|
||||
const keys = new Set<string>();
|
||||
const lower = normalizedReference.toLowerCase();
|
||||
keys.add(lower);
|
||||
|
||||
const withoutNamespace = normalizedReference.includes('.')
|
||||
? normalizedReference.split('.').pop() || normalizedReference
|
||||
: normalizedReference;
|
||||
keys.add(withoutNamespace.toLowerCase());
|
||||
|
||||
return Array.from(keys);
|
||||
}
|
||||
|
||||
function buildEntityTypeLookup(entityTypes: Record<string, ODataMetadataEntityType>): Map<string, ODataMetadataEntityType> {
|
||||
const lookup = new Map<string, ODataMetadataEntityType>();
|
||||
|
||||
for (const [entityTypeKey, entityType] of Object.entries(entityTypes || {})) {
|
||||
const keys = new Set<string>([
|
||||
...buildTypeReferenceKeys(entityTypeKey),
|
||||
...buildTypeReferenceKeys(entityType.fullTypeName),
|
||||
...buildTypeReferenceKeys(entityType.typeName),
|
||||
]);
|
||||
|
||||
for (const key of keys) {
|
||||
if (!lookup.has(key)) {
|
||||
lookup.set(key, entityType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
function resolveEntityType(
|
||||
entityTypeLookup: Map<string, ODataMetadataEntityType>,
|
||||
typeReference: string | undefined
|
||||
): ODataMetadataEntityType | null {
|
||||
const keys = buildTypeReferenceKeys(typeReference);
|
||||
for (const key of keys) {
|
||||
const found = entityTypeLookup.get(key);
|
||||
if (found) return found;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveLookupPath(entitySetName: string, serviceResourceMap: Map<string, ODataServiceResource>): string {
|
||||
const serviceResource = serviceResourceMap.get(entitySetName);
|
||||
const resourceUrl = String(serviceResource?.url ?? '').trim();
|
||||
if (!resourceUrl) return `/${entitySetName}`;
|
||||
return resourceUrl.startsWith('/') ? resourceUrl : `/${resourceUrl}`;
|
||||
}
|
||||
|
||||
function buildServiceResourceNameLookup(resources: ODataServiceResource[]): Map<string, string> {
|
||||
const lookup = new Map<string, string>();
|
||||
for (const resource of resources || []) {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (!resourceName) continue;
|
||||
const lower = resourceName.toLowerCase();
|
||||
if (!lookup.has(lower)) {
|
||||
lookup.set(lower, resourceName);
|
||||
}
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
function resolveServiceResourceNameForEntityType(
|
||||
entityType: ODataMetadataEntityType,
|
||||
serviceResourceNameLookup: Map<string, string>
|
||||
): string | null {
|
||||
const baseNames = [
|
||||
String(entityType?.typeName ?? '').trim(),
|
||||
normalizeSingularName(entityType?.typeName),
|
||||
normalizeEntityTypeName(entityType?.fullTypeName),
|
||||
normalizeSingularName(normalizeEntityTypeName(entityType?.fullTypeName)),
|
||||
].filter(Boolean);
|
||||
|
||||
const candidates = new Set<string>();
|
||||
for (const baseName of baseNames) {
|
||||
candidates.add(baseName);
|
||||
candidates.add(normalizeSingularName(baseName));
|
||||
candidates.add(normalizePluralName(baseName));
|
||||
}
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const matched = serviceResourceNameLookup.get(String(candidate).toLowerCase());
|
||||
if (matched) return matched;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
type MandatoryNavigationTargetParameter = {
|
||||
name: string;
|
||||
lookupEntitySet: string;
|
||||
lookupPath: string;
|
||||
lookupValueField?: string;
|
||||
lookupLabelField?: string;
|
||||
};
|
||||
|
||||
type MandatoryNavigationByTarget = Record<string, MandatoryNavigationTargetParameter[]>;
|
||||
|
||||
type ParentNavigationContext = {
|
||||
parentEntitySetName: string;
|
||||
parentType: ODataMetadataEntityType;
|
||||
navigationBindings: Record<string, string>;
|
||||
};
|
||||
|
||||
function deduceMandatoryNavigationByTarget(
|
||||
metadataDocument: ODataMetadataDocument | null,
|
||||
resources: ODataServiceResource[]
|
||||
): MandatoryNavigationByTarget {
|
||||
if (!metadataDocument) return {};
|
||||
|
||||
const entityTypeLookup = buildEntityTypeLookup(metadataDocument.entityTypes || {});
|
||||
|
||||
const serviceResourceMap = new Map<string, ODataServiceResource>();
|
||||
for (const resource of resources) {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (resourceName) {
|
||||
serviceResourceMap.set(resourceName, resource);
|
||||
}
|
||||
}
|
||||
const serviceResourceNameLookup = buildServiceResourceNameLookup(resources);
|
||||
|
||||
const entitySetsByEntityType = new Map<string, string[]>();
|
||||
for (const [entitySetName, entitySet] of Object.entries(metadataDocument.entitySets || {})) {
|
||||
const typeKeys = buildTypeReferenceKeys(entitySet?.entityType);
|
||||
if (typeKeys.length === 0) continue;
|
||||
|
||||
for (const typeKey of typeKeys) {
|
||||
const list = entitySetsByEntityType.get(typeKey) || [];
|
||||
if (!list.includes(entitySetName)) {
|
||||
list.push(entitySetName);
|
||||
entitySetsByEntityType.set(typeKey, list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mandatoryByTarget: MandatoryNavigationByTarget = {};
|
||||
const parentContexts: ParentNavigationContext[] = [];
|
||||
const parentTypeKeysCovered = new Set<string>();
|
||||
|
||||
for (const [parentEntitySetName, parentEntitySet] of Object.entries(metadataDocument.entitySets || {})) {
|
||||
const parentType = resolveEntityType(entityTypeLookup, parentEntitySet.entityType);
|
||||
if (!parentType) continue;
|
||||
|
||||
parentContexts.push({
|
||||
parentEntitySetName,
|
||||
parentType,
|
||||
navigationBindings: parentEntitySet.navigationBindings || {},
|
||||
});
|
||||
|
||||
for (const typeKey of buildTypeReferenceKeys(parentEntitySet.entityType)) {
|
||||
parentTypeKeysCovered.add(typeKey);
|
||||
}
|
||||
}
|
||||
|
||||
for (const entityType of Object.values(metadataDocument.entityTypes || {})) {
|
||||
const typeKeys = [
|
||||
...buildTypeReferenceKeys(entityType.fullTypeName),
|
||||
...buildTypeReferenceKeys(entityType.typeName),
|
||||
];
|
||||
const alreadyCovered = typeKeys.some(typeKey => parentTypeKeysCovered.has(typeKey));
|
||||
if (alreadyCovered) continue;
|
||||
|
||||
if (!Array.isArray(entityType.navigationProperties) || entityType.navigationProperties.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parentEntitySetName = resolveServiceResourceNameForEntityType(entityType, serviceResourceNameLookup);
|
||||
if (!parentEntitySetName) continue;
|
||||
|
||||
parentContexts.push({
|
||||
parentEntitySetName,
|
||||
parentType: entityType,
|
||||
navigationBindings: {},
|
||||
});
|
||||
|
||||
for (const typeKey of typeKeys) {
|
||||
parentTypeKeysCovered.add(typeKey);
|
||||
}
|
||||
}
|
||||
|
||||
for (const { parentEntitySetName, parentType, navigationBindings } of parentContexts) {
|
||||
const parentParamName =
|
||||
toLowerCamelCase(parentType.typeName) ||
|
||||
toLowerCamelCase(normalizeSingularName(parentEntitySetName)) ||
|
||||
toLowerCamelCase(parentEntitySetName);
|
||||
|
||||
if (!parentParamName) continue;
|
||||
|
||||
for (const navProperty of parentType.navigationProperties || []) {
|
||||
if (!navProperty.containsTarget) continue;
|
||||
|
||||
const targetNames = new Set<string>();
|
||||
const directBoundTarget = navigationBindings?.[navProperty.name];
|
||||
if (directBoundTarget) {
|
||||
targetNames.add(directBoundTarget);
|
||||
}
|
||||
|
||||
const navTypeKeys = buildTypeReferenceKeys(navProperty.type);
|
||||
if (navTypeKeys.length > 0) {
|
||||
const typeTargets = navTypeKeys.flatMap(typeKey => entitySetsByEntityType.get(typeKey) || []);
|
||||
for (const targetName of typeTargets) {
|
||||
targetNames.add(targetName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const targetEntitySetName of targetNames) {
|
||||
const targetList = mandatoryByTarget[targetEntitySetName] || [];
|
||||
const exists = targetList.some(item => item.name.toLowerCase() === parentParamName.toLowerCase());
|
||||
if (exists) continue;
|
||||
|
||||
targetList.push({
|
||||
name: parentParamName,
|
||||
lookupEntitySet: parentEntitySetName,
|
||||
lookupPath: resolveLookupPath(parentEntitySetName, serviceResourceMap),
|
||||
lookupValueField: parentType.keyProperties?.[0],
|
||||
lookupLabelField: parentType.stringProperties?.find(prop => /name/i.test(prop)) || parentType.stringProperties?.[0],
|
||||
});
|
||||
mandatoryByTarget[targetEntitySetName] = targetList;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mandatoryByTarget;
|
||||
}
|
||||
|
||||
function buildMandatoryNavigationParameters(
|
||||
resource: ODataServiceResource,
|
||||
mandatoryByTarget: MandatoryNavigationByTarget
|
||||
): RestApiParameter[] {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (!resourceName) return [];
|
||||
|
||||
const mandatoryTargets = mandatoryByTarget[resourceName] || [];
|
||||
const mandatoryParameters: RestApiParameter[] = [];
|
||||
const seenNames = new Set<string>();
|
||||
|
||||
for (const mandatoryTarget of mandatoryTargets) {
|
||||
const normalizedName = mandatoryTarget.name.toLowerCase();
|
||||
if (seenNames.has(normalizedName)) continue;
|
||||
|
||||
const description = mandatoryTarget.lookupEntitySet
|
||||
? `Required navigation parameter deduced from OData metadata (lookup: ${mandatoryTarget.lookupEntitySet})`
|
||||
: 'Required navigation parameter deduced from OData metadata';
|
||||
|
||||
mandatoryParameters.push({
|
||||
name: mandatoryTarget.name,
|
||||
in: 'query',
|
||||
dataType: 'string',
|
||||
required: true,
|
||||
description,
|
||||
odataLookupPath: mandatoryTarget.lookupPath,
|
||||
odataLookupEntitySet: mandatoryTarget.lookupEntitySet,
|
||||
odataLookupValueField: mandatoryTarget.lookupValueField,
|
||||
odataLookupLabelField: mandatoryTarget.lookupLabelField,
|
||||
});
|
||||
seenNames.add(normalizedName);
|
||||
}
|
||||
|
||||
return mandatoryParameters;
|
||||
}
|
||||
|
||||
function createODataResourceEndpoints(
|
||||
resource: ODataServiceResource,
|
||||
mandatoryByTarget: MandatoryNavigationByTarget
|
||||
): RestApiEndpoint[] {
|
||||
const path = normalizeEndpointPath(resource.url);
|
||||
if (!path) return [];
|
||||
|
||||
const summary = resource.name || resource.url || path;
|
||||
const descriptionKind = String(resource.kind ?? '').trim();
|
||||
const methods = inferMethods(resource.kind);
|
||||
const mandatoryNavigationParameters = buildMandatoryNavigationParameters(resource, mandatoryByTarget);
|
||||
|
||||
return methods.map(method => {
|
||||
const parameters: RestApiParameter[] = [...mandatoryNavigationParameters];
|
||||
|
||||
if (method === 'POST') {
|
||||
parameters.push({
|
||||
name: 'body',
|
||||
in: 'body',
|
||||
dataType: 'object',
|
||||
contentType: 'application/json',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
summary,
|
||||
description: descriptionKind ? `OData ${descriptionKind}` : 'OData resource',
|
||||
parameters,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function analyseODataDefinition(
|
||||
doc: ODataServiceDocument,
|
||||
endpointUrl: string,
|
||||
metadataDocumentXml?: string | null
|
||||
): RestApiDefinition {
|
||||
const resources = Array.isArray(doc?.value) ? doc.value : [];
|
||||
const categoriesByName = new Map<string, RestApiEndpoint[]>();
|
||||
const metadataDocument = metadataDocumentXml ? parseODataMetadataDocument(metadataDocumentXml) : null;
|
||||
const mandatoryByTarget = deduceMandatoryNavigationByTarget(metadataDocument, resources);
|
||||
|
||||
for (const resource of resources) {
|
||||
const endpoints = createODataResourceEndpoints(resource, mandatoryByTarget);
|
||||
if (endpoints.length === 0) continue;
|
||||
|
||||
const categoryName = String(resource.kind ?? 'Resources').trim() || 'Resources';
|
||||
const existingEndpoints = categoriesByName.get(categoryName) || [];
|
||||
existingEndpoints.push(...endpoints);
|
||||
categoriesByName.set(categoryName, existingEndpoints);
|
||||
}
|
||||
|
||||
const metadataEndpoint: RestApiEndpoint = {
|
||||
method: 'GET',
|
||||
path: '/$metadata',
|
||||
summary: '$metadata',
|
||||
description: 'OData service metadata',
|
||||
parameters: [],
|
||||
};
|
||||
|
||||
const metadataCategory = categoriesByName.get('Metadata') || [];
|
||||
metadataCategory.push(metadataEndpoint);
|
||||
categoriesByName.set('Metadata', metadataCategory);
|
||||
|
||||
const serviceRoot = normalizeServiceRoot(doc?.['@odata.context'], endpointUrl);
|
||||
const servers: RestApiServer[] = serviceRoot ? [{ url: serviceRoot }] : [];
|
||||
|
||||
return {
|
||||
categories: Array.from(categoriesByName.entries()).map(([name, endpoints]) => ({
|
||||
name,
|
||||
endpoints,
|
||||
})),
|
||||
servers,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import { buildRestAuthHeaders } from './restAuthTools';
|
||||
import { apiDriverBase } from './restDriverBase';
|
||||
|
||||
function resolveServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
|
||||
const safeFallback = String(fallbackUrl ?? '').trim();
|
||||
|
||||
if (typeof contextUrl === 'string' && contextUrl.trim()) {
|
||||
try {
|
||||
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
|
||||
resolved.hash = '';
|
||||
resolved.search = '';
|
||||
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
|
||||
|
||||
const url = resolved.toString();
|
||||
return url.endsWith('/') ? url : `${url}/`;
|
||||
} catch {
|
||||
// ignore, fallback below
|
||||
}
|
||||
}
|
||||
|
||||
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
|
||||
}
|
||||
|
||||
async function loadODataServiceDocument(dbhan: any) {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00330 OData endpoint URL is not configured');
|
||||
}
|
||||
|
||||
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1, {
|
||||
headers: buildRestAuthHeaders(dbhan.connection.restAuth),
|
||||
});
|
||||
|
||||
const document = response?.data;
|
||||
if (!document || typeof document !== 'object') {
|
||||
throw new Error('DBGM-00331 OData service document is empty or invalid');
|
||||
}
|
||||
|
||||
if (!document['@odata.context']) {
|
||||
throw new Error('DBGM-00332 OData service document does not contain @odata.context');
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
function getODataVersion(document: any): string {
|
||||
const contextUrl = String(document?.['@odata.context'] ?? '').trim();
|
||||
const versionMatch = contextUrl.match(/\/v(\d+(?:\.\d+)*)\/$metadata$/i);
|
||||
if (versionMatch?.[1]) return versionMatch[1];
|
||||
return '';
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
export const oDataDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'odata@rest',
|
||||
title: 'OData - REST',
|
||||
databaseEngineTypes: ['rest', 'odata'],
|
||||
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><rect width="128" height="128" fill="#f9a000"/><rect x="12" y="12" width="47" height="12" fill="#ffffff"/><rect x="69" y="12" width="47" height="12" fill="#ffffff"/><rect x="12" y="37" width="47" height="12" fill="#ffffff"/><rect x="69" y="37" width="47" height="12" fill="#ffffff"/><rect x="12" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="87" width="47" height="12" fill="#ffffff"/><circle cx="35" cy="102" r="20" fill="#e6e6e6"/></svg>',
|
||||
apiServerUrl1Label: 'OData Service URL',
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
return false;
|
||||
},
|
||||
|
||||
beforeConnectionSave: connection => ({
|
||||
...connection,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: '_api_database_',
|
||||
}),
|
||||
|
||||
async connect(connection: any) {
|
||||
return {
|
||||
connection,
|
||||
client: null,
|
||||
database: '_api_database_',
|
||||
axios: connection.axios,
|
||||
};
|
||||
},
|
||||
|
||||
async getVersion(dbhan: any) {
|
||||
const document = await loadODataServiceDocument(dbhan);
|
||||
const resourcesCount = Array.isArray(document?.value) ? document.value.length : 0;
|
||||
const odataVersion = getODataVersion(document);
|
||||
|
||||
return {
|
||||
version: odataVersion || 'OData',
|
||||
versionText: `OData${odataVersion ? ` ${odataVersion}` : ''}, ${resourcesCount} resources`,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,161 @@
|
||||
import type { ODataMetadataDocument, ODataMetadataEntitySet, ODataMetadataEntityType, ODataMetadataNavigationProperty } from './oDataAdapter';
|
||||
|
||||
function decodeXmlEntities(value: string): string {
|
||||
return String(value ?? '')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/&/g, '&');
|
||||
}
|
||||
|
||||
function parseXmlAttributes(attributesText: string): Record<string, string> {
|
||||
const attributes: Record<string, string> = {};
|
||||
const regex = /([A-Za-z_][A-Za-z0-9_.:-]*)\s*=\s*("([^"]*)"|'([^']*)')/g;
|
||||
let match = regex.exec(attributesText || '');
|
||||
|
||||
while (match) {
|
||||
const rawName = match[1];
|
||||
const localName = rawName.includes(':') ? rawName.split(':').pop() || rawName : rawName;
|
||||
const rawValue = match[3] ?? match[4] ?? '';
|
||||
const decoded = decodeXmlEntities(rawValue);
|
||||
attributes[rawName] = decoded;
|
||||
attributes[localName] = decoded;
|
||||
match = regex.exec(attributesText || '');
|
||||
}
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
function extractXmlElements(xml: string, elementName: string): Array<{ attributes: Record<string, string>; innerXml: string }> {
|
||||
const elements: Array<{ attributes: Record<string, string>; innerXml: string }> = [];
|
||||
const fullTagRegex = new RegExp(
|
||||
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)>([\\s\\S]*?)<\\/(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}>`,
|
||||
'gi'
|
||||
);
|
||||
const selfClosingRegex = new RegExp(
|
||||
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)\\/>`,
|
||||
'gi'
|
||||
);
|
||||
|
||||
let fullMatch = fullTagRegex.exec(xml || '');
|
||||
while (fullMatch) {
|
||||
elements.push({
|
||||
attributes: parseXmlAttributes(fullMatch[1] || ''),
|
||||
innerXml: fullMatch[2] || '',
|
||||
});
|
||||
fullMatch = fullTagRegex.exec(xml || '');
|
||||
}
|
||||
|
||||
let selfClosingMatch = selfClosingRegex.exec(xml || '');
|
||||
while (selfClosingMatch) {
|
||||
elements.push({
|
||||
attributes: parseXmlAttributes(selfClosingMatch[1] || ''),
|
||||
innerXml: '',
|
||||
});
|
||||
selfClosingMatch = selfClosingRegex.exec(xml || '');
|
||||
}
|
||||
|
||||
return elements;
|
||||
}
|
||||
|
||||
function toBoolAttribute(value: string | undefined): boolean {
|
||||
return String(value ?? '').trim().toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
function normalizeEntitySetName(value: string | undefined): string {
|
||||
const input = String(value ?? '').trim();
|
||||
if (!input) return '';
|
||||
|
||||
const noContainer = input.includes('/') ? input.split('/').pop() || '' : input;
|
||||
return noContainer.includes('.') ? noContainer.split('.').pop() || noContainer : noContainer;
|
||||
}
|
||||
|
||||
export function parseODataMetadataDocument(metadataXml: string): ODataMetadataDocument {
|
||||
const schemas = extractXmlElements(metadataXml || '', 'Schema');
|
||||
|
||||
const entityTypes: Record<string, ODataMetadataEntityType> = {};
|
||||
const entitySets: Record<string, ODataMetadataEntitySet> = {};
|
||||
|
||||
for (const schema of schemas) {
|
||||
const namespace = String(schema.attributes.Namespace || '').trim();
|
||||
|
||||
for (const entityTypeNode of extractXmlElements(schema.innerXml, 'EntityType')) {
|
||||
const typeName = String(entityTypeNode.attributes.Name || '').trim();
|
||||
if (!typeName) continue;
|
||||
|
||||
const fullTypeName = namespace ? `${namespace}.${typeName}` : typeName;
|
||||
const keyProperties: string[] = [];
|
||||
const stringProperties: string[] = [];
|
||||
const navigationProperties: ODataMetadataNavigationProperty[] = [];
|
||||
|
||||
for (const keyNode of extractXmlElements(entityTypeNode.innerXml, 'Key')) {
|
||||
for (const propRef of extractXmlElements(keyNode.innerXml, 'PropertyRef')) {
|
||||
const keyName = String(propRef.attributes.Name || '').trim();
|
||||
if (keyName && !keyProperties.includes(keyName)) {
|
||||
keyProperties.push(keyName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const propertyNode of extractXmlElements(entityTypeNode.innerXml, 'Property')) {
|
||||
const propName = String(propertyNode.attributes.Name || '').trim();
|
||||
const propType = String(propertyNode.attributes.Type || '').trim();
|
||||
if (propName && /^Edm\.String$/i.test(propType)) {
|
||||
stringProperties.push(propName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const navNode of extractXmlElements(entityTypeNode.innerXml, 'NavigationProperty')) {
|
||||
const navName = String(navNode.attributes.Name || '').trim();
|
||||
if (!navName) continue;
|
||||
|
||||
navigationProperties.push({
|
||||
name: navName,
|
||||
type: String(navNode.attributes.Type || '').trim(),
|
||||
containsTarget: toBoolAttribute(navNode.attributes.ContainsTarget),
|
||||
nullable: navNode.attributes.Nullable === undefined ? true : toBoolAttribute(navNode.attributes.Nullable),
|
||||
});
|
||||
}
|
||||
|
||||
entityTypes[fullTypeName] = {
|
||||
typeName,
|
||||
fullTypeName,
|
||||
keyProperties,
|
||||
stringProperties,
|
||||
navigationProperties,
|
||||
};
|
||||
}
|
||||
|
||||
for (const entitySetNode of extractXmlElements(schema.innerXml, 'EntitySet')) {
|
||||
const setName = String(entitySetNode.attributes.Name || '').trim();
|
||||
const entityType = String(entitySetNode.attributes.EntityType || '').trim();
|
||||
if (!setName || !entityType) continue;
|
||||
|
||||
const navigationBindings: Record<string, string> = {};
|
||||
|
||||
for (const bindingNode of extractXmlElements(entitySetNode.innerXml, 'NavigationPropertyBinding')) {
|
||||
const path = String(bindingNode.attributes.Path || '').trim();
|
||||
const target = normalizeEntitySetName(bindingNode.attributes.Target);
|
||||
if (!path || !target) continue;
|
||||
|
||||
navigationBindings[path] = target;
|
||||
const pathLastSegment = path.split('/').pop();
|
||||
if (pathLastSegment && !navigationBindings[pathLastSegment]) {
|
||||
navigationBindings[pathLastSegment] = target;
|
||||
}
|
||||
}
|
||||
|
||||
entitySets[setName] = {
|
||||
name: setName,
|
||||
entityType,
|
||||
navigationBindings,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
entityTypes,
|
||||
entitySets,
|
||||
};
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { apiDriverBase } from './restDriverBase';
|
||||
|
||||
async function loadOpenApiDefinition(dbhan: any) {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00000 REST connection URL is not configured');
|
||||
throw new Error('DBGM-00313 REST connection URL is not configured');
|
||||
}
|
||||
|
||||
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1);
|
||||
@@ -20,7 +20,7 @@ async function loadOpenApiDefinition(dbhan: any) {
|
||||
}
|
||||
|
||||
if (!openApiDefinition || typeof openApiDefinition !== 'object') {
|
||||
throw new Error('DBGM-00000 API documentation is empty or could not be parsed');
|
||||
throw new Error('DBGM-00314 API documentation is empty or could not be parsed');
|
||||
}
|
||||
|
||||
return openApiDefinition;
|
||||
@@ -30,7 +30,7 @@ async function loadOpenApiDefinition(dbhan: any) {
|
||||
export const openApiDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'openapi@rest',
|
||||
title: 'OpenAPI - REST (experimental)',
|
||||
title: 'OpenAPI - REST',
|
||||
databaseEngineTypes: ['rest', 'openapi'],
|
||||
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#85ea2d" d="M63.999 124.945c-33.607 0-60.95-27.34-60.95-60.949C3.05 30.388 30.392 3.048 64 3.048s60.95 27.342 60.95 60.95c0 33.607-27.343 60.946-60.95 60.946z"/><path fill="#173647" d="M40.3 43.311c-.198 2.19.072 4.454-.073 6.668-.173 2.217-.444 4.407-.888 6.596-.615 3.126-2.56 5.489-5.24 7.458 5.218 3.396 5.807 8.662 6.152 14.003.172 2.88.098 5.785.394 8.638.221 2.215 1.082 2.782 3.372 2.854.935.025 1.894 0 2.978 0v6.842c-6.768 1.156-12.354-.762-13.734-6.496a39.329 39.329 0 0 1-.836-6.4c-.148-2.287.097-4.577-.074-6.864-.492-6.277-1.305-8.393-7.308-8.689v-7.8c.441-.1.86-.174 1.302-.223 3.298-.172 4.701-1.182 5.414-4.43a37.512 37.512 0 0 0 .616-5.536c.247-3.569.148-7.21.763-10.754.86-5.094 4.01-7.556 9.254-7.852 1.476-.074 2.978 0 4.676 0v6.99c-.714.05-1.33.147-1.969.147-4.258-.148-4.48 1.304-4.8 4.848zm8.195 16.193h-.099c-2.462-.123-4.578 1.796-4.702 4.258-.122 2.485 1.797 4.603 4.259 4.724h.295c2.436.148 4.527-1.724 4.676-4.16v-.245c.05-2.486-1.944-4.527-4.43-4.577zm15.43 0c-2.386-.074-4.38 1.796-4.454 4.159 0 .149 0 .271.024.418 0 2.684 1.821 4.406 4.578 4.406 2.707 0 4.406-1.772 4.406-4.553-.025-2.682-1.823-4.455-4.554-4.43Zm15.801 0a4.596 4.596 0 0 0-4.676 4.454 4.515 4.515 0 0 0 4.528 4.528h.05c2.264.394 4.553-1.796 4.701-4.429.122-2.437-2.092-4.553-4.604-4.553Zm21.682.369c-2.855-.123-4.284-1.083-4.996-3.79a27.444 27.444 0 0 1-.811-5.292c-.198-3.298-.174-6.62-.395-9.918-.516-7.826-6.177-10.557-14.397-9.205v6.792c1.304 0 2.313 0 3.322.025 1.748.024 3.077.69 3.249 2.634.172 1.772.172 3.568.344 5.365.346 3.57.542 7.187 1.157 10.706.542 2.904 2.536 5.07 5.02 6.841-4.355 2.929-5.636 7.113-5.857 11.814-.122 3.223-.196 6.472-.368 9.721-.148 2.953-1.181 3.913-4.16 3.987-.835.024-1.648.098-2.583.148v6.964c1.748 0 3.347.1 4.946 0 4.971-.295 7.974-2.706 8.96-7.531.417-2.658.662-5.34.737-8.023.171-2.46.148-4.946.394-7.382.369-3.815 2.116-5.389 5.93-5.636a5.161 5.161 0 0 0 1.06-.245v-7.801c-.64-.074-1.084-.148-1.552-.173zM64 6.1c31.977 0 57.9 25.92 57.9 57.898 0 31.977-25.923 57.899-57.9 57.899-31.976 0-57.898-25.922-57.898-57.9C6.102 32.023 32.024 6.101 64 6.101m0-6.1C28.71 0 0 28.71 0 64c0 35.288 28.71 63.998 64 63.998 35.289 0 64-28.71 64-64S99.289.002 64 .002Z"/></svg>',
|
||||
apiServerUrl1Label: 'API Definition URL',
|
||||
@@ -39,7 +39,7 @@ export const openApiDriver: EngineDriver = {
|
||||
loadApiServerUrl2Options: true,
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showAuthConnectionField(field, values)) return true;
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
if (field === 'apiServerUrl2') return true;
|
||||
return false;
|
||||
|
||||
@@ -7,6 +7,11 @@ export interface RestApiParameter {
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: any;
|
||||
options?: Array<{ label: string; value: string }>;
|
||||
odataLookupPath?: string;
|
||||
odataLookupEntitySet?: string;
|
||||
odataLookupValueField?: string;
|
||||
odataLookupLabelField?: string;
|
||||
}
|
||||
|
||||
export interface RestApiEndpoint {
|
||||
|
||||
@@ -0,0 +1,134 @@
|
||||
const { executeODataApiEndpoint } = require('./restApiExecutor');
|
||||
|
||||
function createDefinition() {
|
||||
return {
|
||||
categories: [
|
||||
{
|
||||
name: 'EntitySet',
|
||||
endpoints: [
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/customers',
|
||||
parameters: [
|
||||
{
|
||||
name: 'company',
|
||||
in: 'query',
|
||||
dataType: 'string',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/$metadata',
|
||||
parameters: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
test('adds OData system query options from parameterValues', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/customers',
|
||||
'GET',
|
||||
{
|
||||
company: '123',
|
||||
'$top': 50,
|
||||
'$skip': '10',
|
||||
'$count': true,
|
||||
'$select': ['id', 'displayName'],
|
||||
'$orderby': 'displayName asc',
|
||||
'$filter': 'displayName ne null',
|
||||
'$search': 'dino',
|
||||
'$expand': 'addresses',
|
||||
'$format': 'application/json',
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const requestUrl = String(calls[0].url);
|
||||
const parsed = new URL(requestUrl);
|
||||
|
||||
expect(parsed.pathname).toBe('/odata/customers');
|
||||
expect(parsed.searchParams.get('company')).toBe('123');
|
||||
expect(parsed.searchParams.get('$top')).toBe('50');
|
||||
expect(parsed.searchParams.get('$skip')).toBe('10');
|
||||
expect(parsed.searchParams.get('$count')).toBe('true');
|
||||
expect(parsed.searchParams.get('$select')).toBe('id,displayName');
|
||||
expect(parsed.searchParams.get('$orderby')).toBe('displayName asc');
|
||||
expect(parsed.searchParams.get('$filter')).toBe('displayName ne null');
|
||||
expect(parsed.searchParams.get('$search')).toBe('dino');
|
||||
expect(parsed.searchParams.get('$expand')).toBe('addresses');
|
||||
expect(parsed.searchParams.get('$format')).toBe('application/json');
|
||||
});
|
||||
|
||||
test('accepts non-dollar aliases and ignores invalid system option values', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/customers',
|
||||
'GET',
|
||||
{
|
||||
company: '123',
|
||||
top: 'abc',
|
||||
skip: -1,
|
||||
count: 'yes',
|
||||
select: ['id'],
|
||||
filter: 'id ne null',
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const parsed = new URL(String(calls[0].url));
|
||||
expect(parsed.searchParams.get('$top')).toBeNull();
|
||||
expect(parsed.searchParams.get('$skip')).toBeNull();
|
||||
expect(parsed.searchParams.get('$count')).toBeNull();
|
||||
expect(parsed.searchParams.get('$select')).toBe('id');
|
||||
expect(parsed.searchParams.get('$filter')).toBe('id ne null');
|
||||
});
|
||||
|
||||
test('does not add OData system query options to $metadata endpoint', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/$metadata',
|
||||
'GET',
|
||||
{
|
||||
'$top': 10,
|
||||
'$count': true,
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const parsed = new URL(String(calls[0].url));
|
||||
expect(parsed.pathname).toBe('/odata/$metadata');
|
||||
expect(parsed.search).toBe('');
|
||||
});
|
||||
@@ -32,7 +32,156 @@ function normalizeValueForRequest(value: any, parameter: RestApiParameter): any
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function executeRestApiEndpoint(
|
||||
function splitPathAndQuery(path: string) {
|
||||
const value = String(path || '');
|
||||
const index = value.indexOf('?');
|
||||
if (index < 0) {
|
||||
return {
|
||||
pathOnly: value,
|
||||
queryString: '',
|
||||
};
|
||||
}
|
||||
return {
|
||||
pathOnly: value.slice(0, index),
|
||||
queryString: value.slice(index + 1),
|
||||
};
|
||||
}
|
||||
|
||||
function addAuthHeaders(headers: Record<string, string>, auth: RestApiAuthorization | null) {
|
||||
if (!auth) return;
|
||||
|
||||
if (auth.type === 'basic') {
|
||||
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else if (auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
} else if (auth.type === 'apikey') {
|
||||
headers[auth.header] = auth.value;
|
||||
}
|
||||
}
|
||||
|
||||
function findEndpointDefinition(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string
|
||||
) {
|
||||
return definition.categories
|
||||
.flatMap(category => category.endpoints)
|
||||
.find(ep => ep.path === endpoint && ep.method === method);
|
||||
}
|
||||
|
||||
function buildRequestUrl(server: string, pathOnly: string) {
|
||||
const normalizedServer = String(server || '').trim();
|
||||
const normalizedPath = String(pathOnly || '').trim();
|
||||
|
||||
if (!normalizedServer) {
|
||||
return normalizedPath;
|
||||
}
|
||||
|
||||
try {
|
||||
const baseUrl = normalizedServer.endsWith('/') ? normalizedServer : `${normalizedServer}/`;
|
||||
const relativePath = normalizedPath.replace(/^\//, '');
|
||||
return new URL(relativePath, baseUrl).toString();
|
||||
} catch {
|
||||
return normalizedServer + normalizedPath;
|
||||
}
|
||||
}
|
||||
|
||||
function appendQueryAndCookies(
|
||||
url: string,
|
||||
query: URLSearchParams,
|
||||
cookies: string[],
|
||||
headers: Record<string, string>
|
||||
) {
|
||||
const queryStringValue = query.toString();
|
||||
if (queryStringValue) {
|
||||
const separator = url.includes('?') ? '&' : '?';
|
||||
url += separator + queryStringValue;
|
||||
}
|
||||
|
||||
if (cookies.length > 0) {
|
||||
headers['Cookie'] = cookies.join('; ');
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
const ODATA_SYSTEM_QUERY_OPTIONS = new Set([
|
||||
'$filter',
|
||||
'$select',
|
||||
'$expand',
|
||||
'$orderby',
|
||||
'$top',
|
||||
'$skip',
|
||||
'$count',
|
||||
'$search',
|
||||
'$format',
|
||||
]);
|
||||
|
||||
const ODATA_SYSTEM_QUERY_ALIASES: Record<string, string> = {
|
||||
filter: '$filter',
|
||||
select: '$select',
|
||||
expand: '$expand',
|
||||
orderby: '$orderby',
|
||||
top: '$top',
|
||||
skip: '$skip',
|
||||
count: '$count',
|
||||
search: '$search',
|
||||
format: '$format',
|
||||
};
|
||||
|
||||
function resolveODataQueryOptionKey(rawKey: string): string | null {
|
||||
const key = String(rawKey || '').trim();
|
||||
if (!key) return null;
|
||||
|
||||
const keyLower = key.toLowerCase();
|
||||
if (ODATA_SYSTEM_QUERY_OPTIONS.has(keyLower)) {
|
||||
return keyLower;
|
||||
}
|
||||
|
||||
return ODATA_SYSTEM_QUERY_ALIASES[keyLower] || null;
|
||||
}
|
||||
|
||||
function normalizeODataQueryOptionValue(optionKey: string, value: any): string | null {
|
||||
if (!hasValue(value)) return null;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const items = value.filter(item => hasValue(item)).map(item => String(item).trim()).filter(Boolean);
|
||||
if (items.length === 0) return null;
|
||||
return items.join(',');
|
||||
}
|
||||
|
||||
if (optionKey === '$count') {
|
||||
if (typeof value === 'boolean') return value ? 'true' : 'false';
|
||||
const lowered = String(value).trim().toLowerCase();
|
||||
if (lowered === 'true' || lowered === 'false') return lowered;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (optionKey === '$top' || optionKey === '$skip') {
|
||||
const numeric = Number(value);
|
||||
if (Number.isFinite(numeric) && numeric >= 0) {
|
||||
return String(Math.trunc(numeric));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return String(value).trim();
|
||||
}
|
||||
|
||||
function applyODataSystemQueryOptions(query: URLSearchParams, parameterValues: Record<string, any>) {
|
||||
for (const [rawKey, rawValue] of Object.entries(parameterValues || {})) {
|
||||
const optionKey = resolveODataQueryOptionKey(rawKey);
|
||||
if (!optionKey) continue;
|
||||
|
||||
const normalizedValue = normalizeODataQueryOptionValue(optionKey, rawValue);
|
||||
if (!hasValue(normalizedValue)) continue;
|
||||
|
||||
query.set(optionKey, String(normalizedValue));
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeRestApiEndpointOpenApi(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string,
|
||||
@@ -41,16 +190,15 @@ export async function executeRestApiEndpoint(
|
||||
auth: RestApiAuthorization | null,
|
||||
axios: AxiosInstance
|
||||
): Promise<any> {
|
||||
const endpointDef = definition.categories
|
||||
.flatMap(category => category.endpoints)
|
||||
.find(ep => ep.path === endpoint && ep.method === method);
|
||||
const endpointDef = findEndpointDefinition(definition, endpoint, method);
|
||||
if (!endpointDef) {
|
||||
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
|
||||
}
|
||||
|
||||
let url = server + endpointDef.path;
|
||||
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
|
||||
let url = buildRequestUrl(server, pathOnly);
|
||||
const headers: Record<string, string> = {};
|
||||
const query = new URLSearchParams();
|
||||
const query = new URLSearchParams(queryString);
|
||||
const cookies: string[] = [];
|
||||
let body: any = undefined;
|
||||
|
||||
@@ -88,26 +236,87 @@ export async function executeRestApiEndpoint(
|
||||
}
|
||||
}
|
||||
|
||||
const queryString = query.toString();
|
||||
if (queryString) {
|
||||
const separator = url.includes('?') ? '&' : '?';
|
||||
url += separator + queryString;
|
||||
}
|
||||
|
||||
if (cookies.length > 0) {
|
||||
headers['Cookie'] = cookies.join('; ');
|
||||
}
|
||||
|
||||
if (auth) {
|
||||
if (auth.type === 'basic') {
|
||||
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else if (auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
} else if (auth.type === 'apikey') {
|
||||
headers[auth.header] = auth.value;
|
||||
}
|
||||
}
|
||||
url = appendQueryAndCookies(url, query, cookies, headers);
|
||||
addAuthHeaders(headers, auth);
|
||||
|
||||
const resp = await axios({
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data: body,
|
||||
});
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
export async function executeODataApiEndpoint(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string,
|
||||
parameterValues: Record<string, any>,
|
||||
server: string,
|
||||
auth: RestApiAuthorization | null,
|
||||
axios: AxiosInstance
|
||||
): Promise<any> {
|
||||
const endpointDef = findEndpointDefinition(definition, endpoint, method);
|
||||
if (!endpointDef) {
|
||||
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
|
||||
}
|
||||
|
||||
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
|
||||
const metadataPath = pathOnly.replace(/\/+$/, '') === '/$metadata';
|
||||
|
||||
let url = buildRequestUrl(server, pathOnly);
|
||||
const headers: Record<string, string> = {
|
||||
Accept: 'application/json',
|
||||
'OData-Version': '4.0',
|
||||
};
|
||||
const query = metadataPath ? new URLSearchParams() : new URLSearchParams(queryString);
|
||||
const cookies: string[] = [];
|
||||
let body: any = undefined;
|
||||
|
||||
for (const param of endpointDef.parameters) {
|
||||
const value = normalizeValueForRequest(parameterValues[param.name], param);
|
||||
if (!hasValue(value) && param.in !== 'path') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (param.in === 'path') {
|
||||
url = url.replace(`{${param.name}}`, encodeURIComponent(value));
|
||||
} else if (param.in === 'query') {
|
||||
if (metadataPath) continue;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
query.append(param.name, String(item));
|
||||
}
|
||||
} else {
|
||||
query.append(param.name, String(value));
|
||||
}
|
||||
} else if (param.in === 'header') {
|
||||
headers[param.name] = Array.isArray(value) ? value.map(item => String(item)).join(',') : String(value);
|
||||
} else if (param.in === 'cookie') {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(item))}`);
|
||||
}
|
||||
} else {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(value))}`);
|
||||
}
|
||||
} else if (param.in === 'body') {
|
||||
body = value;
|
||||
if (param.contentType && !headers['Content-Type']) {
|
||||
headers['Content-Type'] = param.contentType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!metadataPath) {
|
||||
applyODataSystemQueryOptions(query, parameterValues);
|
||||
}
|
||||
|
||||
url = appendQueryAndCookies(url, query, cookies, headers);
|
||||
addAuthHeaders(headers, auth);
|
||||
|
||||
const resp = await axios({
|
||||
method,
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
import { RestApiAuthorization } from './restApiDef';
|
||||
|
||||
export function buildRestAuthHeaders(auth: RestApiAuthorization | null) {
|
||||
const headers = {};
|
||||
if (!auth) return headers;
|
||||
if (auth.type === 'basic') {
|
||||
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else if (auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
} else if (auth.type === 'apikey') {
|
||||
headers[auth.header] = auth.value;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
@@ -39,4 +39,12 @@ export const apiDriverBase = {
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showAuthConnectionField(field, values)) return true;
|
||||
if (field === 'httpProxyUrl') return true;
|
||||
if (field === 'httpProxyUser') return true;
|
||||
if (field === 'httpProxyPassword') return true;
|
||||
return false;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -41,7 +41,7 @@ STORAGE_DATABASE=dbname
|
||||
STORAGE_ENGINE=mysql@dbgate-plugin-mysql
|
||||
```
|
||||
|
||||
You could find more about environment variable configuration on [DbGate docs](https://dbgate.org/docs/env-variables/) page.
|
||||
You could find more about environment variable configuration on [DbGate docs](https://docs.dbgate.io/env-variables/) page.
|
||||
|
||||
After installing, you can run dbgate with command:
|
||||
```sh
|
||||
@@ -65,7 +65,7 @@ dbgate-serve
|
||||
Then open http://localhost:3000 in your browser
|
||||
|
||||
## Download desktop app
|
||||
You can also download binary packages for desktop app from https://dbgate.org . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
|
||||
You can also download binary packages for desktop app from https://www.dbgate.io . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
|
||||
|
||||
## Use Oracle with Instant client (thick mode)
|
||||
If you are Oracle database user and you would like to use Oracle instant client (thick mode) instead of thin mode (pure JS NPM package), please make the following:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-serve",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "dbgate-sqltree",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -1,8 +1,55 @@
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { Condition, BinaryCondition } from './types';
|
||||
import { Condition, BinaryCondition, LikeCondition } from './types';
|
||||
import { dumpSqlExpression } from './dumpSqlExpression';
|
||||
import { dumpSqlSelect } from './dumpSqlCommand';
|
||||
|
||||
|
||||
function dumpLikeAsFunctionCondition(dmp: SqlDumper, condition: LikeCondition) {
|
||||
// For DynamoDB: contains() works only on string attributes
|
||||
// For numeric values, search both as number and as string
|
||||
const likeExpr = condition.right;
|
||||
|
||||
let isNumericValue = false;
|
||||
let numericStringValue = '';
|
||||
if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'string') {
|
||||
const cleanedStr = (likeExpr.value || '').replace(/%/g, '').trim();
|
||||
// Only match valid decimal numbers (not Infinity, NaN, etc.)
|
||||
isNumericValue = /^-?\d+(\.\d+)?$/.test(cleanedStr);
|
||||
numericStringValue = cleanedStr;
|
||||
} else if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'number') {
|
||||
isNumericValue = Number.isFinite(likeExpr.value);
|
||||
numericStringValue = String(likeExpr.value);
|
||||
}
|
||||
|
||||
if (isNumericValue) {
|
||||
// For numeric values: (column = value OR contains(column, 'value'))
|
||||
dmp.putRaw('(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(' = ');
|
||||
dmp.put('%s', numericStringValue);
|
||||
dmp.putRaw(' OR contains(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(', ');
|
||||
dmp.put('%v', numericStringValue);
|
||||
dmp.putRaw('))');
|
||||
} else {
|
||||
// String value: contains(column, value)
|
||||
dmp.putRaw('contains(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(', ');
|
||||
if (likeExpr.exprType === 'value') {
|
||||
let cleanValue = likeExpr.value;
|
||||
if (typeof cleanValue === 'string') {
|
||||
cleanValue = cleanValue.replace(/%/g, '');
|
||||
}
|
||||
dmp.put('%v', cleanValue);
|
||||
} else {
|
||||
dumpSqlExpression(dmp, likeExpr);
|
||||
}
|
||||
dmp.putRaw(')');
|
||||
}
|
||||
}
|
||||
|
||||
export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
switch (condition.conditionType) {
|
||||
case 'binary':
|
||||
@@ -51,9 +98,13 @@ export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
});
|
||||
break;
|
||||
case 'like':
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
if (dmp.dialect.likeAsFunction) {
|
||||
dumpLikeAsFunctionCondition(dmp, condition);
|
||||
} else {
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
}
|
||||
break;
|
||||
case 'notLike':
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "dbgate-tools",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
@@ -33,7 +33,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"blueimp-md5": "^2.19.0",
|
||||
"dbgate-query-splitter": "^4.11.9",
|
||||
"dbgate-query-splitter": "^4.12.0",
|
||||
"dbgate-sqltree": "^7.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
|
||||
@@ -124,7 +124,7 @@ export function redis_mergeNextPage(tree: RedisTreeModel, nextPage: RedisLoadRes
|
||||
childrenByKey[dirObj.parentKey].push(dirObj);
|
||||
|
||||
// set key count
|
||||
dirsByKey[dirObj.key].count = childrenByKey[dirObj.key].length;
|
||||
dirsByKey[dirObj.key].count = childrenByKey[dirObj.key]?.length || 0;
|
||||
}
|
||||
|
||||
for (const key in childrenByKey) {
|
||||
|
||||
@@ -49,6 +49,26 @@ export function base64ToHex(base64String) {
|
||||
return '0x' + hexString.toUpperCase();
|
||||
}
|
||||
|
||||
export function base64ToUuid(base64String): string | null {
|
||||
let binaryString: string;
|
||||
try {
|
||||
binaryString = atob(base64String);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (binaryString.length !== 16) {
|
||||
return null;
|
||||
}
|
||||
const hex = Array.from(binaryString, c => c.charCodeAt(0).toString(16).padStart(2, '0')).join('');
|
||||
return [
|
||||
hex.slice(0, 8),
|
||||
hex.slice(8, 12),
|
||||
hex.slice(12, 16),
|
||||
hex.slice(16, 20),
|
||||
hex.slice(20, 32),
|
||||
].join('-');
|
||||
}
|
||||
|
||||
export function hexToBase64(hexString) {
|
||||
const binaryString = hexString
|
||||
.match(/.{1,2}/g)
|
||||
@@ -57,6 +77,23 @@ export function hexToBase64(hexString) {
|
||||
return btoa(binaryString);
|
||||
}
|
||||
|
||||
const uuidPattern = '[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}';
|
||||
const uuidRegex = new RegExp(`^${uuidPattern}$`);
|
||||
const uuid3WrapperRegex = new RegExp(`^UUID3\\("(${uuidPattern})"\\)$`);
|
||||
const uuid4WrapperRegex = new RegExp(`^UUID\\("(${uuidPattern})"\\)$`);
|
||||
|
||||
export function uuidToBase64(uuid: string): string | null {
|
||||
if (!uuid || !uuidRegex.test(uuid)) {
|
||||
return null;
|
||||
}
|
||||
const hex = uuid.replace(/-/g, '');
|
||||
const binaryString = hex
|
||||
.match(/.{1,2}/g)
|
||||
.map(byte => String.fromCharCode(parseInt(byte, 16)))
|
||||
.join('');
|
||||
return btoa(binaryString);
|
||||
}
|
||||
|
||||
export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
|
||||
if (!_isString(value)) return value;
|
||||
|
||||
@@ -65,6 +102,20 @@ export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
|
||||
}
|
||||
|
||||
if (editorTypes?.parseHexAsBuffer) {
|
||||
const mUuid3 = value.match(uuid3WrapperRegex);
|
||||
if (mUuid3) {
|
||||
const base64Uuid3 = uuidToBase64(mUuid3[1]);
|
||||
if (base64Uuid3 != null) return { $binary: { base64: base64Uuid3, subType: '03' } };
|
||||
}
|
||||
const mUuid4 = value.match(uuid4WrapperRegex);
|
||||
if (mUuid4) {
|
||||
const base64Uuid4 = uuidToBase64(mUuid4[1]);
|
||||
if (base64Uuid4 != null) return { $binary: { base64: base64Uuid4, subType: '04' } };
|
||||
}
|
||||
if (uuidRegex.test(value)) {
|
||||
const base64UuidPlain = uuidToBase64(value);
|
||||
if (base64UuidPlain != null) return { $binary: { base64: base64UuidPlain, subType: '04' } };
|
||||
}
|
||||
const mHex = value.match(/^0x([0-9a-fA-F][0-9a-fA-F])+$/);
|
||||
if (mHex) {
|
||||
return {
|
||||
@@ -266,6 +317,18 @@ export function stringifyCellValue(
|
||||
if (value === false) return { value: 'false', gridStyle: 'valueCellStyle' };
|
||||
|
||||
if (value?.$binary?.base64) {
|
||||
const subType = value.$binary.subType;
|
||||
if (subType === '03' || subType === '04') {
|
||||
const uuidStr = base64ToUuid(value.$binary.base64);
|
||||
if (uuidStr != null) {
|
||||
if (intent === 'gridCellIntent' || intent === 'exportIntent' || intent === 'clipboardIntent' || intent === 'stringConversionIntent') {
|
||||
return { value: uuidStr, gridStyle: 'valueCellStyle' };
|
||||
}
|
||||
// For editing intents: tag with subType so parseCellValue can round-trip it
|
||||
const tag = subType === '03' ? 'UUID3' : 'UUID';
|
||||
return { value: `${tag}("${uuidStr}")`, gridStyle: 'valueCellStyle' };
|
||||
}
|
||||
}
|
||||
return {
|
||||
value: base64ToHex(value.$binary.base64),
|
||||
gridStyle: 'valueCellStyle',
|
||||
|
||||
Vendored
+1
@@ -4,6 +4,7 @@ export interface SqlDialect {
|
||||
rangeSelect?: boolean;
|
||||
limitSelect?: boolean;
|
||||
ilike?: boolean;
|
||||
likeAsFunction?: boolean;
|
||||
rowNumberOverPaging?: boolean;
|
||||
topRecords?: boolean;
|
||||
stringEscapeChar: string;
|
||||
|
||||
Vendored
+7
@@ -59,6 +59,7 @@ export interface QueryOptions {
|
||||
importSqlDump?: boolean;
|
||||
range?: { offset: number; limit: number };
|
||||
readonly?: boolean;
|
||||
commandTimeout?: number;
|
||||
}
|
||||
|
||||
export interface WriteTableOptions {
|
||||
@@ -264,6 +265,7 @@ export interface EngineDriver<TClient = any, TDataBase = any> extends FilterBeha
|
||||
collectionPluralLabel?: string;
|
||||
collectionNameLabel?: string;
|
||||
newCollectionFormParams?: any[];
|
||||
disableRenameCollection?: boolean;
|
||||
icon?: EngineDriverIcon;
|
||||
|
||||
apiServerUrl1Label?: string;
|
||||
@@ -412,6 +414,7 @@ export interface EngineDriver<TClient = any, TDataBase = any> extends FilterBeha
|
||||
): { message: string; severity: 'info' | 'error' | 'debug' } | null;
|
||||
getNativeOperationFormArgs(operation: 'backup' | 'restore'): any[];
|
||||
getAdvancedConnectionFields(): any[];
|
||||
sortCollectionDisplayColumns?(columns: any[]): any[];
|
||||
|
||||
analyserClass?: any;
|
||||
dumperClass?: any;
|
||||
@@ -421,6 +424,10 @@ export interface EngineDriver<TClient = any, TDataBase = any> extends FilterBeha
|
||||
engine: string;
|
||||
conid?: string;
|
||||
};
|
||||
|
||||
setTransactionIsolationLevel?(dbhan: DatabaseHandle<TClient, TDataBase>, level: string): Promise<void>;
|
||||
isolationLevels?: string[];
|
||||
defaultIsolationLevel?: string;
|
||||
}
|
||||
|
||||
export interface DatabaseModification {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"version": "7.0.0-alpha.1",
|
||||
"name": "dbgate-types",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# dbgate-web
|
||||
|
||||
This package is used internally by [DbGate](https://dbgate.org)
|
||||
This package is used internally by [DbGate](https://www.dbgate.io)
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
const postcss = require('postcss');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const production = process.env.NODE_ENV === 'production';
|
||||
|
||||
async function buildTailwind() {
|
||||
const inputFile = path.resolve(__dirname, 'src/tailwind.css');
|
||||
const outputFile = path.resolve(__dirname, 'public/build/tailwind.css');
|
||||
|
||||
const css = fs.readFileSync(inputFile, 'utf8');
|
||||
|
||||
const plugins = [require('@tailwindcss/postcss')({}), require('autoprefixer')({})];
|
||||
|
||||
const result = await postcss(plugins).process(css, {
|
||||
from: inputFile,
|
||||
to: outputFile,
|
||||
});
|
||||
|
||||
// Ensure output directory exists
|
||||
fs.mkdirSync(path.dirname(outputFile), { recursive: true });
|
||||
|
||||
// Write processed CSS
|
||||
fs.writeFileSync(outputFile, result.css);
|
||||
|
||||
// Write source map in dev mode
|
||||
if (!production && result.map) {
|
||||
fs.writeFileSync(outputFile + '.map', result.map.toString());
|
||||
}
|
||||
|
||||
console.log('Tailwind CSS built successfully');
|
||||
}
|
||||
|
||||
buildTailwind().catch(err => {
|
||||
console.error('Error building tailwind CSS:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
+14
-19
@@ -2,11 +2,12 @@
|
||||
"name": "dbgate-web",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"scripts": {
|
||||
"build": "yarn build:index && rollup -c",
|
||||
"dev": "yarn build:index && cross-env API_URL=http://localhost:3000 rollup -c -w",
|
||||
"build": "yarn build:index && node build-tailwind.js && cross-env NODE_ENV=production rolldown -c rolldown.config.mjs",
|
||||
"dev": "yarn build:index && node build-tailwind.js && cross-env API_URL=http://localhost:3000 rolldown -c rolldown.config.mjs -w",
|
||||
"start": "sirv public --port 5001",
|
||||
"validate": "svelte-check",
|
||||
"build:index": "node build-index.js",
|
||||
"build:tailwind": "node build-tailwind.js",
|
||||
"prepublishOnly": "yarn build"
|
||||
},
|
||||
"repository": {
|
||||
@@ -19,11 +20,6 @@
|
||||
"devDependencies": {
|
||||
"@energiency/chartjs-plugin-piechart-outlabels": "^1.3.4",
|
||||
"@mdi/font": "^7.1.96",
|
||||
"@rollup/plugin-commonjs": "^20.0.0",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
"@rollup/plugin-node-resolve": "^13.0.5",
|
||||
"@rollup/plugin-replace": "^3.0.0",
|
||||
"@rollup/plugin-typescript": "^8.2.5",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@tsconfig/svelte": "^1.0.0",
|
||||
"ace-builds": "^1.36.5",
|
||||
@@ -33,11 +29,11 @@
|
||||
"chartjs-plugin-datalabels": "^2.2.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dbgate-datalib": "^7.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.9",
|
||||
"dbgate-query-splitter": "^4.12.0",
|
||||
"dbgate-rest": "^7.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^7.0.0-alpha.1",
|
||||
"dbgate-tools": "^7.0.0-alpha.1",
|
||||
"dbgate-types": "^7.0.0-alpha.1",
|
||||
"dbgate-rest": "^7.0.0-alpha.1",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
"file-selector": "^0.2.4",
|
||||
@@ -49,39 +45,38 @@
|
||||
"postcss": "^8.5.6",
|
||||
"randomcolor": "^0.6.2",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
"rollup": "^2.57.0",
|
||||
"rolldown": "^1.0.0-rc.5",
|
||||
"rollup-plugin-copy": "^3.3.0",
|
||||
"rollup-plugin-css-only": "^3.1.0",
|
||||
"rollup-plugin-livereload": "^2.0.0",
|
||||
"rollup-plugin-postcss": "^4.0.2",
|
||||
"rollup-plugin-svelte": "^7.2.2",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"sirv-cli": "^1.0.0",
|
||||
"sql-formatter": "^3.1.0",
|
||||
"svelte": "^4.2.20",
|
||||
"svelte-check": "^1.0.0",
|
||||
"svelte-markdown": "^0.1.4",
|
||||
"svelte-preprocess": "^4.9.5",
|
||||
"svelte-preprocess": "^6.0.0",
|
||||
"svelte-select": "^4.4.7",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"tslib": "^2.3.1",
|
||||
"typescript": "^4.4.3",
|
||||
"typescript": "^5.7.0",
|
||||
"uuid": "^3.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@langchain/core": "^0.3.72",
|
||||
"@langchain/langgraph": "^0.4.9",
|
||||
"@langchain/openai": "^0.6.9",
|
||||
"@langchain/core": "^1.1.29",
|
||||
"@langchain/langgraph": "^1.2.0",
|
||||
"@langchain/openai": "^1.2.11",
|
||||
"@messageformat/core": "^3.4.0",
|
||||
"chartjs-plugin-zoom": "^1.2.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"debug": "^4.3.4",
|
||||
"dom-to-image": "^2.6.0",
|
||||
"dompurify": "^3.3.2",
|
||||
"flatpickr": "^4.6.13",
|
||||
"fuzzy": "^0.1.3",
|
||||
"highlight.js": "^11.11.1",
|
||||
"interval-operations": "^1.0.7",
|
||||
"leaflet": "^1.8.0",
|
||||
"openai": "^5.10.1",
|
||||
"openai": "^6.24.0",
|
||||
"wellknown": "^0.5.0",
|
||||
"xml-formatter": "^3.6.4",
|
||||
"zod": "^4.1.5"
|
||||
|
||||
@@ -0,0 +1,169 @@
|
||||
import { defineConfig } from 'rolldown';
|
||||
import { replacePlugin } from 'rolldown/plugins';
|
||||
import svelte from 'rollup-plugin-svelte';
|
||||
import livereload from 'rollup-plugin-livereload';
|
||||
import copy from 'rollup-plugin-copy';
|
||||
import sveltePreprocess from 'svelte-preprocess';
|
||||
import { spawn } from 'node:child_process';
|
||||
|
||||
const production = process.env.NODE_ENV === 'production';
|
||||
|
||||
function serve() {
|
||||
let server;
|
||||
|
||||
function toExit() {
|
||||
if (server) server.kill(0);
|
||||
}
|
||||
|
||||
return {
|
||||
writeBundle() {
|
||||
if (server) return;
|
||||
server = spawn('npm', ['run', 'start', '--', '--dev'], {
|
||||
stdio: ['ignore', 'inherit', 'inherit'],
|
||||
shell: true,
|
||||
});
|
||||
|
||||
process.on('SIGTERM', toExit);
|
||||
process.on('exit', toExit);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export default defineConfig([
|
||||
// Web Worker entry
|
||||
{
|
||||
input: 'src/query/QueryParserWorker.js',
|
||||
output: {
|
||||
sourcemap: !production,
|
||||
format: 'iife',
|
||||
file: 'public/build/query-parser-worker.js',
|
||||
minify: production,
|
||||
},
|
||||
platform: 'browser',
|
||||
},
|
||||
|
||||
// Main application entry
|
||||
{
|
||||
input: 'src/main.ts',
|
||||
output: {
|
||||
sourcemap: !production,
|
||||
format: 'iife',
|
||||
name: 'app',
|
||||
dir: 'public/build',
|
||||
entryFileNames: 'bundle.js',
|
||||
cssEntryFileNames: 'bundle.css',
|
||||
minify: production,
|
||||
},
|
||||
// dbgate-types is a TypeScript-only package (no runtime code).
|
||||
// Mark it external so rolldown doesn't try to bundle it.
|
||||
external: ['dbgate-types'],
|
||||
platform: 'browser',
|
||||
resolve: {
|
||||
conditionNames: ['svelte', 'browser', 'import'],
|
||||
},
|
||||
// Shim Node's `global` for browser (used by debug, dbgate-tools getLogger, etc.)
|
||||
transform: {
|
||||
define: {
|
||||
global: 'globalThis',
|
||||
},
|
||||
},
|
||||
// Handle non-JS asset types referenced from CSS (e.g. leaflet marker images)
|
||||
moduleTypes: {
|
||||
'.png': 'dataurl',
|
||||
'.jpg': 'dataurl',
|
||||
'.gif': 'dataurl',
|
||||
'.svg': 'dataurl',
|
||||
},
|
||||
plugins: [
|
||||
// ace-builds addon files (keybinding-vim, modes, themes, etc.) reference
|
||||
// the bare `ace` global set by ace.js on window. We must ensure ace.js is
|
||||
// evaluated first, then inject a local `ace` binding for the addon code.
|
||||
{
|
||||
name: 'ace-global-shim',
|
||||
transform(code, id) {
|
||||
if (/ace-builds[\\/]src-noconflict[\\/]/.test(id) && !id.endsWith('ace.js')) {
|
||||
// Import ace.js first (triggers its IIFE which sets window.ace),
|
||||
// then bind the local `ace` variable from window.
|
||||
const shimmed = 'import "ace-builds/src-noconflict/ace";\nvar ace = window.ace;\n' + code;
|
||||
return { code: shimmed, map: null };
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
// Resolve chart.js/dist to chart.js (not exported in package.json exports field)
|
||||
{
|
||||
name: 'resolve-chartjs-dist',
|
||||
resolveId: {
|
||||
filter: { id: /chart\.js\/dist/ },
|
||||
handler(source) {
|
||||
if (source === 'chart.js/dist') {
|
||||
return this.resolve('chart.js', undefined, { skipSelf: true });
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
copy({
|
||||
targets: [
|
||||
{
|
||||
src: '../../node_modules/@mdi/font/css/materialdesignicons.css',
|
||||
dest: 'public/build/fonts/',
|
||||
},
|
||||
{
|
||||
src: '../../node_modules/@mdi/font/fonts/*',
|
||||
dest: 'public/build/fonts/',
|
||||
},
|
||||
{
|
||||
src: '../../node_modules/diff2html/bundles/css/diff2html.min.css',
|
||||
dest: 'public/build/',
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
||||
replacePlugin({
|
||||
'process.env.API_URL': JSON.stringify(process.env.API_URL),
|
||||
}),
|
||||
|
||||
svelte({
|
||||
preprocess: sveltePreprocess({
|
||||
sourceMap: !production,
|
||||
typescript: {
|
||||
compilerOptions: {
|
||||
verbatimModuleSyntax: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
compilerOptions: {
|
||||
// enable run-time checks when not in production
|
||||
dev: !production,
|
||||
},
|
||||
onwarn: (warning, handler) => {
|
||||
const ignoreWarnings = [
|
||||
'a11y-click-events-have-key-events',
|
||||
'a11y-missing-attribute',
|
||||
'a11y-invalid-attribute',
|
||||
'a11y-no-noninteractive-tabindex',
|
||||
'a11y-label-has-associated-control',
|
||||
'vite-plugin-svelte-css-no-scopable-elements',
|
||||
'unused-export-let',
|
||||
];
|
||||
if (ignoreWarnings.includes(warning.code)) return;
|
||||
handler(warning);
|
||||
},
|
||||
// Let rolldown handle CSS bundling natively
|
||||
emitCss: true,
|
||||
}),
|
||||
|
||||
// In dev mode, call `npm run start` once
|
||||
// the bundle has been generated
|
||||
!production && serve(),
|
||||
|
||||
// Watch the `public` directory and refresh the
|
||||
// browser on changes when not in production
|
||||
!production && livereload('public'),
|
||||
],
|
||||
watch: {
|
||||
clearScreen: true,
|
||||
},
|
||||
},
|
||||
]);
|
||||
@@ -1,159 +0,0 @@
|
||||
import svelte from 'rollup-plugin-svelte';
|
||||
import commonjs from '@rollup/plugin-commonjs';
|
||||
import resolve from '@rollup/plugin-node-resolve';
|
||||
import livereload from 'rollup-plugin-livereload';
|
||||
import copy from 'rollup-plugin-copy';
|
||||
import { terser } from 'rollup-plugin-terser';
|
||||
import sveltePreprocess from 'svelte-preprocess';
|
||||
import typescript from '@rollup/plugin-typescript';
|
||||
import replace from '@rollup/plugin-replace';
|
||||
import css from 'rollup-plugin-css-only';
|
||||
import json from '@rollup/plugin-json';
|
||||
import postcss from 'rollup-plugin-postcss';
|
||||
|
||||
const production = !process.env.ROLLUP_WATCH;
|
||||
|
||||
function serve() {
|
||||
let server;
|
||||
|
||||
function toExit() {
|
||||
if (server) server.kill(0);
|
||||
}
|
||||
|
||||
return {
|
||||
writeBundle() {
|
||||
if (server) return;
|
||||
server = require('child_process').spawn('npm', ['run', 'start', '--', '--dev'], {
|
||||
stdio: ['ignore', 'inherit', 'inherit'],
|
||||
shell: true,
|
||||
});
|
||||
|
||||
process.on('SIGTERM', toExit);
|
||||
process.on('exit', toExit);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export default [
|
||||
// Separate entry for Tailwind CSS processing
|
||||
{
|
||||
input: 'src/tailwind.css',
|
||||
output: {
|
||||
file: 'public/build/tailwind.css',
|
||||
},
|
||||
plugins: [
|
||||
postcss({
|
||||
extract: true,
|
||||
minimize: production,
|
||||
sourceMap: !production,
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
input: 'src/query/QueryParserWorker.js',
|
||||
output: {
|
||||
sourcemap: !production,
|
||||
format: 'iife',
|
||||
file: 'public/build/query-parser-worker.js',
|
||||
},
|
||||
plugins: [
|
||||
commonjs(),
|
||||
resolve({
|
||||
browser: true,
|
||||
}),
|
||||
|
||||
// If we're building for production (npm run build
|
||||
// instead of npm run dev), minify
|
||||
production && terser(),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
input: 'src/main.ts',
|
||||
output: {
|
||||
sourcemap: !production,
|
||||
format: 'iife',
|
||||
name: 'app',
|
||||
file: 'public/build/bundle.js',
|
||||
},
|
||||
plugins: [
|
||||
copy({
|
||||
targets: [
|
||||
{
|
||||
src: '../../node_modules/@mdi/font/css/materialdesignicons.css',
|
||||
dest: 'public/build/fonts/',
|
||||
},
|
||||
{
|
||||
src: '../../node_modules/@mdi/font/fonts/*',
|
||||
dest: 'public/build/fonts/',
|
||||
},
|
||||
{
|
||||
src: '../../node_modules/diff2html/bundles/css/diff2html.min.css',
|
||||
dest: 'public/build/',
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
||||
replace({
|
||||
'process.env.API_URL': JSON.stringify(process.env.API_URL),
|
||||
}),
|
||||
|
||||
svelte({
|
||||
preprocess: sveltePreprocess({ sourceMap: !production }),
|
||||
compilerOptions: {
|
||||
// enable run-time checks when not in production
|
||||
dev: !production,
|
||||
},
|
||||
onwarn: (warning, handler) => {
|
||||
const ignoreWarnings = [
|
||||
'a11y-click-events-have-key-events',
|
||||
'a11y-missing-attribute',
|
||||
'a11y-invalid-attribute',
|
||||
'a11y-no-noninteractive-tabindex',
|
||||
'a11y-label-has-associated-control',
|
||||
'vite-plugin-svelte-css-no-scopable-elements',
|
||||
'unused-export-let',
|
||||
];
|
||||
if (ignoreWarnings.includes(warning.code)) return;
|
||||
// console.log('***************************', warning.code);
|
||||
handler(warning);
|
||||
},
|
||||
}),
|
||||
// we'll extract any component CSS out into
|
||||
// a separate file - better for performance
|
||||
css({ output: 'bundle.css' }),
|
||||
|
||||
// If you have external dependencies installed from
|
||||
// npm, you'll most likely need these plugins. In
|
||||
// some cases you'll need additional configuration -
|
||||
// consult the documentation for details:
|
||||
// https://github.com/rollup/plugins/tree/master/packages/commonjs
|
||||
resolve({
|
||||
browser: true,
|
||||
dedupe: ['svelte'],
|
||||
}),
|
||||
commonjs(),
|
||||
typescript({
|
||||
sourceMap: !production,
|
||||
inlineSources: !production,
|
||||
}),
|
||||
json(),
|
||||
|
||||
// In dev mode, call `npm run start` once
|
||||
// the bundle has been generated
|
||||
!production && serve(),
|
||||
|
||||
// Watch the `public` directory and refresh the
|
||||
// browser on changes when not in production
|
||||
!production && livereload('public'),
|
||||
|
||||
// If we're building for production (npm run build
|
||||
// instead of npm run dev), minify
|
||||
production && terser(),
|
||||
],
|
||||
watch: {
|
||||
clearScreen: true,
|
||||
},
|
||||
},
|
||||
];
|
||||
@@ -35,7 +35,7 @@
|
||||
<div class="not-supported" class:isElectron>
|
||||
<div class="m-5 big-icon"><FontIcon icon="img warn" /></div>
|
||||
<div class="m-3">Sorry, DbGate is not supported on mobile devices.</div>
|
||||
<div class="m-3">Please visit <a href="https://dbgate.org">DbGate web</a> for more info.</div>
|
||||
<div class="m-3">Please visit <a href="https://www.dbgate.io">DbGate web</a> for more info.</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
<script>
|
||||
import _ from 'lodash';
|
||||
import Link from '../elements/Link.svelte';
|
||||
|
||||
import { plusExpandIcon } from '../icons/expandIcons';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
<script lang="ts" context="module">
|
||||
import { copyTextToClipboard } from '../utility/clipboard';
|
||||
import { _t, _tval, DefferedTranslationResult } from '../translations';
|
||||
import { _t, _tval, type DefferedTranslationResult } from '../translations';
|
||||
import sqlFormatter from 'sql-formatter';
|
||||
|
||||
export const extractKey = ({ schemaName, pureName }) => (schemaName ? `${schemaName}.${pureName}` : pureName);
|
||||
@@ -411,7 +411,8 @@
|
||||
isDropCollection: true,
|
||||
requiresWriteAccess: true,
|
||||
},
|
||||
hasPermission('dbops/table/rename') && {
|
||||
hasPermission('dbops/table/rename') &&
|
||||
!driver?.disableRenameCollection && {
|
||||
label: _t('dbObject.renameCollection', { defaultMessage: 'Rename collection/container' }),
|
||||
isRenameCollection: true,
|
||||
requiresWriteAccess: true,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
<script context="module">
|
||||
import { __t } from '../translations';
|
||||
registerCommand({
|
||||
id: 'commandPalette.show',
|
||||
category: __t('command.commandPalette', { defaultMessage: 'Command palette' }),
|
||||
@@ -87,7 +88,7 @@
|
||||
import { getLocalStorage } from '../utility/storageCache';
|
||||
import registerCommand from './registerCommand';
|
||||
import { formatKeyText, switchCurrentDatabase } from '../utility/common';
|
||||
import { _tval, __t, _t } from '../translations';
|
||||
import { _tval, _t } from '../translations';
|
||||
import { getDriverIcon } from '../utility/driverIcons';
|
||||
import { currentThemeType } from '../plugins/themes';
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ import { get } from 'svelte/store';
|
||||
import AboutModal from '../modals/AboutModal.svelte';
|
||||
import SqlGeneratorModal from '../modals/SqlGeneratorModal.svelte';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
import newQuery, { newDiagram, newPerspective, newQueryDesign } from '../query/newQuery';
|
||||
import newQuery, { newDiagram, newPerspective, newQueryDesign, newGraphQlQuery } from '../query/newQuery';
|
||||
import saveTabFile from '../utility/saveTabFile';
|
||||
import openNewTab from '../utility/openNewTab';
|
||||
import getElectron from '../utility/getElectron';
|
||||
@@ -258,6 +258,20 @@ if (isProApp()) {
|
||||
});
|
||||
}
|
||||
|
||||
if (isProApp()) {
|
||||
registerCommand({
|
||||
id: 'new.graphqlQuery',
|
||||
category: __t('command.new', { defaultMessage: 'New' }),
|
||||
icon: 'img graphql',
|
||||
name: __t('command.new.graphqlQuery', { defaultMessage: 'GraphQL Query' }),
|
||||
menuName: __t('command.new.newGraphqlQuery', { defaultMessage: 'New GraphQL Query' }),
|
||||
onClick: () => newGraphQlQuery(),
|
||||
testEnabled: () =>
|
||||
getCurrentDatabase() &&
|
||||
findEngineDriver(getCurrentDatabase()?.connection, getExtensions())?.databaseEngineTypes?.includes('graphql'),
|
||||
});
|
||||
}
|
||||
|
||||
if (isProApp()) {
|
||||
registerCommand({
|
||||
id: 'new.application',
|
||||
@@ -752,6 +766,29 @@ if (isProApp()) {
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'graphql.chat',
|
||||
category: __t('command.database', { defaultMessage: 'Database' }),
|
||||
name: __t('command.graphql.chat', { defaultMessage: 'GraphQL chat' }),
|
||||
toolbar: true,
|
||||
icon: 'icon ai',
|
||||
testEnabled: () =>
|
||||
getCurrentDatabase() != null &&
|
||||
findEngineDriver(getCurrentDatabase()?.connection, getExtensions())?.databaseEngineTypes?.includes('graphql') &&
|
||||
hasPermission('dbops/chat'),
|
||||
onClick: () => {
|
||||
openNewTab({
|
||||
title: 'GraphQL Chat',
|
||||
icon: 'img ai',
|
||||
tabComponent: 'GraphQlChatTab',
|
||||
props: {
|
||||
conid: getCurrentDatabase()?.connection?._id,
|
||||
database: getCurrentDatabase()?.name,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (hasPermission('settings/change')) {
|
||||
|
||||
@@ -2,16 +2,21 @@
|
||||
import { createGridCache, createGridConfig, FreeTableGridDisplay } from 'dbgate-datalib';
|
||||
import { writable } from 'svelte/store';
|
||||
|
||||
import DataGridCore from '../datagrid/DataGridCore.svelte';
|
||||
import RowsArrayGrider from '../datagrid/RowsArrayGrider';
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
import ArrayDataGridCore from './ArrayDataGridCore.svelte';
|
||||
import ColumnManager from './ColumnManager.svelte';
|
||||
import HorizontalSplitter from '../elements/HorizontalSplitter.svelte';
|
||||
|
||||
export let rows;
|
||||
export let rows = [];
|
||||
export let errorMessage = null;
|
||||
export let isLoading = false;
|
||||
export let conid = null;
|
||||
export let database = null;
|
||||
export let hideGridLeftColumn = false;
|
||||
|
||||
let model = null;
|
||||
let managerSize = 220;
|
||||
let model: any = null;
|
||||
let display: any = null;
|
||||
const collapsedLeftColumnStore = writable(false);
|
||||
|
||||
const config = writable(createGridConfig());
|
||||
const cache = writable(createGridCache());
|
||||
@@ -20,14 +25,60 @@
|
||||
structure: { __isDynamicStructure: true },
|
||||
rows,
|
||||
};
|
||||
$: grider = new RowsArrayGrider(rows);
|
||||
$: display = new FreeTableGridDisplay(model, $config, config.update, $cache, cache.update);
|
||||
|
||||
$: display = new FreeTableGridDisplay(model, $config, config.update, $cache, cache.update, { filterable: true });
|
||||
</script>
|
||||
|
||||
{#if isLoading}
|
||||
<LoadingInfo wrapper message="Loading data" />
|
||||
{:else if errorMessage}
|
||||
<ErrorInfo message={errorMessage} />
|
||||
{:else if grider}
|
||||
<DataGridCore {...$$props} {grider} {display} />
|
||||
{/if}
|
||||
<div class="array-grid-wrapper">
|
||||
<HorizontalSplitter
|
||||
initialValue="220px"
|
||||
bind:size={managerSize}
|
||||
hideFirst={hideGridLeftColumn || $collapsedLeftColumnStore}
|
||||
>
|
||||
<svelte:fragment slot="1">
|
||||
<div class="column-selector">
|
||||
<ColumnManager {display} {managerSize} {conid} {database} isDynamicStructure />
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="2">
|
||||
<div class="grid-content">
|
||||
<ArrayDataGridCore
|
||||
{rows}
|
||||
{errorMessage}
|
||||
{isLoading}
|
||||
{hideGridLeftColumn}
|
||||
{collapsedLeftColumnStore}
|
||||
externalDisplay={display}
|
||||
/>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
</HorizontalSplitter>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.array-grid-wrapper {
|
||||
height: 100%;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.column-selector {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.column-selector :global(.managerInnerContainer),
|
||||
.column-selector :global(.manager-inner-container) {
|
||||
max-width: none !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.grid-content {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
<script lang="ts">
|
||||
import { createGridCache, createGridConfig, FreeTableGridDisplay } from 'dbgate-datalib';
|
||||
import { evaluateCondition } from 'dbgate-sqltree';
|
||||
import { writable } from 'svelte/store';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
|
||||
import DataGridCore from './DataGridCore.svelte';
|
||||
import RowsArrayGrider from './RowsArrayGrider';
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
import { registerMenu } from '../utility/contextMenu';
|
||||
import createQuickExportMenu from '../utility/createQuickExportMenu';
|
||||
import { exportQuickExportFile } from '../utility/exportFileTools';
|
||||
import { openImportExportTab } from '../utility/importExportTools';
|
||||
import { apiCall } from '../utility/api';
|
||||
|
||||
export let rows;
|
||||
export let errorMessage = null;
|
||||
export let isLoading = false;
|
||||
export let externalDisplay = null;
|
||||
export let formatterFunction = undefined;
|
||||
export let isLoadedAll = true;
|
||||
export let loadedTime = undefined;
|
||||
export let changeSetStore = undefined;
|
||||
export let collapsedLeftColumnStore = undefined;
|
||||
export let jslid = undefined;
|
||||
|
||||
let model = null;
|
||||
let filteredRows = [];
|
||||
let rowsForValueLookup = [];
|
||||
|
||||
const config = writable(createGridConfig());
|
||||
const cache = writable(createGridCache());
|
||||
|
||||
$: model = {
|
||||
structure: { __isDynamicStructure: true },
|
||||
rows,
|
||||
};
|
||||
$: display =
|
||||
externalDisplay ||
|
||||
new FreeTableGridDisplay(model, $config, config.update, $cache, cache.update, { filterable: true });
|
||||
$: {
|
||||
const sourceRows = rows || [];
|
||||
const condition = display?.compileJslFilters?.();
|
||||
|
||||
if (!condition) {
|
||||
filteredRows = sourceRows;
|
||||
} else {
|
||||
filteredRows = sourceRows.filter(row => {
|
||||
try {
|
||||
return !!evaluateCondition(condition, row);
|
||||
} catch {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
$: grider = new RowsArrayGrider(filteredRows);
|
||||
$: rowsForValueLookup = getRowsForExport(rows);
|
||||
|
||||
function getRowsForExport(rows) {
|
||||
const sourceRows = rows || [];
|
||||
if (sourceRows.length === 0) return sourceRows;
|
||||
|
||||
const firstRow = sourceRows[0];
|
||||
if (firstRow && typeof firstRow === 'object' && firstRow.__isStreamHeader === true) {
|
||||
return sourceRows.slice(1);
|
||||
}
|
||||
|
||||
return sourceRows;
|
||||
}
|
||||
|
||||
async function saveRowsToTempJsl() {
|
||||
const tempJslId = uuidv1();
|
||||
await apiCall('jsldata/save-rows', {
|
||||
jslid: tempJslId,
|
||||
rows: getRowsForExport(rows),
|
||||
});
|
||||
return tempJslId;
|
||||
}
|
||||
|
||||
async function exportGrid() {
|
||||
const tempJslId = await saveRowsToTempJsl();
|
||||
const initialValues: any = {};
|
||||
initialValues.sourceStorageType = 'jsldata';
|
||||
initialValues.sourceJslId = tempJslId;
|
||||
initialValues.sourceList = ['query-data'];
|
||||
initialValues['columns_query-data'] = display.getExportColumnMap();
|
||||
openImportExportTab(initialValues);
|
||||
}
|
||||
|
||||
const quickExportHandler = fmt => async () => {
|
||||
const tempJslId = await saveRowsToTempJsl();
|
||||
await exportQuickExportFile(
|
||||
'Query',
|
||||
{
|
||||
functionName: 'jslDataReader',
|
||||
props: {
|
||||
jslid: tempJslId,
|
||||
},
|
||||
},
|
||||
fmt,
|
||||
display.getExportColumnMap()
|
||||
);
|
||||
};
|
||||
|
||||
registerMenu(() =>
|
||||
createQuickExportMenu(
|
||||
quickExportHandler,
|
||||
{
|
||||
text: 'Export advanced...',
|
||||
onClick: () => exportGrid(),
|
||||
},
|
||||
{ tag: 'export' }
|
||||
)
|
||||
);
|
||||
</script>
|
||||
|
||||
{#if isLoading}
|
||||
<LoadingInfo wrapper message="Loading data" />
|
||||
{:else if errorMessage}
|
||||
<ErrorInfo message={errorMessage} />
|
||||
{:else if grider}
|
||||
<DataGridCore
|
||||
{...$$props}
|
||||
{grider}
|
||||
{display}
|
||||
{formatterFunction}
|
||||
{isLoadedAll}
|
||||
{loadedTime}
|
||||
{changeSetStore}
|
||||
{collapsedLeftColumnStore}
|
||||
{jslid}
|
||||
passAllRows={rowsForValueLookup}
|
||||
/>
|
||||
{/if}
|
||||
@@ -1,4 +1,5 @@
|
||||
<script context="module" lang="ts">
|
||||
import { __t } from '../translations';
|
||||
const getCurrentEditor = () => getActiveComponent('CollectionDataGridCore');
|
||||
|
||||
registerCommand({
|
||||
@@ -103,17 +104,37 @@
|
||||
async function loadRowCount(props) {
|
||||
const { conid, database } = props;
|
||||
|
||||
const response = await apiCall('database-connections/collection-data', {
|
||||
conid,
|
||||
database,
|
||||
options: {
|
||||
pureName: props.pureName,
|
||||
countDocuments: true,
|
||||
condition: buildConditionForGrid(props),
|
||||
},
|
||||
});
|
||||
const timeoutPromise = new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Row count query timed out')), 3000)
|
||||
);
|
||||
|
||||
return response.count;
|
||||
try {
|
||||
const response = await Promise.race([
|
||||
apiCall('database-connections/collection-data', {
|
||||
conid,
|
||||
database,
|
||||
commandTimeout: 3000,
|
||||
options: {
|
||||
pureName: props.pureName,
|
||||
countDocuments: true,
|
||||
condition: buildConditionForGrid(props),
|
||||
},
|
||||
}),
|
||||
timeoutPromise,
|
||||
]);
|
||||
|
||||
if (response && typeof response === 'object' && (response as any).errorMessage) {
|
||||
return { errorMessage: (response as any).errorMessage };
|
||||
}
|
||||
|
||||
if (response && typeof response === 'object' && typeof (response as any).count === 'number') {
|
||||
return (response as any).count;
|
||||
}
|
||||
|
||||
return { errorMessage: 'Error loading row count' };
|
||||
} catch (err) {
|
||||
return { errorMessage: err.message || 'Error loading row count' };
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -140,8 +161,6 @@
|
||||
import LoadingDataGridCore from './LoadingDataGridCore.svelte';
|
||||
import { mongoFilterBehaviour, standardFilterBehaviours } from 'dbgate-tools';
|
||||
import { openImportExportTab } from '../utility/importExportTools';
|
||||
import { __t } from '../translations';
|
||||
|
||||
export let conid;
|
||||
export let display;
|
||||
export let database;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user