Compare commits
363 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c51dad39e0 | |||
| 1d350a3a29 | |||
| 81e3cce070 | |||
| f9de2d77b5 | |||
| 3956eaf389 | |||
| d13e2c2d87 | |||
| ebf2371da9 | |||
| fa4b12448d | |||
| 5fe6dfa551 | |||
| 6061c8b0a5 | |||
| 1ac0aa8a3e | |||
| 5d04d7f01f | |||
| 9c97e347c5 | |||
| 22967d123d | |||
| 3fed650254 | |||
| b57b2083d3 | |||
| 1f47e8c62e | |||
| d7ce653d74 | |||
| 07c803efee | |||
| 26b6d9133e | |||
| 146084bdb3 | |||
| fa82b4630b | |||
| d00841030f | |||
| c517bb0be6 | |||
| e585d8be8f | |||
| 8be76832a5 | |||
| 99df266a3e | |||
| 5660874992 | |||
| b0dade9da3 | |||
| a533858804 | |||
| d3bcc984e7 | |||
| 99e8307a80 | |||
| 73926ea392 | |||
| 5ff24526b7 | |||
| 32ed1c57bd | |||
| f4c3a95348 | |||
| b1a908343a | |||
| 7f9d7eb36e | |||
| 30820e29fc | |||
| a85ea2e0f7 | |||
| 993e713955 | |||
| 3151e30db1 | |||
| eb5219dd68 | |||
| bb44783369 | |||
| 33b46c4db3 | |||
| 3730aae62a | |||
| 065062d58a | |||
| 7b2f58e68e | |||
| e2fc23fcf8 | |||
| 6f56ef284d | |||
| 08a644ba39 | |||
| 6ae19ac4a6 | |||
| 7761cbe81d | |||
| f981d88150 | |||
| e2a23eaa0d | |||
| 9d510b3c08 | |||
| a98f5ac45e | |||
| b989e964c0 | |||
| 3ff6eefa06 | |||
| 67fde9be3c | |||
| df7ac89723 | |||
| 358df9f53b | |||
| 02e3bfaa8a | |||
| dde74fa73b | |||
| 100e3fe75f | |||
| af7930cea2 | |||
| 6b4f6b909c | |||
| 9a6e5cd7cc | |||
| 9f64b6ec7a | |||
| 77f720e34c | |||
| 168dcb7824 | |||
| 759186a212 | |||
| 71ed7a76ea | |||
| bd939b22c7 | |||
| c327f77294 | |||
| d907d79beb | |||
| 93b879927c | |||
| 0c545d4cf9 | |||
| 95c90c1517 | |||
| cb731fa858 | |||
| 9bb3b09ecf | |||
| 7c8f541d3e | |||
| ce41687382 | |||
| 4b083dea5c | |||
| c84473c1eb | |||
| 7fc078f3e6 | |||
| cbbd538248 | |||
| 825f6e562b | |||
| a278afb260 | |||
| 2fbeea717c | |||
| c7259e4663 | |||
| 69a2669342 | |||
| 42d1ca8fd4 | |||
| 1cf52d8b39 | |||
| 6e482afab2 | |||
| ddf3295e6d | |||
| 79e087abd3 | |||
| a7cf51bdf7 | |||
| dfdb31e2f8 | |||
| 3508ddc3ca | |||
| 137fc6b928 | |||
| e6f5295420 | |||
| 2bb08921c3 | |||
| ee2d0e4c30 | |||
| c43a838572 | |||
| 17ff6a8013 | |||
| 62ad6a0d08 | |||
| 5c049fa867 | |||
| 619f17114a | |||
| 1c1431014c | |||
| 9d1d7b7e34 | |||
| f68ca1e786 | |||
| 8d16a30064 | |||
| cf601c33c0 | |||
| 588cd39d7c | |||
| 79ebfa9b7a | |||
| 0c6b2746d1 | |||
| 978972c55c | |||
| 37854fc577 | |||
| 5537e193a6 | |||
| 0d42b2b133 | |||
| 44bd7972d4 | |||
| 5143eb39f7 | |||
| cf51883b3e | |||
| 484ca0c78a | |||
| 8f5cad0e2c | |||
| 988512a571 | |||
| f8bd380051 | |||
| 281131dbba | |||
| ea3a61077a | |||
| d1a898b40d | |||
| a521a81ef0 | |||
| 2505c61975 | |||
| ab5a54dbb6 | |||
| 44ad8fa60a | |||
| 5b27a241d7 | |||
| 084019ca65 | |||
| ba147af8fe | |||
| 1b3f4db07d | |||
| c36705d458 | |||
| 0e126cb8cf | |||
| c48183a539 | |||
| 50f380dbbe | |||
| 66023a9a68 | |||
| c3fbc3354c | |||
| a7d2ed11f3 | |||
| 899aec2658 | |||
| 74e47587e2 | |||
| 6a3dc92572 | |||
| e3a4667422 | |||
| c4dd99bba9 | |||
| cb70f3c318 | |||
| 588b6f9882 | |||
| 375f69ca1e | |||
| a32e5cc139 | |||
| 8e00137751 | |||
| 003db50833 | |||
| bc519c2c20 | |||
| 3b41fa8cfa | |||
| 39ed0f6d2d | |||
| 710f796832 | |||
| 9ec5fb7263 | |||
| 407db457d5 | |||
| 0c5d2cfcd1 | |||
| 87ace375bb | |||
| d010020f3b | |||
| c60227a98f | |||
| 2824681bff | |||
| 073a3e3946 | |||
| 93e91127a0 | |||
| b60a6cff56 | |||
| 1f3b1963d9 | |||
| 4915f57abb | |||
| 97c6fc97d5 | |||
| b68421bbc3 | |||
| 2d10559754 | |||
| b398a7b546 | |||
| 1711d2102d | |||
| 97cea230f3 | |||
| b6a0fe9465 | |||
| 06c50659bb | |||
| 244b47f548 | |||
| b72a244d93 | |||
| c1e069d4dc | |||
| f99994085a | |||
| 32fd0dd78c | |||
| a557b6b2b4 | |||
| e84583c776 | |||
| a548b0d543 | |||
| de94f15383 | |||
| 7045d986ef | |||
| de7ae9cf09 | |||
| ab3d6888dc | |||
| 98a70891f3 | |||
| 52e7326a2c | |||
| bfd2e3b07a | |||
| 799f5e30d3 | |||
| d3e544c3c0 | |||
| 866fd55834 | |||
| 74ce1fba32 | |||
| a11b93b4cc | |||
| 066f2baa03 | |||
| e02396280f | |||
| a654c80746 | |||
| 3b50f4bd7c | |||
| cc1f77f5bc | |||
| 381fce4a82 | |||
| bc3be97cee | |||
| 1c389208a7 | |||
| cbeed2d3d0 | |||
| 3d974ad144 | |||
| 749042a05d | |||
| 52413b82ee | |||
| 212a7ec083 | |||
| cee94fe113 | |||
| e1ead2519a | |||
| 80330a25ac | |||
| 508470e970 | |||
| bc64b4b5c7 | |||
| 48d8494ead | |||
| 2a51d2ed96 | |||
| cfabcc7bf6 | |||
| 90fc8fd0fc | |||
| ff54533e33 | |||
| 2072f0b5ba | |||
| 6efc720a45 | |||
| c7cb1efe9c | |||
| e193531246 | |||
| 2aa53f414e | |||
| 843c15d754 | |||
| fb19582088 | |||
| 8040466cbe | |||
| 302b4d7acd | |||
| a8ccc24d46 | |||
| b2fb071a7b | |||
| 204d7b97d5 | |||
| f3da709aac | |||
| 0ab8afb838 | |||
| d50999547f | |||
| 04741b0eba | |||
| ba86fe32e7 | |||
| 9deb7d7fdc | |||
| 55eb64e5ca | |||
| a5f50f3f2b | |||
| 47214eb5b3 | |||
| 599509d417 | |||
| 9d366fc359 | |||
| 0e1ed0bde6 | |||
| 6ad7824bf2 | |||
| 1174f51c07 | |||
| 1950dda1ab | |||
| 8231b6d5be | |||
| 0feacbe6eb | |||
| 80b5f5adca | |||
| 13650f36e6 | |||
| 3f58d99069 | |||
| 0c8a025cf6 | |||
| 5014df4859 | |||
| 34a491e2ef | |||
| 884e4ca88e | |||
| a670c5e86c | |||
| af1fba79be | |||
| ac44de0bf4 | |||
| f013a241ce | |||
| 0e29a7206d | |||
| 689b3f299c | |||
| 02ccb990bd | |||
| 61fe4f0d57 | |||
| 0a920195d5 | |||
| 18896bf56d | |||
| 098c9041a0 | |||
| 61a41d8eb2 | |||
| e76073d5c8 | |||
| 8c34added7 | |||
| 66fc6b93ae | |||
| 881d5a8008 | |||
| 5d263de954 | |||
| c8d0494000 | |||
| a9b48b5aa5 | |||
| f08a951eef | |||
| 8758a4bc86 | |||
| aae328f8c8 | |||
| 1953578a33 | |||
| 543bdd79d9 | |||
| e0e1a3c8e4 | |||
| f1d84f448e | |||
| 7c5c21f15d | |||
| 41ffaeebe3 | |||
| 5d9b44b647 | |||
| a18d2c5650 | |||
| e0379bcf12 | |||
| e91242d5a2 | |||
| 8177187b3a | |||
| 6b3e1144bc | |||
| dfec88f52d | |||
| b8df67659a | |||
| 861da64581 | |||
| ab147a2cc9 | |||
| e13191e894 | |||
| 7f69ea8dc0 | |||
| ef2140696b | |||
| 4607900c3b | |||
| 3258d55796 | |||
| 35e6966c39 | |||
| 885756b259 | |||
| 5fbc1b937c | |||
| 7e444e9fc2 | |||
| c051237914 | |||
| 3855b0dd28 | |||
| afcc9e096a | |||
| f4df1fbff4 | |||
| 45b3a5af91 | |||
| f54b18e652 | |||
| b1210d19ad | |||
| 21cbcc79c6 | |||
| a7d0c8fb0f | |||
| 1e3dc54d81 | |||
| 48f294fd83 | |||
| 298ad0de4b | |||
| c7953f9231 | |||
| afd97eae7d | |||
| f4e558b7e8 | |||
| 12c99c646e | |||
| 6c1a2eedbe | |||
| 8a73216035 | |||
| c6a93f12f7 | |||
| 09f44d94b3 | |||
| c26748154a | |||
| 2474f915d4 | |||
| 53f940cd23 | |||
| 991b648854 | |||
| 663f057a9a | |||
| 61963fb824 | |||
| bdf3cf5b36 | |||
| 5cc459594b | |||
| 8d315e52df | |||
| 48a24a8704 | |||
| cdce52f0e5 | |||
| d12ccbeac4 | |||
| 0b1620105a | |||
| 2ae9c98acb | |||
| ed00848a1e | |||
| 06f7741dbf | |||
| 8d3b7cace8 | |||
| 8f0775e337 | |||
| 444cb6aa0c | |||
| b4acc19ea2 | |||
| 1ef17cd861 | |||
| e564e930e5 | |||
| a30badbbe0 | |||
| b33d21fdb3 | |||
| 78da83f7db | |||
| 8f6313d4ec | |||
| 14962a5622 | |||
| b8048e7592 | |||
| cf9823e123 | |||
| 1667dbfde0 | |||
| 416436a612 | |||
| dc1b724d8d | |||
| 080dc44175 | |||
| 3921f50feb | |||
| 6fc63be56a | |||
| 721fdf09b3 |
@@ -47,7 +47,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -90,14 +90,6 @@ jobs:
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Run `packer init` for Azure
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer init ./azure-ubuntu.pkr.hcl
|
||||
- name: Run `packer build` for Azure
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer build ./azure-ubuntu.pkr.hcl
|
||||
- name: Run `packer init` for AWS
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
@@ -114,16 +106,6 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
- name: Delete old Azure VMs
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
chmod +x delete-old-azure-images.sh
|
||||
./delete-old-azure-images.sh
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{secrets.AZURE_CLIENT_ID}}
|
||||
AZURE_CLIENT_SECRET: ${{secrets.AZURE_CLIENT_SECRET}}
|
||||
AZURE_TENANT_ID: ${{secrets.AZURE_TENANT_ID}}
|
||||
AZURE_SUBSCRIPTION_ID: ${{secrets.AZURE_SUBSCRIPTION_ID}}
|
||||
- name: Delete old AMIs (AWS)
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
cd dbgate-merged
|
||||
node adjustNpmPackageJsonPremium
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
run: npm install -g npm@11.5.1
|
||||
- name: Remove dbmodel - should be not published
|
||||
run: |
|
||||
cd ..
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
with:
|
||||
node-version: 22.x
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
run: npm install -g npm@11.5.1
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install
|
||||
@@ -56,7 +56,10 @@ jobs:
|
||||
working-directory: packages/sqltree
|
||||
run: |
|
||||
npm publish --tag "$NPM_TAG"
|
||||
|
||||
- name: Publish rest
|
||||
working-directory: packages/rest
|
||||
run: |
|
||||
npm publish --tag "$NPM_TAG"
|
||||
- name: Publish api
|
||||
working-directory: packages/api
|
||||
run: |
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 3ef5290af3820e8376e2304051b439611f789a07
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
@@ -132,6 +132,10 @@ jobs:
|
||||
image: redis
|
||||
ports:
|
||||
- '16011:6379'
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
ports:
|
||||
- '16015:8000'
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
ports:
|
||||
|
||||
@@ -23,26 +23,49 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ../dbgate-merged
|
||||
yarn install
|
||||
- name: Integration tests
|
||||
run: |
|
||||
cd ../dbgate-merged
|
||||
cd integration-tests
|
||||
yarn test:ci
|
||||
- name: Filter parser tests
|
||||
if: always()
|
||||
run: |
|
||||
cd ../dbgate-merged
|
||||
cd packages/filterparser
|
||||
yarn test:ci
|
||||
- name: Datalib (perspective) tests
|
||||
if: always()
|
||||
run: |
|
||||
cd ../dbgate-merged
|
||||
cd packages/datalib
|
||||
yarn test:ci
|
||||
- name: Tools tests
|
||||
if: always()
|
||||
run: |
|
||||
cd ../dbgate-merged
|
||||
cd packages/tools
|
||||
yarn test:ci
|
||||
services:
|
||||
@@ -98,3 +121,14 @@ jobs:
|
||||
FIREBIRD_USE_LEGACY_AUTH: true
|
||||
ports:
|
||||
- '3050:3050'
|
||||
mongodb:
|
||||
image: mongo:4.0.12
|
||||
ports:
|
||||
- '27017:27017'
|
||||
volumes:
|
||||
- mongo-data:/data/db
|
||||
- mongo-config:/data/configdb
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
ports:
|
||||
- '8000:8000'
|
||||
|
||||
Vendored
+6
-1
@@ -2,5 +2,10 @@
|
||||
"jestrunner.jestCommand": "node_modules/.bin/cross-env DEVMODE=1 LOCALTEST=1 node_modules/.bin/jest",
|
||||
"cSpell.words": [
|
||||
"dbgate"
|
||||
]
|
||||
],
|
||||
"chat.tools.terminal.autoApprove": {
|
||||
"yarn workspace": true,
|
||||
"yarn --cwd packages/rest": true,
|
||||
"yarn --cwd packages/web": true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
# AGENTS
|
||||
|
||||
## Rules
|
||||
|
||||
- In newly added code, always use `DBGM-00000` for message/error codes; do not introduce new numbered DBGM codes such as `DBGM-00316`.
|
||||
- GUI uses Svelte4 (packages/web)
|
||||
- GUI is tested with E2E tests in `e2e-tests` folder, using Cypress. Use data-testid attribute in components to make them easier to test.
|
||||
- data-testid format: ComponentName_identifier. Use reasonable identifiers
|
||||
- don't change content of storageModel.js - this is generated from table YAMLs with "yarn storage-json" command
|
||||
+242
-39
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,119 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
DbGate is a cross-platform (no)SQL database manager supporting MySQL, PostgreSQL, SQL Server, Oracle, MongoDB, Redis, SQLite, and more. It runs as a web app (Docker/NPM), an Electron desktop app, or in a browser. The monorepo uses Yarn workspaces.
|
||||
|
||||
## Development Commands
|
||||
|
||||
```sh
|
||||
yarn # install all packages (also builds TS libraries and plugins)
|
||||
yarn start # run API (port 3000) + web (port 5001) concurrently
|
||||
```
|
||||
|
||||
For more control, run these 3 commands in separate terminals:
|
||||
```sh
|
||||
yarn start:api # Express API on port 3000
|
||||
yarn start:web # Svelte frontend on port 5001
|
||||
yarn lib # watch-compile TS libraries and plugins
|
||||
```
|
||||
|
||||
For Electron development:
|
||||
```sh
|
||||
yarn start:web # web on port 5001
|
||||
yarn lib # watch TS libs/plugins
|
||||
yarn start:app # Electron app
|
||||
```
|
||||
|
||||
### Building
|
||||
|
||||
```sh
|
||||
yarn build:lib # build all TS libraries (sqltree, tools, filterparser, datalib, rest)
|
||||
yarn build:api # build API
|
||||
yarn build:web # build web frontend
|
||||
yarn ts # TypeScript type-check API and web
|
||||
yarn prettier # format all source files
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Unit tests (in packages like `dbgate-tools`):
|
||||
```sh
|
||||
yarn workspace dbgate-tools test
|
||||
```
|
||||
|
||||
Integration tests (requires Docker for database containers):
|
||||
```sh
|
||||
cd integration-tests
|
||||
yarn test:local # run all tests
|
||||
yarn test:local:path __tests__/alter-database.spec.js # run a single test file
|
||||
```
|
||||
|
||||
E2E tests (Cypress):
|
||||
```sh
|
||||
yarn cy:open # open Cypress UI
|
||||
cd e2e-tests && yarn cy:run:browse-data # run a specific spec headlessly
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Monorepo Structure
|
||||
|
||||
| Path | Package | Purpose |
|
||||
|---|---|---|
|
||||
| `packages/api` | `dbgate-api` | Express.js backend server |
|
||||
| `packages/web` | `dbgate-web` | Svelte 4 frontend (built with Rolldown) |
|
||||
| `packages/tools` | `dbgate-tools` | Shared TS utilities: SQL dumping, schema analysis, diffing, driver base classes |
|
||||
| `packages/datalib` | `dbgate-datalib` | Grid display logic, changeset management, perspectives, chart definitions |
|
||||
| `packages/sqltree` | `dbgate-sqltree` | SQL AST representation and dumping |
|
||||
| `packages/filterparser` | `dbgate-filterparser` | Parses filter strings into SQL/Mongo conditions |
|
||||
| `packages/rest` | `dbgate-rest` | REST connection support |
|
||||
| `packages/types` | `dbgate-types` | TypeScript type definitions (`.d.ts` only) |
|
||||
| `packages/aigwmock` | `dbgate-aigwmock` | Mock AI gateway server for E2E testing |
|
||||
| `plugins/dbgate-plugin-*` | — | Database drivers and file format handlers |
|
||||
| `app/` | — | Electron shell |
|
||||
| `integration-tests/` | — | Jest-based DB integration tests (Docker) |
|
||||
| `e2e-tests/` | — | Cypress E2E tests |
|
||||
|
||||
### API Backend (`packages/api`)
|
||||
|
||||
- Express.js server with controllers in `src/controllers/` — each file exposes REST endpoints via the `useController` utility
|
||||
- Database connections run in child processes (`src/proc/`) to isolate crashes and long-running operations
|
||||
- `src/shell/` contains stream-based data pipeline primitives (readers, writers, transforms) used for import/export and replication
|
||||
- Plugin drivers are loaded dynamically via `requireEngineDriver`; each plugin in `plugins/` exports a driver conforming to `DriverBase` from `dbgate-tools`
|
||||
|
||||
### Frontend (`packages/web`)
|
||||
|
||||
- Svelte 4 components; builds with Rolldown (not Vite/Webpack)
|
||||
- Global state in `src/stores.ts` using Svelte writable stores, with `writableWithStorage` / `writableWithForage` helpers for persistence
|
||||
- API calls go through `src/utility/api.ts` (`apiCall`, `apiOff`, etc.) which handles auth, error display, and cache invalidation
|
||||
- Tab system: each open editor/viewer is a "tab" tracked in `openedTabs` store; tab components live in `src/tabs/`
|
||||
- Left-panel tree items are "AppObjects" in `src/appobj/`
|
||||
- Metadata (table lists, column info) is loaded reactively via hooks in `src/utility/metadataLoaders.ts`
|
||||
- Commands/keybindings are registered in `src/commands/`
|
||||
|
||||
### Plugin Architecture
|
||||
|
||||
Each `plugins/dbgate-plugin-*` package provides:
|
||||
- **Frontend build** (`build:frontend`): bundled JS loaded by the web UI for query formatting, data rendering
|
||||
- **Backend build** (`build:backend`): Node.js driver code loaded by the API for actual DB connections
|
||||
|
||||
Plugins are copied to `plugins/dist/` via `plugins:copydist` before building the app or Docker image.
|
||||
|
||||
### Key Conventions
|
||||
|
||||
- Error/message codes use `DBGM-00000` as placeholder — do not introduce new numbered `DBGM-NNNNN` codes
|
||||
- Frontend uses **Svelte 4** (not Svelte 5)
|
||||
- E2E test selectors use `data-testid` attribute with format `ComponentName_identifier`
|
||||
- Prettier config: single quotes, 2-space indent, 120-char line width, trailing commas ES5
|
||||
- Logging via `pinomin`; pipe through `pino-pretty` for human-readable output
|
||||
|
||||
### Translation System
|
||||
|
||||
```sh
|
||||
yarn translations:extract # extract new strings
|
||||
yarn translations:add-missing # add missing translations
|
||||
yarn translations:check # check for issues
|
||||
```
|
||||
@@ -13,9 +13,9 @@
|
||||
<p>DbGate is cross-platform database manager. It's designed to be simple to use and effective, when working with more databases simultaneously. But there are also many advanced features like schema compare, visual query designer, chart visualisation or batch export and import.</p>
|
||||
</description>
|
||||
|
||||
<url type="homepage">https://dbgate.org/</url>
|
||||
<url type="homepage">https://www.dbgate.io/</url>
|
||||
<url type="vcs-browser">https://github.com/dbgate/dbgate</url>
|
||||
<url type="contact">https://dbgate.org/about/</url>
|
||||
<url type="contact">https://www.dbgate.io/contact/</url>
|
||||
<url type="donation">https://github.com/sponsors/dbgate</url>
|
||||
<url type="bugtracker">https://github.com/dbgate/dbgate/issues</url>
|
||||
|
||||
|
||||
@@ -400,6 +400,14 @@ function createWindow() {
|
||||
},
|
||||
});
|
||||
|
||||
mainWindow.webContents.session.webRequest.onBeforeSendHeaders(
|
||||
{ urls: ['https://*.tile.openstreetmap.org/*'] },
|
||||
(details, callback) => {
|
||||
details.requestHeaders['Referer'] = 'https://www.dbgate.io';
|
||||
callback({ requestHeaders: details.requestHeaders });
|
||||
}
|
||||
);
|
||||
|
||||
if (initialConfig['winIsMaximized']) {
|
||||
mainWindow.maximize();
|
||||
}
|
||||
|
||||
@@ -4,5 +4,6 @@ module.exports = {
|
||||
mssql: true,
|
||||
oracle: true,
|
||||
sqlite: true,
|
||||
mongo: true
|
||||
mongo: true,
|
||||
dynamo: true,
|
||||
};
|
||||
|
||||
@@ -3,8 +3,58 @@ const os = require('os');
|
||||
const fs = require('fs');
|
||||
|
||||
const baseDir = path.join(os.homedir(), '.dbgate');
|
||||
const testApiPidFile = path.join(__dirname, 'tmpdata', 'test-api.pid');
|
||||
const aigwmockPidFile = path.join(__dirname, 'tmpdata', 'aigwmock.pid');
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopProcessByPidFile(pidFile) {
|
||||
if (!fs.existsSync(pidFile)) return;
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale PID files and dead processes
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
}
|
||||
|
||||
function clearTestingData() {
|
||||
stopProcessByPidFile(testApiPidFile);
|
||||
stopProcessByPidFile(aigwmockPidFile);
|
||||
|
||||
if (fs.existsSync(path.join(baseDir, 'connections-e2etests.jsonl'))) {
|
||||
fs.unlinkSync(path.join(baseDir, 'connections-e2etests.jsonl'));
|
||||
}
|
||||
|
||||
@@ -37,6 +37,9 @@ module.exports = defineConfig({
|
||||
case 'browse-data':
|
||||
serverProcess = exec('yarn start:browse-data');
|
||||
break;
|
||||
case 'rest':
|
||||
serverProcess = exec('yarn start:rest');
|
||||
break;
|
||||
case 'team':
|
||||
serverProcess = exec('yarn start:team');
|
||||
break;
|
||||
@@ -52,6 +55,9 @@ module.exports = defineConfig({
|
||||
case 'redis':
|
||||
serverProcess = exec('yarn start:redis');
|
||||
break;
|
||||
case 'ai-chat':
|
||||
serverProcess = exec('yarn start:ai-chat');
|
||||
break;
|
||||
}
|
||||
|
||||
await waitOn({ resources: ['http://localhost:3000'] });
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
Cypress.on('uncaught:exception', err => {
|
||||
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.viewport(1250, 900);
|
||||
});
|
||||
|
||||
describe('Database Chat (MySQL)', () => {
|
||||
it('Database chat - chart of popular genres', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me chart of most popular genres');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('database-chat-chart');
|
||||
});
|
||||
|
||||
it('Database chat - find most popular artist', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find most popular artist');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('Iron Maiden', { timeout: 30000 });
|
||||
cy.themeshot('database-chat-popular-artist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GraphQL Chat', () => {
|
||||
it('GraphQL chat - list users', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('list all users');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('users', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-list-users');
|
||||
});
|
||||
|
||||
it('GraphQL chat - product categories chart', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me a chart of product categories');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('graphql-chat-categories-chart');
|
||||
});
|
||||
|
||||
it('GraphQL chat - find most expensive product', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find the most expensive product');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('products', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-expensive-product');
|
||||
});
|
||||
|
||||
it('GraphQL chat - show all categories', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_graphqlChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show all categories');
|
||||
cy.get('body').realPress('Enter');
|
||||
cy.testid('GraphQlChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.contains('categories', { timeout: 30000 });
|
||||
cy.themeshot('graphql-chat-all-categories');
|
||||
});
|
||||
|
||||
it('Explain query error', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_query').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('select * from Invoice2');
|
||||
cy.contains('Execute').click();
|
||||
cy.testid('MessageViewRow-explainErrorButton-1').click();
|
||||
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
|
||||
cy.themeshot('explain-query-error');
|
||||
});
|
||||
});
|
||||
@@ -512,4 +512,43 @@ describe('Data browser data', () => {
|
||||
cy.testid('DataFilterControl_input_ArtistId.Name').type('mich{enter}');
|
||||
cy.themeshot('data-browser-filter-by-expanded');
|
||||
});
|
||||
|
||||
it('DynamoDB', () => {
|
||||
cy.contains('Dynamo-connection').click();
|
||||
cy.contains('us-east-1').click();
|
||||
|
||||
cy.contains('Album').click();
|
||||
cy.contains('Pearl Jam').click();
|
||||
cy.themeshot('dynamodb-table-data');
|
||||
cy.contains('Switch to JSON').click();
|
||||
cy.themeshot('dynamodb-json-view');
|
||||
|
||||
cy.contains('Customer').click();
|
||||
cy.testid('DataFilterControl_input_CustomerId').type('<=10{enter}');
|
||||
cy.contains('Rows: 10');
|
||||
cy.wait(1000);
|
||||
cy.contains('Helena').click().rightclick();
|
||||
cy.contains('Show cell data').click();
|
||||
cy.contains('City: "Prague"');
|
||||
cy.themeshot('dynamodb-query-json-view');
|
||||
|
||||
cy.contains('Switch to JSON').click();
|
||||
cy.contains('Leonie').rightclick();
|
||||
cy.contains('Edit document').click();
|
||||
|
||||
Array.from({ length: 11 }).forEach(() => cy.realPress('ArrowDown'));
|
||||
Array.from({ length: 14 }).forEach(() => cy.realPress('ArrowRight'));
|
||||
Array.from({ length: 7 }).forEach(() => cy.realPress('Delete'));
|
||||
cy.realType('Italy');
|
||||
cy.testid('EditJsonModal_saveButton').click();
|
||||
|
||||
cy.contains('Helena').rightclick();
|
||||
cy.contains('Delete document').click();
|
||||
cy.contains('Save').click();
|
||||
cy.themeshot('dynamodb-save-changes');
|
||||
|
||||
cy.testid('SqlObjectList_addButton').click();
|
||||
cy.contains('New collection/container').click();
|
||||
cy.themeshot('dynamodb-new-collection');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -110,55 +110,6 @@ describe('Charts', () => {
|
||||
cy.themeshot('new-object-window');
|
||||
});
|
||||
|
||||
it.skip('Database chat - charts', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('show me chart of most popular genres');
|
||||
cy.get('body').realPress('{enter}');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.testid('chart-canvas', { timeout: 30000 }).should($c =>
|
||||
expect($c[0].toDataURL()).to.match(/^data:image\/png;base64/)
|
||||
);
|
||||
cy.themeshot('database-chat-chart');
|
||||
});
|
||||
|
||||
it.skip('Database chat', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_databaseChat').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('find most popular artist');
|
||||
cy.get('body').realPress('{enter}');
|
||||
cy.testid('DatabaseChatTab_executeAllQueries', { timeout: 30000 }).click();
|
||||
cy.wait(30000);
|
||||
// cy.contains('Iron Maiden');
|
||||
cy.themeshot('database-chat');
|
||||
|
||||
// cy.testid('DatabaseChatTab_promptInput').click();
|
||||
// cy.get('body').realType('I need top 10 songs with the biggest income');
|
||||
// cy.get('body').realPress('{enter}');
|
||||
// cy.contains('Hot Girl', { timeout: 20000 });
|
||||
// cy.wait(1000);
|
||||
// cy.themeshot('database-chat');
|
||||
});
|
||||
|
||||
it.skip('Explain query error', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('TabsPanel_buttonNewObject').click();
|
||||
cy.testid('NewObjectModal_query').click();
|
||||
cy.wait(1000);
|
||||
cy.get('body').realType('select * from Invoice2');
|
||||
cy.contains('Execute').click();
|
||||
cy.testid('MessageViewRow-explainErrorButton-1').click();
|
||||
cy.testid('ChatCodeRenderer_useSqlButton', { timeout: 30000 });
|
||||
cy.themeshot('explain-query-error');
|
||||
});
|
||||
|
||||
it('Switch language', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
|
||||
@@ -52,6 +52,9 @@ function multiTest(testProps, testDefinition) {
|
||||
if (localconfig.mongo && !testProps.skipMongo) {
|
||||
it('MongoDB', () => testDefinition('Mongo-connection', 'my_guitar_shop', 'mongo@dbgate-plugin-mongo'));
|
||||
}
|
||||
if (localconfig.dynamo && !testProps.skipMongo) {
|
||||
it('DynamoDB', () => testDefinition('Dynamo-connection', null, 'dynamodb@dbgate-plugin-dynamodb'));
|
||||
}
|
||||
}
|
||||
|
||||
describe('Transactions', () => {
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
Cypress.on('uncaught:exception', err => {
|
||||
if (err.message.includes("Failed to execute 'importScripts' on 'WorkerGlobalScope'")) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('http://localhost:3000');
|
||||
cy.viewport(1250, 900);
|
||||
});
|
||||
|
||||
describe('REST API connections', () => {
|
||||
it('GraphQL test', () => {
|
||||
cy.contains('REST GraphQL').click();
|
||||
cy.contains('products').click();
|
||||
cy.testid('GraphQlExplorerNode_toggle_products').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.name').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.price').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.description').click();
|
||||
cy.testid('GraphQlExplorerNode_checkbox_products.category').click();
|
||||
cy.testid('GraphQlQueryTab_execute').click();
|
||||
cy.contains('Electronics');
|
||||
cy.themeshot('rest-graphql-query');
|
||||
});
|
||||
it('REST OpenAPI test', () => {
|
||||
cy.contains('REST OpenAPI').click();
|
||||
cy.contains('/api/categories').click();
|
||||
cy.testid('RestApiEndpointTab_execute').click();
|
||||
cy.contains('Electronics');
|
||||
cy.themeshot('rest-openapi-query');
|
||||
});
|
||||
it('REST OData test', () => {
|
||||
cy.contains('REST OData').click();
|
||||
cy.contains('/Users').click();
|
||||
cy.testid('ODataEndpointTab_execute').click();
|
||||
cy.contains('Henry');
|
||||
cy.themeshot('rest-odata-query');
|
||||
});
|
||||
});
|
||||
@@ -5,14 +5,14 @@ services:
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
ports:
|
||||
- 16000:5432
|
||||
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
@@ -20,21 +20,21 @@ services:
|
||||
mysql-ssh-login:
|
||||
build: containers/mysql-ssh-login
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16017:3306
|
||||
- "16012:22"
|
||||
- '16012:22'
|
||||
|
||||
mysql-ssh-keyfile:
|
||||
build: containers/mysql-ssh-keyfile
|
||||
restart: always
|
||||
ports:
|
||||
ports:
|
||||
- 16007:3306
|
||||
- "16008:22"
|
||||
- '16008:22'
|
||||
|
||||
dex:
|
||||
build: containers/dex
|
||||
ports:
|
||||
- "16009:5556"
|
||||
- '16009:5556'
|
||||
|
||||
mongo:
|
||||
image: mongo:4.4.29
|
||||
@@ -50,6 +50,11 @@ services:
|
||||
ports:
|
||||
- 16011:6379
|
||||
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
ports:
|
||||
- 16015:8000
|
||||
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
restart: always
|
||||
|
||||
Vendored
+14
@@ -0,0 +1,14 @@
|
||||
CONNECTIONS=mysql,graphql
|
||||
|
||||
LOCAL_AI_GATEWAY=true
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
SERVER_mysql=localhost
|
||||
USER_mysql=root
|
||||
PASSWORD_mysql=Pwd2020Db
|
||||
PORT_mysql=16004
|
||||
ENGINE_mysql=mysql@dbgate-plugin-mysql
|
||||
|
||||
LABEL_graphql=REST GraphQL
|
||||
ENGINE_graphql=graphql@rest
|
||||
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
|
||||
Vendored
+7
-1
@@ -1,4 +1,4 @@
|
||||
CONNECTIONS=mysql,postgres,mongo
|
||||
CONNECTIONS=mysql,postgres,mongo,dynamo
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
SERVER_mysql=localhost
|
||||
@@ -22,3 +22,9 @@ USER_mongo=root
|
||||
PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=16010
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_dynamo=Dynamo-connection
|
||||
SERVER_dynamo=localhost
|
||||
PORT_dynamo=16015
|
||||
AUTH_TYPE_dynamo=onpremise
|
||||
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
|
||||
|
||||
Vendored
+8
-1
@@ -1,4 +1,4 @@
|
||||
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo
|
||||
CONNECTIONS=mysql,postgres,mssql,oracle,sqlite,mongo,dynamo
|
||||
LOG_CONNECTION_SENSITIVE_VALUES=true
|
||||
|
||||
LABEL_mysql=MySql-connection
|
||||
@@ -43,3 +43,10 @@ PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=16010
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_dynamo=Dynamo-connection
|
||||
SERVER_dynamo=localhost
|
||||
PORT_dynamo=16015
|
||||
AUTH_TYPE_dynamo=onpremise
|
||||
DATABASE_dynamo=localhost
|
||||
ENGINE_dynamo=dynamodb@dbgate-plugin-dynamodb
|
||||
|
||||
|
||||
Vendored
+14
@@ -0,0 +1,14 @@
|
||||
CONNECTIONS=odata,openapi,graphql
|
||||
|
||||
LABEL_odata=REST OData
|
||||
ENGINE_odata=odata@rest
|
||||
APISERVERURL1_odata=http://localhost:4444/odata/noauth
|
||||
|
||||
LABEL_openapi=REST OpenAPI
|
||||
ENGINE_openapi=openapi@rest
|
||||
APISERVERURL1_openapi=http://localhost:4444/openapi.json
|
||||
APISERVERURL2_openapi=http://localhost:4444/openapi/noauth
|
||||
|
||||
LABEL_graphql=REST GraphQL
|
||||
ENGINE_graphql=graphql@rest
|
||||
APISERVERURL1_graphql=http://localhost:4444/graphql/noauth
|
||||
@@ -0,0 +1,168 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn, spawnSync } = require('child_process');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
const testApiDir = path.join(rootDir, 'test-api');
|
||||
const aigwmockDir = path.join(rootDir, 'packages', 'aigwmock');
|
||||
const tmpDataDir = path.resolve(__dirname, '..', 'tmpdata');
|
||||
const testApiPidFile = path.join(tmpDataDir, 'test-api.pid');
|
||||
const aigwmockPidFile = path.join(tmpDataDir, 'aigwmock.pid');
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
const dbgateApi = require('dbgate-api');
|
||||
dbgateApi.initializeApiEnvironment();
|
||||
const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
// --- MySQL setup (same as charts init) ---
|
||||
|
||||
async function initMySqlDatabase(dbname, inputFile) {
|
||||
const connection = {
|
||||
server: process.env.SERVER_mysql,
|
||||
user: process.env.USER_mysql,
|
||||
password: process.env.PASSWORD_mysql,
|
||||
port: process.env.PORT_mysql,
|
||||
engine: 'mysql@dbgate-plugin-mysql',
|
||||
};
|
||||
|
||||
await dbgateApi.executeQuery({
|
||||
connection,
|
||||
sql: `DROP DATABASE IF EXISTS ${dbname}`,
|
||||
});
|
||||
|
||||
await dbgateApi.executeQuery({
|
||||
connection,
|
||||
sql: `CREATE DATABASE ${dbname}`,
|
||||
});
|
||||
|
||||
await dbgateApi.importDatabase({
|
||||
connection: { ...connection, database: dbname },
|
||||
inputFile,
|
||||
});
|
||||
}
|
||||
|
||||
// --- Process management helpers ---
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopProcess(pidFile) {
|
||||
if (!fs.existsSync(pidFile)) return;
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale pid or already terminated
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
function ensureDependencies(dir, checkFile) {
|
||||
if (fs.existsSync(checkFile)) return;
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: dir,
|
||||
stdio: 'inherit',
|
||||
env: process.env,
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
throw new Error(`DBGM-00297 Failed to install dependencies in ${dir}`);
|
||||
}
|
||||
}
|
||||
|
||||
function startBackgroundProcess(dir, pidFile, port) {
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
|
||||
const child = spawn(command, args, {
|
||||
cwd: dir,
|
||||
env: { ...process.env, PORT: String(port) },
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
});
|
||||
child.unref();
|
||||
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
|
||||
const meta = { pid: child.pid };
|
||||
const startTime = readProcessStartTime(child.pid);
|
||||
if (startTime) meta.startTime = startTime;
|
||||
fs.writeFileSync(pidFile, JSON.stringify(meta));
|
||||
}
|
||||
|
||||
async function waitForReady(url, timeoutMs = 30000) {
|
||||
const startedAt = Date.now();
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (response.ok) return;
|
||||
} catch (err) {
|
||||
// continue waiting
|
||||
}
|
||||
await delay(500);
|
||||
}
|
||||
throw new Error(`DBGM-00305 Server at ${url} did not start in time`);
|
||||
}
|
||||
|
||||
// --- Main ---
|
||||
|
||||
async function run() {
|
||||
// 1. Set up MyChinook MySQL database
|
||||
console.log('[ai-chat init] Setting up MyChinook database...');
|
||||
await initMySqlDatabase('MyChinook', path.resolve(path.join(__dirname, '../data/chinook-mysql.sql')));
|
||||
|
||||
// 2. Start test-api (GraphQL/REST server on port 4444)
|
||||
console.log('[ai-chat init] Starting test-api on port 4444...');
|
||||
stopProcess(testApiPidFile);
|
||||
ensureDependencies(testApiDir, path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json'));
|
||||
startBackgroundProcess(testApiDir, testApiPidFile, 4444);
|
||||
await waitForReady('http://localhost:4444/openapi.json');
|
||||
console.log('[ai-chat init] test-api is ready');
|
||||
|
||||
// 3. Start aigwmock (AI Gateway mock on port 3110)
|
||||
console.log('[ai-chat init] Starting aigwmock on port 3110...');
|
||||
stopProcess(aigwmockPidFile);
|
||||
ensureDependencies(aigwmockDir, path.join(aigwmockDir, 'node_modules', 'express', 'package.json'));
|
||||
startBackgroundProcess(aigwmockDir, aigwmockPidFile, 3110);
|
||||
await waitForReady('http://localhost:3110/openrouter/v1/models');
|
||||
console.log('[ai-chat init] aigwmock is ready');
|
||||
}
|
||||
|
||||
run().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -8,6 +8,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
|
||||
dbgateApi.registerPlugins(dbgatePluginPostgres);
|
||||
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
|
||||
dbgateApi.registerPlugins(dbgatePluginDynamodb);
|
||||
|
||||
async function initMySqlDatabase(dbname, inputFile) {
|
||||
await dbgateApi.executeQuery({
|
||||
@@ -125,6 +127,34 @@ async function initMongoDatabase(dbname, inputDirectory) {
|
||||
// });
|
||||
}
|
||||
|
||||
async function initDynamoDatabase(inputDirectory) {
|
||||
const dynamodbConnection = {
|
||||
server: process.env.SERVER_dynamo,
|
||||
port: process.env.PORT_dynamo,
|
||||
authType: 'onpremise',
|
||||
engine: 'dynamodb@dbgate-plugin-dynamodb',
|
||||
};
|
||||
|
||||
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
|
||||
const pool = await driver.connect(dynamodbConnection);
|
||||
const collections = await driver.listCollections(pool);
|
||||
for (const collection of collections) {
|
||||
await driver.dropTable(pool, collection);
|
||||
}
|
||||
await driver.disconnect(pool);
|
||||
|
||||
for (const file of fs.readdirSync(inputDirectory)) {
|
||||
const pureName = path.parse(file).name;
|
||||
const src = await dbgateApi.jsonLinesReader({ fileName: path.join(inputDirectory, file) });
|
||||
const dst = await dbgateApi.tableWriter({
|
||||
connection: dynamodbConnection,
|
||||
pureName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await dbgateApi.copyStream(src, dst);
|
||||
}
|
||||
}
|
||||
|
||||
const baseDir = path.join(os.homedir(), '.dbgate');
|
||||
|
||||
async function copyFolder(source, target) {
|
||||
@@ -148,6 +178,8 @@ async function run() {
|
||||
await initMongoDatabase('MgChinook', path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
|
||||
await initMongoDatabase('MgRivers', path.resolve(path.join(__dirname, '../data/rivers-jsonl')));
|
||||
|
||||
await initDynamoDatabase(path.resolve(path.join(__dirname, '../data/chinook-jsonl')));
|
||||
|
||||
await copyFolder(
|
||||
path.resolve(path.join(__dirname, '../data/chinook-jsonl')),
|
||||
path.join(baseDir, 'archive-e2etests', 'default')
|
||||
|
||||
@@ -7,6 +7,8 @@ const dbgatePluginMysql = require('dbgate-plugin-mysql');
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
const dbgatePluginPostgres = require('dbgate-plugin-postgres');
|
||||
dbgateApi.registerPlugins(dbgatePluginPostgres);
|
||||
const dbgatePluginDynamodb = require('dbgate-plugin-dynamodb');
|
||||
dbgateApi.registerPlugins(dbgatePluginDynamodb);
|
||||
|
||||
async function createDb(connection, dropDbSql, createDbSql, database = 'my_guitar_shop', { dropDatabaseName } = {}) {
|
||||
if (dropDbSql) {
|
||||
@@ -125,6 +127,28 @@ async function run() {
|
||||
{ dropDatabaseName: 'my_guitar_shop' }
|
||||
);
|
||||
}
|
||||
|
||||
if (localconfig.dynamo) {
|
||||
const dynamodbConnection = {
|
||||
server: process.env.SERVER_dynamo,
|
||||
port: process.env.PORT_dynamo,
|
||||
authType: 'onpremise',
|
||||
engine: 'dynamodb@dbgate-plugin-dynamodb',
|
||||
};
|
||||
|
||||
const driver = dbgatePluginDynamodb.drivers.find(d => d.engine === 'dynamodb@dbgate-plugin-dynamodb');
|
||||
const pool = await driver.connect(dynamodbConnection);
|
||||
const collections = await driver.listCollections(pool);
|
||||
for (const collection of collections) {
|
||||
await driver.dropTable(pool, collection);
|
||||
}
|
||||
await driver.disconnect(pool);
|
||||
|
||||
await dbgateApi.importDbFromFolder({
|
||||
connection: dynamodbConnection,
|
||||
folder: path.resolve(path.join(__dirname, '../data/my-guitar-shop')),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
dbgateApi.runScript(run);
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn, spawnSync } = require('child_process');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
const testApiDir = path.join(rootDir, 'test-api');
|
||||
const pidFile = path.resolve(__dirname, '..', 'tmpdata', 'test-api.pid');
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function waitForApiReady(timeoutMs = 30000) {
|
||||
const startedAt = Date.now();
|
||||
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
try {
|
||||
const response = await fetch('http://localhost:4444/openapi.json');
|
||||
if (response.ok) {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
// continue waiting
|
||||
}
|
||||
|
||||
await delay(500);
|
||||
}
|
||||
|
||||
throw new Error('DBGM-00306 test-api did not start on port 4444 in time');
|
||||
}
|
||||
|
||||
function readProcessStartTime(pid) {
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
const stat = fs.readFileSync(`/proc/${pid}/stat`, 'utf-8');
|
||||
return stat.split(' ')[21] || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isPidStillOurs(meta) {
|
||||
if (!meta || !(meta.pid > 0)) return false;
|
||||
if (process.platform === 'linux' && meta.startTime) {
|
||||
const current = readProcessStartTime(meta.pid);
|
||||
return current === meta.startTime;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function stopPreviousTestApi() {
|
||||
if (!fs.existsSync(pidFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(pidFile, 'utf-8').trim();
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(content);
|
||||
} catch (_) {
|
||||
const pid = Number(content);
|
||||
meta = Number.isInteger(pid) && pid > 0 ? { pid } : null;
|
||||
}
|
||||
if (isPidStillOurs(meta)) {
|
||||
process.kill(meta.pid);
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore stale pid file or already terminated process
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(pidFile);
|
||||
} catch (err) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
function startTestApi() {
|
||||
const command = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const args = isWindows ? ['/c', 'yarn start'] : ['start'];
|
||||
|
||||
const child = spawn(command, args, {
|
||||
cwd: testApiDir,
|
||||
env: {
|
||||
...process.env,
|
||||
PORT: '4444',
|
||||
},
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
});
|
||||
|
||||
child.unref();
|
||||
fs.mkdirSync(path.dirname(pidFile), { recursive: true });
|
||||
const meta = { pid: child.pid };
|
||||
const startTime = readProcessStartTime(child.pid);
|
||||
if (startTime) meta.startTime = startTime;
|
||||
fs.writeFileSync(pidFile, JSON.stringify(meta));
|
||||
}
|
||||
|
||||
function ensureTestApiDependencies() {
|
||||
const dependencyCheckFile = path.join(testApiDir, 'node_modules', 'swagger-jsdoc', 'package.json');
|
||||
if (fs.existsSync(dependencyCheckFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const installCommand = isWindows ? 'cmd.exe' : 'yarn';
|
||||
const installArgs = isWindows ? ['/c', 'yarn install --silent'] : ['install', '--silent'];
|
||||
const result = spawnSync(installCommand, installArgs, {
|
||||
cwd: testApiDir,
|
||||
stdio: 'inherit',
|
||||
env: process.env,
|
||||
});
|
||||
|
||||
if (result.status !== 0) {
|
||||
throw new Error('DBGM-00307 Failed to install test-api dependencies');
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
stopPreviousTestApi();
|
||||
ensureTestApiDependencies();
|
||||
startTestApi();
|
||||
await waitForApiReady();
|
||||
}
|
||||
|
||||
run().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -19,30 +19,36 @@
|
||||
"cy:run:portal": "cypress run --spec cypress/e2e/portal.cy.js",
|
||||
"cy:run:oauth": "cypress run --spec cypress/e2e/oauth.cy.js",
|
||||
"cy:run:browse-data": "cypress run --spec cypress/e2e/browse-data.cy.js",
|
||||
"cy:run:rest": "cypress run --spec cypress/e2e/rest.cy.js",
|
||||
"cy:run:team": "cypress run --spec cypress/e2e/team.cy.js",
|
||||
"cy:run:multi-sql": "cypress run --spec cypress/e2e/multi-sql.cy.js",
|
||||
"cy:run:cloud": "cypress run --spec cypress/e2e/cloud.cy.js",
|
||||
"cy:run:charts": "cypress run --spec cypress/e2e/charts.cy.js",
|
||||
"cy:run:redis": "cypress run --spec cypress/e2e/redis.cy.js",
|
||||
"cy:run:ai-chat": "cypress run --spec cypress/e2e/ai-chat.cy.js",
|
||||
"start:add-connection": "node clearTestingData && cd .. && node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:portal": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/portal/.env node e2e-tests/init/portal.js && env-cmd -f e2e-tests/env/portal/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:oauth": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/oauth/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:browse-data": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/browse-data/.env node e2e-tests/init/browse-data.js && env-cmd -f e2e-tests/env/browse-data/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:rest": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/rest/.env node e2e-tests/init/rest.js && env-cmd -f e2e-tests/env/rest/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:team": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/team/.env node e2e-tests/init/team.js && env-cmd -f e2e-tests/env/team/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:multi-sql": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/multi-sql/.env node e2e-tests/init/multi-sql.js && env-cmd -f e2e-tests/env/multi-sql/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:cloud": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/cloud/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:charts": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/charts/.env node e2e-tests/init/charts.js && env-cmd -f e2e-tests/env/charts/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:redis": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/redis/.env node e2e-tests/init/redis.js && env-cmd -f e2e-tests/env/redis/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"start:ai-chat": "node clearTestingData && cd .. && env-cmd -f e2e-tests/env/ai-chat/.env node e2e-tests/init/ai-chat.js && env-cmd -f e2e-tests/env/ai-chat/.env node packer/build/bundle.js --listen-api --run-e2e-tests",
|
||||
"test:add-connection": "start-server-and-test start:add-connection http://localhost:3000 cy:run:add-connection",
|
||||
"test:portal": "start-server-and-test start:portal http://localhost:3000 cy:run:portal",
|
||||
"test:oauth": "start-server-and-test start:oauth http://localhost:3000 cy:run:oauth",
|
||||
"test:browse-data": "start-server-and-test start:browse-data http://localhost:3000 cy:run:browse-data",
|
||||
"test:rest": "start-server-and-test start:rest http://localhost:3000 cy:run:rest",
|
||||
"test:team": "start-server-and-test start:team http://localhost:3000 cy:run:team",
|
||||
"test:multi-sql": "start-server-and-test start:multi-sql http://localhost:3000 cy:run:multi-sql",
|
||||
"test:cloud": "start-server-and-test start:cloud http://localhost:3000 cy:run:cloud",
|
||||
"test:charts": "start-server-and-test start:charts http://localhost:3000 cy:run:charts",
|
||||
"test:redis": "start-server-and-test start:redis http://localhost:3000 cy:run:redis",
|
||||
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis",
|
||||
"test:ai-chat": "start-server-and-test start:ai-chat http://localhost:3000 cy:run:ai-chat",
|
||||
"test": "yarn test:add-connection && yarn test:portal && yarn test:oauth && yarn test:browse-data && yarn test:rest && yarn test:team && yarn test:multi-sql && yarn test:cloud && yarn test:charts && yarn test:redis && yarn test:ai-chat",
|
||||
"test:ci": "yarn test"
|
||||
},
|
||||
"dependencies": {}
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
test-api.pid
|
||||
aigwmock.pid
|
||||
@@ -0,0 +1,536 @@
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const crypto = require('crypto');
|
||||
const stream = require('stream');
|
||||
const { mongoDbEngine, dynamoDbEngine } = require('../engines');
|
||||
const tableWriter = require('dbgate-api/src/shell/tableWriter');
|
||||
const tableReader = require('dbgate-api/src/shell/tableReader');
|
||||
const copyStream = require('dbgate-api/src/shell/copyStream');
|
||||
|
||||
function randomCollectionName() {
|
||||
return 'test_' + crypto.randomBytes(6).toString('hex');
|
||||
}
|
||||
|
||||
const documentEngines = [
|
||||
{ label: 'MongoDB', engine: mongoDbEngine },
|
||||
{ label: 'DynamoDB', engine: dynamoDbEngine },
|
||||
];
|
||||
|
||||
async function connectEngine(engine) {
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await driver.connect(engine.connection);
|
||||
return { driver, conn };
|
||||
}
|
||||
|
||||
async function createCollection(driver, conn, collectionName, engine) {
|
||||
if (engine.connection.engine.startsWith('dynamodb')) {
|
||||
await driver.operation(conn, {
|
||||
type: 'createCollection',
|
||||
collection: {
|
||||
name: collectionName,
|
||||
partitionKey: '_id',
|
||||
partitionKeyType: 'S',
|
||||
},
|
||||
});
|
||||
} else {
|
||||
await driver.operation(conn, {
|
||||
type: 'createCollection',
|
||||
collection: { name: collectionName },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function dropCollection(driver, conn, collectionName) {
|
||||
try {
|
||||
await driver.operation(conn, {
|
||||
type: 'dropCollection',
|
||||
collection: collectionName,
|
||||
});
|
||||
} catch (e) {
|
||||
// Ignore errors when dropping (collection may not exist)
|
||||
}
|
||||
}
|
||||
|
||||
async function insertDocument(driver, conn, collectionName, doc) {
|
||||
return driver.updateCollection(conn, {
|
||||
inserts: [{ pureName: collectionName, document: {}, fields: doc }],
|
||||
updates: [],
|
||||
deletes: [],
|
||||
});
|
||||
}
|
||||
|
||||
async function readAll(driver, conn, collectionName) {
|
||||
return driver.readCollection(conn, { pureName: collectionName, limit: 1000 });
|
||||
}
|
||||
|
||||
async function updateDocument(driver, conn, collectionName, condition, fields) {
|
||||
return driver.updateCollection(conn, {
|
||||
inserts: [],
|
||||
updates: [{ pureName: collectionName, condition, fields }],
|
||||
deletes: [],
|
||||
});
|
||||
}
|
||||
|
||||
async function deleteDocument(driver, conn, collectionName, condition) {
|
||||
return driver.updateCollection(conn, {
|
||||
inserts: [],
|
||||
updates: [],
|
||||
deletes: [{ pureName: collectionName, condition }],
|
||||
});
|
||||
}
|
||||
|
||||
describe('Collection CRUD', () => {
|
||||
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
|
||||
let driver;
|
||||
let conn;
|
||||
let collectionName;
|
||||
|
||||
beforeAll(async () => {
|
||||
const result = await connectEngine(engine);
|
||||
driver = result.driver;
|
||||
conn = result.conn;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (conn) {
|
||||
await driver.close(conn);
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
collectionName = randomCollectionName();
|
||||
await createCollection(driver, conn, collectionName, engine);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await dropCollection(driver, conn, collectionName);
|
||||
});
|
||||
|
||||
// ---- INSERT ----
|
||||
|
||||
test('insert a single document', async () => {
|
||||
const res = await insertDocument(driver, conn, collectionName, {
|
||||
_id: 'doc1',
|
||||
name: 'Alice',
|
||||
age: 30,
|
||||
});
|
||||
expect(res.inserted.length).toBe(1);
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Alice');
|
||||
expect(all.rows[0].age).toBe(30);
|
||||
});
|
||||
|
||||
test('insert multiple documents', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'a1', name: 'Alice' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'a2', name: 'Bob' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'a3', name: 'Charlie' });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(3);
|
||||
const names = all.rows.map(r => r.name).sort();
|
||||
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
|
||||
});
|
||||
|
||||
test('insert document with nested object', async () => {
|
||||
await insertDocument(driver, conn, collectionName, {
|
||||
_id: 'nested1',
|
||||
name: 'Alice',
|
||||
address: { city: 'Prague', zip: '11000' },
|
||||
});
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].address.city).toBe('Prague');
|
||||
expect(all.rows[0].address.zip).toBe('11000');
|
||||
});
|
||||
|
||||
// ---- READ ----
|
||||
|
||||
test('read from empty collection returns no rows', async () => {
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(0);
|
||||
});
|
||||
|
||||
test('read with limit', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'l1', name: 'A' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'l2', name: 'B' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'l3', name: 'C' });
|
||||
|
||||
const limited = await driver.readCollection(conn, {
|
||||
pureName: collectionName,
|
||||
limit: 2,
|
||||
});
|
||||
expect(limited.rows.length).toBe(2);
|
||||
});
|
||||
|
||||
test('count documents', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'c1', name: 'A' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'c2', name: 'B' });
|
||||
|
||||
const result = await driver.readCollection(conn, {
|
||||
pureName: collectionName,
|
||||
countDocuments: true,
|
||||
});
|
||||
expect(result.count).toBe(2);
|
||||
});
|
||||
|
||||
test('count documents on empty collection returns zero', async () => {
|
||||
const result = await driver.readCollection(conn, {
|
||||
pureName: collectionName,
|
||||
countDocuments: true,
|
||||
});
|
||||
expect(result.count).toBe(0);
|
||||
});
|
||||
|
||||
// ---- UPDATE ----
|
||||
|
||||
test('update an existing document', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'u1', name: 'Alice', age: 25 });
|
||||
|
||||
const res = await updateDocument(driver, conn, collectionName, { _id: 'u1' }, { name: 'Alice Updated' });
|
||||
expect(res.errorMessage).toBeUndefined();
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Alice Updated');
|
||||
});
|
||||
|
||||
test('update does not create new document', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'u2', name: 'Bob' });
|
||||
|
||||
await updateDocument(driver, conn, collectionName, { _id: 'nonexistent' }, { name: 'Ghost' });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Bob');
|
||||
});
|
||||
|
||||
test('update only specified fields', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'u3', name: 'Carol', age: 40, city: 'London' });
|
||||
|
||||
await updateDocument(driver, conn, collectionName, { _id: 'u3' }, { age: 41 });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Carol');
|
||||
expect(all.rows[0].age).toBe(41);
|
||||
expect(all.rows[0].city).toBe('London');
|
||||
});
|
||||
|
||||
// ---- DELETE ----
|
||||
|
||||
test('delete an existing document', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'd1', name: 'Alice' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'd2', name: 'Bob' });
|
||||
|
||||
const res = await deleteDocument(driver, conn, collectionName, { _id: 'd1' });
|
||||
expect(res.errorMessage).toBeUndefined();
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Bob');
|
||||
});
|
||||
|
||||
test('delete non-existing document does not affect collection', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'dx1', name: 'Alice' });
|
||||
|
||||
await deleteDocument(driver, conn, collectionName, { _id: 'nonexistent' });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Alice');
|
||||
});
|
||||
|
||||
test('delete all documents leaves empty collection', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'da1', name: 'A' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'da2', name: 'B' });
|
||||
|
||||
await deleteDocument(driver, conn, collectionName, { _id: 'da1' });
|
||||
await deleteDocument(driver, conn, collectionName, { _id: 'da2' });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(0);
|
||||
});
|
||||
|
||||
// ---- EDGE CASES ----
|
||||
|
||||
test('insert and read document with empty string field', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'e1', name: '', value: 'test' });
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('');
|
||||
expect(all.rows[0].value).toBe('test');
|
||||
});
|
||||
|
||||
test('insert and read document with numeric values', async () => {
|
||||
await insertDocument(driver, conn, collectionName, {
|
||||
_id: 'n1',
|
||||
intVal: 42,
|
||||
floatVal: 3.14,
|
||||
zero: 0,
|
||||
negative: -10,
|
||||
});
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].intVal).toBe(42);
|
||||
expect(all.rows[0].floatVal).toBeCloseTo(3.14);
|
||||
expect(all.rows[0].zero).toBe(0);
|
||||
expect(all.rows[0].negative).toBe(-10);
|
||||
});
|
||||
|
||||
test('insert and read document with boolean values', async () => {
|
||||
await insertDocument(driver, conn, collectionName, {
|
||||
_id: 'b1',
|
||||
active: true,
|
||||
deleted: false,
|
||||
});
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].active).toBe(true);
|
||||
expect(all.rows[0].deleted).toBe(false);
|
||||
});
|
||||
|
||||
test('reading non-existing collection returns error or empty', async () => {
|
||||
const result = await driver.readCollection(conn, {
|
||||
pureName: 'nonexistent_collection_' + crypto.randomBytes(4).toString('hex'),
|
||||
limit: 10,
|
||||
});
|
||||
// Depending on the driver, this may return an error or empty rows
|
||||
if (result.errorMessage) {
|
||||
expect(typeof result.errorMessage).toBe('string');
|
||||
} else {
|
||||
expect(result.rows.length).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
test('replace full document via update with document field', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'r1', name: 'Original', extra: 'data' });
|
||||
|
||||
await driver.updateCollection(conn, {
|
||||
inserts: [],
|
||||
updates: [
|
||||
{
|
||||
pureName: collectionName,
|
||||
condition: { _id: 'r1' },
|
||||
document: { _id: 'r1', name: 'Replaced' },
|
||||
fields: {},
|
||||
},
|
||||
],
|
||||
deletes: [],
|
||||
});
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].name).toBe('Replaced');
|
||||
});
|
||||
|
||||
test('insert then update then delete lifecycle', async () => {
|
||||
// Insert
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'life1', name: 'Lifecycle', status: 'created' });
|
||||
let all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(1);
|
||||
expect(all.rows[0].status).toBe('created');
|
||||
|
||||
// Update
|
||||
await updateDocument(driver, conn, collectionName, { _id: 'life1' }, { status: 'updated' });
|
||||
all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows[0].status).toBe('updated');
|
||||
|
||||
// Delete
|
||||
await deleteDocument(driver, conn, collectionName, { _id: 'life1' });
|
||||
all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function createDocumentImportStream(documents) {
|
||||
const pass = new stream.PassThrough({ objectMode: true });
|
||||
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
|
||||
for (const doc of documents) {
|
||||
pass.write(doc);
|
||||
}
|
||||
pass.end();
|
||||
return pass;
|
||||
}
|
||||
|
||||
function createExportStream() {
|
||||
const writable = new stream.Writable({ objectMode: true });
|
||||
writable.resultArray = [];
|
||||
writable._write = (chunk, encoding, callback) => {
|
||||
writable.resultArray.push(chunk);
|
||||
callback();
|
||||
};
|
||||
return writable;
|
||||
}
|
||||
|
||||
describe('Collection Import/Export', () => {
|
||||
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
|
||||
let driver;
|
||||
let conn;
|
||||
let collectionName;
|
||||
|
||||
beforeAll(async () => {
|
||||
const result = await connectEngine(engine);
|
||||
driver = result.driver;
|
||||
conn = result.conn;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (conn) {
|
||||
await driver.close(conn);
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
collectionName = randomCollectionName();
|
||||
await createCollection(driver, conn, collectionName, engine);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await dropCollection(driver, conn, collectionName);
|
||||
});
|
||||
|
||||
test('import documents via stream', async () => {
|
||||
const documents = [
|
||||
{ _id: 'imp1', name: 'Alice', age: 30 },
|
||||
{ _id: 'imp2', name: 'Bob', age: 25 },
|
||||
{ _id: 'imp3', name: 'Charlie', age: 35 },
|
||||
];
|
||||
|
||||
const reader = createDocumentImportStream(documents);
|
||||
const writer = await tableWriter({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(3);
|
||||
const names = all.rows.map(r => r.name).sort();
|
||||
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
|
||||
});
|
||||
|
||||
test('export documents via stream', async () => {
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'exp1', name: 'Alice', city: 'Prague' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'exp2', name: 'Bob', city: 'Vienna' });
|
||||
await insertDocument(driver, conn, collectionName, { _id: 'exp3', name: 'Charlie', city: 'Berlin' });
|
||||
|
||||
const reader = await tableReader({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
});
|
||||
const writer = createExportStream();
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
|
||||
expect(rows.length).toBe(3);
|
||||
const names = rows.map(r => r.name).sort();
|
||||
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
|
||||
});
|
||||
|
||||
test('import then export round-trip', async () => {
|
||||
const documents = [
|
||||
{ _id: 'rt1', name: 'Alice', value: 100 },
|
||||
{ _id: 'rt2', name: 'Bob', value: 200 },
|
||||
{ _id: 'rt3', name: 'Charlie', value: 300 },
|
||||
{ _id: 'rt4', name: 'Diana', value: 400 },
|
||||
];
|
||||
|
||||
// Import
|
||||
const importReader = createDocumentImportStream(documents);
|
||||
const importWriter = await tableWriter({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await copyStream(importReader, importWriter);
|
||||
|
||||
// Export
|
||||
const exportReader = await tableReader({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
});
|
||||
const exportWriter = createExportStream();
|
||||
await copyStream(exportReader, exportWriter);
|
||||
|
||||
const rows = exportWriter.resultArray.filter(x => !x.__isStreamHeader);
|
||||
expect(rows.length).toBe(4);
|
||||
|
||||
const sortedRows = rows.sort((a, b) => a._id.localeCompare(b._id));
|
||||
for (const doc of documents) {
|
||||
const found = sortedRows.find(r => r._id === doc._id);
|
||||
expect(found).toBeDefined();
|
||||
expect(found.name).toBe(doc.name);
|
||||
expect(found.value).toBe(doc.value);
|
||||
}
|
||||
});
|
||||
|
||||
test('import documents with nested objects', async () => {
|
||||
const documents = [
|
||||
{ _id: 'nest1', name: 'Alice', address: { city: 'Prague', zip: '11000' } },
|
||||
{ _id: 'nest2', name: 'Bob', address: { city: 'Vienna', zip: '1010' } },
|
||||
];
|
||||
|
||||
const reader = createDocumentImportStream(documents);
|
||||
const writer = await tableWriter({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const all = await readAll(driver, conn, collectionName);
|
||||
expect(all.rows.length).toBe(2);
|
||||
|
||||
const alice = all.rows.find(r => r.name === 'Alice');
|
||||
expect(alice.address.city).toBe('Prague');
|
||||
expect(alice.address.zip).toBe('11000');
|
||||
});
|
||||
|
||||
test('import many documents', async () => {
|
||||
const documents = [];
|
||||
for (let i = 0; i < 150; i++) {
|
||||
documents.push({ _id: `many${i}`, name: `Name${i}`, index: i });
|
||||
}
|
||||
|
||||
const reader = createDocumentImportStream(documents);
|
||||
const writer = await tableWriter({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
createIfNotExists: true,
|
||||
});
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const result = await driver.readCollection(conn, {
|
||||
pureName: collectionName,
|
||||
countDocuments: true,
|
||||
});
|
||||
expect(result.count).toBe(150);
|
||||
});
|
||||
|
||||
test('export empty collection returns no data rows', async () => {
|
||||
const reader = await tableReader({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
pureName: collectionName,
|
||||
});
|
||||
const writer = createExportStream();
|
||||
await copyStream(reader, writer);
|
||||
|
||||
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
|
||||
expect(rows.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -123,5 +123,22 @@ services:
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
mongodb:
|
||||
image: mongo:4.0.12
|
||||
restart: always
|
||||
volumes:
|
||||
- mongo-data:/data/db
|
||||
- mongo-config:/data/configdb
|
||||
ports:
|
||||
- 27017:27017
|
||||
|
||||
dynamodb:
|
||||
image: amazon/dynamodb-local
|
||||
restart: always
|
||||
ports:
|
||||
- 8000:8000
|
||||
|
||||
volumes:
|
||||
firebird-data:
|
||||
mongo-data:
|
||||
mongo-config:
|
||||
|
||||
@@ -738,6 +738,27 @@ const firebirdEngine = {
|
||||
skipDropReferences: true,
|
||||
};
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo} */
|
||||
const mongoDbEngine = {
|
||||
label: 'MongoDB',
|
||||
connection: {
|
||||
engine: 'mongo@dbgate-plugin-mongo',
|
||||
server: 'localhost',
|
||||
port: 27017,
|
||||
},
|
||||
};
|
||||
|
||||
/** @type {import('dbgate-types').TestEngineInfo} */
|
||||
const dynamoDbEngine = {
|
||||
label: 'DynamoDB',
|
||||
connection: {
|
||||
engine: 'dynamodb@dbgate-plugin-dynamodb',
|
||||
server: 'localhost',
|
||||
port: 8000,
|
||||
authType: 'onpremise',
|
||||
},
|
||||
};
|
||||
|
||||
const enginesOnCi = [
|
||||
// all engines, which would be run on GitHub actions
|
||||
mysqlEngine,
|
||||
@@ -788,3 +809,5 @@ module.exports.libsqlFileEngine = libsqlFileEngine;
|
||||
module.exports.libsqlWsEngine = libsqlWsEngine;
|
||||
module.exports.duckdbEngine = duckdbEngine;
|
||||
module.exports.firebirdEngine = firebirdEngine;
|
||||
module.exports.mongoDbEngine = mongoDbEngine;
|
||||
module.exports.dynamoDbEngine = dynamoDbEngine;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-integration-tests",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const engines = require('./engines');
|
||||
const { mongoDbEngine, dynamoDbEngine } = require('./engines');
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
@@ -9,7 +10,7 @@ global.DBGATE_PACKAGES = {
|
||||
async function connectEngine(engine) {
|
||||
const { connection } = engine;
|
||||
const driver = requireEngineDriver(connection);
|
||||
for (;;) {
|
||||
for (; ;) {
|
||||
try {
|
||||
const conn = await driver.connect(connection);
|
||||
await driver.getVersion(conn);
|
||||
@@ -26,7 +27,8 @@ async function connectEngine(engine) {
|
||||
|
||||
async function run() {
|
||||
await new Promise(resolve => setTimeout(resolve, 10000));
|
||||
await Promise.all(engines.map(engine => connectEngine(engine)));
|
||||
const documentEngines = [mongoDbEngine, dynamoDbEngine];
|
||||
await Promise.all([...engines, ...documentEngines].map(engine => connectEngine(engine)));
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
+5
-3
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "7.0.6",
|
||||
"version": "7.1.8",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -30,13 +30,15 @@
|
||||
"start:web": "yarn workspace dbgate-web dev",
|
||||
"start:sqltree": "yarn workspace dbgate-sqltree start",
|
||||
"start:tools": "yarn workspace dbgate-tools start",
|
||||
"start:rest": "yarn workspace dbgate-rest start",
|
||||
"start:datalib": "yarn workspace dbgate-datalib start",
|
||||
"start:filterparser": "yarn workspace dbgate-filterparser start",
|
||||
"build:sqltree": "yarn workspace dbgate-sqltree build",
|
||||
"build:datalib": "yarn workspace dbgate-datalib build",
|
||||
"build:filterparser": "yarn workspace dbgate-filterparser build",
|
||||
"build:tools": "yarn workspace dbgate-tools build",
|
||||
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
|
||||
"build:rest": "yarn workspace dbgate-rest build",
|
||||
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib && yarn build:rest",
|
||||
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
|
||||
"build:api": "yarn workspace dbgate-api build",
|
||||
"build:api:doc": "yarn workspace dbgate-api build:doc",
|
||||
@@ -63,7 +65,7 @@
|
||||
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
|
||||
"build:e2e": "yarn build:lib && yarn prepare:packer",
|
||||
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn start:rest\" \"yarn build:plugins:frontend:watch\"",
|
||||
"ts:api": "yarn workspace dbgate-api ts",
|
||||
"ts:web": "yarn workspace dbgate-web ts",
|
||||
"ts": "yarn ts:api && yarn ts:web",
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "dbgate-aigwmock",
|
||||
"version": "1.0.0",
|
||||
"description": "Mock AI Gateway server for E2E testing",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"start": "node src/index.js"
|
||||
},
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"cors": "^2.8.6",
|
||||
"express": "^5.2.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const app = express();
|
||||
app.use(cors());
|
||||
app.use(express.json({ limit: '50mb' }));
|
||||
|
||||
const responses = JSON.parse(fs.readFileSync(path.join(__dirname, 'mockResponses.json'), 'utf-8'));
|
||||
|
||||
let callCounter = 0;
|
||||
|
||||
// GET /openrouter/v1/models
|
||||
app.get('/openrouter/v1/models', (req, res) => {
|
||||
res.json({
|
||||
data: [{ id: 'mock-model', name: 'Mock Model' }],
|
||||
preferredModel: 'mock-model',
|
||||
});
|
||||
});
|
||||
|
||||
// POST /openrouter/v1/chat/completions
|
||||
app.post('/openrouter/v1/chat/completions', (req, res) => {
|
||||
const messages = req.body.messages || [];
|
||||
|
||||
// Find the first user message (skip system messages)
|
||||
const userMessage = messages.find(m => m.role === 'user');
|
||||
if (!userMessage) {
|
||||
return streamTextResponse(res, "I don't have enough context to help. Please ask a question.");
|
||||
}
|
||||
|
||||
// Count assistant messages to determine the current step
|
||||
const assistantCount = messages.filter(m => m.role === 'assistant').length;
|
||||
|
||||
// Find matching scenario by regex
|
||||
const scenario = responses.scenarios.find(s => {
|
||||
const regex = new RegExp(s.match, 'i');
|
||||
return regex.test(userMessage.content);
|
||||
});
|
||||
|
||||
if (!scenario) {
|
||||
console.log(`[aigwmock] No scenario matched for: "${userMessage.content}"`);
|
||||
return streamTextResponse(res, "I'm a mock AI assistant. I don't have a prepared response for that question.");
|
||||
}
|
||||
|
||||
const step = scenario.steps[assistantCount];
|
||||
if (!step) {
|
||||
console.log(`[aigwmock] No more steps for scenario (step ${assistantCount})`);
|
||||
return streamTextResponse(res, "I've completed my analysis of this topic.");
|
||||
}
|
||||
|
||||
console.log(`[aigwmock] Scenario matched: "${scenario.match}", step ${assistantCount}, type: ${step.type}`);
|
||||
|
||||
if (step.type === 'tool_calls') {
|
||||
return streamToolCallResponse(res, step.tool_calls);
|
||||
} else {
|
||||
return streamTextResponse(res, step.content);
|
||||
}
|
||||
});
|
||||
|
||||
function streamTextResponse(res, content) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
});
|
||||
|
||||
const id = `chatcmpl-mock-${Date.now()}`;
|
||||
const created = Math.floor(Date.now() / 1000);
|
||||
|
||||
// Split content into chunks for realistic streaming
|
||||
const chunkSize = 20;
|
||||
const chunks = [];
|
||||
for (let i = 0; i < content.length; i += chunkSize) {
|
||||
chunks.push(content.substring(i, i + chunkSize));
|
||||
}
|
||||
|
||||
// Send initial role chunk
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: { role: 'assistant', content: '' }, finish_reason: null }],
|
||||
});
|
||||
|
||||
// Send content chunks
|
||||
for (const chunk of chunks) {
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: { content: chunk }, finish_reason: null }],
|
||||
});
|
||||
}
|
||||
|
||||
// Send finish
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: {}, finish_reason: 'stop' }],
|
||||
});
|
||||
|
||||
res.write('data: [DONE]\n\n');
|
||||
res.end();
|
||||
}
|
||||
|
||||
function streamToolCallResponse(res, toolCalls) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
});
|
||||
|
||||
const id = `chatcmpl-mock-${Date.now()}`;
|
||||
const created = Math.floor(Date.now() / 1000);
|
||||
|
||||
for (let i = 0; i < toolCalls.length; i++) {
|
||||
const tc = toolCalls[i];
|
||||
const callId = `call_mock_${++callCounter}`;
|
||||
const args = JSON.stringify(tc.arguments);
|
||||
|
||||
if (i === 0) {
|
||||
// First tool call: include role
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
role: 'assistant',
|
||||
content: null,
|
||||
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
} else {
|
||||
// Additional tool calls
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
tool_calls: [{ index: i, id: callId, type: 'function', function: { name: tc.name, arguments: '' } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Stream the arguments
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
tool_calls: [{ index: i, function: { arguments: args } }],
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Send finish with tool_calls reason
|
||||
writeSSE(res, {
|
||||
id,
|
||||
object: 'chat.completion.chunk',
|
||||
created,
|
||||
model: 'mock-model',
|
||||
choices: [{ index: 0, delta: {}, finish_reason: 'tool_calls' }],
|
||||
});
|
||||
|
||||
res.write('data: [DONE]\n\n');
|
||||
res.end();
|
||||
}
|
||||
|
||||
function writeSSE(res, data) {
|
||||
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
}
|
||||
|
||||
const port = process.env.PORT || 3110;
|
||||
app.listen(port, () => {
|
||||
console.log(`[aigwmock] AI Gateway mock server listening on port ${port}`);
|
||||
});
|
||||
@@ -0,0 +1,193 @@
|
||||
{
|
||||
"scenarios": [
|
||||
{
|
||||
"match": "chart.*popular.*genre|popular.*genre.*chart|most popular genre",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Genre" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_sql_select",
|
||||
"arguments": {
|
||||
"sql": "SELECT g.Name AS genre, COUNT(t.TrackId) AS track_count FROM Genre g JOIN Track t ON g.GenreId = t.GenreId GROUP BY g.Name ORDER BY track_count DESC LIMIT 10"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here is a chart showing the most popular genres by track count:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Rock\",\"Latin\",\"Metal\",\"Alternative & Punk\",\"Jazz\",\"Blues\",\"Classical\",\"R&B/Soul\",\"Reggae\",\"Pop\"],\"datasets\":[{\"label\":\"Track Count\",\"data\":[1297,579,374,332,130,81,74,61,58,48]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Most Popular Genres by Track Count\"}}}}\n```"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "most popular artist|popular artist|top artist",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Artist" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Album" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Track" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_sql_select",
|
||||
"arguments": {
|
||||
"sql": "SELECT ar.Name AS artist, COUNT(t.TrackId) AS track_count FROM Artist ar JOIN Album al ON ar.ArtistId = al.ArtistId JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY ar.Name ORDER BY track_count DESC LIMIT 10"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "The most popular artist by number of tracks is **Iron Maiden** with 213 tracks, followed by **U2** with 135 tracks and **Led Zeppelin** with 114 tracks."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "list.*user|show.*user|get.*user",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ users { id firstName lastName email } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here are the users from the GraphQL API. The system contains multiple registered users with their names and email addresses."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "chart.*product.*categor|product.*categor.*chart|chart.*categor",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ products { category } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here is a bar chart showing the distribution of products across categories:\n\n```chart\n{\"type\":\"bar\",\"data\":{\"labels\":[\"Electronics\",\"Clothing\",\"Books\",\"Home & Garden\",\"Sports\",\"Toys\"],\"datasets\":[{\"label\":\"Number of Products\",\"data\":[35,30,33,38,32,32]}]},\"options\":{\"plugins\":{\"title\":{\"display\":true,\"text\":\"Products by Category\"}}}}\n```"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "most expensive product|expensive.*product|highest price",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ products { id name price category } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Based on the query results, I found the most expensive product in the system. The product details are shown in the query results above."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "show.*categor|list.*categor|all.*categor",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "graphql_introspect_schema", "arguments": {} }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "execute_graphql_query",
|
||||
"arguments": {
|
||||
"query": "{ categories { id name description active } }"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Here are all the categories available in the system. Each category has a name, description, and active status indicating whether it is currently in use."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"match": "Explain the following error|doesn't exist|does not exist",
|
||||
"steps": [
|
||||
{
|
||||
"type": "tool_calls",
|
||||
"tool_calls": [
|
||||
{ "name": "get_table_schema", "arguments": { "table": "Invoice" } }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "The error occurs because the table `Invoice2` does not exist in the `MyChinook` database. The correct table name is `Invoice`. Here is the corrected query:\n\n```sql\nSELECT * FROM Invoice\n```\n\nThe table name had a typo — `Invoice2` instead of `Invoice`. The `Invoice` table contains columns like `InvoiceId`, `CustomerId`, `InvoiceDate`, `Total`, and billing address fields."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
Vendored
+6
-1
@@ -1,6 +1,7 @@
|
||||
DEVMODE=1
|
||||
DEVWEB=1
|
||||
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle,mongourl
|
||||
|
||||
LABEL_mysql=MySql
|
||||
SERVER_mysql=dbgatedckstage1.sprinx.cz
|
||||
@@ -43,6 +44,10 @@ PORT_oracle=1521
|
||||
ENGINE_oracle=oracle@dbgate-plugin-oracle
|
||||
SERVICE_NAME_oracle=xe
|
||||
|
||||
LABEL_mongourl=Mongo URL
|
||||
URL_mongourl=mongodb://root:Pwd2020Db@dbgatedckstage1.sprinx.cz:27017
|
||||
ENGINE_mongourl=mongo@dbgate-plugin-mongo
|
||||
|
||||
# SETTINGS_dataGrid.showHintColumns=1
|
||||
|
||||
# docker run -p 3000:3000 -e CONNECTIONS=mongo -e URL_mongo=mongodb://localhost:27017 -e ENGINE_mongo=mongo@dbgate-plugin-mongo -e LABEL_mongo=mongo dbgate/dbgate:beta
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "dbgate-api",
|
||||
"main": "src/index.js",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
@@ -31,7 +31,8 @@
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^7.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.9",
|
||||
"dbgate-query-splitter": "^4.12.0",
|
||||
"dbgate-rest": "^7.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^7.0.0-alpha.1",
|
||||
"dbgate-tools": "^7.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
|
||||
@@ -19,6 +19,26 @@ const unzipDirectory = require('../shell/unzipDirectory');
|
||||
|
||||
const logger = getLogger('archive');
|
||||
|
||||
/**
|
||||
* Rejects any archive name (folder or file) that contains path-traversal
|
||||
* sequences, directory separators, or null bytes. These values are used
|
||||
* directly in path.join() calls; allowing traversal would let callers read
|
||||
* or write arbitrary files outside the archive directory.
|
||||
*/
|
||||
function assertSafeArchiveName(name, label) {
|
||||
if (typeof name !== 'string' || name.length === 0) {
|
||||
throw new Error(`DBGM-00000 Invalid ${label}: must be a non-empty string`);
|
||||
}
|
||||
if (name.includes('\0') || name.includes('..') || name.includes('/') || name.includes('\\')) {
|
||||
throw new Error(`DBGM-00000 Invalid ${label}: path traversal not allowed`);
|
||||
}
|
||||
// Reject names that resolve to the archive root itself (e.g. '.')
|
||||
const resolved = path.resolve(archivedir(), name);
|
||||
if (resolved === path.resolve(archivedir())) {
|
||||
throw new Error(`DBGM-00000 Invalid ${label}: must not resolve to the archive root`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
folders_meta: true,
|
||||
async folders() {
|
||||
@@ -39,6 +59,7 @@ module.exports = {
|
||||
|
||||
createFolder_meta: true,
|
||||
async createFolder({ folder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
await fs.mkdir(path.join(archivedir(), folder));
|
||||
socket.emitChanged('archive-folders-changed');
|
||||
return true;
|
||||
@@ -46,8 +67,12 @@ module.exports = {
|
||||
|
||||
createLink_meta: true,
|
||||
async createLink({ linkedFolder }) {
|
||||
if ( typeof linkedFolder !== 'string' || linkedFolder.length === 0) {
|
||||
throw new Error(`DBGM-00000 Invalid linkedFolder: must be a non-empty string`);
|
||||
}
|
||||
assertSafeArchiveName(path.parse(linkedFolder).name, 'linkedFolder');
|
||||
const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' });
|
||||
fs.writeFile(path.join(archivedir(), folder), linkedFolder);
|
||||
await fs.writeFile(path.join(archivedir(), folder), linkedFolder);
|
||||
clearArchiveLinksCache();
|
||||
socket.emitChanged('archive-folders-changed');
|
||||
return folder;
|
||||
@@ -71,6 +96,7 @@ module.exports = {
|
||||
|
||||
files_meta: true,
|
||||
async files({ folder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
try {
|
||||
if (folder.endsWith('.zip')) {
|
||||
if (await fs.exists(path.join(archivedir(), folder))) {
|
||||
@@ -121,6 +147,9 @@ module.exports = {
|
||||
|
||||
createFile_meta: true,
|
||||
async createFile({ folder, file, fileType, tableInfo }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
assertSafeArchiveName(fileType, 'fileType');
|
||||
await fs.writeFile(
|
||||
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
|
||||
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
|
||||
@@ -131,6 +160,9 @@ module.exports = {
|
||||
|
||||
deleteFile_meta: true,
|
||||
async deleteFile({ folder, file, fileType }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
assertSafeArchiveName(fileType, 'fileType');
|
||||
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
@@ -138,6 +170,10 @@ module.exports = {
|
||||
|
||||
renameFile_meta: true,
|
||||
async renameFile({ folder, file, newFile, fileType }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
assertSafeArchiveName(newFile, 'newFile');
|
||||
assertSafeArchiveName(fileType, 'fileType');
|
||||
await fs.rename(
|
||||
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
|
||||
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
|
||||
@@ -148,6 +184,8 @@ module.exports = {
|
||||
|
||||
modifyFile_meta: true,
|
||||
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
await jsldata.closeDataStore(`archive://${folder}/${file}`);
|
||||
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
|
||||
|
||||
@@ -187,6 +225,8 @@ module.exports = {
|
||||
|
||||
renameFolder_meta: true,
|
||||
async renameFolder({ folder, newFolder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(newFolder, 'newFolder');
|
||||
const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
|
||||
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
@@ -196,6 +236,7 @@ module.exports = {
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folder }) {
|
||||
if (!folder) throw new Error('Missing folder parameter');
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
|
||||
await fs.unlink(path.join(archivedir(), folder));
|
||||
} else {
|
||||
@@ -207,6 +248,8 @@ module.exports = {
|
||||
|
||||
saveText_meta: true,
|
||||
async saveText({ folder, file, text }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
@@ -214,6 +257,8 @@ module.exports = {
|
||||
|
||||
saveJslData_meta: true,
|
||||
async saveJslData({ folder, file, jslid, changeSet }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
const source = getJslFileName(jslid);
|
||||
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
|
||||
if (changeSet) {
|
||||
@@ -232,11 +277,20 @@ module.exports = {
|
||||
|
||||
saveRows_meta: true,
|
||||
async saveRows({ folder, file, rows }) {
|
||||
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
const filePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
|
||||
const fileStream = fs.createWriteStream(filePath);
|
||||
for (const row of rows) {
|
||||
await fileStream.write(JSON.stringify(row) + '\n');
|
||||
const ok = fileStream.write(JSON.stringify(row) + '\n');
|
||||
if (!ok) {
|
||||
await new Promise(resolve => fileStream.once('drain', resolve));
|
||||
}
|
||||
}
|
||||
await fileStream.close();
|
||||
await new Promise((resolve, reject) => {
|
||||
fileStream.end(() => resolve());
|
||||
fileStream.on('error', reject);
|
||||
});
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
},
|
||||
@@ -256,6 +310,8 @@ module.exports = {
|
||||
|
||||
getArchiveData_meta: true,
|
||||
async getArchiveData({ folder, file }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
assertSafeArchiveName(file, 'file');
|
||||
let rows;
|
||||
if (folder.endsWith('.zip')) {
|
||||
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
|
||||
@@ -270,7 +326,7 @@ module.exports = {
|
||||
if (!fileName?.endsWith('.zip')) {
|
||||
throw new Error(`${fileName} is not a ZIP file`);
|
||||
}
|
||||
|
||||
assertSafeArchiveName(fileName.slice(0, -4), 'fileName');
|
||||
const folder = await this.getNewArchiveFolder({ database: fileName });
|
||||
await fs.copyFile(filePath, path.join(archivedir(), folder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
@@ -280,6 +336,7 @@ module.exports = {
|
||||
|
||||
zip_meta: true,
|
||||
async zip({ folder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
|
||||
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
@@ -289,6 +346,7 @@ module.exports = {
|
||||
|
||||
unzip_meta: true,
|
||||
async unzip({ folder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
|
||||
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
@@ -298,6 +356,7 @@ module.exports = {
|
||||
|
||||
getZippedPath_meta: true,
|
||||
async getZippedPath({ folder }) {
|
||||
assertSafeArchiveName(folder, 'folder');
|
||||
if (folder.endsWith('.zip')) {
|
||||
return { filePath: path.join(archivedir(), folder) };
|
||||
}
|
||||
|
||||
@@ -202,7 +202,7 @@ module.exports = {
|
||||
|
||||
const storageConnections = await storage.connections(req);
|
||||
if (storageConnections) {
|
||||
return storageConnections;
|
||||
return storageConnections.map(maskConnection);
|
||||
}
|
||||
if (portalConnections) {
|
||||
if (platformInfo.allowShellConnection) return portalConnections.map(x => encryptConnection(x));
|
||||
@@ -484,7 +484,7 @@ module.exports = {
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
if (storageConnection) {
|
||||
return storageConnection;
|
||||
return mask ? maskConnection(storageConnection) : storageConnection;
|
||||
}
|
||||
|
||||
if (portalConnections) {
|
||||
@@ -492,7 +492,61 @@ module.exports = {
|
||||
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : encryptConnection(res);
|
||||
}
|
||||
const res = await this.datastore.get(conid);
|
||||
return res || null;
|
||||
if (res) return res;
|
||||
|
||||
// In a forked runner-script child process, ask the parent for connections that may be
|
||||
// volatile (in-memory only, e.g. ask-for-password). We only do this when
|
||||
// there really is a parent (process.send exists) to avoid an infinite loop
|
||||
// when the parent's own getCore falls through here.
|
||||
// The check is intentionally narrow: only runner scripts pass
|
||||
// --process-display-name script, so connect/session/ssh-forward subprocesses
|
||||
// are not affected and continue to return null immediately.
|
||||
if (process.send && processArgs.processDisplayName === 'script') {
|
||||
const conn = await new Promise(resolve => {
|
||||
let resolved = false;
|
||||
|
||||
const cleanup = () => {
|
||||
process.removeListener('message', handler);
|
||||
process.removeListener('disconnect', onDisconnect);
|
||||
clearTimeout(timeout);
|
||||
};
|
||||
|
||||
const settle = value => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
cleanup();
|
||||
resolve(value);
|
||||
}
|
||||
};
|
||||
|
||||
const handler = message => {
|
||||
if (message?.msgtype === 'volatile-connection-response' && message.conid === conid) {
|
||||
settle(message.conn || null);
|
||||
}
|
||||
};
|
||||
|
||||
const onDisconnect = () => settle(null);
|
||||
|
||||
const timeout = setTimeout(() => settle(null), 5000);
|
||||
// Don't let the timer alone keep the process alive if all other work is done
|
||||
timeout.unref();
|
||||
|
||||
process.on('message', handler);
|
||||
process.once('disconnect', onDisconnect);
|
||||
|
||||
try {
|
||||
process.send({ msgtype: 'get-volatile-connection', conid });
|
||||
} catch {
|
||||
settle(null);
|
||||
}
|
||||
});
|
||||
if (conn) {
|
||||
volatileConnections[conn._id] = conn; // cache for subsequent calls
|
||||
return conn;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
get_meta: true,
|
||||
@@ -502,6 +556,9 @@ module.exports = {
|
||||
_id: '__model',
|
||||
};
|
||||
}
|
||||
if (!conid) {
|
||||
return null;
|
||||
}
|
||||
await testConnectionPermission(conid, req);
|
||||
return this.getCore({ conid, mask: true });
|
||||
},
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
getLogger,
|
||||
extractErrorLogData,
|
||||
filterStructureBySchema,
|
||||
serializeJsTypesForJsonStringify,
|
||||
} = require('dbgate-tools');
|
||||
const { html, parse } = require('diff2html');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
@@ -94,10 +95,12 @@ module.exports = {
|
||||
}
|
||||
},
|
||||
handle_response(conid, database, { msgid, ...response }) {
|
||||
const [resolve, reject, additionalData] = this.requests[msgid];
|
||||
resolve(response);
|
||||
if (additionalData?.auditLogger) {
|
||||
additionalData?.auditLogger(response);
|
||||
const [resolve, reject, additionalData] = this.requests[msgid] || [];
|
||||
if (resolve) {
|
||||
resolve(response);
|
||||
if (additionalData?.auditLogger) {
|
||||
additionalData?.auditLogger(response);
|
||||
}
|
||||
}
|
||||
delete this.requests[msgid];
|
||||
},
|
||||
@@ -165,6 +168,11 @@ module.exports = {
|
||||
if (!connection) {
|
||||
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
|
||||
if (connection.engine?.endsWith('@rest')) {
|
||||
return { isApiConnection: true };
|
||||
}
|
||||
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
@@ -219,12 +227,13 @@ module.exports = {
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
const connectMessage = serializeJsTypesForJsonStringify({
|
||||
msgtype: 'connect',
|
||||
connection: { ...connection, database },
|
||||
structure: lastClosed ? lastClosed.structure : null,
|
||||
globalSettings: await config.getSettings(),
|
||||
});
|
||||
subprocess.send(connectMessage);
|
||||
return newOpened;
|
||||
},
|
||||
|
||||
@@ -232,9 +241,10 @@ module.exports = {
|
||||
sendRequest(conn, message, additionalData = {}) {
|
||||
const msgid = crypto.randomUUID();
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
this.requests[msgid] = [resolve, reject, additionalData];
|
||||
this.requests[msgid] = [resolve, reject, additionalData, conn.conid, conn.database];
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
const serializedMessage = serializeJsTypesForJsonStringify({ msgid, ...message });
|
||||
conn.subprocess.send(serializedMessage);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
|
||||
this.close(conn.conid, conn.database);
|
||||
@@ -256,12 +266,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
sqlSelect_meta: true,
|
||||
async sqlSelect({ conid, database, select, auditLogSessionGroup }, req) {
|
||||
async sqlSelect({ conid, database, select, commandTimeout, auditLogSessionGroup }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(
|
||||
opened,
|
||||
{ msgtype: 'sqlSelect', select },
|
||||
{ msgtype: 'sqlSelect', select, commandTimeout },
|
||||
{
|
||||
auditLogger:
|
||||
auditLogSessionGroup && select?.from?.name?.pureName
|
||||
@@ -336,9 +346,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
collectionData_meta: true,
|
||||
async collectionData({ conid, database, options, auditLogSessionGroup }, req) {
|
||||
async collectionData({ conid, database, options, commandTimeout, auditLogSessionGroup }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
if (commandTimeout && options) {
|
||||
options.commandTimeout = commandTimeout;
|
||||
}
|
||||
const res = await this.sendRequest(
|
||||
opened,
|
||||
{ msgtype: 'collectionData', options },
|
||||
@@ -468,6 +481,7 @@ module.exports = {
|
||||
|
||||
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
|
||||
const tablePermissions = await loadTablePermissionsFromRequest(req);
|
||||
const databasePermissionRole = getDatabasePermissionRole(conid, database, databasePermissions);
|
||||
const fieldsAndRoles = [
|
||||
[changeSet.inserts, 'create_update_delete'],
|
||||
[changeSet.deletes, 'create_update_delete'],
|
||||
@@ -482,7 +496,7 @@ module.exports = {
|
||||
operation.schemaName,
|
||||
operation.pureName,
|
||||
tablePermissions,
|
||||
databasePermissions
|
||||
databasePermissionRole
|
||||
);
|
||||
if (getTablePermissionRoleLevelIndex(role) < getTablePermissionRoleLevelIndex(requiredRole)) {
|
||||
throw new Error('DBGM-00262 Permission not granted');
|
||||
@@ -571,6 +585,24 @@ module.exports = {
|
||||
};
|
||||
},
|
||||
|
||||
pingDatabases_meta: true,
|
||||
async pingDatabases({ databases }, req) {
|
||||
if (!databases || !Array.isArray(databases)) return { status: 'ok' };
|
||||
for (const { conid, database } of databases) {
|
||||
if (!conid || !database) continue;
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) {
|
||||
try {
|
||||
existing.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00308 Error pinging DB connection');
|
||||
this.close(conid, database);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
refresh_meta: true,
|
||||
async refresh({ conid, database, keepOpen }, req) {
|
||||
await testConnectionPermission(conid, req);
|
||||
@@ -613,6 +645,15 @@ module.exports = {
|
||||
structure: existing.structure,
|
||||
};
|
||||
socket.emitChanged(`database-status-changed`, { conid, database });
|
||||
|
||||
// Reject all pending requests for this connection
|
||||
for (const [msgid, entry] of Object.entries(this.requests)) {
|
||||
const [resolve, reject, additionalData, reqConid, reqDatabase] = entry;
|
||||
if (reqConid === conid && reqDatabase === database) {
|
||||
reject('DBGM-00309 Database connection closed');
|
||||
delete this.requests[msgid];
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -15,7 +15,8 @@ const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, getSqlFrontMatter } = require('dbgate-tools');
|
||||
const yaml = require('js-yaml');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
|
||||
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
|
||||
@@ -35,13 +36,46 @@ function deserialize(format, text) {
|
||||
|
||||
module.exports = {
|
||||
list_meta: true,
|
||||
async list({ folder }, req) {
|
||||
async list({ folder, parseFrontMatter }, req) {
|
||||
const loadedPermissions = await loadPermissionsFromRequest(req);
|
||||
if (!hasPermission(`files/${folder}/read`, loadedPermissions)) return [];
|
||||
const dir = path.join(filesdir(), folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = (await fs.readdir(dir)).map(file => ({ folder, file }));
|
||||
return files;
|
||||
const fileNames = await fs.readdir(dir);
|
||||
if (!parseFrontMatter) {
|
||||
return fileNames.map(file => ({ folder, file }));
|
||||
}
|
||||
const result = [];
|
||||
for (const file of fileNames) {
|
||||
const item = { folder, file };
|
||||
let fh;
|
||||
try {
|
||||
fh = await require('fs').promises.open(path.join(dir, file), 'r');
|
||||
const buf = new Uint8Array(512);
|
||||
const { bytesRead } = await fh.read(buf, 0, 512, 0);
|
||||
let text = Buffer.from(buf.buffer, 0, bytesRead).toString('utf-8');
|
||||
|
||||
if (text.includes('-- >>>') && !text.includes('-- <<<')) {
|
||||
const stat = await fh.stat();
|
||||
const fullSize = Math.min(stat.size, 4096);
|
||||
if (fullSize > 512) {
|
||||
const fullBuf = new Uint8Array(fullSize);
|
||||
const { bytesRead: fullBytesRead } = await fh.read(fullBuf, 0, fullSize, 0);
|
||||
text = Buffer.from(fullBuf.buffer, 0, fullBytesRead).toString('utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
const fm = getSqlFrontMatter(text, yaml);
|
||||
if (fm?.connectionId) item.connectionId = fm.connectionId;
|
||||
if (fm?.databaseName) item.databaseName = fm.databaseName;
|
||||
} catch (e) {
|
||||
// ignore read errors for individual files
|
||||
} finally {
|
||||
if (fh) await fh.close().catch(() => {});
|
||||
}
|
||||
result.push(item);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
listAll_meta: true,
|
||||
@@ -257,6 +291,13 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
exportDiagramPng_meta: true,
|
||||
async exportDiagramPng({ filePath, pngBase64 }) {
|
||||
const base64 = pngBase64.replace(/^data:image\/png;base64,/, '');
|
||||
await fs.writeFile(filePath, Buffer.from(base64, 'base64'));
|
||||
return true;
|
||||
},
|
||||
|
||||
getFileRealPath_meta: true,
|
||||
async getFileRealPath({ folder, file }, req) {
|
||||
const loadedPermissions = await loadPermissionsFromRequest(req);
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
const { filterName } = require('dbgate-tools');
|
||||
const { filterName, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('jsldata');
|
||||
const { jsldir, archivedir } = require('../utility/directories');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const lineReader = require('line-reader');
|
||||
const _ = require('lodash');
|
||||
const { __ } = require('lodash/fp');
|
||||
@@ -149,6 +152,10 @@ module.exports = {
|
||||
|
||||
getRows_meta: true,
|
||||
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
|
||||
const fileName = getJslFileName(jslid);
|
||||
if (!fs.existsSync(fileName)) {
|
||||
return [];
|
||||
}
|
||||
const datastore = await this.ensureDatastore(jslid, formatterFunction);
|
||||
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
|
||||
},
|
||||
@@ -159,6 +166,72 @@ module.exports = {
|
||||
return fs.existsSync(fileName);
|
||||
},
|
||||
|
||||
streamRows_meta: {
|
||||
method: 'get',
|
||||
raw: true,
|
||||
},
|
||||
streamRows(req, res) {
|
||||
const { jslid } = req.query;
|
||||
if (!jslid) {
|
||||
res.status(400).json({ apiErrorMessage: 'Missing jslid' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Reject file:// jslids — they resolve to arbitrary server-side paths
|
||||
if (jslid.startsWith('file://')) {
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden jslid scheme' });
|
||||
return;
|
||||
}
|
||||
|
||||
const fileName = getJslFileName(jslid);
|
||||
|
||||
if (!fs.existsSync(fileName)) {
|
||||
res.status(404).json({ apiErrorMessage: 'File not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Dereference symlinks and normalize case (Windows) before the allow-list check.
|
||||
// realpathSync is safe here because existsSync confirmed the file is present.
|
||||
// path.resolve() alone cannot dereference symlinks, so a symlink inside an allowed
|
||||
// root could otherwise point to an arbitrary external path.
|
||||
const normalize = p => (process.platform === 'win32' ? p.toLowerCase() : p);
|
||||
const resolveRoot = r => { try { return fs.realpathSync(r); } catch { return path.resolve(r); } };
|
||||
|
||||
let realFile;
|
||||
try {
|
||||
realFile = fs.realpathSync(fileName);
|
||||
} catch {
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
|
||||
return;
|
||||
}
|
||||
|
||||
const allowedRoots = [jsldir(), archivedir()].map(r => normalize(resolveRoot(r)) + path.sep);
|
||||
const isAllowed = allowedRoots.some(root => normalize(realFile).startsWith(root));
|
||||
if (!isAllowed) {
|
||||
logger.warn({ jslid, realFile }, 'DBGM-00000 streamRows rejected path outside allowed roots');
|
||||
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
|
||||
return;
|
||||
}
|
||||
res.setHeader('Content-Type', 'application/x-ndjson');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
const stream = fs.createReadStream(realFile, 'utf-8');
|
||||
|
||||
req.on('close', () => {
|
||||
stream.destroy();
|
||||
});
|
||||
|
||||
stream.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00000 Error streaming JSONL file');
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ apiErrorMessage: 'Stream error' });
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
stream.pipe(res);
|
||||
},
|
||||
|
||||
getStats_meta: true,
|
||||
getStats({ jslid }) {
|
||||
const file = `${getJslFileName(jslid)}.stats`;
|
||||
|
||||
@@ -33,19 +33,35 @@ function readCore(reader, skip, limit, filter) {
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
read_meta: true,
|
||||
async read({ skip, limit, filter }) {
|
||||
function readJsonl({ skip, limit, filter }) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const fileName = path.join(datadir(), 'query-history.jsonl');
|
||||
// @ts-ignore
|
||||
if (!(await fs.exists(fileName))) return [];
|
||||
if (!(await fs.exists(fileName))) return resolve([]);
|
||||
const reader = fsReverse(fileName);
|
||||
const res = await readCore(reader, skip, limit, filter);
|
||||
return res;
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
read_meta: true,
|
||||
async read({ skip, limit, filter }, req) {
|
||||
const storage = require('./storage');
|
||||
const storageResult = await storage.readQueryHistory({ skip, limit, filter }, req);
|
||||
if (storageResult) return storageResult;
|
||||
return readJsonl({ skip, limit, filter });
|
||||
},
|
||||
|
||||
write_meta: true,
|
||||
async write({ data }) {
|
||||
async write({ data }, req) {
|
||||
const storage = require('./storage');
|
||||
const written = await storage.writeQueryHistory({ data }, req);
|
||||
if (written) {
|
||||
socket.emit('query-history-changed');
|
||||
return 'OK';
|
||||
}
|
||||
|
||||
const fileName = path.join(datadir(), 'query-history.jsonl');
|
||||
await fs.appendFile(fileName, JSON.stringify(data) + '\n');
|
||||
socket.emit('query-history-changed');
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
module.exports = {
|
||||
disconnect_meta: true,
|
||||
async disconnect({ conid }, req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
getApiInfo_meta: true,
|
||||
async getApiInfo({ conid }, req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
restStatus_meta: true,
|
||||
async restStatus() {
|
||||
return {};
|
||||
},
|
||||
|
||||
ping_meta: true,
|
||||
async ping({ conidArray, strmid }) {
|
||||
return null;
|
||||
},
|
||||
|
||||
refresh_meta: true,
|
||||
async refresh({ conid, keepOpen }, req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
testConnection_meta: true,
|
||||
async testConnection({ conid }, req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
execute_meta: true,
|
||||
async execute({ conid, method, endpoint, parameters, server }, req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
apiQuery_meta: true,
|
||||
async apiQuery({ conid, server, query, variables }, req) {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
@@ -10,6 +10,7 @@ const {
|
||||
extractShellApiPlugins,
|
||||
compileShellApiFunctionName,
|
||||
jsonScriptToJavascript,
|
||||
assertValidShellApiFunctionName,
|
||||
getLogger,
|
||||
safeJsonParse,
|
||||
pinoLogRecordToMessageRecord,
|
||||
@@ -54,19 +55,23 @@ logger.info('DBGM-00014 Finished job script');
|
||||
dbgateApi.runScript(run);
|
||||
`;
|
||||
|
||||
const loaderScriptTemplate = (prefix, functionName, props, runid) => `
|
||||
const loaderScriptTemplate = (functionName, props, runid) => {
|
||||
const plugins = extractShellApiPlugins(functionName, props);
|
||||
const prefix = plugins.map(packageName => `// @require ${packageName}\n`).join('');
|
||||
return `
|
||||
${prefix}
|
||||
const dbgateApi = require(process.env.DBGATE_API);
|
||||
dbgateApi.initializeApiEnvironment();
|
||||
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
|
||||
${requirePluginsTemplate(plugins)}
|
||||
require=null;
|
||||
async function run() {
|
||||
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
|
||||
const writer=await dbgateApi.collectorWriter({runid: ${JSON.stringify(runid)}});
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
`;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
/** @type {import('dbgate-types').OpenedRunner[]} */
|
||||
@@ -172,7 +177,7 @@ module.exports = {
|
||||
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
|
||||
subprocess.on('exit', code => {
|
||||
// console.log('... EXITED', code);
|
||||
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
|
||||
this.rejectRequest(runid, { message: 'DBGM-00281 No data returned, maybe input data source is too big' });
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
@@ -196,6 +201,27 @@ module.exports = {
|
||||
// @ts-ignore
|
||||
const { msgtype } = message;
|
||||
if (handleProcessCommunication(message, subprocess)) return;
|
||||
if (msgtype === 'get-volatile-connection') {
|
||||
const connections = require('./connections');
|
||||
// @ts-ignore
|
||||
const conid = message.conid;
|
||||
if (!conid || typeof conid !== 'string') return;
|
||||
const trySend = payload => {
|
||||
if (!subprocess.connected) return;
|
||||
try {
|
||||
subprocess.send(payload);
|
||||
} catch {
|
||||
// child disconnected between the check and the send — ignore
|
||||
}
|
||||
};
|
||||
connections.getCore({ conid }).then(conn => {
|
||||
trySend({ msgtype: 'volatile-connection-response', conid, conn: conn?.unsaved ? conn : null });
|
||||
}).catch(err => {
|
||||
logger.error({ ...extractErrorLogData(err), conid }, 'DBGM-00000 Error resolving volatile connection for child process');
|
||||
trySend({ msgtype: 'volatile-connection-response', conid, conn: null });
|
||||
});
|
||||
return;
|
||||
}
|
||||
this[`handle_${msgtype}`](runid, message);
|
||||
});
|
||||
return _.pick(newOpened, ['runid']);
|
||||
@@ -225,7 +251,7 @@ module.exports = {
|
||||
subprocess.on('exit', code => {
|
||||
console.log('... EXITED', code);
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
|
||||
this.dispatchMessage(runid, `Finished external process with code ${code}`);
|
||||
this.dispatchMessage(runid, `DBGM-00282 Finished external process with code ${code}`);
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
if (onFinished) {
|
||||
onFinished();
|
||||
@@ -233,7 +259,7 @@ module.exports = {
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
subprocess.on('spawn', () => {
|
||||
this.dispatchMessage(runid, `Started external process ${command}`);
|
||||
this.dispatchMessage(runid, `DBGM-00283 Started external process ${command}`);
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
console.log('... ERROR subprocess', error);
|
||||
@@ -279,7 +305,7 @@ module.exports = {
|
||||
if (script.type == 'json') {
|
||||
if (!platformInfo.isElectron) {
|
||||
if (!checkSecureDirectoriesInScript(script)) {
|
||||
return { errorMessage: 'Unallowed directories in script' };
|
||||
return { errorMessage: 'DBGM-00284 Unallowed directories in script' };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,10 +325,10 @@ module.exports = {
|
||||
action: 'script',
|
||||
severity: 'warn',
|
||||
detail: script,
|
||||
message: 'Scripts are not allowed',
|
||||
message: 'DBGM-00285 Scripts are not allowed',
|
||||
});
|
||||
|
||||
return { errorMessage: 'Shell scripting is not allowed' };
|
||||
return { errorMessage: 'DBGM-00286 Shell scripting is not allowed' };
|
||||
}
|
||||
|
||||
sendToAuditLog(req, {
|
||||
@@ -312,7 +338,7 @@ module.exports = {
|
||||
action: 'script',
|
||||
severity: 'info',
|
||||
detail: script,
|
||||
message: 'Running JS script',
|
||||
message: 'DBGM-00287 Running JS script',
|
||||
});
|
||||
|
||||
return this.startCore(runid, scriptTemplate(script, false));
|
||||
@@ -327,7 +353,7 @@ module.exports = {
|
||||
async cancel({ runid }) {
|
||||
const runner = this.opened.find(x => x.runid == runid);
|
||||
if (!runner) {
|
||||
throw new Error('Invalid runner');
|
||||
throw new Error('DBGM-00288 Invalid runner');
|
||||
}
|
||||
runner.subprocess.kill();
|
||||
return { state: 'ok' };
|
||||
@@ -353,17 +379,15 @@ module.exports = {
|
||||
async loadReader({ functionName, props }) {
|
||||
if (!platformInfo.isElectron) {
|
||||
if (props?.fileName && !checkSecureDirectories(props.fileName)) {
|
||||
return { errorMessage: 'Unallowed file' };
|
||||
return { errorMessage: 'DBGM-00289 Unallowed file' };
|
||||
}
|
||||
}
|
||||
const prefix = extractShellApiPlugins(functionName)
|
||||
.map(packageName => `// @require ${packageName}\n`)
|
||||
.join('');
|
||||
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
assertValidShellApiFunctionName(functionName);
|
||||
const runid = crypto.randomUUID();
|
||||
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
|
||||
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
|
||||
this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
@@ -371,7 +395,7 @@ module.exports = {
|
||||
scriptResult_meta: true,
|
||||
async scriptResult({ script }) {
|
||||
if (script.type != 'json') {
|
||||
return { errorMessage: 'Only JSON scripts are allowed' };
|
||||
return { errorMessage: 'DBGM-00290 Only JSON scripts are allowed' };
|
||||
}
|
||||
|
||||
const promise = new Promise(async (resolve, reject) => {
|
||||
|
||||
@@ -171,7 +171,7 @@ module.exports = {
|
||||
const databasePermissions = await loadDatabasePermissionsFromRequest(req);
|
||||
const res = [];
|
||||
for (const db of opened?.databases ?? []) {
|
||||
const databasePermissionRole = getDatabasePermissionRole(db.id, db.name, databasePermissions);
|
||||
const databasePermissionRole = getDatabasePermissionRole(conid, db.name, databasePermissions);
|
||||
if (databasePermissionRole != 'deny') {
|
||||
res.push({
|
||||
...db,
|
||||
|
||||
@@ -228,6 +228,19 @@ module.exports = {
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
setIsolationLevel_meta: true,
|
||||
async setIsolationLevel({ sesid, level }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
|
||||
logger.info({ sesid, level }, 'DBGM-00315 Setting transaction isolation level');
|
||||
session.subprocess.send({ msgtype: 'setIsolationLevel', level });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
executeReader_meta: true,
|
||||
async executeReader({ conid, database, sql, queryName, appFolder }) {
|
||||
const { sesid } = await this.create({ conid, database });
|
||||
|
||||
@@ -14,6 +14,7 @@ const socket = require('./utility/socket');
|
||||
const connections = require('./controllers/connections');
|
||||
const serverConnections = require('./controllers/serverConnections');
|
||||
const databaseConnections = require('./controllers/databaseConnections');
|
||||
const restConnections = require('./controllers/restConnections');
|
||||
const metadata = require('./controllers/metadata');
|
||||
const sessions = require('./controllers/sessions');
|
||||
const runners = require('./controllers/runners');
|
||||
@@ -267,6 +268,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/auth', auth);
|
||||
useController(app, electron, '/cloud', cloud);
|
||||
useController(app, electron, '/team-files', teamFiles);
|
||||
useController(app, electron, '/rest-connections', restConnections);
|
||||
}
|
||||
|
||||
function setElectronSender(electronSender) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const { connectUtility, getRestAuthFromConnection } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const _ = require('lodash');
|
||||
@@ -29,6 +29,9 @@ function start() {
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const connectionChanged = driver?.beforeConnectionSave ? driver.beforeConnectionSave(connection) : connection;
|
||||
if (driver?.databaseEngineTypes?.includes('rest')) {
|
||||
connectionChanged.restAuth = getRestAuthFromConnection(connection);
|
||||
}
|
||||
|
||||
if (!connection.isVolatileResolved) {
|
||||
if (connectionChanged.useRedirectDbLogin) {
|
||||
|
||||
@@ -234,12 +234,12 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
|
||||
}
|
||||
}
|
||||
|
||||
async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false) {
|
||||
async function handleQueryData({ msgid, sql, range, commandTimeout }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
const res = await driver.query(dbhan, sql, { range });
|
||||
const res = await driver.query(dbhan, sql, { range, commandTimeout });
|
||||
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
@@ -250,11 +250,11 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSqlSelect({ msgid, select }) {
|
||||
async function handleSqlSelect({ msgid, select, commandTimeout }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const dmp = driver.createDumper();
|
||||
dumpSqlSelect(dmp, select);
|
||||
return handleQueryData({ msgid, sql: dmp.s, range: select.range }, true);
|
||||
return handleQueryData({ msgid, sql: dmp.s, range: select.range, commandTimeout }, true);
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const connectProcess = require('./connectProcess');
|
||||
const databaseConnectionProcess = require('./databaseConnectionProcess');
|
||||
const serverConnectionProcess = require('./serverConnectionProcess');
|
||||
const restConnectionProcess = require('./restConnectionProcess');
|
||||
const sessionProcess = require('./sessionProcess');
|
||||
const jslDatastoreProcess = require('./jslDatastoreProcess');
|
||||
const sshForwardProcess = require('./sshForwardProcess');
|
||||
@@ -9,6 +10,7 @@ module.exports = {
|
||||
connectProcess,
|
||||
databaseConnectionProcess,
|
||||
serverConnectionProcess,
|
||||
restConnectionProcess,
|
||||
sessionProcess,
|
||||
jslDatastoreProcess,
|
||||
sshForwardProcess,
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
}
|
||||
|
||||
module.exports = { start };
|
||||
@@ -77,6 +77,38 @@ async function handleStopProfiler({ jslid }) {
|
||||
currentProfiler = null;
|
||||
}
|
||||
|
||||
async function handleSetIsolationLevel({ level }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
if (!driver.setTransactionIsolationLevel) {
|
||||
process.send({ msgtype: 'done', skipFinishedMessage: true });
|
||||
return;
|
||||
}
|
||||
|
||||
if (driver.isolationLevels && level && !driver.isolationLevels.includes(level)) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: {
|
||||
message: `Isolation level "${level}" is not supported by this driver. Supported levels: ${driver.isolationLevels.join(', ')}`,
|
||||
severity: 'error',
|
||||
},
|
||||
});
|
||||
process.send({ msgtype: 'done', skipFinishedMessage: true });
|
||||
return;
|
||||
}
|
||||
|
||||
executingScripts++;
|
||||
try {
|
||||
await driver.setTransactionIsolationLevel(dbhan, level);
|
||||
process.send({ msgtype: 'done', controlCommand: 'setIsolationLevel' });
|
||||
} finally {
|
||||
executingScripts--;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteControlCommand({ command }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
@@ -210,6 +242,7 @@ const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
executeQuery: handleExecuteQuery,
|
||||
executeControlCommand: handleExecuteControlCommand,
|
||||
setIsolationLevel: handleSetIsolationLevel,
|
||||
executeReader: handleExecuteReader,
|
||||
startProfiler: handleStartProfiler,
|
||||
stopProfiler: handleStopProfiler,
|
||||
|
||||
@@ -4,7 +4,8 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const authProxy = require('../utility/authProxy');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
//
|
||||
const { openApiDriver, graphQlDriver, oDataDriver } = require('dbgate-rest');
|
||||
//
|
||||
const logger = getLogger('requirePlugin');
|
||||
|
||||
const loadedPlugins = {};
|
||||
@@ -13,16 +14,21 @@ const dbgateEnv = {
|
||||
dbgateApi: null,
|
||||
platformInfo,
|
||||
authProxy,
|
||||
isProApp: () =>{
|
||||
isProApp: () => {
|
||||
const { isProApp } = require('../utility/checkLicense');
|
||||
return isProApp();
|
||||
}
|
||||
},
|
||||
};
|
||||
function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (!packageName) throw new Error('Missing packageName in plugin');
|
||||
if (loadedPlugins[packageName]) return loadedPlugins[packageName];
|
||||
|
||||
if (requiredPlugin == null) {
|
||||
if (packageName.endsWith('@rest') || packageName === 'rest') {
|
||||
return {
|
||||
drivers: [openApiDriver, graphQlDriver, oDataDriver],
|
||||
};
|
||||
}
|
||||
let module;
|
||||
const modulePath = getPluginBackendPath(packageName);
|
||||
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
|
||||
|
||||
@@ -7,6 +7,7 @@ async function runScript(func) {
|
||||
if (processArgs.checkParent) {
|
||||
childProcessChecker();
|
||||
}
|
||||
|
||||
try {
|
||||
await func();
|
||||
process.exit(0);
|
||||
|
||||
@@ -16,23 +16,53 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let settled = false;
|
||||
/** Track active streams so we can destroy them on early abort */
|
||||
const activeStreams = new Set();
|
||||
const safeReject = rejectErr => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
for (const s of activeStreams) {
|
||||
s.destroy();
|
||||
}
|
||||
activeStreams.clear();
|
||||
zipFile.close();
|
||||
reject(rejectErr);
|
||||
};
|
||||
/** Pending per-file extractions – we resolve the main promise after they’re all done */
|
||||
const pending = [];
|
||||
|
||||
// Resolved output boundary used for zip-slip checks on every entry
|
||||
const resolvedOutputDir = path.resolve(outputDirectory);
|
||||
|
||||
// kick things off
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
// Null-byte poison check
|
||||
if (entry.fileName.includes('\0')) {
|
||||
return safeReject(new Error(`DBGM-00000 ZIP entry with null byte in filename rejected`));
|
||||
}
|
||||
|
||||
const destPath = path.join(outputDirectory, entry.fileName);
|
||||
const resolvedDest = path.resolve(destPath);
|
||||
|
||||
// Zip-slip protection: every extracted path must stay inside outputDirectory
|
||||
if (resolvedDest !== resolvedOutputDir && !resolvedDest.startsWith(resolvedOutputDir + path.sep)) {
|
||||
return safeReject(
|
||||
new Error(`DBGM-00000 ZIP slip detected: entry "${entry.fileName}" would escape output directory`)
|
||||
);
|
||||
}
|
||||
|
||||
// Handle directories (their names always end with “/” in ZIPs)
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Ensure directory exists, then continue to next entry
|
||||
fs.promises
|
||||
.mkdir(destPath, { recursive: true })
|
||||
.then(() => zipFile.readEntry())
|
||||
.catch(reject);
|
||||
.then(() => {
|
||||
if (!settled) zipFile.readEntry();
|
||||
})
|
||||
.catch(safeReject);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -46,17 +76,29 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
if (err) return rej(err);
|
||||
|
||||
const writeStream = fs.createWriteStream(destPath);
|
||||
activeStreams.add(readStream);
|
||||
activeStreams.add(writeStream);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// proceed to next entry once we’ve consumed *this* one
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
// proceed to next entry once we've consumed *this* one
|
||||
readStream.on('end', () => {
|
||||
activeStreams.delete(readStream);
|
||||
if (!settled) zipFile.readEntry();
|
||||
});
|
||||
|
||||
readStream.on('error', readErr => {
|
||||
activeStreams.delete(readStream);
|
||||
rej(readErr);
|
||||
});
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
activeStreams.delete(writeStream);
|
||||
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
activeStreams.delete(writeStream);
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
@@ -67,22 +109,29 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
})
|
||||
);
|
||||
|
||||
// Immediately abort the whole unzip if this file fails; otherwise the
|
||||
// zip would never emit 'end' (lazyEntries won't advance without readEntry).
|
||||
filePromise.catch(safeReject);
|
||||
pending.push(filePromise);
|
||||
});
|
||||
|
||||
// Entire archive enumerated; wait for all streams to finish
|
||||
zipFile.on('end', () => {
|
||||
if (settled) return;
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
zipFile.close();
|
||||
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
.catch(safeReject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
safeReject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -698,6 +698,30 @@ module.exports = {
|
||||
"columnName": "id_original",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyUrl",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyUser",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "httpProxyPassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "defaultIsolationLevel",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
@@ -851,76 +875,106 @@ module.exports = {
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"pureName": "query_history",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"pureName": "query_history",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"pureName": "query_history",
|
||||
"columnName": "created",
|
||||
"dataType": "bigint",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "query_history",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "token",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": true
|
||||
"pureName": "query_history",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "created_at",
|
||||
"dataType": "datetime",
|
||||
"notNull": true
|
||||
"pureName": "query_history",
|
||||
"columnName": "sql",
|
||||
"dataType": "text",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "expires_at",
|
||||
"dataType": "datetime",
|
||||
"notNull": true
|
||||
"pureName": "query_history",
|
||||
"columnName": "conid",
|
||||
"dataType": "varchar(100)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "password_reset_tokens",
|
||||
"columnName": "used_at",
|
||||
"dataType": "datetime",
|
||||
"pureName": "query_history",
|
||||
"columnName": "database",
|
||||
"dataType": "varchar(200)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"constraintName": "FK_password_reset_tokens_user_id",
|
||||
"pureName": "password_reset_tokens",
|
||||
"constraintName": "FK_query_history_user_id",
|
||||
"pureName": "query_history",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"constraintName": "FK_query_history_role_id",
|
||||
"pureName": "query_history",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"indexes": [
|
||||
{
|
||||
"constraintName": "idx_token",
|
||||
"pureName": "password_reset_tokens",
|
||||
"constraintName": "idx_query_history_user_id",
|
||||
"pureName": "query_history",
|
||||
"constraintType": "index",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "token"
|
||||
"columnName": "user_id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintName": "idx_query_history_role_id",
|
||||
"pureName": "query_history",
|
||||
"constraintType": "index",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "password_reset_tokens",
|
||||
"pureName": "query_history",
|
||||
"constraintType": "primaryKey",
|
||||
"constraintName": "PK_password_reset_tokens",
|
||||
"constraintName": "PK_query_history",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
@@ -2252,6 +2306,84 @@ module.exports = {
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "token",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "created_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "expires_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"columnName": "used_at",
|
||||
"dataType": "varchar(32)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"constraintName": "FK_user_password_reset_tokens_user_id",
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"refTableName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"indexes": [
|
||||
{
|
||||
"constraintName": "idx_token",
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"constraintType": "index",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "token"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_password_reset_tokens",
|
||||
"constraintType": "primaryKey",
|
||||
"constraintName": "PK_user_password_reset_tokens",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columns": [
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
const fs = require('fs-extra');
|
||||
const { decryptConnection } = require('./crypting');
|
||||
const { decryptConnection, decryptPasswordString } = require('./crypting');
|
||||
const { getSshTunnelProxy } = require('./sshTunnelProxy');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const _ = require('lodash');
|
||||
const axios = require('axios');
|
||||
|
||||
async function loadConnection(driver, storedConnection, connectionMode) {
|
||||
const { allowShellConnection, allowConnectionFromEnvVariables } = platformInfo;
|
||||
@@ -132,11 +133,66 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
||||
|
||||
connection.ssl = await extractConnectionSslParams(connection);
|
||||
|
||||
const proxyUrl = String(connection.httpProxyUrl ?? '').trim();
|
||||
const proxyUser = String(connection.httpProxyUser ?? '').trim();
|
||||
const proxyPassword = String(connection.httpProxyPassword ?? '').trim();
|
||||
if (!proxyUrl && (proxyUser || proxyPassword)) {
|
||||
throw new Error('DBGM-00329 Proxy user or password is set but proxy URL is missing');
|
||||
}
|
||||
if (proxyUrl) {
|
||||
let parsedProxy;
|
||||
try {
|
||||
const parsed = new URL(proxyUrl.includes('://') ? proxyUrl : `http://${proxyUrl}`);
|
||||
parsedProxy = {
|
||||
protocol: parsed.protocol.replace(':', ''),
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port, 10) : (parsed.protocol === 'https:' ? 443 : 80),
|
||||
};
|
||||
const username = connection.httpProxyUser ?? parsed.username;
|
||||
const rawPassword = connection.httpProxyPassword ?? parsed.password;
|
||||
const password = decryptPasswordString(rawPassword);
|
||||
if (username) {
|
||||
parsedProxy.auth = { username, password: password ?? '' };
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`DBGM-00334 Invalid proxy URL "${proxyUrl}": ${err && err.message ? err.message : err}`);
|
||||
}
|
||||
connection.axios = axios.default.create({ proxy: parsedProxy });
|
||||
} else {
|
||||
connection.axios = axios.default;
|
||||
}
|
||||
|
||||
const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
|
||||
return conn;
|
||||
}
|
||||
|
||||
function getRestAuthFromConnection(connection) {
|
||||
if (!connection) return null;
|
||||
if (connection.authType == 'basic') {
|
||||
return {
|
||||
type: 'basic',
|
||||
user: connection.user,
|
||||
password: decryptPasswordString(connection.password),
|
||||
};
|
||||
}
|
||||
if (connection.authType == 'bearer') {
|
||||
return {
|
||||
type: 'bearer',
|
||||
token: connection.authToken,
|
||||
};
|
||||
}
|
||||
if (connection.authType == 'apikey') {
|
||||
return {
|
||||
type: 'apikey',
|
||||
header: connection.apiKeyHeader,
|
||||
value: connection.apiKeyValue,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractConnectionSslParams,
|
||||
connectUtility,
|
||||
getRestAuthFromConnection,
|
||||
};
|
||||
|
||||
@@ -101,7 +101,27 @@ function decryptObjectPasswordField(obj, field, encryptor = null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition'];
|
||||
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition', 'httpProxyPassword'];
|
||||
const additionalFieldsToMask = [
|
||||
'databaseUrl',
|
||||
'server',
|
||||
'port',
|
||||
'user',
|
||||
'sshBastionHost',
|
||||
'sshHost',
|
||||
'sshKeyFile',
|
||||
'sshLogin',
|
||||
'sshMode',
|
||||
'sshPort',
|
||||
'sslCaFile',
|
||||
'sslCertFilePassword',
|
||||
'sslKeyFile',
|
||||
'sslRejectUnauthorized',
|
||||
'secretAccessKey',
|
||||
'accessKeyId',
|
||||
'endpoint',
|
||||
'endpointKey',
|
||||
];
|
||||
|
||||
function encryptConnection(connection, encryptor = null) {
|
||||
if (connection.passwordMode != 'saveRaw') {
|
||||
@@ -114,7 +134,7 @@ function encryptConnection(connection, encryptor = null) {
|
||||
|
||||
function maskConnection(connection) {
|
||||
if (!connection) return connection;
|
||||
return _.omit(connection, fieldsToEncrypt);
|
||||
return _.omit(connection, [...fieldsToEncrypt, ...additionalFieldsToMask]);
|
||||
}
|
||||
|
||||
function decryptConnection(connection) {
|
||||
|
||||
@@ -25,8 +25,14 @@ function extractConnectionsFromEnv(env) {
|
||||
socketPath: env[`SOCKET_PATH_${id}`],
|
||||
serviceName: env[`SERVICE_NAME_${id}`],
|
||||
authType: env[`AUTH_TYPE_${id}`] || (env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
|
||||
defaultDatabase: env[`DATABASE_${id}`] || (env[`FILE_${id}`] ? getDatabaseFileLabel(env[`FILE_${id}`]) : null),
|
||||
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`],
|
||||
defaultDatabase:
|
||||
env[`DATABASE_${id}`] ||
|
||||
(env[`FILE_${id}`]
|
||||
? getDatabaseFileLabel(env[`FILE_${id}`])
|
||||
: env[`APISERVERURL1_${id}`]
|
||||
? '_api_database_'
|
||||
: null),
|
||||
singleDatabase: !!env[`DATABASE_${id}`] || !!env[`FILE_${id}`] || !!env[`APISERVERURL1_${id}`],
|
||||
displayName: env[`LABEL_${id}`],
|
||||
isReadOnly: env[`READONLY_${id}`],
|
||||
databases: env[`DBCONFIG_${id}`] ? safeJsonParse(env[`DBCONFIG_${id}`]) : null,
|
||||
@@ -54,6 +60,11 @@ function extractConnectionsFromEnv(env) {
|
||||
sslKeyFile: env[`SSL_KEY_FILE_${id}`],
|
||||
sslRejectUnauthorized: env[`SSL_REJECT_UNAUTHORIZED_${id}`],
|
||||
trustServerCertificate: env[`SSL_TRUST_CERTIFICATE_${id}`],
|
||||
|
||||
apiServerUrl1: env[`APISERVERURL1_${id}`],
|
||||
apiServerUrl2: env[`APISERVERURL2_${id}`],
|
||||
apiKeyHeader: env[`APIKEYHEADER_${id}`],
|
||||
apiKeyValue: env[`APIKEYVALUE_${id}`],
|
||||
}));
|
||||
|
||||
return connections;
|
||||
|
||||
@@ -96,8 +96,9 @@ async function loadFilePermissionsFromRequest(req) {
|
||||
}
|
||||
|
||||
function matchDatabasePermissionRow(conid, database, permissionRow) {
|
||||
if (permissionRow.connection_id) {
|
||||
if (conid != permissionRow.connection_id) {
|
||||
const connectionIdentifier = permissionRow.connection_conid ?? permissionRow.connection_id;
|
||||
if (connectionIdentifier) {
|
||||
if (conid != connectionIdentifier) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,8 +84,12 @@ export function analyseCollectionDisplayColumns(rows, display) {
|
||||
if (res.find(x => x.uniqueName == added)) continue;
|
||||
res.push(getDisplayColumn([], added, display));
|
||||
}
|
||||
|
||||
// Use driver-specific column sorting if available
|
||||
const sortedColumns = display?.driver?.sortCollectionDisplayColumns ? display.driver.sortCollectionDisplayColumns(res) : res;
|
||||
|
||||
return (
|
||||
res.map(col => ({
|
||||
sortedColumns.map(col => ({
|
||||
...col,
|
||||
isChecked: display.isColumnChecked(col),
|
||||
})) || []
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import type { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { evalFilterBehaviour } from 'dbgate-tools';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { FreeTableModel } from './FreeTableModel';
|
||||
@@ -11,13 +12,15 @@ export class FreeTableGridDisplay extends GridDisplay {
|
||||
config: GridConfig,
|
||||
setConfig: ChangeConfigFunc,
|
||||
cache: GridCache,
|
||||
setCache: ChangeCacheFunc
|
||||
setCache: ChangeCacheFunc,
|
||||
options: { filterable?: boolean } = {}
|
||||
) {
|
||||
super(config, setConfig, cache, setCache);
|
||||
this.columns = model?.structure?.__isDynamicStructure
|
||||
? analyseCollectionDisplayColumns(model?.rows, this)
|
||||
: this.getDisplayColumns(model);
|
||||
this.filterable = false;
|
||||
this.filterable = options.filterable ?? false;
|
||||
this.filterBehaviourOverride = evalFilterBehaviour;
|
||||
this.sortable = false;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dbmodel
|
||||
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](https://dbgate.org) tooling and plugins for connecting many different databases.
|
||||
Deploy, load or build script from model of SQL database. Can be used as command-line tool. Uses [DbGate](www.dbgate.io) tooling and plugins for connecting many different databases.
|
||||
|
||||
If you want to use this tool from JavaScript interface, please use [dbgate-api](https://www.npmjs.com/package/dbgate-api) package.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbmodel",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -16,7 +16,46 @@ function getDateStringWithoutTimeZone(dateString) {
|
||||
|
||||
export function getFilterValueExpression(value, dataType?) {
|
||||
if (value == null) return 'NULL';
|
||||
if (isTypeDateTime(dataType)) return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
|
||||
if (isTypeDateTime(dataType)) {
|
||||
// Check for year as number (GROUP:YEAR)
|
||||
if (typeof value === 'number' && Number.isInteger(value) && value >= 1000 && value <= 9999) {
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
if (_isString(value)) {
|
||||
// Year only
|
||||
if (/^\d{4}$/.test(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// Year-month: validate month is in range 01-12
|
||||
const yearMonthMatch = value.match(/^(\d{4})-(\d{1,2})$/);
|
||||
if (yearMonthMatch) {
|
||||
const month = parseInt(yearMonthMatch[2], 10);
|
||||
if (month >= 1 && month <= 12) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// Year-month-day: validate month and day
|
||||
const yearMonthDayMatch = value.match(/^(\d{4})-(\d{1,2})-(\d{1,2})$/);
|
||||
if (yearMonthDayMatch) {
|
||||
const month = parseInt(yearMonthDayMatch[2], 10);
|
||||
const day = parseInt(yearMonthDayMatch[3], 10);
|
||||
|
||||
// Quick validation: month 1-12, day 1-31
|
||||
if (month >= 1 && month <= 12 && day >= 1 && day <= 31) {
|
||||
// Construct a date to verify it's actually valid (e.g., reject 2024-02-30)
|
||||
const dateStr = `${yearMonthDayMatch[1]}-${String(month).padStart(2, '0')}-${String(day).padStart(2, '0')}`;
|
||||
const date = toDate(dateStr);
|
||||
if (!isNaN(date.getTime())) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return format(toDate(getDateStringWithoutTimeZone(value)), 'yyyy-MM-dd HH:mm:ss');
|
||||
}
|
||||
if (value === true) return 'TRUE';
|
||||
if (value === false) return 'FALSE';
|
||||
if (value.$oid) return `ObjectId("${value.$oid}")`;
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
lib
|
||||
@@ -0,0 +1,7 @@
|
||||
# dbgate-rest
|
||||
|
||||
REST API support for DbGate
|
||||
|
||||
## Installation
|
||||
|
||||
yarn add dbgate-rest
|
||||
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
moduleFileExtensions: ['ts', 'js'],
|
||||
reporters: ['default', 'github-actions'],
|
||||
};
|
||||
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"version": "7.0.0-alpha.1",
|
||||
"name": "dbgate-rest",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
},
|
||||
"author": "Jan Prochazka",
|
||||
"license": "GPL-3.0",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"dbgate"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "tsc --watch",
|
||||
"prepublishOnly": "yarn build",
|
||||
"test": "jest",
|
||||
"test:ci": "jest --json --outputFile=result.json --testLocationInResults"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "^13.7.0",
|
||||
"dbgate-types": "^7.0.0-alpha.1",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"dbgate-tools": "^7.0.0-alpha.1",
|
||||
"lodash": "^4.17.21",
|
||||
"openapi-types": "^12.1.3",
|
||||
"pinomin": "^1.0.5",
|
||||
"uuid": "^3.4.0",
|
||||
"js-yaml": "^4.1.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
type FlatObject = Record<string, any>;
|
||||
|
||||
function isPlainObject(value: any): value is Record<string, any> {
|
||||
return !!value && typeof value === 'object' && !Array.isArray(value);
|
||||
}
|
||||
|
||||
function flattenValue(value: any) {
|
||||
if (Array.isArray(value)) {
|
||||
const primitiveArray = value.every(item => item == null || typeof item !== 'object');
|
||||
if (primitiveArray) {
|
||||
return value.join(', ');
|
||||
}
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function flattenObject(obj: Record<string, any>, prefix = '', out: FlatObject = {}, visited = new WeakSet()): FlatObject {
|
||||
if (visited.has(obj)) return out;
|
||||
visited.add(obj);
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const nextKey = prefix ? `${prefix}.${key}` : key;
|
||||
|
||||
if (isPlainObject(value)) {
|
||||
flattenObject(value, nextKey, out, visited);
|
||||
continue;
|
||||
}
|
||||
|
||||
out[nextKey] = flattenValue(value);
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function unwrapArrayItem(item: any) {
|
||||
if (isPlainObject(item) && isPlainObject(item.node)) {
|
||||
return item.node;
|
||||
}
|
||||
return item;
|
||||
}
|
||||
|
||||
function collectArrayCandidates(
|
||||
value: any,
|
||||
set: Set<any[]>,
|
||||
visited = new WeakSet(),
|
||||
depth = 0
|
||||
): void {
|
||||
if (depth > 10) return;
|
||||
if (Array.isArray(value)) {
|
||||
set.add(value);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isPlainObject(value)) return;
|
||||
if (visited.has(value)) return;
|
||||
visited.add(value);
|
||||
|
||||
if (Array.isArray(value.edges)) set.add(value.edges);
|
||||
if (Array.isArray(value.nodes)) set.add(value.nodes);
|
||||
if (Array.isArray(value.items)) set.add(value.items);
|
||||
|
||||
for (const nested of Object.values(value)) {
|
||||
collectArrayCandidates(nested, set, visited, depth + 1);
|
||||
}
|
||||
}
|
||||
|
||||
function findUniqueArrayCandidate(value: any): any[] | null {
|
||||
if (Array.isArray(value)) return value;
|
||||
|
||||
const candidates = new Set<any[]>();
|
||||
collectArrayCandidates(value, candidates);
|
||||
|
||||
if (candidates.size !== 1) return null;
|
||||
return candidates.values().next().value ?? null;
|
||||
}
|
||||
|
||||
export function arrayifyToFlatObjects(input: any): FlatObject[] | undefined {
|
||||
const arrayCandidate = findUniqueArrayCandidate(input);
|
||||
|
||||
if (!arrayCandidate) return undefined;
|
||||
|
||||
return arrayCandidate.map(item => {
|
||||
const unwrapped = unwrapArrayItem(item);
|
||||
if (isPlainObject(unwrapped)) {
|
||||
return flattenObject(unwrapped);
|
||||
}
|
||||
return { value: unwrapped };
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import { fetchGraphQLSchema, GraphQLIntrospectionResult } from './graphqlIntrospection';
|
||||
import { apiDriverBase } from './restDriverBase';
|
||||
import { buildRestAuthHeaders } from './restAuthTools';
|
||||
|
||||
async function loadGraphQlSchema(dbhan: any): Promise<GraphQLIntrospectionResult> {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00310 GraphQL endpoint URL is not configured');
|
||||
}
|
||||
|
||||
const introspectionResult = await fetchGraphQLSchema(
|
||||
dbhan.connection.apiServerUrl1,
|
||||
buildRestAuthHeaders(dbhan.connection.restAuth),
|
||||
dbhan.axios
|
||||
);
|
||||
|
||||
if (!introspectionResult || typeof introspectionResult !== 'object') {
|
||||
throw new Error('DBGM-00311 GraphQL schema is empty or could not be loaded');
|
||||
}
|
||||
|
||||
return introspectionResult;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
export const graphQlDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'graphql@rest',
|
||||
title: 'GraphQL',
|
||||
databaseEngineTypes: ['rest', 'graphql'],
|
||||
icon: '<svg version="1.1" id="GraphQL_Logo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 400 400" enable-background="new 0 0 400 400" xml:space="preserve"><g><g><g><rect x="122" y="-0.4" transform="matrix(-0.866 -0.5 0.5 -0.866 163.3196 363.3136)" fill="#E535AB" width="16.6" height="320.3"/></g></g><g><g><rect x="39.8" y="272.2" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="37.9" y="312.2" transform="matrix(-0.866 -0.5 0.5 -0.866 83.0693 663.3409)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="177.1" y="71.1" transform="matrix(-0.866 -0.5 0.5 -0.866 463.3409 283.0693)" fill="#E535AB" width="185" height="16.6"/></g></g><g><g><rect x="122.1" y="-13" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7903 232.1221)" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="109.6" y="151.6" transform="matrix(-0.5 -0.866 0.866 -0.5 266.0828 473.3766)" fill="#E535AB" width="320.3" height="16.6"/></g></g><g><g><rect x="52.5" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="330.9" y="107.5" fill="#E535AB" width="16.6" height="185"/></g></g><g><g><rect x="262.4" y="240.1" transform="matrix(-0.5 -0.866 0.866 -0.5 126.7953 714.2875)" fill="#E535AB" width="14.5" height="160.9"/></g></g><path fill="#E535AB" d="M369.5,297.9c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C373.5,259.9,379.2,281.2,369.5,297.9"/><path fill="#E535AB" d="M90.9,137c-9.6,16.7-31,22.4-47.7,12.8c-16.7-9.6-22.4-31-12.8-47.7c9.6-16.7,31-22.4,47.7-12.8 C94.8,99,100.5,120.3,90.9,137"/><path fill="#E535AB" d="M30.5,297.9c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6,16.7,3.9,38-12.8,47.7 C61.4,320.3,40.1,314.6,30.5,297.9"/><path fill="#E535AB" d="M309.1,137c-9.6-16.7-3.9-38,12.8-47.7c16.7-9.6,38-3.9,47.7,12.8c9.6-16.7,3.9-38-12.8,47.7 C340.1,159.4,318.7,153.7,309.1,137"/><path fill="#E535AB" d="M200,395.8c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,380.1,219.3,395.8,200,395.8"/><path fill="#E535AB" d="M200,74c-19.3,0-34.9-15.6-34.9-34.9c0-19.3,15.6-34.9,34.9-34.9c19.3,0,34.9,15.6,34.9,34.9 C234.9,58.4,219.3,74,200,74"/></g></svg>',
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
return false;
|
||||
},
|
||||
|
||||
apiServerUrl1Label: 'GraphQL Endpoint URL',
|
||||
|
||||
beforeConnectionSave: connection => ({
|
||||
...connection,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: '_api_database_',
|
||||
}),
|
||||
|
||||
async connect(connection: any) {
|
||||
return {
|
||||
connection,
|
||||
client: null,
|
||||
database: '_api_database_',
|
||||
axios: connection.axios,
|
||||
};
|
||||
},
|
||||
|
||||
async getVersion(dbhan: any) {
|
||||
const introspectionResult = await loadGraphQlSchema(dbhan);
|
||||
const schema = introspectionResult.__schema;
|
||||
|
||||
// const version = 'GraphQL';
|
||||
|
||||
return {
|
||||
version: `GraphQL, ${schema.types?.length || 0} types`,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,235 @@
|
||||
export function parseGraphQlSelectionPaths(text: string): {
|
||||
fieldPaths: string[];
|
||||
argumentPaths: string[];
|
||||
argumentValues: Record<string, Record<string, string>>;
|
||||
} {
|
||||
if (!text) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
|
||||
const cleaned = text.replace(/#[^\n]*/g, '');
|
||||
|
||||
const tokens: string[] =
|
||||
cleaned.match(
|
||||
/\.\.\.|"(?:[^"\\]|\\.)*"|[A-Za-z_][A-Za-z0-9_]*|\$[A-Za-z_][A-Za-z0-9_]*|-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?|[@{}()\[\],!:$]/g
|
||||
) || [];
|
||||
const startIndex = tokens.indexOf('{');
|
||||
if (startIndex === -1) return { fieldPaths: [], argumentPaths: [], argumentValues: {} };
|
||||
|
||||
const result = parseSelectionSet(tokens, startIndex, []);
|
||||
return {
|
||||
fieldPaths: result.fieldPaths.map(parts => parts.join('.')),
|
||||
argumentPaths: result.argumentPaths.map(parts => parts.join('.')),
|
||||
argumentValues: result.argumentValues,
|
||||
};
|
||||
}
|
||||
|
||||
function parseArgumentValue(tokens: string[], startIndex: number): { value: string; endIndex: number } {
|
||||
const valueTokens: string[] = [];
|
||||
let index = startIndex;
|
||||
let parenthesesDepth = 0;
|
||||
let bracketDepth = 0;
|
||||
let braceDepth = 0;
|
||||
|
||||
while (index < tokens.length) {
|
||||
const token = tokens[index];
|
||||
|
||||
if (token === '(') {
|
||||
parenthesesDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '[') {
|
||||
bracketDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '{') {
|
||||
braceDepth += 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ')') {
|
||||
if (parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
|
||||
break;
|
||||
}
|
||||
parenthesesDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ']') {
|
||||
if (bracketDepth === 0) break;
|
||||
bracketDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '}') {
|
||||
if (braceDepth === 0) break;
|
||||
braceDepth -= 1;
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === ',' && parenthesesDepth === 0 && bracketDepth === 0 && braceDepth === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
valueTokens.push(token);
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return {
|
||||
value: valueTokens.join(''),
|
||||
endIndex: index,
|
||||
};
|
||||
}
|
||||
|
||||
function parseArgumentsFromField(
|
||||
tokens: string[],
|
||||
startIndex: number
|
||||
): { arguments: { name: string; value: string }[]; endIndex: number } {
|
||||
const args: { name: string; value: string }[] = [];
|
||||
let index = startIndex;
|
||||
|
||||
if (tokens[index] !== '(') {
|
||||
return { arguments: args, endIndex: index };
|
||||
}
|
||||
|
||||
let depth = 1;
|
||||
index += 1;
|
||||
while (index < tokens.length && depth > 0) {
|
||||
if (tokens[index] === '(') depth += 1;
|
||||
if (tokens[index] === ')') depth -= 1;
|
||||
|
||||
// Look for argument names (identifier followed by colon) and their values
|
||||
if (depth > 0 && /^[A-Za-z_]/.test(tokens[index]) && tokens[index + 1] === ':') {
|
||||
const argumentName = tokens[index];
|
||||
const { value, endIndex } = parseArgumentValue(tokens, index + 2);
|
||||
args.push({ name: argumentName, value });
|
||||
index = endIndex;
|
||||
if (tokens[index] === ',') {
|
||||
index += 1;
|
||||
}
|
||||
} else {
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return { arguments: args, endIndex: index };
|
||||
}
|
||||
|
||||
function parseSelectionSet(
|
||||
tokens: string[],
|
||||
startIndex: number,
|
||||
prefix: string[]
|
||||
): {
|
||||
fieldPaths: string[][];
|
||||
argumentPaths: string[][];
|
||||
argumentValues: Record<string, Record<string, string>>;
|
||||
index: number;
|
||||
} {
|
||||
const fieldPaths: string[][] = [];
|
||||
const argumentPaths: string[][] = [];
|
||||
const argumentValues: Record<string, Record<string, string>> = {};
|
||||
let index = startIndex + 1;
|
||||
|
||||
while (index < tokens.length) {
|
||||
const token = tokens[index];
|
||||
if (token === '}') {
|
||||
return { fieldPaths, argumentPaths, argumentValues, index: index + 1 };
|
||||
}
|
||||
|
||||
if (token === '...') {
|
||||
index += 1;
|
||||
if (tokens[index] === 'on') {
|
||||
index += 2;
|
||||
}
|
||||
while (index < tokens.length && tokens[index] !== '{' && tokens[index] !== '}') {
|
||||
index += 1;
|
||||
}
|
||||
if (tokens[index] === '{') {
|
||||
const frag = parseSelectionSet(tokens, index, prefix);
|
||||
fieldPaths.push(...frag.fieldPaths);
|
||||
argumentPaths.push(...frag.argumentPaths);
|
||||
for (const [fieldPath, values] of Object.entries(frag.argumentValues)) {
|
||||
argumentValues[fieldPath] = {
|
||||
...(argumentValues[fieldPath] || {}),
|
||||
...values,
|
||||
};
|
||||
}
|
||||
index = frag.index;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[A-Za-z_]/.test(token)) {
|
||||
let fieldName = token;
|
||||
if (tokens[index + 1] === ':' && /^[A-Za-z_]/.test(tokens[index + 2] || '')) {
|
||||
fieldName = tokens[index + 2];
|
||||
index += 3;
|
||||
} else {
|
||||
index += 1;
|
||||
}
|
||||
|
||||
// Parse arguments if present
|
||||
const { arguments: args, endIndex: argsEndIndex } = parseArgumentsFromField(tokens, index);
|
||||
index = argsEndIndex;
|
||||
|
||||
// Add argument paths for this field
|
||||
const currentFieldPath = [...prefix, fieldName].join('.');
|
||||
for (const arg of args) {
|
||||
argumentPaths.push([...prefix, fieldName, arg.name]);
|
||||
if (!argumentValues[currentFieldPath]) {
|
||||
argumentValues[currentFieldPath] = {};
|
||||
}
|
||||
argumentValues[currentFieldPath][arg.name] = arg.value;
|
||||
}
|
||||
|
||||
while (tokens[index] === '@') {
|
||||
index += 2;
|
||||
if (tokens[index] === '(') {
|
||||
let depth = 1;
|
||||
index += 1;
|
||||
while (index < tokens.length && depth > 0) {
|
||||
if (tokens[index] === '(') depth += 1;
|
||||
if (tokens[index] === ')') depth -= 1;
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (tokens[index] === '{') {
|
||||
const nested = parseSelectionSet(tokens, index, [...prefix, fieldName]);
|
||||
if (nested.fieldPaths.length > 0) {
|
||||
fieldPaths.push(...nested.fieldPaths);
|
||||
} else {
|
||||
fieldPaths.push([...prefix, fieldName]);
|
||||
}
|
||||
argumentPaths.push(...nested.argumentPaths);
|
||||
for (const [fieldPath, values] of Object.entries(nested.argumentValues)) {
|
||||
argumentValues[fieldPath] = {
|
||||
...(argumentValues[fieldPath] || {}),
|
||||
...values,
|
||||
};
|
||||
}
|
||||
index = nested.index;
|
||||
} else {
|
||||
fieldPaths.push([...prefix, fieldName]);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return { fieldPaths, argumentPaths, argumentValues, index };
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
export type GraphQlVariableDefinition = {
|
||||
name: string;
|
||||
type: string;
|
||||
};
|
||||
|
||||
export function extractGraphQlVariableDefinitions(text: string): GraphQlVariableDefinition[] {
|
||||
if (!text) return [];
|
||||
|
||||
const cleaned = text.replace(/#[^\n]*/g, '');
|
||||
const regex = /\$([A-Za-z_][A-Za-z0-9_]*)\s*:\s*([^=,)\n]+)/g;
|
||||
const names = new Set<string>();
|
||||
const definitions: GraphQlVariableDefinition[] = [];
|
||||
|
||||
let match: RegExpExecArray | null = null;
|
||||
while ((match = regex.exec(cleaned))) {
|
||||
const name = match[1];
|
||||
if (names.has(name)) continue;
|
||||
names.add(name);
|
||||
definitions.push({
|
||||
name,
|
||||
type: match[2].trim(),
|
||||
});
|
||||
}
|
||||
|
||||
return definitions;
|
||||
}
|
||||
|
||||
function unwrapNonNull(typeText: string): string {
|
||||
let current = (typeText || '').trim();
|
||||
while (current.endsWith('!')) {
|
||||
current = current.slice(0, -1).trim();
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
function isListType(typeText: string): boolean {
|
||||
const unwrapped = unwrapNonNull(typeText);
|
||||
return unwrapped.startsWith('[') && unwrapped.endsWith(']');
|
||||
}
|
||||
|
||||
function getInnerListType(typeText: string): string {
|
||||
const unwrapped = unwrapNonNull(typeText);
|
||||
if (!(unwrapped.startsWith('[') && unwrapped.endsWith(']'))) return unwrapped;
|
||||
return unwrapped.slice(1, -1).trim();
|
||||
}
|
||||
|
||||
function getBaseType(typeText: string): string {
|
||||
let current = unwrapNonNull(typeText);
|
||||
while (current.startsWith('[') && current.endsWith(']')) {
|
||||
current = current.slice(1, -1).trim();
|
||||
current = unwrapNonNull(current);
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
function parseJsonIfPossible(raw: string): any {
|
||||
const trimmed = (raw || '').trim();
|
||||
if (!trimmed) return null;
|
||||
try {
|
||||
return JSON.parse(trimmed);
|
||||
} catch {
|
||||
return raw;
|
||||
}
|
||||
}
|
||||
|
||||
function toInt(raw: string): number | null {
|
||||
const trimmed = (raw || '').trim();
|
||||
if (!trimmed) return null;
|
||||
const num = Number(trimmed);
|
||||
if (!Number.isFinite(num)) return null;
|
||||
return Math.trunc(num);
|
||||
}
|
||||
|
||||
function toFloat(raw: string): number | null {
|
||||
const trimmed = (raw || '').trim();
|
||||
if (!trimmed) return null;
|
||||
const num = Number(trimmed);
|
||||
if (!Number.isFinite(num)) return null;
|
||||
return num;
|
||||
}
|
||||
|
||||
function toBoolean(raw: string): boolean | null {
|
||||
const lowered = (raw || '').trim().toLowerCase();
|
||||
if (!lowered) return null;
|
||||
if (['true', '1', 'yes', 'y', 'on'].includes(lowered)) return true;
|
||||
if (['false', '0', 'no', 'n', 'off'].includes(lowered)) return false;
|
||||
return null;
|
||||
}
|
||||
|
||||
function convertByGraphQlTypeValue(raw: any, graphQlType: string): any {
|
||||
if (raw == null) return null;
|
||||
|
||||
if (isListType(graphQlType)) {
|
||||
const innerType = getInnerListType(graphQlType);
|
||||
const parsed = typeof raw === 'string' ? parseJsonIfPossible(raw) : raw;
|
||||
const arrayValue = Array.isArray(parsed) ? parsed : [parsed];
|
||||
return arrayValue.map(item => convertByGraphQlTypeValue(item, innerType));
|
||||
}
|
||||
|
||||
const baseType = getBaseType(graphQlType);
|
||||
const stringValue = typeof raw === 'string' ? raw : JSON.stringify(raw);
|
||||
|
||||
if (baseType === 'Int') return toInt(stringValue);
|
||||
if (baseType === 'Float') return toFloat(stringValue);
|
||||
if (baseType === 'Boolean') return toBoolean(stringValue);
|
||||
if (baseType === 'String' || baseType === 'ID') return String(raw);
|
||||
|
||||
if (typeof raw === 'string') {
|
||||
return parseJsonIfPossible(raw);
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
|
||||
export function convertGraphQlVariablesForRequest(
|
||||
queryText: string,
|
||||
rawVariables: Record<string, string> = {}
|
||||
): Record<string, any> {
|
||||
const definitions = extractGraphQlVariableDefinitions(queryText || '');
|
||||
const next: Record<string, any> = {};
|
||||
|
||||
for (const definition of definitions) {
|
||||
const raw = rawVariables?.[definition.name] ?? '';
|
||||
next[definition.name] = convertByGraphQlTypeValue(raw, definition.type);
|
||||
}
|
||||
|
||||
return next;
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
import type { GraphQLField, GraphQLInputValue, GraphQLIntrospectionResult, GraphQLType, GraphQLTypeRef } from './graphqlIntrospection';
|
||||
|
||||
export type GraphQLExplorerOperationType = 'query' | 'mutation' | 'subscription';
|
||||
|
||||
export interface GraphQLExplorerFieldNode {
|
||||
name: string;
|
||||
description?: string;
|
||||
typeName: string;
|
||||
typeDisplay: string;
|
||||
isLeaf: boolean;
|
||||
isArgument?: boolean;
|
||||
arguments?: GraphQLExplorerFieldNode[];
|
||||
children?: GraphQLExplorerFieldNode[];
|
||||
}
|
||||
|
||||
export interface GraphQLExplorerOperation {
|
||||
operationType: GraphQLExplorerOperationType;
|
||||
rootTypeName: string;
|
||||
fields: GraphQLExplorerFieldNode[];
|
||||
}
|
||||
|
||||
interface GraphQLExplorerOptions {
|
||||
maxDepth?: number;
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_DEPTH = 2;
|
||||
|
||||
function getTypeDisplay(typeRef: GraphQLTypeRef | null | undefined): string {
|
||||
if (!typeRef) return 'Unknown';
|
||||
if (typeRef.kind === 'NON_NULL') return `${getTypeDisplay(typeRef.ofType)}!`;
|
||||
if (typeRef.kind === 'LIST') return `[${getTypeDisplay(typeRef.ofType)}]`;
|
||||
return typeRef.name || 'Unknown';
|
||||
}
|
||||
|
||||
function unwrapNamedType(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
|
||||
if (!typeRef) return null;
|
||||
if (typeRef.kind === 'NON_NULL' || typeRef.kind === 'LIST') return unwrapNamedType(typeRef.ofType);
|
||||
return typeRef;
|
||||
}
|
||||
|
||||
function buildTypeMap(types: GraphQLType[]): Map<string, GraphQLType> {
|
||||
return new Map(types.map(type => [type.name, type]));
|
||||
}
|
||||
|
||||
function isCompositeType(type: GraphQLType | undefined): boolean {
|
||||
return type?.kind === 'OBJECT' || type?.kind === 'INTERFACE';
|
||||
}
|
||||
|
||||
function buildFieldNode(
|
||||
field: GraphQLField,
|
||||
typeMap: Map<string, GraphQLType>,
|
||||
depth: number,
|
||||
maxDepth: number,
|
||||
visitedTypes: Set<string>
|
||||
): GraphQLExplorerFieldNode {
|
||||
const namedType = unwrapNamedType(field.type);
|
||||
const typeDef = namedType?.name ? typeMap.get(namedType.name) : undefined;
|
||||
const composite = isCompositeType(typeDef);
|
||||
const nextVisited = new Set(visitedTypes);
|
||||
|
||||
if (typeDef?.name) {
|
||||
nextVisited.add(typeDef.name);
|
||||
}
|
||||
|
||||
let children: GraphQLExplorerFieldNode[] | undefined;
|
||||
if (composite && depth < maxDepth && typeDef?.fields && !visitedTypes.has(typeDef.name)) {
|
||||
children = typeDef.fields.map(childField =>
|
||||
buildFieldNode(childField, typeMap, depth + 1, maxDepth, nextVisited)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
name: field.name,
|
||||
description: field.description,
|
||||
typeName: namedType?.name || 'Unknown',
|
||||
typeDisplay: getTypeDisplay(field.type),
|
||||
isLeaf: !composite || !children || children.length === 0,
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function buildOperationFields(
|
||||
rootTypeName: string,
|
||||
types: GraphQLType[],
|
||||
maxDepth: number
|
||||
): GraphQLExplorerFieldNode[] {
|
||||
const typeMap = buildTypeMap(types);
|
||||
const rootType = typeMap.get(rootTypeName);
|
||||
if (!rootType?.fields) return [];
|
||||
|
||||
return rootType.fields.map(field => buildFieldNode(field, typeMap, 1, maxDepth, new Set([rootTypeName])));
|
||||
}
|
||||
|
||||
export function buildGraphQlExplorerOperations(
|
||||
introspectionResult: GraphQLIntrospectionResult,
|
||||
options: GraphQLExplorerOptions = {}
|
||||
): GraphQLExplorerOperation[] {
|
||||
const { __schema } = introspectionResult || {};
|
||||
if (!__schema?.types) return [];
|
||||
|
||||
const maxDepth = options.maxDepth ?? DEFAULT_MAX_DEPTH;
|
||||
const operations: GraphQLExplorerOperation[] = [];
|
||||
|
||||
if (__schema.queryType?.name) {
|
||||
operations.push({
|
||||
operationType: 'query',
|
||||
rootTypeName: __schema.queryType.name,
|
||||
fields: buildOperationFields(__schema.queryType.name, __schema.types, maxDepth),
|
||||
});
|
||||
}
|
||||
|
||||
if (__schema.mutationType?.name) {
|
||||
operations.push({
|
||||
operationType: 'mutation',
|
||||
rootTypeName: __schema.mutationType.name,
|
||||
fields: buildOperationFields(__schema.mutationType.name, __schema.types, maxDepth),
|
||||
});
|
||||
}
|
||||
|
||||
if (__schema.subscriptionType?.name) {
|
||||
operations.push({
|
||||
operationType: 'subscription',
|
||||
rootTypeName: __schema.subscriptionType.name,
|
||||
fields: buildOperationFields(__schema.subscriptionType.name, __schema.types, maxDepth),
|
||||
});
|
||||
}
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
export function buildGraphQlQueryText(
|
||||
operationType: GraphQLExplorerOperationType,
|
||||
selectionPaths: string[],
|
||||
options: { operationName?: string; indent?: string } = {}
|
||||
): string {
|
||||
const indent = options.indent ?? ' ';
|
||||
const opName = options.operationName?.trim();
|
||||
|
||||
const tree = new Map<string, Map<string, any>>();
|
||||
for (const path of selectionPaths) {
|
||||
if (!path) continue;
|
||||
const parts = path.split('.').filter(Boolean);
|
||||
let node = tree;
|
||||
for (const part of parts) {
|
||||
if (!node.has(part)) {
|
||||
node.set(part, new Map());
|
||||
}
|
||||
node = node.get(part) as Map<string, any>;
|
||||
}
|
||||
}
|
||||
|
||||
const renderTree = (node: Map<string, any>, level: number): string[] => {
|
||||
const lines: string[] = [];
|
||||
for (const [name, children] of node.entries()) {
|
||||
if (children.size === 0) {
|
||||
lines.push(`${indent.repeat(level)}${name}`);
|
||||
} else {
|
||||
lines.push(`${indent.repeat(level)}${name} {`);
|
||||
lines.push(...renderTree(children, level + 1));
|
||||
lines.push(`${indent.repeat(level)}}`);
|
||||
}
|
||||
}
|
||||
return lines;
|
||||
};
|
||||
|
||||
const header = opName ? `${operationType} ${opName}` : operationType;
|
||||
const lines = [`${header} {`];
|
||||
if (tree.size > 0) {
|
||||
lines.push(...renderTree(tree, 1));
|
||||
}
|
||||
lines.push('}');
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
@@ -0,0 +1,495 @@
|
||||
import type { RestApiDefinition } from './restApiDef';
|
||||
import type { AxiosInstance } from 'axios';
|
||||
|
||||
const DEFAULT_INTROSPECTION_DEPTH = 6;
|
||||
|
||||
function buildTypeRefSelection(depth: number): string {
|
||||
if (depth <= 0) {
|
||||
return `
|
||||
kind
|
||||
name
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
${buildTypeRefSelection(depth - 1)}
|
||||
}
|
||||
`;
|
||||
}
|
||||
|
||||
function buildIntrospectionQuery(maxDepth: number): string {
|
||||
const typeRefSelection = buildTypeRefSelection(maxDepth);
|
||||
|
||||
return `
|
||||
query IntrospectionQuery {
|
||||
__schema {
|
||||
types {
|
||||
kind
|
||||
name
|
||||
description
|
||||
fields {
|
||||
name
|
||||
description
|
||||
type {
|
||||
${typeRefSelection}
|
||||
}
|
||||
args {
|
||||
name
|
||||
description
|
||||
type {
|
||||
${typeRefSelection}
|
||||
}
|
||||
defaultValue
|
||||
}
|
||||
}
|
||||
inputFields {
|
||||
name
|
||||
description
|
||||
type {
|
||||
${typeRefSelection}
|
||||
}
|
||||
}
|
||||
}
|
||||
queryType {
|
||||
name
|
||||
}
|
||||
mutationType {
|
||||
name
|
||||
}
|
||||
subscriptionType {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
|
||||
export interface GraphQLTypeRef {
|
||||
kind: string;
|
||||
name?: string;
|
||||
ofType?: GraphQLTypeRef | null;
|
||||
}
|
||||
|
||||
export interface GraphQLInputValue {
|
||||
name: string;
|
||||
description?: string;
|
||||
type: GraphQLTypeRef;
|
||||
defaultValue?: string;
|
||||
}
|
||||
|
||||
export interface GraphQLField {
|
||||
name: string;
|
||||
description?: string;
|
||||
type: GraphQLTypeRef;
|
||||
args?: GraphQLInputValue[];
|
||||
}
|
||||
|
||||
export interface GraphQLType {
|
||||
kind: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
fields?: GraphQLField[];
|
||||
inputFields?: GraphQLField[];
|
||||
possibleTypes?: GraphQLTypeRef[];
|
||||
}
|
||||
|
||||
export interface GraphQLIntrospectionResult {
|
||||
__schema: {
|
||||
types: GraphQLType[];
|
||||
queryType?: { name: string };
|
||||
mutationType?: { name: string };
|
||||
subscriptionType?: { name: string };
|
||||
};
|
||||
}
|
||||
|
||||
function getTypeString(type: GraphQLTypeRef | null | undefined): string {
|
||||
if (!type) return 'Unknown';
|
||||
if (type.kind === 'NON_NULL') return getTypeString(type.ofType) + '!';
|
||||
if (type.kind === 'LIST') return '[' + getTypeString(type.ofType) + ']';
|
||||
return type.name || 'Unknown';
|
||||
}
|
||||
|
||||
function findType(types: GraphQLType[], name: string): GraphQLType | undefined {
|
||||
return types.find(t => t.name === name);
|
||||
}
|
||||
|
||||
function unwrapNamedTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
|
||||
if (!typeRef) return null;
|
||||
if (typeRef.kind === 'NON_NULL' || typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
|
||||
return typeRef;
|
||||
}
|
||||
|
||||
function unwrapListTypeRef(typeRef: GraphQLTypeRef | null | undefined): GraphQLTypeRef | null {
|
||||
if (!typeRef) return null;
|
||||
if (typeRef.kind === 'NON_NULL') return unwrapListTypeRef(typeRef.ofType);
|
||||
if (typeRef.kind === 'LIST') return unwrapNamedTypeRef(typeRef.ofType);
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildTypeMap(types: GraphQLType[]): Map<string, GraphQLType> {
|
||||
return new Map((types || []).map(type => [type.name, type]));
|
||||
}
|
||||
|
||||
function isScalarLikeField(field: GraphQLField, typeMap: Map<string, GraphQLType>): boolean {
|
||||
const namedType = unwrapNamedTypeRef(field.type);
|
||||
if (!namedType?.name) return false;
|
||||
const type = typeMap.get(namedType.name);
|
||||
if (!type) return namedType.kind === 'SCALAR' || namedType.kind === 'ENUM';
|
||||
return type.kind === 'SCALAR' || type.kind === 'ENUM';
|
||||
}
|
||||
|
||||
export function scoreFieldName(name: string): number {
|
||||
const lowerName = (name || '').toLowerCase();
|
||||
const exactOrder = [
|
||||
'id',
|
||||
'name',
|
||||
'title',
|
||||
'email',
|
||||
'username',
|
||||
'status',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'type',
|
||||
'code',
|
||||
'key',
|
||||
];
|
||||
|
||||
const exactIndex = exactOrder.indexOf(lowerName);
|
||||
if (exactIndex >= 0) {
|
||||
return 500 - exactIndex;
|
||||
}
|
||||
|
||||
if (lowerName.endsWith('id')) return 300;
|
||||
if (lowerName.includes('name')) return 280;
|
||||
if (lowerName.includes('title')) return 260;
|
||||
if (lowerName.includes('email')) return 240;
|
||||
if (lowerName.includes('status')) return 220;
|
||||
if (lowerName.includes('date') || lowerName.endsWith('at')) return 200;
|
||||
return 100;
|
||||
}
|
||||
|
||||
export function chooseUsefulNodeAttributes(nodeType: GraphQLType | undefined, typeMap: Map<string, GraphQLType>): string[] {
|
||||
if (!nodeType?.fields?.length) return ['__typename'];
|
||||
|
||||
const scalarFields = nodeType.fields.filter(field => isScalarLikeField(field, typeMap));
|
||||
if (scalarFields.length === 0) return ['__typename'];
|
||||
|
||||
return scalarFields
|
||||
.map((field, index) => ({
|
||||
field,
|
||||
score: scoreFieldName(field.name),
|
||||
index,
|
||||
}))
|
||||
.sort((left, right) => {
|
||||
if (right.score !== left.score) return right.score - left.score;
|
||||
return left.index - right.index;
|
||||
})
|
||||
.slice(0, 10)
|
||||
.map(item => item.field.name);
|
||||
}
|
||||
|
||||
function stringifyArgumentValue(argumentTypeRef: GraphQLTypeRef | null | undefined, value: number | string): string {
|
||||
const namedType = unwrapNamedTypeRef(argumentTypeRef);
|
||||
if (!namedType?.name) {
|
||||
// Fallback: safely stringify as a JSON string literal
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
const typeName = namedType.name.toLowerCase();
|
||||
if (typeName === 'int' || typeName === 'float') {
|
||||
const numValue = typeof value === 'number' ? value : Number(value);
|
||||
if (Number.isFinite(numValue)) {
|
||||
return String(numValue);
|
||||
}
|
||||
// If the value cannot be parsed as a valid number, fall back to a quoted string
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
// For non-numeric types, safely serialize as a JSON string literal
|
||||
return JSON.stringify(String(value));
|
||||
}
|
||||
|
||||
export function buildFirstTenArgs(field: GraphQLField, filterParamName?: string | null, filterValue?: string): string {
|
||||
const args = field.args || [];
|
||||
if (args.length === 0) return '';
|
||||
|
||||
const argPairs: string[] = [];
|
||||
|
||||
// Add pagination argument
|
||||
const candidates = ['first', 'limit', 'pagesize', 'perpage', 'take', 'size', 'count', 'maxresults'];
|
||||
const paginationArg = args.find(item => candidates.includes((item.name || '').toLowerCase()));
|
||||
if (paginationArg) {
|
||||
argPairs.push(`${paginationArg.name}: ${stringifyArgumentValue(paginationArg.type, 10)}`);
|
||||
}
|
||||
|
||||
// Add filter argument if provided
|
||||
if (filterParamName && filterValue) {
|
||||
const filterArg = args.find(item => item.name === filterParamName);
|
||||
if (filterArg) {
|
||||
argPairs.push(`${filterParamName}: ${stringifyArgumentValue(filterArg.type, filterValue)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (argPairs.length === 0) return '';
|
||||
return `(${argPairs.join(', ')})`;
|
||||
}
|
||||
|
||||
export type GraphQLConnectionProjection =
|
||||
| {
|
||||
kind: 'edges';
|
||||
nodeTypeName: string;
|
||||
hasPageInfo: boolean;
|
||||
}
|
||||
| {
|
||||
kind: 'listField';
|
||||
listFieldName: string;
|
||||
nodeTypeName: string;
|
||||
};
|
||||
|
||||
export function detectConnectionProjection(
|
||||
field: GraphQLField,
|
||||
typeMap: Map<string, GraphQLType>
|
||||
): GraphQLConnectionProjection | null {
|
||||
const fieldTypeRef = unwrapNamedTypeRef(field.type);
|
||||
if (!fieldTypeRef?.name) return null;
|
||||
|
||||
const returnType = typeMap.get(fieldTypeRef.name);
|
||||
if (!returnType || returnType.kind !== 'OBJECT' || !returnType.fields?.length) return null;
|
||||
|
||||
const edgesField = returnType.fields.find(item => item.name === 'edges');
|
||||
if (edgesField) {
|
||||
const edgeTypeRef = unwrapListTypeRef(edgesField.type);
|
||||
if (edgeTypeRef?.name) {
|
||||
const edgeType = typeMap.get(edgeTypeRef.name);
|
||||
const nodeField = edgeType?.fields?.find(item => item.name === 'node');
|
||||
const nodeTypeRef = unwrapNamedTypeRef(nodeField?.type);
|
||||
if (nodeTypeRef?.name) {
|
||||
const hasPageInfo = !!returnType.fields.find(item => item.name === 'pageInfo');
|
||||
return {
|
||||
kind: 'edges',
|
||||
nodeTypeName: nodeTypeRef.name,
|
||||
hasPageInfo,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const listFieldNames = ['nodes', 'items', 'results', 'data'];
|
||||
for (const listFieldName of listFieldNames) {
|
||||
const listField = returnType.fields.find(item => item.name === listFieldName);
|
||||
if (!listField) continue;
|
||||
const listItemTypeRef = unwrapListTypeRef(listField.type);
|
||||
if (!listItemTypeRef?.name) continue;
|
||||
return {
|
||||
kind: 'listField',
|
||||
listFieldName,
|
||||
nodeTypeName: listItemTypeRef.name,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildConnectionQuery(field: GraphQLField, typeMap: Map<string, GraphQLType>): string | null {
|
||||
const projection = detectConnectionProjection(field, typeMap);
|
||||
if (!projection) return null;
|
||||
|
||||
const nodeType = typeMap.get(projection.nodeTypeName);
|
||||
const selectedAttributes = chooseUsefulNodeAttributes(nodeType, typeMap);
|
||||
const argsString = buildFirstTenArgs(field);
|
||||
const attributeBlock = selectedAttributes.map(attr => ` ${attr}`).join('\n');
|
||||
|
||||
if (projection.kind === 'edges') {
|
||||
const pageInfoBlock = projection.hasPageInfo
|
||||
? `
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}`
|
||||
: '';
|
||||
|
||||
return `query {
|
||||
${field.name}${argsString} {
|
||||
edges {
|
||||
node {
|
||||
${attributeBlock}
|
||||
}
|
||||
}${pageInfoBlock}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
|
||||
return `query {
|
||||
${field.name}${argsString} {
|
||||
${projection.listFieldName} {
|
||||
${attributeBlock}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
|
||||
function buildConnectionEndpoints(
|
||||
types: GraphQLType[],
|
||||
rootTypeName?: string
|
||||
): Array<{
|
||||
name: string;
|
||||
description?: string;
|
||||
fields?: string;
|
||||
connectionQuery?: string;
|
||||
}> {
|
||||
if (!rootTypeName) return [];
|
||||
|
||||
const rootType = findType(types, rootTypeName);
|
||||
if (!rootType?.fields?.length) return [];
|
||||
|
||||
const typeMap = buildTypeMap(types);
|
||||
const connectionEndpoints = [];
|
||||
|
||||
for (const field of rootType.fields) {
|
||||
const connectionQuery = buildConnectionQuery(field, typeMap);
|
||||
if (!connectionQuery) continue;
|
||||
|
||||
connectionEndpoints.push({
|
||||
name: field.name,
|
||||
description: field.description || '',
|
||||
fields: field.description,
|
||||
connectionQuery,
|
||||
});
|
||||
}
|
||||
|
||||
return connectionEndpoints;
|
||||
}
|
||||
|
||||
function buildOperationEndpoints(
|
||||
types: GraphQLType[],
|
||||
operationType: 'OBJECT',
|
||||
rootTypeName?: string
|
||||
): Array<{ name: string; description?: string; fields?: string }> {
|
||||
if (!rootTypeName) return [];
|
||||
const rootType = findType(types, rootTypeName);
|
||||
if (!rootType || !rootType.fields) return [];
|
||||
|
||||
return rootType.fields.map(field => ({
|
||||
name: field.name,
|
||||
description: field.description || '',
|
||||
fields: field.description,
|
||||
}));
|
||||
}
|
||||
|
||||
export function extractRestApiDefinitionFromGraphQlIntrospectionResult(
|
||||
introspectionResult: GraphQLIntrospectionResult
|
||||
): RestApiDefinition {
|
||||
const { __schema } = introspectionResult;
|
||||
const categories: any[] = [];
|
||||
|
||||
// Connections (query fields returning connection-like payloads)
|
||||
if (__schema.queryType?.name) {
|
||||
const connectionEndpoints = buildConnectionEndpoints(__schema.types, __schema.queryType.name);
|
||||
if (connectionEndpoints.length > 0) {
|
||||
categories.push({
|
||||
name: 'Connections',
|
||||
endpoints: connectionEndpoints.map(connection => ({
|
||||
method: 'POST',
|
||||
path: connection.name,
|
||||
summary: connection.description,
|
||||
description: connection.fields,
|
||||
parameters: [],
|
||||
connectionQuery: connection.connectionQuery,
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Queries
|
||||
if (__schema.queryType?.name) {
|
||||
const queryEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.queryType.name);
|
||||
if (queryEndpoints.length > 0) {
|
||||
categories.push({
|
||||
name: 'Queries',
|
||||
endpoints: queryEndpoints.map(q => ({
|
||||
method: 'POST',
|
||||
path: q.name,
|
||||
summary: q.description,
|
||||
description: q.fields,
|
||||
parameters: [],
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Mutations
|
||||
if (__schema.mutationType?.name) {
|
||||
const mutationEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.mutationType.name);
|
||||
if (mutationEndpoints.length > 0) {
|
||||
categories.push({
|
||||
name: 'Mutations',
|
||||
endpoints: mutationEndpoints.map(m => ({
|
||||
method: 'POST',
|
||||
path: m.name,
|
||||
summary: m.description,
|
||||
description: m.fields,
|
||||
parameters: [],
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Subscriptions
|
||||
if (__schema.subscriptionType?.name) {
|
||||
const subscriptionEndpoints = buildOperationEndpoints(__schema.types, 'OBJECT', __schema.subscriptionType.name);
|
||||
if (subscriptionEndpoints.length > 0) {
|
||||
categories.push({
|
||||
name: 'Subscriptions',
|
||||
endpoints: subscriptionEndpoints.map(s => ({
|
||||
method: 'POST',
|
||||
path: s.name,
|
||||
summary: s.description,
|
||||
description: s.fields,
|
||||
parameters: [],
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
categories,
|
||||
servers: [],
|
||||
};
|
||||
}
|
||||
|
||||
export async function fetchGraphQLSchema(
|
||||
url: string,
|
||||
headers: Record<string, string>,
|
||||
axios: AxiosInstance,
|
||||
maxDepth: number = DEFAULT_INTROSPECTION_DEPTH
|
||||
): Promise<GraphQLIntrospectionResult> {
|
||||
try {
|
||||
const query = buildIntrospectionQuery(maxDepth);
|
||||
const response = await axios.post(
|
||||
url,
|
||||
{ query },
|
||||
{
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (response.data.errors) {
|
||||
throw new Error(`GraphQL introspection error: ${JSON.stringify(response.data.errors)}`);
|
||||
}
|
||||
|
||||
if (!response.data.data) {
|
||||
throw new Error('Invalid introspection response: no data field');
|
||||
}
|
||||
|
||||
return response.data.data as GraphQLIntrospectionResult;
|
||||
} catch (err: any) {
|
||||
throw new Error(`DBGM-00312 Could not fetch GraphQL schema: ${err.message}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
export * from './openApiDriver';
|
||||
export * from './oDataDriver';
|
||||
export * from './graphQlDriver';
|
||||
export * from './openApiAdapter';
|
||||
export * from './oDataAdapter';
|
||||
export * from './oDataMetadataParser';
|
||||
export * from './restApiExecutor';
|
||||
export * from './arrayify';
|
||||
export * from './graphqlIntrospection';
|
||||
export * from './graphqlExplorer';
|
||||
export * from './graphQlQueryParser';
|
||||
export * from './graphQlVariables';
|
||||
export * from './restAuthTools';
|
||||
@@ -0,0 +1,70 @@
|
||||
const { analyseODataDefinition } = require('./oDataAdapter');
|
||||
|
||||
function findEndpoint(apiInfo, path, method = 'GET') {
|
||||
return apiInfo.categories
|
||||
.flatMap(category => category.endpoints)
|
||||
.find(endpoint => endpoint.path === path && endpoint.method === method);
|
||||
}
|
||||
|
||||
test('deduces mandatory company parameter for customers and items from ContainsTarget metadata', () => {
|
||||
const serviceDocument = {
|
||||
'@odata.context': 'https://example/odata/$metadata',
|
||||
value: [
|
||||
{ name: 'companies', kind: 'EntitySet', url: 'companies' },
|
||||
{ name: 'customers', kind: 'EntitySet', url: 'customers' },
|
||||
{ name: 'items', kind: 'EntitySet', url: 'items' },
|
||||
],
|
||||
};
|
||||
|
||||
const metadataXml = `<?xml version="1.0" encoding="utf-8"?>
|
||||
<edmx:Edmx Version="4.0" xmlns:edmx="http://docs.oasis-open.org/odata/ns/edmx">
|
||||
<edmx:DataServices>
|
||||
<Schema Namespace="Microsoft.NAV" Alias="NAV" xmlns="http://docs.oasis-open.org/odata/ns/edm">
|
||||
<EntityType Name="company">
|
||||
<Key><PropertyRef Name="id"/></Key>
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
<Property Name="displayName" Type="Edm.String"/>
|
||||
<NavigationProperty Name="customers" Type="Collection(NAV.customer)" ContainsTarget="true" />
|
||||
<NavigationProperty Name="items" Type="Collection(NAV.item)" ContainsTarget="true" />
|
||||
</EntityType>
|
||||
<EntityType Name="customer">
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
</EntityType>
|
||||
<EntityType Name="item">
|
||||
<Property Name="id" Type="Edm.Guid"/>
|
||||
</EntityType>
|
||||
<EntityContainer Name="default">
|
||||
<EntitySet Name="companies" EntityType="NAV.company">
|
||||
<NavigationPropertyBinding Path="customers" Target="customers"/>
|
||||
<NavigationPropertyBinding Path="items" Target="items"/>
|
||||
</EntitySet>
|
||||
<EntitySet Name="customers" EntityType="NAV.customer"/>
|
||||
<EntitySet Name="items" EntityType="NAV.item"/>
|
||||
</EntityContainer>
|
||||
</Schema>
|
||||
</edmx:DataServices>
|
||||
</edmx:Edmx>`;
|
||||
|
||||
const apiInfo = analyseODataDefinition(serviceDocument, 'https://example/odata', metadataXml);
|
||||
|
||||
const customersGet = findEndpoint(apiInfo, '/customers', 'GET');
|
||||
const itemsGet = findEndpoint(apiInfo, '/items', 'GET');
|
||||
|
||||
expect(customersGet).toBeDefined();
|
||||
expect(itemsGet).toBeDefined();
|
||||
|
||||
const customersCompany = customersGet.parameters.find(param => param.name === 'company');
|
||||
const itemsCompany = itemsGet.parameters.find(param => param.name === 'company');
|
||||
|
||||
expect(customersCompany).toBeDefined();
|
||||
expect(customersCompany.required).toBe(true);
|
||||
expect(customersCompany.in).toBe('query');
|
||||
expect(customersCompany.odataLookupEntitySet).toBe('companies');
|
||||
expect(customersCompany.odataLookupPath).toBe('/companies');
|
||||
|
||||
expect(itemsCompany).toBeDefined();
|
||||
expect(itemsCompany.required).toBe(true);
|
||||
expect(itemsCompany.in).toBe('query');
|
||||
expect(itemsCompany.odataLookupEntitySet).toBe('companies');
|
||||
expect(itemsCompany.odataLookupPath).toBe('/companies');
|
||||
});
|
||||
@@ -0,0 +1,458 @@
|
||||
import { RestApiDefinition, RestApiEndpoint, RestApiParameter, RestApiServer } from './restApiDef';
|
||||
import { parseODataMetadataDocument } from './oDataMetadataParser';
|
||||
|
||||
export type ODataServiceResource = {
|
||||
name?: string;
|
||||
kind?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
export type ODataServiceDocument = {
|
||||
'@odata.context'?: string;
|
||||
value?: ODataServiceResource[];
|
||||
};
|
||||
|
||||
export interface ODataMetadataNavigationProperty {
|
||||
name: string;
|
||||
type?: string;
|
||||
containsTarget: boolean;
|
||||
nullable: boolean;
|
||||
}
|
||||
|
||||
export interface ODataMetadataEntityType {
|
||||
typeName: string;
|
||||
fullTypeName: string;
|
||||
keyProperties: string[];
|
||||
stringProperties: string[];
|
||||
navigationProperties: ODataMetadataNavigationProperty[];
|
||||
}
|
||||
|
||||
export interface ODataMetadataEntitySet {
|
||||
name: string;
|
||||
entityType: string;
|
||||
navigationBindings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ODataMetadataDocument {
|
||||
entityTypes: Record<string, ODataMetadataEntityType>;
|
||||
entitySets: Record<string, ODataMetadataEntitySet>;
|
||||
}
|
||||
|
||||
function normalizeServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
|
||||
const safeFallback = String(fallbackUrl ?? '').trim();
|
||||
|
||||
if (typeof contextUrl === 'string' && contextUrl.trim()) {
|
||||
try {
|
||||
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
|
||||
resolved.hash = '';
|
||||
resolved.search = '';
|
||||
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
|
||||
|
||||
const url = resolved.toString();
|
||||
return url.endsWith('/') ? url : `${url}/`;
|
||||
} catch {
|
||||
// ignore, fallback below
|
||||
}
|
||||
}
|
||||
|
||||
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
|
||||
}
|
||||
|
||||
function normalizeEndpointPath(valueUrl: string | undefined): string | null {
|
||||
const input = String(valueUrl ?? '').trim();
|
||||
if (!input) return null;
|
||||
|
||||
try {
|
||||
const parsed = new URL(input, 'http://odata.local');
|
||||
const pathWithQuery = `${parsed.pathname}${parsed.search}`;
|
||||
return pathWithQuery.startsWith('/') ? pathWithQuery : `/${pathWithQuery}`;
|
||||
} catch {
|
||||
return input.startsWith('/') ? input : `/${input}`;
|
||||
}
|
||||
}
|
||||
|
||||
function inferMethods(kind: string | undefined): RestApiEndpoint['method'][] {
|
||||
const normalizedKind = String(kind ?? '').toLowerCase();
|
||||
|
||||
if (normalizedKind === 'actionimport') return ['POST'];
|
||||
if (normalizedKind === 'entityset') return ['GET', 'POST'];
|
||||
return ['GET'];
|
||||
}
|
||||
|
||||
function toLowerCamelCase(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
return text.charAt(0).toLowerCase() + text.slice(1);
|
||||
}
|
||||
|
||||
function normalizeSingularName(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
if (/ies$/i.test(text)) return `${text.slice(0, -3)}y`;
|
||||
if (/sses$/i.test(text)) return text;
|
||||
if (/s$/i.test(text) && text.length > 1) return text.slice(0, -1);
|
||||
return text;
|
||||
}
|
||||
|
||||
function normalizePluralName(value: string | undefined): string {
|
||||
const text = String(value ?? '').trim();
|
||||
if (!text) return '';
|
||||
if (/y$/i.test(text)) return `${text.slice(0, -1)}ies`;
|
||||
if (/s$/i.test(text)) return text;
|
||||
return `${text}s`;
|
||||
}
|
||||
|
||||
function normalizeEntityTypeName(typeName: string | undefined): string {
|
||||
const text = String(typeName ?? '').trim();
|
||||
if (!text) return '';
|
||||
|
||||
const collectionMatch = text.match(/^Collection\((.+)\)$/i);
|
||||
const unwrapped = collectionMatch ? collectionMatch[1] : text;
|
||||
const slashStripped = unwrapped.includes('/') ? unwrapped.split('/').pop() || unwrapped : unwrapped;
|
||||
return slashStripped.trim();
|
||||
}
|
||||
|
||||
function buildTypeReferenceKeys(typeReference: string | undefined): string[] {
|
||||
const normalizedReference = normalizeEntityTypeName(typeReference);
|
||||
if (!normalizedReference) return [];
|
||||
|
||||
const keys = new Set<string>();
|
||||
const lower = normalizedReference.toLowerCase();
|
||||
keys.add(lower);
|
||||
|
||||
const withoutNamespace = normalizedReference.includes('.')
|
||||
? normalizedReference.split('.').pop() || normalizedReference
|
||||
: normalizedReference;
|
||||
keys.add(withoutNamespace.toLowerCase());
|
||||
|
||||
return Array.from(keys);
|
||||
}
|
||||
|
||||
function buildEntityTypeLookup(entityTypes: Record<string, ODataMetadataEntityType>): Map<string, ODataMetadataEntityType> {
|
||||
const lookup = new Map<string, ODataMetadataEntityType>();
|
||||
|
||||
for (const [entityTypeKey, entityType] of Object.entries(entityTypes || {})) {
|
||||
const keys = new Set<string>([
|
||||
...buildTypeReferenceKeys(entityTypeKey),
|
||||
...buildTypeReferenceKeys(entityType.fullTypeName),
|
||||
...buildTypeReferenceKeys(entityType.typeName),
|
||||
]);
|
||||
|
||||
for (const key of keys) {
|
||||
if (!lookup.has(key)) {
|
||||
lookup.set(key, entityType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
function resolveEntityType(
|
||||
entityTypeLookup: Map<string, ODataMetadataEntityType>,
|
||||
typeReference: string | undefined
|
||||
): ODataMetadataEntityType | null {
|
||||
const keys = buildTypeReferenceKeys(typeReference);
|
||||
for (const key of keys) {
|
||||
const found = entityTypeLookup.get(key);
|
||||
if (found) return found;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveLookupPath(entitySetName: string, serviceResourceMap: Map<string, ODataServiceResource>): string {
|
||||
const serviceResource = serviceResourceMap.get(entitySetName);
|
||||
const resourceUrl = String(serviceResource?.url ?? '').trim();
|
||||
if (!resourceUrl) return `/${entitySetName}`;
|
||||
return resourceUrl.startsWith('/') ? resourceUrl : `/${resourceUrl}`;
|
||||
}
|
||||
|
||||
function buildServiceResourceNameLookup(resources: ODataServiceResource[]): Map<string, string> {
|
||||
const lookup = new Map<string, string>();
|
||||
for (const resource of resources || []) {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (!resourceName) continue;
|
||||
const lower = resourceName.toLowerCase();
|
||||
if (!lookup.has(lower)) {
|
||||
lookup.set(lower, resourceName);
|
||||
}
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
function resolveServiceResourceNameForEntityType(
|
||||
entityType: ODataMetadataEntityType,
|
||||
serviceResourceNameLookup: Map<string, string>
|
||||
): string | null {
|
||||
const baseNames = [
|
||||
String(entityType?.typeName ?? '').trim(),
|
||||
normalizeSingularName(entityType?.typeName),
|
||||
normalizeEntityTypeName(entityType?.fullTypeName),
|
||||
normalizeSingularName(normalizeEntityTypeName(entityType?.fullTypeName)),
|
||||
].filter(Boolean);
|
||||
|
||||
const candidates = new Set<string>();
|
||||
for (const baseName of baseNames) {
|
||||
candidates.add(baseName);
|
||||
candidates.add(normalizeSingularName(baseName));
|
||||
candidates.add(normalizePluralName(baseName));
|
||||
}
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const matched = serviceResourceNameLookup.get(String(candidate).toLowerCase());
|
||||
if (matched) return matched;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
type MandatoryNavigationTargetParameter = {
|
||||
name: string;
|
||||
lookupEntitySet: string;
|
||||
lookupPath: string;
|
||||
lookupValueField?: string;
|
||||
lookupLabelField?: string;
|
||||
};
|
||||
|
||||
type MandatoryNavigationByTarget = Record<string, MandatoryNavigationTargetParameter[]>;
|
||||
|
||||
type ParentNavigationContext = {
|
||||
parentEntitySetName: string;
|
||||
parentType: ODataMetadataEntityType;
|
||||
navigationBindings: Record<string, string>;
|
||||
};
|
||||
|
||||
function deduceMandatoryNavigationByTarget(
|
||||
metadataDocument: ODataMetadataDocument | null,
|
||||
resources: ODataServiceResource[]
|
||||
): MandatoryNavigationByTarget {
|
||||
if (!metadataDocument) return {};
|
||||
|
||||
const entityTypeLookup = buildEntityTypeLookup(metadataDocument.entityTypes || {});
|
||||
|
||||
const serviceResourceMap = new Map<string, ODataServiceResource>();
|
||||
for (const resource of resources) {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (resourceName) {
|
||||
serviceResourceMap.set(resourceName, resource);
|
||||
}
|
||||
}
|
||||
const serviceResourceNameLookup = buildServiceResourceNameLookup(resources);
|
||||
|
||||
const entitySetsByEntityType = new Map<string, string[]>();
|
||||
for (const [entitySetName, entitySet] of Object.entries(metadataDocument.entitySets || {})) {
|
||||
const typeKeys = buildTypeReferenceKeys(entitySet?.entityType);
|
||||
if (typeKeys.length === 0) continue;
|
||||
|
||||
for (const typeKey of typeKeys) {
|
||||
const list = entitySetsByEntityType.get(typeKey) || [];
|
||||
if (!list.includes(entitySetName)) {
|
||||
list.push(entitySetName);
|
||||
entitySetsByEntityType.set(typeKey, list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mandatoryByTarget: MandatoryNavigationByTarget = {};
|
||||
const parentContexts: ParentNavigationContext[] = [];
|
||||
const parentTypeKeysCovered = new Set<string>();
|
||||
|
||||
for (const [parentEntitySetName, parentEntitySet] of Object.entries(metadataDocument.entitySets || {})) {
|
||||
const parentType = resolveEntityType(entityTypeLookup, parentEntitySet.entityType);
|
||||
if (!parentType) continue;
|
||||
|
||||
parentContexts.push({
|
||||
parentEntitySetName,
|
||||
parentType,
|
||||
navigationBindings: parentEntitySet.navigationBindings || {},
|
||||
});
|
||||
|
||||
for (const typeKey of buildTypeReferenceKeys(parentEntitySet.entityType)) {
|
||||
parentTypeKeysCovered.add(typeKey);
|
||||
}
|
||||
}
|
||||
|
||||
for (const entityType of Object.values(metadataDocument.entityTypes || {})) {
|
||||
const typeKeys = [
|
||||
...buildTypeReferenceKeys(entityType.fullTypeName),
|
||||
...buildTypeReferenceKeys(entityType.typeName),
|
||||
];
|
||||
const alreadyCovered = typeKeys.some(typeKey => parentTypeKeysCovered.has(typeKey));
|
||||
if (alreadyCovered) continue;
|
||||
|
||||
if (!Array.isArray(entityType.navigationProperties) || entityType.navigationProperties.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parentEntitySetName = resolveServiceResourceNameForEntityType(entityType, serviceResourceNameLookup);
|
||||
if (!parentEntitySetName) continue;
|
||||
|
||||
parentContexts.push({
|
||||
parentEntitySetName,
|
||||
parentType: entityType,
|
||||
navigationBindings: {},
|
||||
});
|
||||
|
||||
for (const typeKey of typeKeys) {
|
||||
parentTypeKeysCovered.add(typeKey);
|
||||
}
|
||||
}
|
||||
|
||||
for (const { parentEntitySetName, parentType, navigationBindings } of parentContexts) {
|
||||
const parentParamName =
|
||||
toLowerCamelCase(parentType.typeName) ||
|
||||
toLowerCamelCase(normalizeSingularName(parentEntitySetName)) ||
|
||||
toLowerCamelCase(parentEntitySetName);
|
||||
|
||||
if (!parentParamName) continue;
|
||||
|
||||
for (const navProperty of parentType.navigationProperties || []) {
|
||||
if (!navProperty.containsTarget) continue;
|
||||
|
||||
const targetNames = new Set<string>();
|
||||
const directBoundTarget = navigationBindings?.[navProperty.name];
|
||||
if (directBoundTarget) {
|
||||
targetNames.add(directBoundTarget);
|
||||
}
|
||||
|
||||
const navTypeKeys = buildTypeReferenceKeys(navProperty.type);
|
||||
if (navTypeKeys.length > 0) {
|
||||
const typeTargets = navTypeKeys.flatMap(typeKey => entitySetsByEntityType.get(typeKey) || []);
|
||||
for (const targetName of typeTargets) {
|
||||
targetNames.add(targetName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const targetEntitySetName of targetNames) {
|
||||
const targetList = mandatoryByTarget[targetEntitySetName] || [];
|
||||
const exists = targetList.some(item => item.name.toLowerCase() === parentParamName.toLowerCase());
|
||||
if (exists) continue;
|
||||
|
||||
targetList.push({
|
||||
name: parentParamName,
|
||||
lookupEntitySet: parentEntitySetName,
|
||||
lookupPath: resolveLookupPath(parentEntitySetName, serviceResourceMap),
|
||||
lookupValueField: parentType.keyProperties?.[0],
|
||||
lookupLabelField: parentType.stringProperties?.find(prop => /name/i.test(prop)) || parentType.stringProperties?.[0],
|
||||
});
|
||||
mandatoryByTarget[targetEntitySetName] = targetList;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mandatoryByTarget;
|
||||
}
|
||||
|
||||
function buildMandatoryNavigationParameters(
|
||||
resource: ODataServiceResource,
|
||||
mandatoryByTarget: MandatoryNavigationByTarget
|
||||
): RestApiParameter[] {
|
||||
const resourceName = String(resource?.name ?? '').trim();
|
||||
if (!resourceName) return [];
|
||||
|
||||
const mandatoryTargets = mandatoryByTarget[resourceName] || [];
|
||||
const mandatoryParameters: RestApiParameter[] = [];
|
||||
const seenNames = new Set<string>();
|
||||
|
||||
for (const mandatoryTarget of mandatoryTargets) {
|
||||
const normalizedName = mandatoryTarget.name.toLowerCase();
|
||||
if (seenNames.has(normalizedName)) continue;
|
||||
|
||||
const description = mandatoryTarget.lookupEntitySet
|
||||
? `Required navigation parameter deduced from OData metadata (lookup: ${mandatoryTarget.lookupEntitySet})`
|
||||
: 'Required navigation parameter deduced from OData metadata';
|
||||
|
||||
mandatoryParameters.push({
|
||||
name: mandatoryTarget.name,
|
||||
in: 'query',
|
||||
dataType: 'string',
|
||||
required: true,
|
||||
description,
|
||||
odataLookupPath: mandatoryTarget.lookupPath,
|
||||
odataLookupEntitySet: mandatoryTarget.lookupEntitySet,
|
||||
odataLookupValueField: mandatoryTarget.lookupValueField,
|
||||
odataLookupLabelField: mandatoryTarget.lookupLabelField,
|
||||
});
|
||||
seenNames.add(normalizedName);
|
||||
}
|
||||
|
||||
return mandatoryParameters;
|
||||
}
|
||||
|
||||
function createODataResourceEndpoints(
|
||||
resource: ODataServiceResource,
|
||||
mandatoryByTarget: MandatoryNavigationByTarget
|
||||
): RestApiEndpoint[] {
|
||||
const path = normalizeEndpointPath(resource.url);
|
||||
if (!path) return [];
|
||||
|
||||
const summary = resource.name || resource.url || path;
|
||||
const descriptionKind = String(resource.kind ?? '').trim();
|
||||
const methods = inferMethods(resource.kind);
|
||||
const mandatoryNavigationParameters = buildMandatoryNavigationParameters(resource, mandatoryByTarget);
|
||||
|
||||
return methods.map(method => {
|
||||
const parameters: RestApiParameter[] = [...mandatoryNavigationParameters];
|
||||
|
||||
if (method === 'POST') {
|
||||
parameters.push({
|
||||
name: 'body',
|
||||
in: 'body',
|
||||
dataType: 'object',
|
||||
contentType: 'application/json',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
summary,
|
||||
description: descriptionKind ? `OData ${descriptionKind}` : 'OData resource',
|
||||
parameters,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function analyseODataDefinition(
|
||||
doc: ODataServiceDocument,
|
||||
endpointUrl: string,
|
||||
metadataDocumentXml?: string | null
|
||||
): RestApiDefinition {
|
||||
const resources = Array.isArray(doc?.value) ? doc.value : [];
|
||||
const categoriesByName = new Map<string, RestApiEndpoint[]>();
|
||||
const metadataDocument = metadataDocumentXml ? parseODataMetadataDocument(metadataDocumentXml) : null;
|
||||
const mandatoryByTarget = deduceMandatoryNavigationByTarget(metadataDocument, resources);
|
||||
|
||||
for (const resource of resources) {
|
||||
const endpoints = createODataResourceEndpoints(resource, mandatoryByTarget);
|
||||
if (endpoints.length === 0) continue;
|
||||
|
||||
const categoryName = String(resource.kind ?? 'Resources').trim() || 'Resources';
|
||||
const existingEndpoints = categoriesByName.get(categoryName) || [];
|
||||
existingEndpoints.push(...endpoints);
|
||||
categoriesByName.set(categoryName, existingEndpoints);
|
||||
}
|
||||
|
||||
const metadataEndpoint: RestApiEndpoint = {
|
||||
method: 'GET',
|
||||
path: '/$metadata',
|
||||
summary: '$metadata',
|
||||
description: 'OData service metadata',
|
||||
parameters: [],
|
||||
};
|
||||
|
||||
const metadataCategory = categoriesByName.get('Metadata') || [];
|
||||
metadataCategory.push(metadataEndpoint);
|
||||
categoriesByName.set('Metadata', metadataCategory);
|
||||
|
||||
const serviceRoot = normalizeServiceRoot(doc?.['@odata.context'], endpointUrl);
|
||||
const servers: RestApiServer[] = serviceRoot ? [{ url: serviceRoot }] : [];
|
||||
|
||||
return {
|
||||
categories: Array.from(categoriesByName.entries()).map(([name, endpoints]) => ({
|
||||
name,
|
||||
endpoints,
|
||||
})),
|
||||
servers,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import { buildRestAuthHeaders } from './restAuthTools';
|
||||
import { apiDriverBase } from './restDriverBase';
|
||||
|
||||
function resolveServiceRoot(contextUrl: string | undefined, fallbackUrl: string): string {
|
||||
const safeFallback = String(fallbackUrl ?? '').trim();
|
||||
|
||||
if (typeof contextUrl === 'string' && contextUrl.trim()) {
|
||||
try {
|
||||
const resolved = new URL(contextUrl.trim(), safeFallback || undefined);
|
||||
resolved.hash = '';
|
||||
resolved.search = '';
|
||||
resolved.pathname = resolved.pathname.replace(/\/$metadata$/i, '');
|
||||
|
||||
const url = resolved.toString();
|
||||
return url.endsWith('/') ? url : `${url}/`;
|
||||
} catch {
|
||||
// ignore, fallback below
|
||||
}
|
||||
}
|
||||
|
||||
return safeFallback.endsWith('/') ? safeFallback : `${safeFallback}/`;
|
||||
}
|
||||
|
||||
async function loadODataServiceDocument(dbhan: any) {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00330 OData endpoint URL is not configured');
|
||||
}
|
||||
|
||||
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1, {
|
||||
headers: buildRestAuthHeaders(dbhan.connection.restAuth),
|
||||
});
|
||||
|
||||
const document = response?.data;
|
||||
if (!document || typeof document !== 'object') {
|
||||
throw new Error('DBGM-00331 OData service document is empty or invalid');
|
||||
}
|
||||
|
||||
if (!document['@odata.context']) {
|
||||
throw new Error('DBGM-00332 OData service document does not contain @odata.context');
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
function getODataVersion(document: any): string {
|
||||
const contextUrl = String(document?.['@odata.context'] ?? '').trim();
|
||||
const versionMatch = contextUrl.match(/\/v(\d+(?:\.\d+)*)\/$metadata$/i);
|
||||
if (versionMatch?.[1]) return versionMatch[1];
|
||||
return '';
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
export const oDataDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'odata@rest',
|
||||
title: 'OData - REST',
|
||||
databaseEngineTypes: ['rest', 'odata'],
|
||||
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><rect width="128" height="128" fill="#f9a000"/><rect x="12" y="12" width="47" height="12" fill="#ffffff"/><rect x="69" y="12" width="47" height="12" fill="#ffffff"/><rect x="12" y="37" width="47" height="12" fill="#ffffff"/><rect x="69" y="37" width="47" height="12" fill="#ffffff"/><rect x="12" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="62" width="47" height="12" fill="#ffffff"/><rect x="69" y="87" width="47" height="12" fill="#ffffff"/><circle cx="35" cy="102" r="20" fill="#e6e6e6"/></svg>',
|
||||
apiServerUrl1Label: 'OData Service URL',
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
return false;
|
||||
},
|
||||
|
||||
beforeConnectionSave: connection => ({
|
||||
...connection,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: '_api_database_',
|
||||
}),
|
||||
|
||||
async connect(connection: any) {
|
||||
return {
|
||||
connection,
|
||||
client: null,
|
||||
database: '_api_database_',
|
||||
axios: connection.axios,
|
||||
};
|
||||
},
|
||||
|
||||
async getVersion(dbhan: any) {
|
||||
const document = await loadODataServiceDocument(dbhan);
|
||||
const resourcesCount = Array.isArray(document?.value) ? document.value.length : 0;
|
||||
const odataVersion = getODataVersion(document);
|
||||
|
||||
return {
|
||||
version: odataVersion || 'OData',
|
||||
versionText: `OData${odataVersion ? ` ${odataVersion}` : ''}, ${resourcesCount} resources`,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,161 @@
|
||||
import type { ODataMetadataDocument, ODataMetadataEntitySet, ODataMetadataEntityType, ODataMetadataNavigationProperty } from './oDataAdapter';
|
||||
|
||||
function decodeXmlEntities(value: string): string {
|
||||
return String(value ?? '')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/&/g, '&');
|
||||
}
|
||||
|
||||
function parseXmlAttributes(attributesText: string): Record<string, string> {
|
||||
const attributes: Record<string, string> = {};
|
||||
const regex = /([A-Za-z_][A-Za-z0-9_.:-]*)\s*=\s*("([^"]*)"|'([^']*)')/g;
|
||||
let match = regex.exec(attributesText || '');
|
||||
|
||||
while (match) {
|
||||
const rawName = match[1];
|
||||
const localName = rawName.includes(':') ? rawName.split(':').pop() || rawName : rawName;
|
||||
const rawValue = match[3] ?? match[4] ?? '';
|
||||
const decoded = decodeXmlEntities(rawValue);
|
||||
attributes[rawName] = decoded;
|
||||
attributes[localName] = decoded;
|
||||
match = regex.exec(attributesText || '');
|
||||
}
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
function extractXmlElements(xml: string, elementName: string): Array<{ attributes: Record<string, string>; innerXml: string }> {
|
||||
const elements: Array<{ attributes: Record<string, string>; innerXml: string }> = [];
|
||||
const fullTagRegex = new RegExp(
|
||||
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)>([\\s\\S]*?)<\\/(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}>`,
|
||||
'gi'
|
||||
);
|
||||
const selfClosingRegex = new RegExp(
|
||||
`<(?:[A-Za-z_][A-Za-z0-9_.-]*:)?${elementName}\\b([^>]*)\\/>`,
|
||||
'gi'
|
||||
);
|
||||
|
||||
let fullMatch = fullTagRegex.exec(xml || '');
|
||||
while (fullMatch) {
|
||||
elements.push({
|
||||
attributes: parseXmlAttributes(fullMatch[1] || ''),
|
||||
innerXml: fullMatch[2] || '',
|
||||
});
|
||||
fullMatch = fullTagRegex.exec(xml || '');
|
||||
}
|
||||
|
||||
let selfClosingMatch = selfClosingRegex.exec(xml || '');
|
||||
while (selfClosingMatch) {
|
||||
elements.push({
|
||||
attributes: parseXmlAttributes(selfClosingMatch[1] || ''),
|
||||
innerXml: '',
|
||||
});
|
||||
selfClosingMatch = selfClosingRegex.exec(xml || '');
|
||||
}
|
||||
|
||||
return elements;
|
||||
}
|
||||
|
||||
function toBoolAttribute(value: string | undefined): boolean {
|
||||
return String(value ?? '').trim().toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
function normalizeEntitySetName(value: string | undefined): string {
|
||||
const input = String(value ?? '').trim();
|
||||
if (!input) return '';
|
||||
|
||||
const noContainer = input.includes('/') ? input.split('/').pop() || '' : input;
|
||||
return noContainer.includes('.') ? noContainer.split('.').pop() || noContainer : noContainer;
|
||||
}
|
||||
|
||||
export function parseODataMetadataDocument(metadataXml: string): ODataMetadataDocument {
|
||||
const schemas = extractXmlElements(metadataXml || '', 'Schema');
|
||||
|
||||
const entityTypes: Record<string, ODataMetadataEntityType> = {};
|
||||
const entitySets: Record<string, ODataMetadataEntitySet> = {};
|
||||
|
||||
for (const schema of schemas) {
|
||||
const namespace = String(schema.attributes.Namespace || '').trim();
|
||||
|
||||
for (const entityTypeNode of extractXmlElements(schema.innerXml, 'EntityType')) {
|
||||
const typeName = String(entityTypeNode.attributes.Name || '').trim();
|
||||
if (!typeName) continue;
|
||||
|
||||
const fullTypeName = namespace ? `${namespace}.${typeName}` : typeName;
|
||||
const keyProperties: string[] = [];
|
||||
const stringProperties: string[] = [];
|
||||
const navigationProperties: ODataMetadataNavigationProperty[] = [];
|
||||
|
||||
for (const keyNode of extractXmlElements(entityTypeNode.innerXml, 'Key')) {
|
||||
for (const propRef of extractXmlElements(keyNode.innerXml, 'PropertyRef')) {
|
||||
const keyName = String(propRef.attributes.Name || '').trim();
|
||||
if (keyName && !keyProperties.includes(keyName)) {
|
||||
keyProperties.push(keyName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const propertyNode of extractXmlElements(entityTypeNode.innerXml, 'Property')) {
|
||||
const propName = String(propertyNode.attributes.Name || '').trim();
|
||||
const propType = String(propertyNode.attributes.Type || '').trim();
|
||||
if (propName && /^Edm\.String$/i.test(propType)) {
|
||||
stringProperties.push(propName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const navNode of extractXmlElements(entityTypeNode.innerXml, 'NavigationProperty')) {
|
||||
const navName = String(navNode.attributes.Name || '').trim();
|
||||
if (!navName) continue;
|
||||
|
||||
navigationProperties.push({
|
||||
name: navName,
|
||||
type: String(navNode.attributes.Type || '').trim(),
|
||||
containsTarget: toBoolAttribute(navNode.attributes.ContainsTarget),
|
||||
nullable: navNode.attributes.Nullable === undefined ? true : toBoolAttribute(navNode.attributes.Nullable),
|
||||
});
|
||||
}
|
||||
|
||||
entityTypes[fullTypeName] = {
|
||||
typeName,
|
||||
fullTypeName,
|
||||
keyProperties,
|
||||
stringProperties,
|
||||
navigationProperties,
|
||||
};
|
||||
}
|
||||
|
||||
for (const entitySetNode of extractXmlElements(schema.innerXml, 'EntitySet')) {
|
||||
const setName = String(entitySetNode.attributes.Name || '').trim();
|
||||
const entityType = String(entitySetNode.attributes.EntityType || '').trim();
|
||||
if (!setName || !entityType) continue;
|
||||
|
||||
const navigationBindings: Record<string, string> = {};
|
||||
|
||||
for (const bindingNode of extractXmlElements(entitySetNode.innerXml, 'NavigationPropertyBinding')) {
|
||||
const path = String(bindingNode.attributes.Path || '').trim();
|
||||
const target = normalizeEntitySetName(bindingNode.attributes.Target);
|
||||
if (!path || !target) continue;
|
||||
|
||||
navigationBindings[path] = target;
|
||||
const pathLastSegment = path.split('/').pop();
|
||||
if (pathLastSegment && !navigationBindings[pathLastSegment]) {
|
||||
navigationBindings[pathLastSegment] = target;
|
||||
}
|
||||
}
|
||||
|
||||
entitySets[setName] = {
|
||||
name: setName,
|
||||
entityType,
|
||||
navigationBindings,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
entityTypes,
|
||||
entitySets,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import type { OpenAPIV3_1 } from 'openapi-types';
|
||||
import { RestApiDefinition, RestApiCategory, RestApiEndpoint, RestApiParameter, RestApiServer } from './restApiDef';
|
||||
|
||||
/**
|
||||
* Converts an OpenAPI v3.1 document into a simplified REST API definition
|
||||
* Organizes endpoints by tags into categories
|
||||
*/
|
||||
export function analyseOpenApiDefinition(doc: OpenAPIV3_1.Document): RestApiDefinition {
|
||||
const categories = new Map<string, RestApiEndpoint[]>();
|
||||
|
||||
// Process all paths and methods
|
||||
if (doc.paths) {
|
||||
for (const [path, pathItem] of Object.entries(doc.paths)) {
|
||||
if (!pathItem) continue;
|
||||
|
||||
// Process each HTTP method in the path
|
||||
const methods = ['get', 'post', 'put', 'patch', 'delete', 'options', 'head', 'trace'] as const;
|
||||
|
||||
for (const method of methods) {
|
||||
const operation = (pathItem as any)[method] as OpenAPIV3_1.OperationObject | undefined;
|
||||
if (!operation) continue;
|
||||
|
||||
const endpoint: RestApiEndpoint = {
|
||||
method: method.toUpperCase() as any,
|
||||
path,
|
||||
summary: operation.summary,
|
||||
description: operation.description,
|
||||
parameters: extractParameters(operation, pathItem as any),
|
||||
};
|
||||
|
||||
// Use tags to organize into categories
|
||||
const tags = operation.tags || ['Other'];
|
||||
for (const tag of tags) {
|
||||
if (!categories.has(tag)) {
|
||||
categories.set(tag, []);
|
||||
}
|
||||
categories.get(tag)!.push(endpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert Map to RestApiCategory array
|
||||
const categoryArray: RestApiCategory[] = Array.from(categories.entries()).map(([name, endpoints]) => ({
|
||||
name,
|
||||
endpoints,
|
||||
}));
|
||||
|
||||
const servers: RestApiServer[] = (doc.servers || []).map(server => ({
|
||||
url: server.url,
|
||||
description: server.description,
|
||||
}));
|
||||
|
||||
return {
|
||||
categories: categoryArray,
|
||||
servers,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract parameters from operation and path item
|
||||
*/
|
||||
function extractParameters(
|
||||
operation: OpenAPIV3_1.OperationObject,
|
||||
pathItem: OpenAPIV3_1.PathItemObject
|
||||
): RestApiParameter[] {
|
||||
const parameters: RestApiParameter[] = [];
|
||||
|
||||
// Path item level parameters (apply to all methods)
|
||||
if (pathItem.parameters) {
|
||||
for (const param of pathItem.parameters) {
|
||||
if (!('$ref' in param)) {
|
||||
parameters.push(convertParameter(param as OpenAPIV3_1.ParameterObject));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Operation level parameters
|
||||
if (operation.parameters) {
|
||||
for (const param of operation.parameters) {
|
||||
if (!('$ref' in param)) {
|
||||
parameters.push(convertParameter(param as OpenAPIV3_1.ParameterObject));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const bodyParameter = convertRequestBodyParameter(operation.requestBody);
|
||||
if (bodyParameter) {
|
||||
parameters.push(bodyParameter);
|
||||
}
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
function isSchemaObject(schema: OpenAPIV3_1.SchemaObject | OpenAPIV3_1.ReferenceObject | undefined): schema is OpenAPIV3_1.SchemaObject {
|
||||
return !!schema && !('$ref' in schema);
|
||||
}
|
||||
|
||||
function isExampleObject(example: OpenAPIV3_1.ExampleObject | OpenAPIV3_1.ReferenceObject | undefined): example is OpenAPIV3_1.ExampleObject {
|
||||
return !!example && !('$ref' in example);
|
||||
}
|
||||
|
||||
function cloneValue(value: any) {
|
||||
if (value == null) return value;
|
||||
if (typeof value !== 'object') return value;
|
||||
|
||||
try {
|
||||
return JSON.parse(JSON.stringify(value));
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
function extractMediaTypeExample(mediaType: OpenAPIV3_1.MediaTypeObject | undefined): any {
|
||||
if (!mediaType) return undefined;
|
||||
|
||||
if (mediaType.example !== undefined) return cloneValue(mediaType.example);
|
||||
|
||||
if (mediaType.examples) {
|
||||
const firstExample = Object.values(mediaType.examples)[0];
|
||||
if (isExampleObject(firstExample) && firstExample.value !== undefined) {
|
||||
return cloneValue(firstExample.value);
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function buildSchemaExample(
|
||||
schema: OpenAPIV3_1.SchemaObject | undefined,
|
||||
recursionDepth = 0
|
||||
): any {
|
||||
if (!schema || recursionDepth > 6) return undefined;
|
||||
|
||||
if (schema.example !== undefined) return cloneValue(schema.example);
|
||||
if (schema.default !== undefined) return cloneValue(schema.default);
|
||||
|
||||
if (schema.oneOf?.length) {
|
||||
const oneOfSchema = schema.oneOf[0];
|
||||
return isSchemaObject(oneOfSchema) ? buildSchemaExample(oneOfSchema, recursionDepth + 1) : undefined;
|
||||
}
|
||||
if (schema.anyOf?.length) {
|
||||
const anyOfSchema = schema.anyOf[0];
|
||||
return isSchemaObject(anyOfSchema) ? buildSchemaExample(anyOfSchema, recursionDepth + 1) : undefined;
|
||||
}
|
||||
if (schema.allOf?.length) {
|
||||
const mergedObject = {};
|
||||
let hasValue = false;
|
||||
|
||||
for (const item of schema.allOf) {
|
||||
if (!isSchemaObject(item)) continue;
|
||||
const itemExample = buildSchemaExample(item, recursionDepth + 1);
|
||||
if (itemExample && typeof itemExample === 'object' && !Array.isArray(itemExample)) {
|
||||
Object.assign(mergedObject, itemExample);
|
||||
hasValue = true;
|
||||
}
|
||||
}
|
||||
|
||||
return hasValue ? mergedObject : undefined;
|
||||
}
|
||||
|
||||
if (schema.enum?.length) return cloneValue(schema.enum[0]);
|
||||
|
||||
if (schema.type === 'object' || schema.properties || schema.additionalProperties) {
|
||||
const result: Record<string, any> = {};
|
||||
let hasAnyProperty = false;
|
||||
|
||||
for (const [propertyName, propertySchema] of Object.entries(schema.properties || {})) {
|
||||
if (!isSchemaObject(propertySchema)) continue;
|
||||
const propertyValue = buildSchemaExample(propertySchema, recursionDepth + 1);
|
||||
if (propertyValue !== undefined) {
|
||||
result[propertyName] = propertyValue;
|
||||
hasAnyProperty = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (schema.additionalProperties) {
|
||||
if (schema.additionalProperties === true) {
|
||||
result.additionalProp1 = 'string';
|
||||
hasAnyProperty = true;
|
||||
} else if (isSchemaObject(schema.additionalProperties)) {
|
||||
result.additionalProp1 = buildSchemaExample(schema.additionalProperties, recursionDepth + 1) ?? 'string';
|
||||
hasAnyProperty = true;
|
||||
}
|
||||
}
|
||||
|
||||
return hasAnyProperty ? result : {};
|
||||
}
|
||||
|
||||
if (schema.type === 'array') {
|
||||
if (isSchemaObject(schema.items)) {
|
||||
const itemValue = buildSchemaExample(schema.items, recursionDepth + 1);
|
||||
return itemValue !== undefined ? [itemValue] : [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
if (schema.type === 'number' || schema.type === 'integer') return 0;
|
||||
if (schema.type === 'boolean') return true;
|
||||
if (schema.type === 'null') return null;
|
||||
|
||||
return 'string';
|
||||
}
|
||||
|
||||
function getSchemaType(schema: OpenAPIV3_1.SchemaObject | undefined): string | undefined {
|
||||
if (!schema) return undefined;
|
||||
|
||||
if (schema.type === 'array') {
|
||||
if (isSchemaObject(schema.items)) {
|
||||
return `array<${schema.items.type || 'any'}>`;
|
||||
}
|
||||
return 'array';
|
||||
}
|
||||
|
||||
if (Array.isArray(schema.type)) return schema.type.join(' | ');
|
||||
if (schema.type) return schema.type;
|
||||
if (schema.properties) return 'object';
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isStringListSchema(schema: OpenAPIV3_1.SchemaObject | undefined): boolean {
|
||||
return schema?.type === 'array' && isSchemaObject(schema.items) && schema.items.type === 'string';
|
||||
}
|
||||
|
||||
function convertRequestBodyParameter(
|
||||
requestBody: OpenAPIV3_1.RequestBodyObject | OpenAPIV3_1.ReferenceObject | undefined
|
||||
): RestApiParameter | null {
|
||||
if (!requestBody || '$ref' in requestBody || !requestBody.content) return null;
|
||||
|
||||
const preferredContentTypes = [
|
||||
'application/json',
|
||||
'application/x-www-form-urlencoded',
|
||||
'multipart/form-data',
|
||||
'text/plain',
|
||||
];
|
||||
const availableContentTypes = Object.keys(requestBody.content);
|
||||
if (availableContentTypes.length === 0) return null;
|
||||
|
||||
const selectedContentType =
|
||||
preferredContentTypes.find(contentType => requestBody.content?.[contentType]) || availableContentTypes[0];
|
||||
const mediaType = requestBody.content[selectedContentType];
|
||||
|
||||
if (!mediaType || !isSchemaObject(mediaType.schema)) {
|
||||
return {
|
||||
name: 'body',
|
||||
in: 'body',
|
||||
contentType: selectedContentType,
|
||||
description: requestBody.description,
|
||||
required: requestBody.required,
|
||||
};
|
||||
}
|
||||
|
||||
const schema = mediaType.schema;
|
||||
const mediaTypeExample = extractMediaTypeExample(mediaType);
|
||||
const generatedExample = buildSchemaExample(schema);
|
||||
|
||||
return {
|
||||
name: 'body',
|
||||
in: 'body',
|
||||
dataType: getSchemaType(schema),
|
||||
contentType: selectedContentType,
|
||||
isStringList: isStringListSchema(schema),
|
||||
description: requestBody.description,
|
||||
required: requestBody.required,
|
||||
defaultValue: mediaTypeExample ?? generatedExample,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert OpenAPI parameter to REST API parameter
|
||||
*/
|
||||
function convertParameter(param: OpenAPIV3_1.ParameterObject): RestApiParameter {
|
||||
const schema = isSchemaObject(param.schema) ? param.schema : undefined;
|
||||
|
||||
return {
|
||||
name: param.name,
|
||||
in: param.in as any,
|
||||
dataType: getSchemaType(schema),
|
||||
isStringList: isStringListSchema(schema),
|
||||
description: param.description,
|
||||
required: param.required,
|
||||
defaultValue: schema?.default,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import yaml from 'js-yaml';
|
||||
import { apiDriverBase } from './restDriverBase';
|
||||
|
||||
async function loadOpenApiDefinition(dbhan: any) {
|
||||
if (!dbhan?.connection?.apiServerUrl1) {
|
||||
throw new Error('DBGM-00313 REST connection URL is not configured');
|
||||
}
|
||||
|
||||
const response = await dbhan.axios.get(dbhan.connection.apiServerUrl1);
|
||||
|
||||
const content = response?.data;
|
||||
let openApiDefinition: any = content;
|
||||
if (typeof content === 'string') {
|
||||
try {
|
||||
openApiDefinition = JSON.parse(content);
|
||||
} catch {
|
||||
openApiDefinition = yaml.load(content);
|
||||
}
|
||||
}
|
||||
|
||||
if (!openApiDefinition || typeof openApiDefinition !== 'object') {
|
||||
throw new Error('DBGM-00314 API documentation is empty or could not be parsed');
|
||||
}
|
||||
|
||||
return openApiDefinition;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
export const openApiDriver: EngineDriver = {
|
||||
...apiDriverBase,
|
||||
engine: 'openapi@rest',
|
||||
title: 'OpenAPI - REST',
|
||||
databaseEngineTypes: ['rest', 'openapi'],
|
||||
icon: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#85ea2d" d="M63.999 124.945c-33.607 0-60.95-27.34-60.95-60.949C3.05 30.388 30.392 3.048 64 3.048s60.95 27.342 60.95 60.95c0 33.607-27.343 60.946-60.95 60.946z"/><path fill="#173647" d="M40.3 43.311c-.198 2.19.072 4.454-.073 6.668-.173 2.217-.444 4.407-.888 6.596-.615 3.126-2.56 5.489-5.24 7.458 5.218 3.396 5.807 8.662 6.152 14.003.172 2.88.098 5.785.394 8.638.221 2.215 1.082 2.782 3.372 2.854.935.025 1.894 0 2.978 0v6.842c-6.768 1.156-12.354-.762-13.734-6.496a39.329 39.329 0 0 1-.836-6.4c-.148-2.287.097-4.577-.074-6.864-.492-6.277-1.305-8.393-7.308-8.689v-7.8c.441-.1.86-.174 1.302-.223 3.298-.172 4.701-1.182 5.414-4.43a37.512 37.512 0 0 0 .616-5.536c.247-3.569.148-7.21.763-10.754.86-5.094 4.01-7.556 9.254-7.852 1.476-.074 2.978 0 4.676 0v6.99c-.714.05-1.33.147-1.969.147-4.258-.148-4.48 1.304-4.8 4.848zm8.195 16.193h-.099c-2.462-.123-4.578 1.796-4.702 4.258-.122 2.485 1.797 4.603 4.259 4.724h.295c2.436.148 4.527-1.724 4.676-4.16v-.245c.05-2.486-1.944-4.527-4.43-4.577zm15.43 0c-2.386-.074-4.38 1.796-4.454 4.159 0 .149 0 .271.024.418 0 2.684 1.821 4.406 4.578 4.406 2.707 0 4.406-1.772 4.406-4.553-.025-2.682-1.823-4.455-4.554-4.43Zm15.801 0a4.596 4.596 0 0 0-4.676 4.454 4.515 4.515 0 0 0 4.528 4.528h.05c2.264.394 4.553-1.796 4.701-4.429.122-2.437-2.092-4.553-4.604-4.553Zm21.682.369c-2.855-.123-4.284-1.083-4.996-3.79a27.444 27.444 0 0 1-.811-5.292c-.198-3.298-.174-6.62-.395-9.918-.516-7.826-6.177-10.557-14.397-9.205v6.792c1.304 0 2.313 0 3.322.025 1.748.024 3.077.69 3.249 2.634.172 1.772.172 3.568.344 5.365.346 3.57.542 7.187 1.157 10.706.542 2.904 2.536 5.07 5.02 6.841-4.355 2.929-5.636 7.113-5.857 11.814-.122 3.223-.196 6.472-.368 9.721-.148 2.953-1.181 3.913-4.16 3.987-.835.024-1.648.098-2.583.148v6.964c1.748 0 3.347.1 4.946 0 4.971-.295 7.974-2.706 8.96-7.531.417-2.658.662-5.34.737-8.023.171-2.46.148-4.946.394-7.382.369-3.815 2.116-5.389 5.93-5.636a5.161 5.161 0 0 0 1.06-.245v-7.801c-.64-.074-1.084-.148-1.552-.173zM64 6.1c31.977 0 57.9 25.92 57.9 57.898 0 31.977-25.923 57.899-57.9 57.899-31.976 0-57.898-25.922-57.898-57.9C6.102 32.023 32.024 6.101 64 6.101m0-6.1C28.71 0 0 28.71 0 64c0 35.288 28.71 63.998 64 63.998 35.289 0 64-28.71 64-64S99.289.002 64 .002Z"/></svg>',
|
||||
apiServerUrl1Label: 'API Definition URL',
|
||||
apiServerUrl2Label: 'API Server URL',
|
||||
apiServerUrl2Placeholder: '(optional - if not set, the first server URL from the API definition will be used)',
|
||||
loadApiServerUrl2Options: true,
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showConnectionField(field, values)) return true;
|
||||
if (field === 'apiServerUrl1') return true;
|
||||
if (field === 'apiServerUrl2') return true;
|
||||
return false;
|
||||
},
|
||||
|
||||
beforeConnectionSave: connection => ({
|
||||
...connection,
|
||||
singleDatabase: true,
|
||||
defaultDatabase: '_api_database_',
|
||||
}),
|
||||
|
||||
async connect(connection: any) {
|
||||
return {
|
||||
connection,
|
||||
client: null,
|
||||
database: '_api_database_',
|
||||
axios: connection.axios
|
||||
};
|
||||
},
|
||||
|
||||
async listDatabases(dbhan: any) {
|
||||
const openApiDefinition = await loadOpenApiDefinition(dbhan);
|
||||
const servers = Array.isArray(openApiDefinition.servers) ? openApiDefinition.servers : [];
|
||||
|
||||
return servers
|
||||
.map(server => String(server?.url ?? '').trim())
|
||||
.filter(Boolean)
|
||||
.map(url => ({
|
||||
name: url,
|
||||
}));
|
||||
},
|
||||
|
||||
async getVersion(dbhan: any) {
|
||||
const openApiDefinition = await loadOpenApiDefinition(dbhan);
|
||||
|
||||
const specVersion = String(openApiDefinition.openapi ?? openApiDefinition.swagger ?? '').trim();
|
||||
const apiVersion = String(openApiDefinition.info?.version ?? '').trim();
|
||||
|
||||
const version = apiVersion || specVersion || 'Unknown';
|
||||
const versionText = [
|
||||
apiVersion ? `API ${apiVersion}` : null,
|
||||
specVersion ? `OpenAPI ${specVersion}` : null,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(', ');
|
||||
|
||||
return {
|
||||
version,
|
||||
...(versionText ? { versionText } : {}),
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,65 @@
|
||||
export interface RestApiParameter {
|
||||
name: string;
|
||||
in: 'query' | 'header' | 'path' | 'cookie' | 'body';
|
||||
dataType?: string;
|
||||
contentType?: string;
|
||||
isStringList?: boolean;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: any;
|
||||
options?: Array<{ label: string; value: string }>;
|
||||
odataLookupPath?: string;
|
||||
odataLookupEntitySet?: string;
|
||||
odataLookupValueField?: string;
|
||||
odataLookupLabelField?: string;
|
||||
}
|
||||
|
||||
export interface RestApiEndpoint {
|
||||
method: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'OPTIONS' | 'HEAD';
|
||||
path: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
parameters: RestApiParameter[];
|
||||
}
|
||||
|
||||
export interface RestApiCategory {
|
||||
name: string;
|
||||
endpoints: RestApiEndpoint[];
|
||||
}
|
||||
|
||||
export interface RestApiServer {
|
||||
url: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface RestApiDefinition {
|
||||
categories: RestApiCategory[];
|
||||
servers?: RestApiServer[];
|
||||
}
|
||||
|
||||
export interface RestApiAuthorization_None {
|
||||
type: 'none';
|
||||
}
|
||||
|
||||
export interface RestApiAuthorization_Basic {
|
||||
type: 'basic';
|
||||
user: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
export interface RestApiAuthorization_Bearer {
|
||||
type: 'bearer';
|
||||
token: string;
|
||||
}
|
||||
|
||||
export interface RestApiAuthorization_ApiKey {
|
||||
type: 'apikey';
|
||||
header: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export type RestApiAuthorization =
|
||||
| RestApiAuthorization_None
|
||||
| RestApiAuthorization_Basic
|
||||
| RestApiAuthorization_Bearer
|
||||
| RestApiAuthorization_ApiKey;
|
||||
@@ -0,0 +1,134 @@
|
||||
const { executeODataApiEndpoint } = require('./restApiExecutor');
|
||||
|
||||
function createDefinition() {
|
||||
return {
|
||||
categories: [
|
||||
{
|
||||
name: 'EntitySet',
|
||||
endpoints: [
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/customers',
|
||||
parameters: [
|
||||
{
|
||||
name: 'company',
|
||||
in: 'query',
|
||||
dataType: 'string',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/$metadata',
|
||||
parameters: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
test('adds OData system query options from parameterValues', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/customers',
|
||||
'GET',
|
||||
{
|
||||
company: '123',
|
||||
'$top': 50,
|
||||
'$skip': '10',
|
||||
'$count': true,
|
||||
'$select': ['id', 'displayName'],
|
||||
'$orderby': 'displayName asc',
|
||||
'$filter': 'displayName ne null',
|
||||
'$search': 'dino',
|
||||
'$expand': 'addresses',
|
||||
'$format': 'application/json',
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const requestUrl = String(calls[0].url);
|
||||
const parsed = new URL(requestUrl);
|
||||
|
||||
expect(parsed.pathname).toBe('/odata/customers');
|
||||
expect(parsed.searchParams.get('company')).toBe('123');
|
||||
expect(parsed.searchParams.get('$top')).toBe('50');
|
||||
expect(parsed.searchParams.get('$skip')).toBe('10');
|
||||
expect(parsed.searchParams.get('$count')).toBe('true');
|
||||
expect(parsed.searchParams.get('$select')).toBe('id,displayName');
|
||||
expect(parsed.searchParams.get('$orderby')).toBe('displayName asc');
|
||||
expect(parsed.searchParams.get('$filter')).toBe('displayName ne null');
|
||||
expect(parsed.searchParams.get('$search')).toBe('dino');
|
||||
expect(parsed.searchParams.get('$expand')).toBe('addresses');
|
||||
expect(parsed.searchParams.get('$format')).toBe('application/json');
|
||||
});
|
||||
|
||||
test('accepts non-dollar aliases and ignores invalid system option values', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/customers',
|
||||
'GET',
|
||||
{
|
||||
company: '123',
|
||||
top: 'abc',
|
||||
skip: -1,
|
||||
count: 'yes',
|
||||
select: ['id'],
|
||||
filter: 'id ne null',
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const parsed = new URL(String(calls[0].url));
|
||||
expect(parsed.searchParams.get('$top')).toBeNull();
|
||||
expect(parsed.searchParams.get('$skip')).toBeNull();
|
||||
expect(parsed.searchParams.get('$count')).toBeNull();
|
||||
expect(parsed.searchParams.get('$select')).toBe('id');
|
||||
expect(parsed.searchParams.get('$filter')).toBe('id ne null');
|
||||
});
|
||||
|
||||
test('does not add OData system query options to $metadata endpoint', async () => {
|
||||
const calls = [];
|
||||
const axios = async args => {
|
||||
calls.push(args);
|
||||
return { status: 200, data: {} };
|
||||
};
|
||||
|
||||
await executeODataApiEndpoint(
|
||||
createDefinition(),
|
||||
'/$metadata',
|
||||
'GET',
|
||||
{
|
||||
'$top': 10,
|
||||
'$count': true,
|
||||
},
|
||||
'https://example.test/odata',
|
||||
null,
|
||||
axios
|
||||
);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
const parsed = new URL(String(calls[0].url));
|
||||
expect(parsed.pathname).toBe('/odata/$metadata');
|
||||
expect(parsed.search).toBe('');
|
||||
});
|
||||
@@ -0,0 +1,329 @@
|
||||
import type { AxiosInstance } from 'axios';
|
||||
import { RestApiAuthorization, RestApiDefinition, RestApiParameter } from './restApiDef';
|
||||
|
||||
function hasValue(value: any) {
|
||||
if (value === null || value === undefined) return false;
|
||||
if (typeof value === 'string') return value.trim() !== '';
|
||||
if (Array.isArray(value)) return value.length > 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
function normalizeValueForRequest(value: any, parameter: RestApiParameter): any {
|
||||
if (!hasValue(value)) return undefined;
|
||||
|
||||
if (parameter.isStringList) {
|
||||
if (Array.isArray(value)) return value.filter(item => item != null && String(item).trim() !== '');
|
||||
return [String(value)];
|
||||
}
|
||||
|
||||
if (parameter.in === 'body' && typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
if (!trimmed) return undefined;
|
||||
|
||||
if ((parameter.contentType || '').includes('json') || parameter.dataType === 'object') {
|
||||
try {
|
||||
return JSON.parse(trimmed);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function splitPathAndQuery(path: string) {
|
||||
const value = String(path || '');
|
||||
const index = value.indexOf('?');
|
||||
if (index < 0) {
|
||||
return {
|
||||
pathOnly: value,
|
||||
queryString: '',
|
||||
};
|
||||
}
|
||||
return {
|
||||
pathOnly: value.slice(0, index),
|
||||
queryString: value.slice(index + 1),
|
||||
};
|
||||
}
|
||||
|
||||
function addAuthHeaders(headers: Record<string, string>, auth: RestApiAuthorization | null) {
|
||||
if (!auth) return;
|
||||
|
||||
if (auth.type === 'basic') {
|
||||
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else if (auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
} else if (auth.type === 'apikey') {
|
||||
headers[auth.header] = auth.value;
|
||||
}
|
||||
}
|
||||
|
||||
function findEndpointDefinition(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string
|
||||
) {
|
||||
return definition.categories
|
||||
.flatMap(category => category.endpoints)
|
||||
.find(ep => ep.path === endpoint && ep.method === method);
|
||||
}
|
||||
|
||||
function buildRequestUrl(server: string, pathOnly: string) {
|
||||
const normalizedServer = String(server || '').trim();
|
||||
const normalizedPath = String(pathOnly || '').trim();
|
||||
|
||||
if (!normalizedServer) {
|
||||
return normalizedPath;
|
||||
}
|
||||
|
||||
try {
|
||||
const baseUrl = normalizedServer.endsWith('/') ? normalizedServer : `${normalizedServer}/`;
|
||||
const relativePath = normalizedPath.replace(/^\//, '');
|
||||
return new URL(relativePath, baseUrl).toString();
|
||||
} catch {
|
||||
return normalizedServer + normalizedPath;
|
||||
}
|
||||
}
|
||||
|
||||
function appendQueryAndCookies(
|
||||
url: string,
|
||||
query: URLSearchParams,
|
||||
cookies: string[],
|
||||
headers: Record<string, string>
|
||||
) {
|
||||
const queryStringValue = query.toString();
|
||||
if (queryStringValue) {
|
||||
const separator = url.includes('?') ? '&' : '?';
|
||||
url += separator + queryStringValue;
|
||||
}
|
||||
|
||||
if (cookies.length > 0) {
|
||||
headers['Cookie'] = cookies.join('; ');
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
const ODATA_SYSTEM_QUERY_OPTIONS = new Set([
|
||||
'$filter',
|
||||
'$select',
|
||||
'$expand',
|
||||
'$orderby',
|
||||
'$top',
|
||||
'$skip',
|
||||
'$count',
|
||||
'$search',
|
||||
'$format',
|
||||
]);
|
||||
|
||||
const ODATA_SYSTEM_QUERY_ALIASES: Record<string, string> = {
|
||||
filter: '$filter',
|
||||
select: '$select',
|
||||
expand: '$expand',
|
||||
orderby: '$orderby',
|
||||
top: '$top',
|
||||
skip: '$skip',
|
||||
count: '$count',
|
||||
search: '$search',
|
||||
format: '$format',
|
||||
};
|
||||
|
||||
function resolveODataQueryOptionKey(rawKey: string): string | null {
|
||||
const key = String(rawKey || '').trim();
|
||||
if (!key) return null;
|
||||
|
||||
const keyLower = key.toLowerCase();
|
||||
if (ODATA_SYSTEM_QUERY_OPTIONS.has(keyLower)) {
|
||||
return keyLower;
|
||||
}
|
||||
|
||||
return ODATA_SYSTEM_QUERY_ALIASES[keyLower] || null;
|
||||
}
|
||||
|
||||
function normalizeODataQueryOptionValue(optionKey: string, value: any): string | null {
|
||||
if (!hasValue(value)) return null;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const items = value.filter(item => hasValue(item)).map(item => String(item).trim()).filter(Boolean);
|
||||
if (items.length === 0) return null;
|
||||
return items.join(',');
|
||||
}
|
||||
|
||||
if (optionKey === '$count') {
|
||||
if (typeof value === 'boolean') return value ? 'true' : 'false';
|
||||
const lowered = String(value).trim().toLowerCase();
|
||||
if (lowered === 'true' || lowered === 'false') return lowered;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (optionKey === '$top' || optionKey === '$skip') {
|
||||
const numeric = Number(value);
|
||||
if (Number.isFinite(numeric) && numeric >= 0) {
|
||||
return String(Math.trunc(numeric));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return String(value).trim();
|
||||
}
|
||||
|
||||
function applyODataSystemQueryOptions(query: URLSearchParams, parameterValues: Record<string, any>) {
|
||||
for (const [rawKey, rawValue] of Object.entries(parameterValues || {})) {
|
||||
const optionKey = resolveODataQueryOptionKey(rawKey);
|
||||
if (!optionKey) continue;
|
||||
|
||||
const normalizedValue = normalizeODataQueryOptionValue(optionKey, rawValue);
|
||||
if (!hasValue(normalizedValue)) continue;
|
||||
|
||||
query.set(optionKey, String(normalizedValue));
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeRestApiEndpointOpenApi(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string,
|
||||
parameterValues: Record<string, any>,
|
||||
server: string,
|
||||
auth: RestApiAuthorization | null,
|
||||
axios: AxiosInstance
|
||||
): Promise<any> {
|
||||
const endpointDef = findEndpointDefinition(definition, endpoint, method);
|
||||
if (!endpointDef) {
|
||||
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
|
||||
}
|
||||
|
||||
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
|
||||
let url = buildRequestUrl(server, pathOnly);
|
||||
const headers: Record<string, string> = {};
|
||||
const query = new URLSearchParams(queryString);
|
||||
const cookies: string[] = [];
|
||||
let body: any = undefined;
|
||||
|
||||
for (const param of endpointDef.parameters) {
|
||||
const value = normalizeValueForRequest(parameterValues[param.name], param);
|
||||
if (!hasValue(value) && param.in !== 'path') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (param.in === 'path') {
|
||||
url = url.replace(`{${param.name}}`, encodeURIComponent(value));
|
||||
} else if (param.in === 'query') {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
query.append(param.name, String(item));
|
||||
}
|
||||
} else {
|
||||
query.append(param.name, String(value));
|
||||
}
|
||||
} else if (param.in === 'header') {
|
||||
headers[param.name] = Array.isArray(value) ? value.map(item => String(item)).join(',') : String(value);
|
||||
} else if (param.in === 'cookie') {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(item))}`);
|
||||
}
|
||||
} else {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(value))}`);
|
||||
}
|
||||
} else if (param.in === 'body') {
|
||||
body = value;
|
||||
if (param.contentType && !headers['Content-Type']) {
|
||||
headers['Content-Type'] = param.contentType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
url = appendQueryAndCookies(url, query, cookies, headers);
|
||||
addAuthHeaders(headers, auth);
|
||||
|
||||
const resp = await axios({
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data: body,
|
||||
});
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
export async function executeODataApiEndpoint(
|
||||
definition: RestApiDefinition,
|
||||
endpoint: string,
|
||||
method: string,
|
||||
parameterValues: Record<string, any>,
|
||||
server: string,
|
||||
auth: RestApiAuthorization | null,
|
||||
axios: AxiosInstance
|
||||
): Promise<any> {
|
||||
const endpointDef = findEndpointDefinition(definition, endpoint, method);
|
||||
if (!endpointDef) {
|
||||
throw new Error(`Endpoint ${method} ${endpoint} not found in definition.`);
|
||||
}
|
||||
|
||||
const { pathOnly, queryString } = splitPathAndQuery(endpointDef.path);
|
||||
const metadataPath = pathOnly.replace(/\/+$/, '') === '/$metadata';
|
||||
|
||||
let url = buildRequestUrl(server, pathOnly);
|
||||
const headers: Record<string, string> = {
|
||||
Accept: 'application/json',
|
||||
'OData-Version': '4.0',
|
||||
};
|
||||
const query = metadataPath ? new URLSearchParams() : new URLSearchParams(queryString);
|
||||
const cookies: string[] = [];
|
||||
let body: any = undefined;
|
||||
|
||||
for (const param of endpointDef.parameters) {
|
||||
const value = normalizeValueForRequest(parameterValues[param.name], param);
|
||||
if (!hasValue(value) && param.in !== 'path') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (param.in === 'path') {
|
||||
url = url.replace(`{${param.name}}`, encodeURIComponent(value));
|
||||
} else if (param.in === 'query') {
|
||||
if (metadataPath) continue;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
query.append(param.name, String(item));
|
||||
}
|
||||
} else {
|
||||
query.append(param.name, String(value));
|
||||
}
|
||||
} else if (param.in === 'header') {
|
||||
headers[param.name] = Array.isArray(value) ? value.map(item => String(item)).join(',') : String(value);
|
||||
} else if (param.in === 'cookie') {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(item))}`);
|
||||
}
|
||||
} else {
|
||||
cookies.push(`${encodeURIComponent(param.name)}=${encodeURIComponent(String(value))}`);
|
||||
}
|
||||
} else if (param.in === 'body') {
|
||||
body = value;
|
||||
if (param.contentType && !headers['Content-Type']) {
|
||||
headers['Content-Type'] = param.contentType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!metadataPath) {
|
||||
applyODataSystemQueryOptions(query, parameterValues);
|
||||
}
|
||||
|
||||
url = appendQueryAndCookies(url, query, cookies, headers);
|
||||
addAuthHeaders(headers, auth);
|
||||
|
||||
const resp = await axios({
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data: body,
|
||||
});
|
||||
|
||||
return resp;
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
import { RestApiAuthorization } from './restApiDef';
|
||||
|
||||
export function buildRestAuthHeaders(auth: RestApiAuthorization | null) {
|
||||
const headers = {};
|
||||
if (!auth) return headers;
|
||||
if (auth.type === 'basic') {
|
||||
const basicAuth = Buffer.from(`${auth.user}:${auth.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else if (auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
} else if (auth.type === 'apikey') {
|
||||
headers[auth.header] = auth.value;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
import { driverBase } from 'dbgate-tools';
|
||||
|
||||
export const apiDriverBase = {
|
||||
...driverBase,
|
||||
supportExecuteQuery: false,
|
||||
getAuthTypes() {
|
||||
return [
|
||||
{
|
||||
title: 'No Authentication',
|
||||
name: 'none',
|
||||
},
|
||||
{
|
||||
title: 'Basic Authentication',
|
||||
name: 'basic',
|
||||
},
|
||||
{
|
||||
title: 'Bearer Token Authentication',
|
||||
name: 'bearer',
|
||||
},
|
||||
{
|
||||
title: 'API Key Authentication',
|
||||
name: 'apikey',
|
||||
},
|
||||
];
|
||||
},
|
||||
|
||||
showAuthConnectionField: (field, values) => {
|
||||
if (field === 'authType') return true;
|
||||
if (values?.authType === 'basic') {
|
||||
if (field === 'user') return true;
|
||||
if (field === 'password') return true;
|
||||
}
|
||||
if (values?.authType === 'bearer') {
|
||||
if (field === 'authToken') return true;
|
||||
}
|
||||
if (values?.authType === 'apikey') {
|
||||
if (field === 'apiKeyHeader') return true;
|
||||
if (field === 'apiKeyValue') return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (apiDriverBase.showAuthConnectionField(field, values)) return true;
|
||||
if (field === 'httpProxyUrl') return true;
|
||||
if (field === 'httpProxyUser') return true;
|
||||
if (field === 'httpProxyPassword') return true;
|
||||
return false;
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2018",
|
||||
"module": "commonjs",
|
||||
"declaration": true,
|
||||
"skipLibCheck": true,
|
||||
"outDir": "lib",
|
||||
"preserveWatchOutput": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -41,7 +41,7 @@ STORAGE_DATABASE=dbname
|
||||
STORAGE_ENGINE=mysql@dbgate-plugin-mysql
|
||||
```
|
||||
|
||||
You could find more about environment variable configuration on [DbGate docs](https://dbgate.org/docs/env-variables/) page.
|
||||
You could find more about environment variable configuration on [DbGate docs](https://docs.dbgate.io/env-variables/) page.
|
||||
|
||||
After installing, you can run dbgate with command:
|
||||
```sh
|
||||
@@ -65,7 +65,7 @@ dbgate-serve
|
||||
Then open http://localhost:3000 in your browser
|
||||
|
||||
## Download desktop app
|
||||
You can also download binary packages for desktop app from https://dbgate.org . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
|
||||
You can also download binary packages for desktop app from https://www.dbgate.io . Or run from source code, as described on [github](https://github.com/dbgate/dbgate)
|
||||
|
||||
## Use Oracle with Instant client (thick mode)
|
||||
If you are Oracle database user and you would like to use Oracle instant client (thick mode) instead of thin mode (pure JS NPM package), please make the following:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-serve",
|
||||
"version": "7.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "dbgate-sqltree",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"homepage": "https://www.dbgate.io/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbgate/dbgate.git"
|
||||
|
||||
@@ -1,8 +1,55 @@
|
||||
import type { SqlDumper } from 'dbgate-types';
|
||||
import { Condition, BinaryCondition } from './types';
|
||||
import { Condition, BinaryCondition, LikeCondition } from './types';
|
||||
import { dumpSqlExpression } from './dumpSqlExpression';
|
||||
import { dumpSqlSelect } from './dumpSqlCommand';
|
||||
|
||||
|
||||
function dumpLikeAsFunctionCondition(dmp: SqlDumper, condition: LikeCondition) {
|
||||
// For DynamoDB: contains() works only on string attributes
|
||||
// For numeric values, search both as number and as string
|
||||
const likeExpr = condition.right;
|
||||
|
||||
let isNumericValue = false;
|
||||
let numericStringValue = '';
|
||||
if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'string') {
|
||||
const cleanedStr = (likeExpr.value || '').replace(/%/g, '').trim();
|
||||
// Only match valid decimal numbers (not Infinity, NaN, etc.)
|
||||
isNumericValue = /^-?\d+(\.\d+)?$/.test(cleanedStr);
|
||||
numericStringValue = cleanedStr;
|
||||
} else if (likeExpr.exprType === 'value' && typeof likeExpr.value === 'number') {
|
||||
isNumericValue = Number.isFinite(likeExpr.value);
|
||||
numericStringValue = String(likeExpr.value);
|
||||
}
|
||||
|
||||
if (isNumericValue) {
|
||||
// For numeric values: (column = value OR contains(column, 'value'))
|
||||
dmp.putRaw('(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(' = ');
|
||||
dmp.put('%s', numericStringValue);
|
||||
dmp.putRaw(' OR contains(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(', ');
|
||||
dmp.put('%v', numericStringValue);
|
||||
dmp.putRaw('))');
|
||||
} else {
|
||||
// String value: contains(column, value)
|
||||
dmp.putRaw('contains(');
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.putRaw(', ');
|
||||
if (likeExpr.exprType === 'value') {
|
||||
let cleanValue = likeExpr.value;
|
||||
if (typeof cleanValue === 'string') {
|
||||
cleanValue = cleanValue.replace(/%/g, '');
|
||||
}
|
||||
dmp.put('%v', cleanValue);
|
||||
} else {
|
||||
dumpSqlExpression(dmp, likeExpr);
|
||||
}
|
||||
dmp.putRaw(')');
|
||||
}
|
||||
}
|
||||
|
||||
export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
switch (condition.conditionType) {
|
||||
case 'binary':
|
||||
@@ -51,9 +98,13 @@ export function dumpSqlCondition(dmp: SqlDumper, condition: Condition) {
|
||||
});
|
||||
break;
|
||||
case 'like':
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
if (dmp.dialect.likeAsFunction) {
|
||||
dumpLikeAsFunctionCondition(dmp, condition);
|
||||
} else {
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
dmp.put(dmp.dialect.ilike ? ' ^ilike ' : ' ^like ');
|
||||
dumpSqlExpression(dmp, condition.right);
|
||||
}
|
||||
break;
|
||||
case 'notLike':
|
||||
dumpSqlExpression(dmp, condition.left);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user