Compare commits

...

132 Commits

Author SHA1 Message Date
Stela Augustinova
10679e9899 Merge pull request #1423 from dbgate/feature/remote-code-execution
Add validation for JavaScript identifiers and shell API function names
2026-04-14 09:51:20 +02:00
Stela Augustinova
c51dad39e0 Improve saveRows method to handle backpressure and errors in file writing 2026-04-14 09:49:14 +02:00
Stela Augustinova
1d350a3a29 Add validation to assertSafeArchiveName to prevent resolving to archive root 2026-04-13 14:55:29 +02:00
Stela Augustinova
81e3cce070 Add validation for linkedFolder in createLink method 2026-04-13 13:54:48 +02:00
Stela Augustinova
f9de2d77b5 Moved functionName validation 2026-04-13 13:44:45 +02:00
Stela Augustinova
3956eaf389 Improve error handling in unzipDirectory by adding readStream error listener and immediate abort on file extraction failure 2026-04-13 13:20:28 +02:00
Stela Augustinova
d13e2c2d87 Make fs.writeFile call awaitable in createLink method for proper async handling 2026-04-13 12:53:54 +02:00
Stela Augustinova
ebf2371da9 Enhance unzipDirectory error handling and manage active streams for better resource cleanup 2026-04-13 12:43:45 +02:00
Stela Augustinova
fa4b12448d Refactor unzipDirectory to improve error handling and prevent multiple rejections 2026-04-13 10:48:01 +02:00
Stela Augustinova
5fe6dfa551 Refactor loaderScriptTemplate to streamline plugin extraction and improve readability 2026-04-13 10:41:12 +02:00
Stela Augustinova
6061c8b0a5 Update JS reserved words 2026-04-13 10:31:12 +02:00
Stela Augustinova
1ac0aa8a3e Add path traversal and null byte checks for archive names and ZIP entries 2026-04-13 09:19:03 +02:00
Stela Augustinova
5d04d7f01f Enhance JavaScript identifier validation and update variable storage method in ScriptWriterEval 2026-04-10 16:15:45 +02:00
Stela Augustinova
9c97e347c5 Add validation for JavaScript identifiers and shell API function names 2026-04-10 13:22:54 +02:00
Stela Augustinova
22967d123d Add 7.1.8 entry to CHANGELOG with fixed NPM packages build 2026-04-09 16:05:50 +02:00
Stela Augustinova
3fed650254 Add postgresql optimalization to 7.1.7 changelog 2026-04-09 16:03:24 +02:00
Stela Augustinova
b57b2083d3 v7.1.8 2026-04-09 15:35:13 +02:00
Stela Augustinova
1f47e8c62e v7.1.8-alpha.7 2026-04-09 15:26:28 +02:00
CI workflows
d7ce653d74 chore: auto-update github workflows 2026-04-09 13:25:14 +00:00
Stela Augustinova
07c803efee Update npm publishing steps and remove unnecessary access flag 2026-04-09 15:24:48 +02:00
Stela Augustinova
26b6d9133e v7.1.8-alpha.6 2026-04-09 15:15:37 +02:00
CI workflows
146084bdb3 chore: auto-update github workflows 2026-04-09 13:14:54 +00:00
Stela Augustinova
fa82b4630b Specify npm version to 11.5.1 for consistency 2026-04-09 15:14:24 +02:00
Stela Augustinova
d00841030f v7.1.8-alpha.5 2026-04-09 15:01:14 +02:00
CI workflows
c517bb0be6 chore: auto-update github workflows 2026-04-09 13:00:00 +00:00
Stela Augustinova
e585d8be8f Add public access to npm publish commands in build workflow 2026-04-09 14:59:25 +02:00
Stela Augustinova
8be76832a5 v7.1.8-alpha.4 2026-04-09 14:54:23 +02:00
Stela Augustinova
99df266a3e v7.1.8-aplha.4 2026-04-09 14:50:56 +02:00
CI workflows
5660874992 chore: auto-update github workflows 2026-04-09 12:50:31 +00:00
Stela Augustinova
b0dade9da3 Configure NPM token in build workflow 2026-04-09 14:50:11 +02:00
Stela Augustinova
a533858804 v7.1.8-alpha.3 2026-04-09 14:20:39 +02:00
CI workflows
d3bcc984e7 chore: auto-update github workflows 2026-04-09 12:18:26 +00:00
Stela Augustinova
99e8307a80 Enable NPM token configuration in build workflow 2026-04-09 14:17:59 +02:00
Stela Augustinova
73926ea392 v7.1.8-alpha.2 2026-04-09 14:12:17 +02:00
CI workflows
5ff24526b7 chore: auto-update github workflows 2026-04-09 12:11:15 +00:00
Stela Augustinova
32ed1c57bd Update Node.js setup to use yarn caching and remove npm install step 2026-04-09 14:10:50 +02:00
Stela Augustinova
f4c3a95348 v7.1.8-alpha.1 2026-04-09 14:02:38 +02:00
CI workflows
b1a908343a chore: auto-update github workflows 2026-04-09 11:58:25 +00:00
Stela Augustinova
7f9d7eb36e Update Node.js setup action and enable npm caching 2026-04-09 13:57:51 +02:00
Stela Augustinova
30820e29fc Update CHANGELOG for version 7.1.7 2026-04-09 13:23:07 +02:00
Stela Augustinova
a85ea2e0f7 v7.1.7 2026-04-09 12:56:57 +02:00
Stela Augustinova
993e713955 v7.1.7-premium-beta.5 2026-04-09 12:11:02 +02:00
Stela Augustinova
3151e30db1 SYNC: Update translations 2026-04-09 08:59:26 +00:00
Jan Prochazka
eb5219dd68 Merge pull request #1422 from dbgate/feature/duplicate-translation-keys
Remove duplicate translation keys
2026-04-09 10:49:30 +02:00
Stela Augustinova
bb44783369 Refactor translation keys to eliminate duplicates in QueryTab component 2026-04-09 10:33:33 +02:00
CI workflows
33b46c4db3 chore: auto-update github workflows 2026-04-09 08:24:34 +00:00
Jan Prochazka
3730aae62a Merge pull request #1419 from dbgate/feature/map-referer
Added referer
2026-04-09 10:24:25 +02:00
CI workflows
065062d58a Update pro ref 2026-04-09 08:24:16 +00:00
Jan Prochazka
7b2f58e68e SYNC: Merge pull request #92 from dbgate/feature/ai-toggle 2026-04-09 08:24:02 +00:00
Stela Augustinova
e2fc23fcf8 Remove duplicate translation keys 2026-04-09 10:12:39 +02:00
SPRINX0\prochazka
6f56ef284d v7.1.7-premium-beta.4 2026-04-08 16:14:19 +02:00
SPRINX0\prochazka
08a644ba39 v7.1.7-premuim-beta.4 2026-04-08 16:07:40 +02:00
CI workflows
6ae19ac4a6 chore: auto-update github workflows 2026-04-08 14:06:22 +00:00
CI workflows
7761cbe81d Update pro ref 2026-04-08 14:05:57 +00:00
Jan Prochazka
f981d88150 SYNC: Merge pull request #91 from dbgate/feature/query-history-per-user 2026-04-08 14:05:40 +00:00
CI workflows
e2a23eaa0d chore: auto-update github workflows 2026-04-08 12:57:03 +00:00
CI workflows
9d510b3c08 Update pro ref 2026-04-08 12:56:40 +00:00
SPRINX0\prochazka
a98f5ac45e reverted yarn.lock 2026-04-08 14:03:13 +02:00
SPRINX0\prochazka
b989e964c0 v7.1.7-premium-beta.3 2026-04-08 13:34:11 +02:00
CI workflows
3ff6eefa06 chore: auto-update github workflows 2026-04-08 11:29:47 +00:00
CI workflows
67fde9be3c Update pro ref 2026-04-08 11:29:28 +00:00
SPRINX0\prochazka
df7ac89723 SYNC: v7.1.7-premium-beta.2 2026-04-08 11:29:18 +00:00
SPRINX0\prochazka
358df9f53b SYNC: try to fix ms entra login 2026-04-08 11:29:15 +00:00
Stela Augustinova
02e3bfaa8a Added referer 2026-04-08 12:05:42 +02:00
Jan Prochazka
dde74fa73b Merge pull request #1407 from dbgate/feature/postgres-optimalization
Feature/postgres optimalization
2026-04-08 11:46:42 +02:00
SPRINX0\prochazka
100e3fe75f deleted sast workflows 2026-04-08 10:59:29 +02:00
SPRINX0\prochazka
af7930cea2 Enhance aggregation functions in SQL queries for improved PostgreSQL compatibility 2026-04-08 10:55:24 +02:00
SPRINX0\prochazka
6b4f6b909c Merge branch 'feature/postgres-optimalization' of https://github.com/dbgate/dbgate into feature/postgres-optimalization 2026-04-08 10:26:35 +02:00
Jan Prochazka
9a6e5cd7cc Update plugins/dbgate-plugin-postgres/src/backend/sql/views.js
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-08 10:21:22 +02:00
SPRINX0\prochazka
9f64b6ec7a Merge branch 'master' into feature/postgres-optimalization 2026-04-08 10:20:28 +02:00
Stela Augustinova
77f720e34c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-08 10:20:09 +02:00
Stela Augustinova
168dcb7824 Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-08 10:20:09 +02:00
Stela Augustinova
759186a212 Improve error handling for volatile connection responses in subprocess communication 2026-04-08 10:20:09 +02:00
Stela Augustinova
71ed7a76ea Handle errors in volatile connection resolution and remove unused registration function 2026-04-08 10:20:09 +02:00
Stela Augustinova
bd939b22c7 Fix volatile connection resolution to prevent multiple resolves 2026-04-08 10:20:09 +02:00
Stela Augustinova
c327f77294 Refactor volatile connections handling in connections and runners modules 2026-04-08 10:20:09 +02:00
Stela Augustinova
d907d79beb Streamline volatile connections handling and remove unused registration module 2026-04-08 10:20:09 +02:00
Stela Augustinova
93b879927c Implement volatile connections handling in runners and shell modules 2026-04-08 10:20:09 +02:00
Stela Augustinova
0c545d4cf9 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-08 10:20:09 +02:00
Stela Augustinova
95c90c1517 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-08 10:20:09 +02:00
CI workflows
cb731fa858 chore: auto-update github workflows 2026-04-08 10:20:09 +02:00
Stela Augustinova
9bb3b09ecf SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-08 10:20:09 +02:00
SPRINX0\prochazka
7c8f541d3e deleted sast workflow 2026-04-08 10:18:37 +02:00
Jan Prochazka
ce41687382 Merge pull request #1417 from dbgate/feature/auth-error
Implement volatile connections handling in runners and shell modules
2026-04-08 10:14:02 +02:00
Stela Augustinova
4b083dea5c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-07 14:56:29 +02:00
Stela Augustinova
c84473c1eb Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-07 14:26:58 +02:00
Stela Augustinova
7fc078f3e6 Improve error handling for volatile connection responses in subprocess communication 2026-04-07 14:15:18 +02:00
Stela Augustinova
cbbd538248 Handle errors in volatile connection resolution and remove unused registration function 2026-04-07 14:01:13 +02:00
Stela Augustinova
825f6e562b Fix volatile connection resolution to prevent multiple resolves 2026-04-07 13:46:34 +02:00
Stela Augustinova
a278afb260 Refactor volatile connections handling in connections and runners modules 2026-04-07 13:42:11 +02:00
Stela Augustinova
2fbeea717c Streamline volatile connections handling and remove unused registration module 2026-04-07 13:26:16 +02:00
Jan Prochazka
c7259e4663 Merge pull request #1412 from dbgate/feature/copy-sql
Improve clipboard formatters to omit undefined values, enhancing data…
2026-04-07 13:11:49 +02:00
Stela Augustinova
69a2669342 Implement volatile connections handling in runners and shell modules 2026-04-07 13:06:04 +02:00
CI workflows
42d1ca8fd4 chore: auto-update github workflows 2026-04-07 10:27:40 +00:00
Stela Augustinova
1cf52d8b39 SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-07 10:27:24 +00:00
Jan Prochazka
6e482afab2 v7.1.7-premium-beta.1 2026-04-02 16:39:06 +02:00
SPRINX0\prochazka
ddf3295e6d Merge branch 'master' into feature/postgres-optimalization 2026-04-02 16:33:25 +02:00
SPRINX0\prochazka
79e087abd3 Optimize PostgreSQL analysis queries and add support for Info Schema routines 2026-04-02 16:32:36 +02:00
CI workflows
a7cf51bdf7 chore: auto-update github workflows 2026-04-02 13:55:33 +00:00
Jan Prochazka
dfdb31e2f8 Merge pull request #1413 from dbgate/feature/integration-test-pro
Update test workflow to include directory changes for integration tests
2026-04-02 15:55:14 +02:00
Stela Augustinova
3508ddc3ca Update test workflow to include directory changes for integration tests 2026-04-02 11:02:36 +02:00
Stela Augustinova
137fc6b928 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-02 10:29:02 +02:00
Jan Prochazka
e6f5295420 Merge pull request #1410 from dbgate/feature/large-fields
Enhance binary size handling in grid cell display
2026-04-01 16:01:23 +02:00
CI workflows
2bb08921c3 chore: auto-update github workflows 2026-04-01 13:55:00 +00:00
Stela Augustinova
ee2d0e4c30 Remove unnecessary restart policy for DynamoDB service 2026-04-01 15:54:35 +02:00
Jan Prochazka
c43a838572 Merge pull request #1411 from dbgate/feature/unreadable-dropdown
Correct class binding and update style variables in SelectField compo…
2026-04-01 15:53:23 +02:00
CI workflows
17ff6a8013 chore: auto-update github workflows 2026-04-01 13:53:13 +00:00
Stela Augustinova
62ad6a0d08 Remove unnecessary restart policy for MongoDB service 2026-04-01 15:52:48 +02:00
CI workflows
5c049fa867 chore: auto-update github workflows 2026-04-01 13:51:09 +00:00
CI workflows
619f17114a Update pro ref 2026-04-01 13:50:58 +00:00
Stela Augustinova
1c1431014c SYNC: Merge pull request #87 from dbgate/feature/collection-test 2026-04-01 13:50:46 +00:00
Stela Augustinova
9d1d7b7e34 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-01 15:49:35 +02:00
Stela Augustinova
f68ca1e786 Correct class binding and update style variables in SelectField component 2026-04-01 13:24:34 +02:00
Jan Prochazka
588cd39d7c Merge pull request #1404 from dbgate/feature/fetch-all-button
Add fetch all button
2026-04-01 09:44:04 +02:00
Stela Augustinova
79ebfa9b7a Add fetchAll command to dataGrid menu 2026-03-31 13:37:06 +02:00
Stela Augustinova
0c6b2746d1 Fix file stream reference in jsldata and remove redundant buffer assignment in LoadingDataGridCore 2026-03-31 08:59:33 +02:00
Stela Augustinova
978972c55c Enhance file path validation in streamRows to include symlink resolution and case normalization, improving security and error handling 2026-03-31 08:31:43 +02:00
Stela Augustinova
37854fc577 Refactor fetchAll to trim lines before parsing, improving error handling for malformed data 2026-03-31 06:54:37 +02:00
Stela Augustinova
5537e193a6 Improve fetchAll error handling and cleanup process during streaming and paginated reads 2026-03-31 06:21:06 +02:00
Stela Augustinova
0d42b2b133 Refactor fetchAll cancel function to improve cleanup process and prevent errors 2026-03-30 15:48:35 +02:00
Stela Augustinova
44bd7972d4 Enhance fetchAll functionality with improved error handling and state management 2026-03-30 14:34:57 +02:00
Stela Augustinova
5143eb39f7 Implement fetchAll functionality with streaming support and error handling 2026-03-30 13:30:12 +02:00
Stela Augustinova
cf51883b3e Add checkbox to skip confirmation when fetching all rows 2026-03-26 15:24:25 +01:00
Stela Augustinova
484ca0c78a Reset loaded time reference in reload function 2026-03-26 15:11:11 +01:00
Stela Augustinova
8f5cad0e2c Prevent loading next data when fetching all rows is in progress 2026-03-26 15:03:54 +01:00
Stela Augustinova
988512a571 Update warning message in FetchAllConfirmModal to simplify language 2026-03-26 14:50:09 +01:00
Stela Augustinova
f8bd380051 Optimize fetchAllRows by using a local buffer to reduce array copies and improve performance 2026-03-26 14:19:11 +01:00
Stela Augustinova
281131dbba Enhance fetchAll functionality by adding loading state check 2026-03-26 14:07:12 +01:00
Stela Augustinova
2505c61975 Add fetch all button 2026-03-26 11:24:05 +01:00
SPRINX0\prochazka
899aec2658 v7.1.5-premium-beta.1 2026-03-20 14:24:11 +01:00
SPRINX0\prochazka
74e47587e2 Merge branch 'master' into feature/postgres-optimalization 2026-03-20 14:23:40 +01:00
SPRINX0\prochazka
cb70f3c318 postgres loading optimalization 2026-03-19 12:17:29 +01:00
74 changed files with 3852 additions and 1790 deletions

View File

@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -35,7 +35,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
@@ -53,7 +53,7 @@ jobs:
cd dbgate-merged
node adjustNpmPackageJsonPremium
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
- name: Remove dbmodel - should be not published
run: |
cd ..

View File

@@ -30,7 +30,7 @@ jobs:
with:
node-version: 22.x
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
- name: yarn install
run: |
yarn install

View File

@@ -30,7 +30,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -23,26 +23,49 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: yarn install
run: |
cd ../dbgate-merged
yarn install
- name: Integration tests
run: |
cd ../dbgate-merged
cd integration-tests
yarn test:ci
- name: Filter parser tests
if: always()
run: |
cd ../dbgate-merged
cd packages/filterparser
yarn test:ci
- name: Datalib (perspective) tests
if: always()
run: |
cd ../dbgate-merged
cd packages/datalib
yarn test:ci
- name: Tools tests
if: always()
run: |
cd ../dbgate-merged
cd packages/tools
yarn test:ci
services:
@@ -98,3 +121,14 @@ jobs:
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'
mongodb:
image: mongo:4.0.12
ports:
- '27017:27017'
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
dynamodb:
image: amazon/dynamodb-local
ports:
- '8000:8000'

View File

@@ -6,3 +6,4 @@
- GUI uses Svelte4 (packages/web)
- GUI is tested with E2E tests in `e2e-tests` folder, using Cypress. Use data-testid attribute in components to make them easier to test.
- data-testid format: ComponentName_identifier. Use reasonable identifiers
- don't change content of storageModel.js - this is generated from table YAMLs with "yarn storage-json" command

View File

@@ -9,6 +9,26 @@ Builds:
- linux - application for linux
- win - application for Windows
## 7.1.8
- FIXED: NPM packages build
## 7.1.7
- FIXED: Resolved duplicate translation tags #1420
- FIXED: Referer error on map display #1418
- FIXED: Export failure when password mode is enabled #1409
- FIXED: Unreadable text in export #1408
- FIXED: Column names set to "undefined" in export #1406
- FIXED: Fixed freezing issues with large fields #1399
- ADDED: "Fetch All" button #1398
- ADDED: Option to disable AI features
- ADDED: PostgreSQL loading optimalization
## 7.1.6
- FIXED: Issues with cloud and file loading
## 7.1.5
- FIXED: Issues with cloud and file loading

View File

@@ -400,6 +400,14 @@ function createWindow() {
},
});
mainWindow.webContents.session.webRequest.onBeforeSendHeaders(
{ urls: ['https://*.tile.openstreetmap.org/*'] },
(details, callback) => {
details.requestHeaders['Referer'] = 'https://www.dbgate.io';
callback({ requestHeaders: details.requestHeaders });
}
);
if (initialConfig['winIsMaximized']) {
mainWindow.maximize();
}

View File

@@ -0,0 +1,536 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
const stream = require('stream');
const { mongoDbEngine, dynamoDbEngine } = require('../engines');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const tableReader = require('dbgate-api/src/shell/tableReader');
const copyStream = require('dbgate-api/src/shell/copyStream');
function randomCollectionName() {
return 'test_' + crypto.randomBytes(6).toString('hex');
}
const documentEngines = [
{ label: 'MongoDB', engine: mongoDbEngine },
{ label: 'DynamoDB', engine: dynamoDbEngine },
];
async function connectEngine(engine) {
const driver = requireEngineDriver(engine.connection);
const conn = await driver.connect(engine.connection);
return { driver, conn };
}
async function createCollection(driver, conn, collectionName, engine) {
if (engine.connection.engine.startsWith('dynamodb')) {
await driver.operation(conn, {
type: 'createCollection',
collection: {
name: collectionName,
partitionKey: '_id',
partitionKeyType: 'S',
},
});
} else {
await driver.operation(conn, {
type: 'createCollection',
collection: { name: collectionName },
});
}
}
async function dropCollection(driver, conn, collectionName) {
try {
await driver.operation(conn, {
type: 'dropCollection',
collection: collectionName,
});
} catch (e) {
// Ignore errors when dropping (collection may not exist)
}
}
async function insertDocument(driver, conn, collectionName, doc) {
return driver.updateCollection(conn, {
inserts: [{ pureName: collectionName, document: {}, fields: doc }],
updates: [],
deletes: [],
});
}
async function readAll(driver, conn, collectionName) {
return driver.readCollection(conn, { pureName: collectionName, limit: 1000 });
}
async function updateDocument(driver, conn, collectionName, condition, fields) {
return driver.updateCollection(conn, {
inserts: [],
updates: [{ pureName: collectionName, condition, fields }],
deletes: [],
});
}
async function deleteDocument(driver, conn, collectionName, condition) {
return driver.updateCollection(conn, {
inserts: [],
updates: [],
deletes: [{ pureName: collectionName, condition }],
});
}
describe('Collection CRUD', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
// ---- INSERT ----
test('insert a single document', async () => {
const res = await insertDocument(driver, conn, collectionName, {
_id: 'doc1',
name: 'Alice',
age: 30,
});
expect(res.inserted.length).toBe(1);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
expect(all.rows[0].age).toBe(30);
});
test('insert multiple documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'a1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'a2', name: 'Bob' });
await insertDocument(driver, conn, collectionName, { _id: 'a3', name: 'Charlie' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('insert document with nested object', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'nested1',
name: 'Alice',
address: { city: 'Prague', zip: '11000' },
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].address.city).toBe('Prague');
expect(all.rows[0].address.zip).toBe('11000');
});
// ---- READ ----
test('read from empty collection returns no rows', async () => {
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
test('read with limit', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'l1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'l2', name: 'B' });
await insertDocument(driver, conn, collectionName, { _id: 'l3', name: 'C' });
const limited = await driver.readCollection(conn, {
pureName: collectionName,
limit: 2,
});
expect(limited.rows.length).toBe(2);
});
test('count documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'c1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'c2', name: 'B' });
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(2);
});
test('count documents on empty collection returns zero', async () => {
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(0);
});
// ---- UPDATE ----
test('update an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u1', name: 'Alice', age: 25 });
const res = await updateDocument(driver, conn, collectionName, { _id: 'u1' }, { name: 'Alice Updated' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice Updated');
});
test('update does not create new document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u2', name: 'Bob' });
await updateDocument(driver, conn, collectionName, { _id: 'nonexistent' }, { name: 'Ghost' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('update only specified fields', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u3', name: 'Carol', age: 40, city: 'London' });
await updateDocument(driver, conn, collectionName, { _id: 'u3' }, { age: 41 });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Carol');
expect(all.rows[0].age).toBe(41);
expect(all.rows[0].city).toBe('London');
});
// ---- DELETE ----
test('delete an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'd1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'd2', name: 'Bob' });
const res = await deleteDocument(driver, conn, collectionName, { _id: 'd1' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('delete non-existing document does not affect collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'dx1', name: 'Alice' });
await deleteDocument(driver, conn, collectionName, { _id: 'nonexistent' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
});
test('delete all documents leaves empty collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'da1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'da2', name: 'B' });
await deleteDocument(driver, conn, collectionName, { _id: 'da1' });
await deleteDocument(driver, conn, collectionName, { _id: 'da2' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
// ---- EDGE CASES ----
test('insert and read document with empty string field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'e1', name: '', value: 'test' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('');
expect(all.rows[0].value).toBe('test');
});
test('insert and read document with numeric values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'n1',
intVal: 42,
floatVal: 3.14,
zero: 0,
negative: -10,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].intVal).toBe(42);
expect(all.rows[0].floatVal).toBeCloseTo(3.14);
expect(all.rows[0].zero).toBe(0);
expect(all.rows[0].negative).toBe(-10);
});
test('insert and read document with boolean values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'b1',
active: true,
deleted: false,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].active).toBe(true);
expect(all.rows[0].deleted).toBe(false);
});
test('reading non-existing collection returns error or empty', async () => {
const result = await driver.readCollection(conn, {
pureName: 'nonexistent_collection_' + crypto.randomBytes(4).toString('hex'),
limit: 10,
});
// Depending on the driver, this may return an error or empty rows
if (result.errorMessage) {
expect(typeof result.errorMessage).toBe('string');
} else {
expect(result.rows.length).toBe(0);
}
});
test('replace full document via update with document field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'r1', name: 'Original', extra: 'data' });
await driver.updateCollection(conn, {
inserts: [],
updates: [
{
pureName: collectionName,
condition: { _id: 'r1' },
document: { _id: 'r1', name: 'Replaced' },
fields: {},
},
],
deletes: [],
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Replaced');
});
test('insert then update then delete lifecycle', async () => {
// Insert
await insertDocument(driver, conn, collectionName, { _id: 'life1', name: 'Lifecycle', status: 'created' });
let all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].status).toBe('created');
// Update
await updateDocument(driver, conn, collectionName, { _id: 'life1' }, { status: 'updated' });
all = await readAll(driver, conn, collectionName);
expect(all.rows[0].status).toBe('updated');
// Delete
await deleteDocument(driver, conn, collectionName, { _id: 'life1' });
all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
});
});
function createDocumentImportStream(documents) {
const pass = new stream.PassThrough({ objectMode: true });
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
for (const doc of documents) {
pass.write(doc);
}
pass.end();
return pass;
}
function createExportStream() {
const writable = new stream.Writable({ objectMode: true });
writable.resultArray = [];
writable._write = (chunk, encoding, callback) => {
writable.resultArray.push(chunk);
callback();
};
return writable;
}
describe('Collection Import/Export', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
test('import documents via stream', async () => {
const documents = [
{ _id: 'imp1', name: 'Alice', age: 30 },
{ _id: 'imp2', name: 'Bob', age: 25 },
{ _id: 'imp3', name: 'Charlie', age: 35 },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('export documents via stream', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'exp1', name: 'Alice', city: 'Prague' });
await insertDocument(driver, conn, collectionName, { _id: 'exp2', name: 'Bob', city: 'Vienna' });
await insertDocument(driver, conn, collectionName, { _id: 'exp3', name: 'Charlie', city: 'Berlin' });
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(3);
const names = rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('import then export round-trip', async () => {
const documents = [
{ _id: 'rt1', name: 'Alice', value: 100 },
{ _id: 'rt2', name: 'Bob', value: 200 },
{ _id: 'rt3', name: 'Charlie', value: 300 },
{ _id: 'rt4', name: 'Diana', value: 400 },
];
// Import
const importReader = createDocumentImportStream(documents);
const importWriter = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(importReader, importWriter);
// Export
const exportReader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const exportWriter = createExportStream();
await copyStream(exportReader, exportWriter);
const rows = exportWriter.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(4);
const sortedRows = rows.sort((a, b) => a._id.localeCompare(b._id));
for (const doc of documents) {
const found = sortedRows.find(r => r._id === doc._id);
expect(found).toBeDefined();
expect(found.name).toBe(doc.name);
expect(found.value).toBe(doc.value);
}
});
test('import documents with nested objects', async () => {
const documents = [
{ _id: 'nest1', name: 'Alice', address: { city: 'Prague', zip: '11000' } },
{ _id: 'nest2', name: 'Bob', address: { city: 'Vienna', zip: '1010' } },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(2);
const alice = all.rows.find(r => r.name === 'Alice');
expect(alice.address.city).toBe('Prague');
expect(alice.address.zip).toBe('11000');
});
test('import many documents', async () => {
const documents = [];
for (let i = 0; i < 150; i++) {
documents.push({ _id: `many${i}`, name: `Name${i}`, index: i });
}
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(150);
});
test('export empty collection returns no data rows', async () => {
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(0);
});
});
});

View File

@@ -123,5 +123,22 @@ services:
retries: 3
start_period: 40s
mongodb:
image: mongo:4.0.12
restart: always
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
ports:
- 27017:27017
dynamodb:
image: amazon/dynamodb-local
restart: always
ports:
- 8000:8000
volumes:
firebird-data:
mongo-data:
mongo-config:

View File

@@ -738,6 +738,27 @@ const firebirdEngine = {
skipDropReferences: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mongoDbEngine = {
label: 'MongoDB',
connection: {
engine: 'mongo@dbgate-plugin-mongo',
server: 'localhost',
port: 27017,
},
};
/** @type {import('dbgate-types').TestEngineInfo} */
const dynamoDbEngine = {
label: 'DynamoDB',
connection: {
engine: 'dynamodb@dbgate-plugin-dynamodb',
server: 'localhost',
port: 8000,
authType: 'onpremise',
},
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -788,3 +809,5 @@ module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;
module.exports.firebirdEngine = firebirdEngine;
module.exports.mongoDbEngine = mongoDbEngine;
module.exports.dynamoDbEngine = dynamoDbEngine;

View File

@@ -1,5 +1,6 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const engines = require('./engines');
const { mongoDbEngine, dynamoDbEngine } = require('./engines');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
@@ -9,7 +10,7 @@ global.DBGATE_PACKAGES = {
async function connectEngine(engine) {
const { connection } = engine;
const driver = requireEngineDriver(connection);
for (;;) {
for (; ;) {
try {
const conn = await driver.connect(connection);
await driver.getVersion(conn);
@@ -26,7 +27,8 @@ async function connectEngine(engine) {
async function run() {
await new Promise(resolve => setTimeout(resolve, 10000));
await Promise.all(engines.map(engine => connectEngine(engine)));
const documentEngines = [mongoDbEngine, dynamoDbEngine];
await Promise.all([...engines, ...documentEngines].map(engine => connectEngine(engine)));
}
run();

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "7.1.6",
"version": "7.1.8",
"name": "dbgate-all",
"workspaces": [
"packages/*",

View File

@@ -19,6 +19,26 @@ const unzipDirectory = require('../shell/unzipDirectory');
const logger = getLogger('archive');
/**
* Rejects any archive name (folder or file) that contains path-traversal
* sequences, directory separators, or null bytes. These values are used
* directly in path.join() calls; allowing traversal would let callers read
* or write arbitrary files outside the archive directory.
*/
function assertSafeArchiveName(name, label) {
if (typeof name !== 'string' || name.length === 0) {
throw new Error(`DBGM-00000 Invalid ${label}: must be a non-empty string`);
}
if (name.includes('\0') || name.includes('..') || name.includes('/') || name.includes('\\')) {
throw new Error(`DBGM-00000 Invalid ${label}: path traversal not allowed`);
}
// Reject names that resolve to the archive root itself (e.g. '.')
const resolved = path.resolve(archivedir(), name);
if (resolved === path.resolve(archivedir())) {
throw new Error(`DBGM-00000 Invalid ${label}: must not resolve to the archive root`);
}
}
module.exports = {
folders_meta: true,
async folders() {
@@ -39,6 +59,7 @@ module.exports = {
createFolder_meta: true,
async createFolder({ folder }) {
assertSafeArchiveName(folder, 'folder');
await fs.mkdir(path.join(archivedir(), folder));
socket.emitChanged('archive-folders-changed');
return true;
@@ -46,8 +67,12 @@ module.exports = {
createLink_meta: true,
async createLink({ linkedFolder }) {
if ( typeof linkedFolder !== 'string' || linkedFolder.length === 0) {
throw new Error(`DBGM-00000 Invalid linkedFolder: must be a non-empty string`);
}
assertSafeArchiveName(path.parse(linkedFolder).name, 'linkedFolder');
const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' });
fs.writeFile(path.join(archivedir(), folder), linkedFolder);
await fs.writeFile(path.join(archivedir(), folder), linkedFolder);
clearArchiveLinksCache();
socket.emitChanged('archive-folders-changed');
return folder;
@@ -71,6 +96,7 @@ module.exports = {
files_meta: true,
async files({ folder }) {
assertSafeArchiveName(folder, 'folder');
try {
if (folder.endsWith('.zip')) {
if (await fs.exists(path.join(archivedir(), folder))) {
@@ -121,6 +147,9 @@ module.exports = {
createFile_meta: true,
async createFile({ folder, file, fileType, tableInfo }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.writeFile(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
@@ -131,6 +160,9 @@ module.exports = {
deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
socket.emitChanged(`archive-files-changed`, { folder });
return true;
@@ -138,6 +170,10 @@ module.exports = {
renameFile_meta: true,
async renameFile({ folder, file, newFile, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(newFile, 'newFile');
assertSafeArchiveName(fileType, 'fileType');
await fs.rename(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
@@ -148,6 +184,8 @@ module.exports = {
modifyFile_meta: true,
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await jsldata.closeDataStore(`archive://${folder}/${file}`);
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
@@ -187,6 +225,8 @@ module.exports = {
renameFolder_meta: true,
async renameFolder({ folder, newFolder }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(newFolder, 'newFolder');
const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
socket.emitChanged(`archive-folders-changed`);
@@ -196,6 +236,7 @@ module.exports = {
deleteFolder_meta: true,
async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter');
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
await fs.unlink(path.join(archivedir(), folder));
} else {
@@ -207,6 +248,8 @@ module.exports = {
saveText_meta: true,
async saveText({ folder, file, text }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
@@ -214,6 +257,8 @@ module.exports = {
saveJslData_meta: true,
async saveJslData({ folder, file, jslid, changeSet }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const source = getJslFileName(jslid);
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
if (changeSet) {
@@ -232,11 +277,20 @@ module.exports = {
saveRows_meta: true,
async saveRows({ folder, file, rows }) {
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const filePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
const fileStream = fs.createWriteStream(filePath);
for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n');
const ok = fileStream.write(JSON.stringify(row) + '\n');
if (!ok) {
await new Promise(resolve => fileStream.once('drain', resolve));
}
}
await fileStream.close();
await new Promise((resolve, reject) => {
fileStream.end(() => resolve());
fileStream.on('error', reject);
});
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
@@ -256,6 +310,8 @@ module.exports = {
getArchiveData_meta: true,
async getArchiveData({ folder, file }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
let rows;
if (folder.endsWith('.zip')) {
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
@@ -270,7 +326,7 @@ module.exports = {
if (!fileName?.endsWith('.zip')) {
throw new Error(`${fileName} is not a ZIP file`);
}
assertSafeArchiveName(fileName.slice(0, -4), 'fileName');
const folder = await this.getNewArchiveFolder({ database: fileName });
await fs.copyFile(filePath, path.join(archivedir(), folder));
socket.emitChanged(`archive-folders-changed`);
@@ -280,6 +336,7 @@ module.exports = {
zip_meta: true,
async zip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
@@ -289,6 +346,7 @@ module.exports = {
unzip_meta: true,
async unzip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
@@ -298,6 +356,7 @@ module.exports = {
getZippedPath_meta: true,
async getZippedPath({ folder }) {
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.zip')) {
return { filePath: path.join(archivedir(), folder) };
}

View File

@@ -492,7 +492,61 @@ module.exports = {
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : encryptConnection(res);
}
const res = await this.datastore.get(conid);
return res || null;
if (res) return res;
// In a forked runner-script child process, ask the parent for connections that may be
// volatile (in-memory only, e.g. ask-for-password). We only do this when
// there really is a parent (process.send exists) to avoid an infinite loop
// when the parent's own getCore falls through here.
// The check is intentionally narrow: only runner scripts pass
// --process-display-name script, so connect/session/ssh-forward subprocesses
// are not affected and continue to return null immediately.
if (process.send && processArgs.processDisplayName === 'script') {
const conn = await new Promise(resolve => {
let resolved = false;
const cleanup = () => {
process.removeListener('message', handler);
process.removeListener('disconnect', onDisconnect);
clearTimeout(timeout);
};
const settle = value => {
if (!resolved) {
resolved = true;
cleanup();
resolve(value);
}
};
const handler = message => {
if (message?.msgtype === 'volatile-connection-response' && message.conid === conid) {
settle(message.conn || null);
}
};
const onDisconnect = () => settle(null);
const timeout = setTimeout(() => settle(null), 5000);
// Don't let the timer alone keep the process alive if all other work is done
timeout.unref();
process.on('message', handler);
process.once('disconnect', onDisconnect);
try {
process.send({ msgtype: 'get-volatile-connection', conid });
} catch {
settle(null);
}
});
if (conn) {
volatileConnections[conn._id] = conn; // cache for subsequent calls
return conn;
}
}
return null;
},
get_meta: true,

View File

@@ -1,5 +1,8 @@
const { filterName } = require('dbgate-tools');
const { filterName, getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('jsldata');
const { jsldir, archivedir } = require('../utility/directories');
const fs = require('fs');
const path = require('path');
const lineReader = require('line-reader');
const _ = require('lodash');
const { __ } = require('lodash/fp');
@@ -149,6 +152,10 @@ module.exports = {
getRows_meta: true,
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
return [];
}
const datastore = await this.ensureDatastore(jslid, formatterFunction);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
},
@@ -159,6 +166,72 @@ module.exports = {
return fs.existsSync(fileName);
},
streamRows_meta: {
method: 'get',
raw: true,
},
streamRows(req, res) {
const { jslid } = req.query;
if (!jslid) {
res.status(400).json({ apiErrorMessage: 'Missing jslid' });
return;
}
// Reject file:// jslids — they resolve to arbitrary server-side paths
if (jslid.startsWith('file://')) {
res.status(403).json({ apiErrorMessage: 'Forbidden jslid scheme' });
return;
}
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
res.status(404).json({ apiErrorMessage: 'File not found' });
return;
}
// Dereference symlinks and normalize case (Windows) before the allow-list check.
// realpathSync is safe here because existsSync confirmed the file is present.
// path.resolve() alone cannot dereference symlinks, so a symlink inside an allowed
// root could otherwise point to an arbitrary external path.
const normalize = p => (process.platform === 'win32' ? p.toLowerCase() : p);
const resolveRoot = r => { try { return fs.realpathSync(r); } catch { return path.resolve(r); } };
let realFile;
try {
realFile = fs.realpathSync(fileName);
} catch {
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
const allowedRoots = [jsldir(), archivedir()].map(r => normalize(resolveRoot(r)) + path.sep);
const isAllowed = allowedRoots.some(root => normalize(realFile).startsWith(root));
if (!isAllowed) {
logger.warn({ jslid, realFile }, 'DBGM-00000 streamRows rejected path outside allowed roots');
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
res.setHeader('Content-Type', 'application/x-ndjson');
res.setHeader('Cache-Control', 'no-cache');
const stream = fs.createReadStream(realFile, 'utf-8');
req.on('close', () => {
stream.destroy();
});
stream.on('error', err => {
logger.error(extractErrorLogData(err), 'DBGM-00000 Error streaming JSONL file');
if (!res.headersSent) {
res.status(500).json({ apiErrorMessage: 'Stream error' });
} else {
res.end();
}
});
stream.pipe(res);
},
getStats_meta: true,
getStats({ jslid }) {
const file = `${getJslFileName(jslid)}.stats`;

View File

@@ -33,19 +33,35 @@ function readCore(reader, skip, limit, filter) {
});
}
module.exports = {
read_meta: true,
async read({ skip, limit, filter }) {
function readJsonl({ skip, limit, filter }) {
return new Promise(async (resolve, reject) => {
const fileName = path.join(datadir(), 'query-history.jsonl');
// @ts-ignore
if (!(await fs.exists(fileName))) return [];
if (!(await fs.exists(fileName))) return resolve([]);
const reader = fsReverse(fileName);
const res = await readCore(reader, skip, limit, filter);
return res;
resolve(res);
});
}
module.exports = {
read_meta: true,
async read({ skip, limit, filter }, req) {
const storage = require('./storage');
const storageResult = await storage.readQueryHistory({ skip, limit, filter }, req);
if (storageResult) return storageResult;
return readJsonl({ skip, limit, filter });
},
write_meta: true,
async write({ data }) {
async write({ data }, req) {
const storage = require('./storage');
const written = await storage.writeQueryHistory({ data }, req);
if (written) {
socket.emit('query-history-changed');
return 'OK';
}
const fileName = path.join(datadir(), 'query-history.jsonl');
await fs.appendFile(fileName, JSON.stringify(data) + '\n');
socket.emit('query-history-changed');

View File

@@ -10,6 +10,7 @@ const {
extractShellApiPlugins,
compileShellApiFunctionName,
jsonScriptToJavascript,
assertValidShellApiFunctionName,
getLogger,
safeJsonParse,
pinoLogRecordToMessageRecord,
@@ -54,19 +55,23 @@ logger.info('DBGM-00014 Finished job script');
dbgateApi.runScript(run);
`;
const loaderScriptTemplate = (prefix, functionName, props, runid) => `
const loaderScriptTemplate = (functionName, props, runid) => {
const plugins = extractShellApiPlugins(functionName, props);
const prefix = plugins.map(packageName => `// @require ${packageName}\n`).join('');
return `
${prefix}
const dbgateApi = require(process.env.DBGATE_API);
dbgateApi.initializeApiEnvironment();
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
${requirePluginsTemplate(plugins)}
require=null;
async function run() {
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
const writer=await dbgateApi.collectorWriter({runid: ${JSON.stringify(runid)}});
await dbgateApi.copyStream(reader, writer);
}
dbgateApi.runScript(run);
`;
};
module.exports = {
/** @type {import('dbgate-types').OpenedRunner[]} */
@@ -196,6 +201,27 @@ module.exports = {
// @ts-ignore
const { msgtype } = message;
if (handleProcessCommunication(message, subprocess)) return;
if (msgtype === 'get-volatile-connection') {
const connections = require('./connections');
// @ts-ignore
const conid = message.conid;
if (!conid || typeof conid !== 'string') return;
const trySend = payload => {
if (!subprocess.connected) return;
try {
subprocess.send(payload);
} catch {
// child disconnected between the check and the send — ignore
}
};
connections.getCore({ conid }).then(conn => {
trySend({ msgtype: 'volatile-connection-response', conid, conn: conn?.unsaved ? conn : null });
}).catch(err => {
logger.error({ ...extractErrorLogData(err), conid }, 'DBGM-00000 Error resolving volatile connection for child process');
trySend({ msgtype: 'volatile-connection-response', conid, conn: null });
});
return;
}
this[`handle_${msgtype}`](runid, message);
});
return _.pick(newOpened, ['runid']);
@@ -356,14 +382,12 @@ module.exports = {
return { errorMessage: 'DBGM-00289 Unallowed file' };
}
}
const prefix = extractShellApiPlugins(functionName)
.map(packageName => `// @require ${packageName}\n`)
.join('');
const promise = new Promise((resolve, reject) => {
assertValidShellApiFunctionName(functionName);
const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
});
return promise;
},

View File

@@ -7,6 +7,7 @@ async function runScript(func) {
if (processArgs.checkParent) {
childProcessChecker();
}
try {
await func();
process.exit(0);

View File

@@ -16,23 +16,53 @@ function unzipDirectory(zipPath, outputDirectory) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
let settled = false;
/** Track active streams so we can destroy them on early abort */
const activeStreams = new Set();
const safeReject = rejectErr => {
if (settled) return;
settled = true;
for (const s of activeStreams) {
s.destroy();
}
activeStreams.clear();
zipFile.close();
reject(rejectErr);
};
/** Pending per-file extractions we resolve the main promise after theyre all done */
const pending = [];
// Resolved output boundary used for zip-slip checks on every entry
const resolvedOutputDir = path.resolve(outputDirectory);
// kick things off
zipFile.readEntry();
zipFile.on('entry', entry => {
// Null-byte poison check
if (entry.fileName.includes('\0')) {
return safeReject(new Error(`DBGM-00000 ZIP entry with null byte in filename rejected`));
}
const destPath = path.join(outputDirectory, entry.fileName);
const resolvedDest = path.resolve(destPath);
// Zip-slip protection: every extracted path must stay inside outputDirectory
if (resolvedDest !== resolvedOutputDir && !resolvedDest.startsWith(resolvedOutputDir + path.sep)) {
return safeReject(
new Error(`DBGM-00000 ZIP slip detected: entry "${entry.fileName}" would escape output directory`)
);
}
// Handle directories (their names always end with “/” in ZIPs)
if (/\/$/.test(entry.fileName)) {
// Ensure directory exists, then continue to next entry
fs.promises
.mkdir(destPath, { recursive: true })
.then(() => zipFile.readEntry())
.catch(reject);
.then(() => {
if (!settled) zipFile.readEntry();
})
.catch(safeReject);
return;
}
@@ -46,17 +76,29 @@ function unzipDirectory(zipPath, outputDirectory) {
if (err) return rej(err);
const writeStream = fs.createWriteStream(destPath);
activeStreams.add(readStream);
activeStreams.add(writeStream);
readStream.pipe(writeStream);
// proceed to next entry once weve consumed *this* one
readStream.on('end', () => zipFile.readEntry());
// proceed to next entry once we've consumed *this* one
readStream.on('end', () => {
activeStreams.delete(readStream);
if (!settled) zipFile.readEntry();
});
readStream.on('error', readErr => {
activeStreams.delete(readStream);
rej(readErr);
});
writeStream.on('finish', () => {
activeStreams.delete(writeStream);
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
activeStreams.delete(writeStream);
logger.error(
extractErrorLogData(writeErr),
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
@@ -67,22 +109,29 @@ function unzipDirectory(zipPath, outputDirectory) {
})
);
// Immediately abort the whole unzip if this file fails; otherwise the
// zip would never emit 'end' (lazyEntries won't advance without readEntry).
filePromise.catch(safeReject);
pending.push(filePromise);
});
// Entire archive enumerated; wait for all streams to finish
zipFile.on('end', () => {
if (settled) return;
Promise.all(pending)
.then(() => {
if (settled) return;
settled = true;
zipFile.close();
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
.catch(safeReject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err);
safeReject(err);
});
});
});

View File

@@ -874,6 +874,114 @@ module.exports = {
}
]
},
{
"pureName": "query_history",
"columns": [
{
"pureName": "query_history",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "query_history",
"columnName": "created",
"dataType": "bigint",
"notNull": true
},
{
"pureName": "query_history",
"columnName": "user_id",
"dataType": "int",
"notNull": false
},
{
"pureName": "query_history",
"columnName": "role_id",
"dataType": "int",
"notNull": false
},
{
"pureName": "query_history",
"columnName": "sql",
"dataType": "text",
"notNull": false
},
{
"pureName": "query_history",
"columnName": "conid",
"dataType": "varchar(100)",
"notNull": false
},
{
"pureName": "query_history",
"columnName": "database",
"dataType": "varchar(200)",
"notNull": false
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"constraintName": "FK_query_history_user_id",
"pureName": "query_history",
"refTableName": "users",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
},
{
"constraintType": "foreignKey",
"constraintName": "FK_query_history_role_id",
"pureName": "query_history",
"refTableName": "roles",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "role_id",
"refColumnName": "id"
}
]
}
],
"indexes": [
{
"constraintName": "idx_query_history_user_id",
"pureName": "query_history",
"constraintType": "index",
"columns": [
{
"columnName": "user_id"
}
]
},
{
"constraintName": "idx_query_history_role_id",
"pureName": "query_history",
"constraintType": "index",
"columns": [
{
"columnName": "role_id"
}
]
}
],
"primaryKey": {
"pureName": "query_history",
"constraintType": "primaryKey",
"constraintName": "PK_query_history",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "roles",
"columns": [

View File

@@ -1,6 +1,6 @@
import _uniq from 'lodash/uniq';
import _cloneDeepWith from 'lodash/cloneDeepWith';
import { evalShellApiFunctionName, compileShellApiFunctionName, extractShellApiPlugins } from './packageTools';
import { evalShellApiFunctionName, compileShellApiFunctionName, extractShellApiPlugins, assertValidJsIdentifier, assertValidShellApiFunctionName } from './packageTools';
export interface ScriptWriterGeneric {
allocVariable(prefix?: string);
@@ -40,6 +40,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
}
assignCore(variableName, functionName, props) {
assertValidJsIdentifier(variableName, 'variableName');
this._put(`const ${variableName} = await ${functionName}(${JSON.stringify(props)});`);
}
@@ -49,6 +50,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
}
assignValue(variableName, jsonValue) {
assertValidJsIdentifier(variableName, 'variableName');
this._put(`const ${variableName} = ${JSON.stringify(jsonValue)};`);
}
@@ -57,8 +59,13 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
}
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
assertValidJsIdentifier(sourceVar, 'sourceVar');
assertValidJsIdentifier(targetVar, 'targetVar');
let opts = '{';
if (colmapVar) opts += `columns: ${colmapVar}, `;
if (colmapVar) {
assertValidJsIdentifier(colmapVar, 'colmapVar');
opts += `columns: ${colmapVar}, `;
}
if (progressName) opts += `progressName: ${JSON.stringify(progressName)}, `;
opts += '}';
@@ -89,7 +96,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
}
zipDirectory(inputDirectory, outputFile) {
this._put(`await dbgateApi.zipDirectory('${inputDirectory}', '${outputFile}');`);
this._put(`await dbgateApi.zipDirectory(${JSON.stringify(inputDirectory)}, ${JSON.stringify(outputFile)});`);
}
}
@@ -214,6 +221,8 @@ export class ScriptWriterEval implements ScriptWriterGeneric {
requirePackage(packageName) {}
async assign(variableName, functionName, props) {
assertValidJsIdentifier(variableName, 'variableName');
assertValidShellApiFunctionName(functionName);
const func = evalShellApiFunctionName(functionName, this.dbgateApi, this.requirePlugin);
this.variables[variableName] = await func(
@@ -226,10 +235,14 @@ export class ScriptWriterEval implements ScriptWriterGeneric {
}
assignValue(variableName, jsonValue) {
assertValidJsIdentifier(variableName, 'variableName');
this.variables[variableName] = jsonValue;
}
async copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
assertValidJsIdentifier(sourceVar, 'sourceVar');
assertValidJsIdentifier(targetVar, 'targetVar');
if (colmapVar != null) assertValidJsIdentifier(colmapVar, 'colmapVar');
await this.dbgateApi.copyStream(this.variables[sourceVar], this.variables[targetVar], {
progressName: _cloneDeepWith(progressName, node => {
if (node?.$runid) {

View File

@@ -3,6 +3,64 @@ import _camelCase from 'lodash/camelCase';
import _isString from 'lodash/isString';
import _isPlainObject from 'lodash/isPlainObject';
const JS_IDENTIFIER_RE = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
// ECMAScript reserved words, strict-mode keywords, and async-context keywords
// that cannot be used as variable or function names in the generated scripts.
// Sources: ECMA-262 §12.7.2 (reserved words), §12.7.3 (strict mode), §14 (contextual).
const JS_RESERVED_WORDS = new Set([
// Keywords
'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger', 'default',
'delete', 'do', 'else', 'export', 'extends', 'false', 'finally', 'for',
'function', 'if', 'import', 'in', 'instanceof', 'let', 'new', 'null', 'return',
'static', 'super', 'switch', 'this', 'throw', 'true', 'try', 'typeof', 'var',
'void', 'while', 'with', 'yield',
// Strict-mode reserved words
'implements', 'interface', 'package', 'private', 'protected', 'public',
// Async context keywords
'async', 'await',
// Future reserved
'enum',
'eval', 'arguments',
]);
export function isValidJsIdentifier(name: string): boolean {
return typeof name === 'string' && JS_IDENTIFIER_RE.test(name) && !JS_RESERVED_WORDS.has(name);
}
export function assertValidJsIdentifier(name: string, label: string): void {
if (!isValidJsIdentifier(name)) {
throw new Error(`DBGM-00000 Invalid ${label}: ${String(name).substring(0, 100)}`);
}
}
/**
* Validates a shell API function name.
* Allowed forms:
* - "someFunctionName" (plain identifier, resolved as dbgateApi.someFunctionName)
* - "funcName@dbgate-plugin-xxx" (namespaced, resolved as plugin.shellApi.funcName)
*/
export function assertValidShellApiFunctionName(functionName: string): void {
if (typeof functionName !== 'string') {
throw new Error('DBGM-00000 functionName must be a string');
}
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) {
if (!isValidJsIdentifier(nsMatch[1])) {
throw new Error(`DBGM-00000 Invalid function part in functionName: ${nsMatch[1].substring(0, 100)}`);
}
if (!/^dbgate-plugin-[a-zA-Z0-9_-]+$/.test(nsMatch[2])) {
throw new Error(`DBGM-00000 Invalid plugin package in functionName: ${nsMatch[2].substring(0, 100)}`);
}
} else {
if (!isValidJsIdentifier(functionName)) {
throw new Error(`DBGM-00000 Invalid functionName: ${functionName.substring(0, 100)}`);
}
}
}
const VALID_PLUGIN_NAME_RE = /^dbgate-plugin-[a-zA-Z0-9_-]+$/;
export function extractShellApiPlugins(functionName, props): string[] {
const res = [];
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
@@ -15,6 +73,11 @@ export function extractShellApiPlugins(functionName, props): string[] {
res.push(nsMatchEngine[2]);
}
}
for (const plugin of res) {
if (!VALID_PLUGIN_NAME_RE.test(plugin)) {
throw new Error(`DBGM-00000 Invalid plugin name: ${String(plugin).substring(0, 100)}`);
}
}
return res;
}
@@ -28,7 +91,8 @@ export function extractPackageName(name): string {
}
export function compileShellApiFunctionName(functionName) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
assertValidShellApiFunctionName(functionName);
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) {
return `${_camelCase(nsMatch[2])}.shellApi.${nsMatch[1]}`;
}
@@ -36,7 +100,8 @@ export function compileShellApiFunctionName(functionName) {
}
export function evalShellApiFunctionName(functionName, dbgateApi, requirePlugin) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
assertValidShellApiFunctionName(functionName);
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) {
return requirePlugin(nsMatch[2]).shellApi[nsMatch[1]];
}

File diff suppressed because it is too large Load Diff

View File

@@ -46,7 +46,7 @@ import { isProApp } from '../utility/proTools';
import { openWebLink } from '../utility/simpleTools';
import { _t } from '../translations';
import ExportImportConnectionsModal from '../modals/ExportImportConnectionsModal.svelte';
import { getBoolSettingsValue } from '../settings/settingsTools';
import { getBoolSettingsValue, isAiDisabled } from '../settings/settingsTools';
import { __t } from '../translations';
// function themeCommand(theme: ThemeDefinition) {
@@ -753,7 +753,8 @@ if (isProApp()) {
testEnabled: () =>
getCurrentDatabase() != null &&
findEngineDriver(getCurrentDatabase()?.connection, getExtensions())?.databaseEngineTypes?.includes('sql') &&
hasPermission('dbops/chat'),
hasPermission('dbops/chat') &&
!isAiDisabled(),
onClick: () => {
openNewTab({
title: 'Chat',
@@ -776,7 +777,8 @@ if (isProApp()) {
testEnabled: () =>
getCurrentDatabase() != null &&
findEngineDriver(getCurrentDatabase()?.connection, getExtensions())?.databaseEngineTypes?.includes('graphql') &&
hasPermission('dbops/chat'),
hasPermission('dbops/chat') &&
!isAiDisabled(),
onClick: () => {
openNewTab({
title: 'GraphQL Chat',

View File

@@ -26,6 +26,18 @@
onClick: () => getCurrentDataGrid().deepRefresh(),
});
registerCommand({
id: 'dataGrid.fetchAll',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.fetchAll', { defaultMessage: 'Fetch all rows' }),
toolbarName: __t('command.datagrid.fetchAll.toolbar', { defaultMessage: 'Fetch all' }),
icon: 'icon download',
toolbar: true,
isRelatedToTab: true,
testEnabled: () => getCurrentDataGrid()?.canFetchAll(),
onClick: () => getCurrentDataGrid().fetchAll(),
});
registerCommand({
id: 'dataGrid.revertRowChanges',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
@@ -432,6 +444,7 @@
import CollapseButton from './CollapseButton.svelte';
import GenerateSqlFromDataModal from '../modals/GenerateSqlFromDataModal.svelte';
import { showModal } from '../modals/modalTools';
import FetchAllConfirmModal from '../modals/FetchAllConfirmModal.svelte';
import StatusBarTabItem from '../widgets/StatusBarTabItem.svelte';
import { findCommand } from '../commands/runCommand';
import { openJsonDocument } from '../tabs/JsonTab.svelte';
@@ -454,6 +467,7 @@
import macros from '../macro/macros';
export let onLoadNextData = undefined;
export let onFetchAllRows = undefined;
export let grider = undefined;
export let display: GridDisplay = undefined;
export let conid = undefined;
@@ -473,6 +487,9 @@
export let errorMessage = undefined;
export let pureName = undefined;
export let schemaName = undefined;
export let isFetchingAll = false;
export let isFetchingFromDb = false;
export let fetchAllLoadedCount = 0;
export let allowDefineVirtualReferences = false;
export let formatterFunction;
export let passAllRows = null;
@@ -647,6 +664,21 @@
return canRefresh() && !!conid && !!database;
}
export function canFetchAll() {
return !!onFetchAllRows && !isLoadedAll && !isFetchingAll && !isLoading;
}
export function fetchAll() {
if (!canFetchAll()) return;
const settings = $settingsValue || {};
if (settings['dataGrid.skipFetchAllConfirm']) {
onFetchAllRows();
} else {
showModal(FetchAllConfirmModal, { onConfirm: () => onFetchAllRows() });
}
}
export async function deepRefresh() {
callUnsubscribeDbRefresh();
await apiCall('database-connections/sync-model', { conid, database });
@@ -1977,6 +2009,7 @@
registerMenu(
{ command: 'dataGrid.refresh' },
{ command: 'dataGrid.fetchAll', hideDisabled: true },
{ placeTag: 'copy' },
{
text: _t('datagrid.copyAdvanced', { defaultMessage: 'Copy advanced' }),
@@ -2404,11 +2437,7 @@
</div>
{:else if allRowCountError && multipleGridsOnTab}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
class="row-count-label row-count-error"
title={allRowCountError}
on:click={onReloadRowCount}
>
<div class="row-count-label row-count-error" title={allRowCountError} on:click={onReloadRowCount}>
{_t('datagrid.rows', { defaultMessage: 'Rows' })}: {_t('datagrid.rowCountMany', { defaultMessage: 'Many' })}
</div>
{/if}
@@ -2417,6 +2446,18 @@
<LoadingInfo wrapper message="Loading data" />
{/if}
{#if isFetchingAll}
<LoadingInfo
wrapper
message={isFetchingFromDb
? _t('datagrid.fetchAll.progressDb', { defaultMessage: 'Fetching data from database...' })
: _t('datagrid.fetchAll.progress', {
defaultMessage: 'Fetching all rows... {count} loaded',
values: { count: fetchAllLoadedCount.toLocaleString() },
})}
/>
{/if}
{#if !tabControlHiddenTab && !multipleGridsOnTab && allRowCount != null}
<StatusBarTabItem text={`${_t('datagrid.rows', { defaultMessage: 'Rows' })}: ${allRowCount.toLocaleString()}`} />
{:else if !tabControlHiddenTab && !multipleGridsOnTab && allRowCountError}

View File

@@ -1,14 +1,18 @@
<script lang="ts">
import { getIntSettingsValue } from '../settings/settingsTools';
import { onDestroy } from 'svelte';
import createRef from '../utility/createRef';
import { useSettings } from '../utility/metadataLoaders';
import { fetchAll, type FetchAllHandle } from '../utility/fetchAll';
import { apiCall } from '../utility/api';
import DataGridCore from './DataGridCore.svelte';
export let loadDataPage;
export let dataPageAvailable;
export let loadRowCount;
export let startFetchAll = null;
export let grider;
export let display;
export let masterLoadedTime = undefined;
@@ -29,6 +33,12 @@
let errorMessage = null;
let domGrid;
let isFetchingAll = false;
let isFetchingFromDb = false;
let fetchAllLoadedCount = 0;
let fetchAllHandle: FetchAllHandle | null = null;
let readerJslid: string | null = null;
const loadNextDataRef = createRef(false);
const loadedTimeRef = createRef(null);
@@ -96,11 +106,161 @@
// console.log('LOADED', nextRows, loadedRows);
}
async function fetchAllRows() {
if (isFetchingAll || isLoadedAll) return;
const jslid = ($$props as any).jslid;
if (jslid) {
// Already have a JSONL file (e.g. query tab) — read directly
fetchAllViaJslid(jslid);
} else if (startFetchAll) {
// SQL/table grid: execute full query → stream to JSONL → read from it
fetchAllViaReader();
} else {
fetchAllRowsLegacy();
}
}
function stopReader() {
if (readerJslid) {
apiCall('sessions/stop-loading-reader', { jslid: readerJslid });
readerJslid = null;
}
}
async function fetchAllViaReader() {
isFetchingAll = true;
isFetchingFromDb = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
// Token guards against a reload/destroy that happens while we await startFetchAll.
// loadedTimeRef is already updated by reload(), so we reuse it as our token.
const token = loadedTime;
let jslid;
try {
jslid = await startFetchAll($$props);
} catch (err) {
if (loadedTime !== token) return; // reload() already reset state
errorMessage = err?.message ?? 'Failed to start data reader';
isFetchingAll = false;
isFetchingFromDb = false;
return;
}
// If reload()/onDestroy ran while we were awaiting, discard the result and
// immediately stop the reader that was just started on the server.
if (loadedTime !== token) {
if (jslid) apiCall('sessions/stop-loading-reader', { jslid });
return;
}
if (!jslid) {
errorMessage = 'Failed to start data reader';
isFetchingAll = false;
isFetchingFromDb = false;
return;
}
readerJslid = jslid;
fetchAllViaJslid(jslid);
}
function fetchAllViaJslid(jslid: string) {
if (!isFetchingAll) {
isFetchingAll = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
}
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 50000);
const buffer: any[] = [];
const jslLoadDataPage = async (offset: number, limit: number) => {
return apiCall('jsldata/get-rows', { jslid, offset, limit });
};
fetchAllHandle = fetchAll(
jslid,
jslLoadDataPage,
{
onPage(rows) {
if (rows.length > 0) isFetchingFromDb = false;
const processed = preprocessLoadedRow ? rows.map(preprocessLoadedRow) : rows;
buffer.push(...processed);
fetchAllLoadedCount = buffer.length;
},
onFinished() {
loadedRows = buffer;
isLoadedAll = true;
isFetchingAll = false;
isFetchingFromDb = false;
fetchAllHandle = null;
readerJslid = null;
if (allRowCount == null && !isRawMode) handleLoadRowCount();
},
onError(msg) {
errorMessage = msg;
isFetchingAll = false;
isFetchingFromDb = false;
fetchAllHandle = null;
stopReader();
},
},
pageSize
);
}
async function fetchAllRowsLegacy() {
isFetchingAll = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 50000);
const fetchStart = new Date().getTime();
loadedTimeRef.set(fetchStart);
// Accumulate into a local buffer to avoid O(n²) full-array copies each iteration.
const buffer = [...loadedRows];
try {
while (!isLoadedAll) {
const nextRows = await loadDataPage($$props, buffer.length, pageSize);
if (loadedTimeRef.get() !== fetchStart) {
// a reload was triggered; abort without overwriting loadedRows with stale data
return;
}
if (nextRows.errorMessage) {
errorMessage = nextRows.errorMessage;
break;
}
if (nextRows.length === 0) {
isLoadedAll = true;
break;
}
const processed = preprocessLoadedRow ? nextRows.map(preprocessLoadedRow) : nextRows;
buffer.push(...processed);
fetchAllLoadedCount = buffer.length;
}
// Single assignment triggers Svelte reactivity once for all accumulated rows.
loadedRows = buffer;
if (allRowCount == null && !isRawMode) handleLoadRowCount();
} finally {
isFetchingAll = false;
}
}
// $: griderProps = { ...$$props, sourceRows: loadProps.loadedRows };
// $: grider = griderFactory(griderProps);
function handleLoadNextData() {
if (!isLoadedAll && !errorMessage && (!grider.disableLoadNextPage || loadedRows.length == 0)) {
if (!isLoadedAll && !errorMessage && !isFetchingAll && (!grider.disableLoadNextPage || loadedRows.length == 0)) {
if (dataPageAvailable($$props)) {
// If not, callbacks to load missing metadata are dispatched
loadNextData();
@@ -109,14 +269,23 @@
}
function reload() {
if (fetchAllHandle) {
fetchAllHandle.cancel();
fetchAllHandle = null;
}
stopReader();
isFetchingFromDb = false;
allRowCount = null;
allRowCountError = null;
isLoading = false;
isFetchingAll = false;
fetchAllLoadedCount = 0;
loadedRows = [];
isLoadedAll = false;
loadedTime = new Date().getTime();
errorMessage = null;
loadNextDataRef.set(false);
loadedTimeRef.set(null);
// loadNextDataToken = 0;
}
@@ -130,6 +299,13 @@
}
}
onDestroy(() => {
if (fetchAllHandle) {
fetchAllHandle.cancel();
}
stopReader();
});
$: if (setLoadedRows) setLoadedRows(loadedRows);
</script>
@@ -137,10 +313,14 @@
{...$$props}
bind:this={domGrid}
onLoadNextData={handleLoadNextData}
onFetchAllRows={fetchAllRows}
{errorMessage}
{isLoading}
{isFetchingAll}
{isFetchingFromDb}
{fetchAllLoadedCount}
allRowCount={rowCountLoaded || allRowCount}
allRowCountError={allRowCountError}
{allRowCountError}
onReloadRowCount={handleLoadRowCount}
{isLoadedAll}
{loadedTime}

View File

@@ -2,13 +2,13 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import hasPermission from '../utility/hasPermission';
import { __t, _t } from '../translations'
import { __t, _t } from '../translations';
const getCurrentEditor = () => getActiveComponent('SqlDataGridCore');
registerCommand({
id: 'sqlDataGrid.openQuery',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.openQuery', { defaultMessage : 'Open query' }),
name: __t('command.openQuery', { defaultMessage: 'Open query' }),
testEnabled: () => getCurrentEditor() != null && hasPermission('dbops/query'),
onClick: () => getCurrentEditor().openQuery(),
});
@@ -16,7 +16,7 @@
registerCommand({
id: 'sqlDataGrid.export',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('common.export', { defaultMessage : 'Export' }),
name: __t('common.export', { defaultMessage: 'Export' }),
icon: 'icon export',
keyText: 'CtrlOrCommand+E',
testEnabled: () => getCurrentEditor() != null && hasPermission('dbops/export'),
@@ -232,6 +232,20 @@
return { errorMessage: err.message || 'Error loading row count' };
}
}
async function startFetchAll(props) {
const { display, conid, database } = props;
const sql = display.getExportQuery();
if (!sql) return null;
const resp = await apiCall('sessions/execute-reader', {
conid,
database,
sql,
});
if (!resp || resp.errorMessage) return null;
return resp.jslid;
}
</script>
<LoadingDataGridCore
@@ -239,6 +253,7 @@
{loadDataPage}
{dataPageAvailable}
{loadRowCount}
{startFetchAll}
setLoadedRows={handleSetLoadedRows}
onPublishedCellsChanged={value => {
publishedCells = value;

View File

@@ -24,7 +24,7 @@
{#if isNative}
<select
value={options.find(x => x.value == value) ? value : defaultValue}
class="{selectClass}"
class={selectClass}
{...$$restProps}
on:change={e => {
dispatch('change', e.target['value']);
@@ -47,7 +47,7 @@
{...$$restProps}
items={options ?? []}
value={isMulti
? _.compact((value && Array.isArray(value)) ? value.map(item => options?.find(x => x.value == item)) : [])
? _.compact(value && Array.isArray(value) ? value.map(item => options?.find(x => x.value == item)) : [])
: (options?.find(x => x.value == value) ?? null)}
on:select={e => {
if (isMulti) {
@@ -69,7 +69,6 @@
</div>
{/if}
<style>
.select {
--border: var(--theme-input-border);
@@ -78,10 +77,10 @@
--background: var(--theme-input-background);
--borderHoverColor: var(--theme-input-border-hover-color);
--borderFocusColor: var(--theme-input-border-focus-color);
--listBackground: var(--theme-input-list-background);
--listBackground: var(--theme-input-background);
--itemActiveBackground: var(--theme-input-item-active-background);
--itemIsActiveBG: var(--theme-input-item-active-background);
--itemHoverBG: var(--theme-input-item-hover-background);
--itemHoverBG: var(--theme-input-multi-clear-hover);
--itemColor: var(--theme-input-item-foreground);
--listEmptyColor: var(--theme-input-background);
--height: 40px;
@@ -95,9 +94,8 @@
--multiClearHoverFill: var(--theme-input-multi-clear-foreground);
--multiItemActiveBG: var(--theme-input-multi-item-background);
--multiItemActiveColor: var(--theme-input-multi-item-foreground);
--multiItemBG: var(--theme-input-multi-item-background);
--multiItemBG: var(--theme-input-multi-clear-background);
--multiItemDisabledHoverBg: var(--theme-input-multi-item-background);
--multiItemDisabledHoverColor: var(--theme-input-multi-item-foreground);
}
</style>

View File

@@ -6,6 +6,7 @@ import { getConnectionInfo } from '../utility/metadataLoaders';
import { findEngineDriver, findObjectLike } from 'dbgate-tools';
import { findFileFormat } from '../plugins/fileformats';
import { getCurrentConfig, getExtensions } from '../stores';
import { getVolatileRemapping } from '../utility/api';
export function getTargetName(extensions, source, values) {
const key = `targetName_${source}`;
@@ -38,6 +39,30 @@ function extractDriverApiParameters(values, direction, driver) {
export function extractShellConnection(connection, database) {
const config = getCurrentConfig();
// Case 1: connection._id is the original ID and a volatile remap exists.
// Use the volatile ID so the backend child process can look up the credentials.
const volatileId = getVolatileRemapping(connection._id);
if (volatileId !== connection._id) {
return {
_id: volatileId,
engine: connection.engine,
database,
};
}
// Case 2: apiCall.transformApiArgs already remapped the conid before the
// connection was fetched, so connection._id IS already the volatile ID and
// connection.unsaved === true. Falling through to allowShellConnection here
// would embed plaintext credentials in the generated script — always use the
// _id reference instead.
if (connection.unsaved) {
return {
_id: connection._id,
engine: connection.engine,
database,
};
}
return config.allowShellConnection
? {
..._.omitBy(

View File

@@ -0,0 +1,74 @@
<script lang="ts">
import FormStyledButton from '../buttons/FormStyledButton.svelte';
import FormProvider from '../forms/FormProvider.svelte';
import FormSubmit from '../forms/FormSubmit.svelte';
import TemplatedCheckboxField from '../forms/TemplatedCheckboxField.svelte';
import FontIcon from '../icons/FontIcon.svelte';
import ModalBase from './ModalBase.svelte';
import { closeCurrentModal } from './modalTools';
import { apiCall } from '../utility/api';
import { _t } from '../translations';
export let onConfirm;
const SKIP_SETTING_KEY = 'dataGrid.skipFetchAllConfirm';
let dontAskAgain = false;
</script>
<FormProvider>
<ModalBase {...$$restProps} data-testid="FetchAllConfirmModal">
<svelte:fragment slot="header">
{_t('datagrid.fetchAll.title', { defaultMessage: 'Fetch All Rows' })}
</svelte:fragment>
<div class="message">
<FontIcon icon="img warn" />
<span>
{_t('datagrid.fetchAll.warning', {
defaultMessage:
'This will load all remaining rows into memory. For large tables, this may consume a significant amount of memory and could affect application performance.',
})}
</span>
</div>
<div class="mt-2">
<TemplatedCheckboxField
label={_t('common.dontAskAgain', { defaultMessage: "Don't ask again" })}
templateProps={{ noMargin: true }}
checked={dontAskAgain}
on:change={e => {
dontAskAgain = e.detail;
apiCall('config/update-settings', { [SKIP_SETTING_KEY]: e.detail });
}}
data-testid="FetchAllConfirmModal_dontAskAgain"
/>
</div>
<svelte:fragment slot="footer">
<FormSubmit
value={_t('datagrid.fetchAll.confirm', { defaultMessage: 'Fetch All' })}
on:click={() => {
closeCurrentModal();
onConfirm();
}}
data-testid="FetchAllConfirmModal_okButton"
/>
<FormStyledButton
type="button"
value={_t('common.close', { defaultMessage: 'Close' })}
on:click={closeCurrentModal}
data-testid="FetchAllConfirmModal_closeButton"
/>
</svelte:fragment>
</ModalBase>
</FormProvider>
<style>
.message {
display: flex;
align-items: flex-start;
gap: 8px;
line-height: 1.5;
}
</style>

View File

@@ -55,6 +55,12 @@
defaultMessage: 'Skip confirmation when saving collection data (NoSQL)',
})}
/>
<FormCheckboxField
name="dataGrid.skipFetchAllConfirm"
label={_t('settings.confirmations.skipFetchAllConfirm', {
defaultMessage: 'Skip confirmation when fetching all rows',
})}
/>
</FormValues>
</div>

View File

@@ -36,6 +36,10 @@ export function getObjectSettingsValue(name, defaultValue) {
return res;
}
export function isAiDisabled(): boolean {
return getBoolSettingsValue('storage.disableAiFeatures', false);
}
export function getConnectionClickActionSetting(): 'connect' | 'openDetails' | 'none' {
return getStringSettingsValue('defaultAction.connectionClick', 'connect');
}

View File

@@ -2,223 +2,230 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import { __t } from '../translations';
const getCurrentEditor = () => getActiveComponent('CollectionDataTab');
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
registerCommand({
id: 'collectionTable.save',
group: 'save',
category: __t('command.collectionData', { defaultMessage: 'Collection data' }),
name: __t('command.collectionData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
</script>
<script lang="ts">
import App from '../App.svelte';
import DataGrid from '../datagrid/DataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
createChangeSet,
createGridCache,
CollectionGridDisplay,
changeSetContainsChanges,
runMacroOnChangeSet,
changeSetChangedCount,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import CollectionDataGridCore from '../datagrid/CollectionDataGridCore.svelte';
import { useCollectionInfo, useConnectionInfo, useSettings } from '../utility/metadataLoaders';
import { extensions } from '../stores';
import CollectionJsonView from '../formview/CollectionJsonView.svelte';
import createActivator from '../utility/createActivator';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import ConfirmNoSqlModal from '../modals/ConfirmNoSqlModal.svelte';
import { registerMenu } from '../utility/contextMenu';
import { setContext } from 'svelte';
import _ from 'lodash';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import { getBoolSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import { getNumberIcon } from '../icons/FontIcon.svelte';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
let loadedRows;
export const activator = createActivator('CollectionDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const settingsValue = useSettings();
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
$: {
$changeSetStore;
invalidateCommands();
}
$: connection = useConnectionInfo({ conid });
$: collectionInfo = useCollectionInfo({ conid, database, schemaName, pureName });
$: display =
$collectionInfo && $connection
? new CollectionGridDisplay(
$collectionInfo,
findEngineDriver($connection, $extensions),
//@ts-ignore
$config,
config.update,
$cache,
cache.update,
loadedRows,
$changeSetStore?.value,
$connection?.isReadOnly,
$settingsValue
)
: null;
// $: console.log('LOADED ROWS MONGO', loadedRows);
async function handleConfirmChange(changeSet) {
const resp = await apiCall('database-connections/update-collection', {
conid,
database,
changeSet: {
...changeSet,
updates: changeSet.updates.map(update => ({
...update,
fields: _.mapValues(update.fields, (v, k) => (v === undefined ? { $$undefined$$: true } : v)),
})),
},
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, { title: 'Error when saving', message: errorMessage });
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
display?.reload();
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function save() {
const json = $changeSetStore?.value;
const driver = findEngineDriver($connection, $extensions);
const script = driver.getCollectionUpdateScript ? driver.getCollectionUpdateScript(json, $collectionInfo) : null;
if (script) {
if (getBoolSettingsValue('skipConfirm.collectionDataSave', false)) {
handleConfirmChange(json);
} else {
showModal(ConfirmNoSqlModal, {
script,
onConfirm: () => handleConfirmChange(json),
engine: display.engine,
skipConfirmSettingKey: 'skipConfirm.collectionDataSave',
});
}
} else {
handleConfirmChange(json);
}
}
function handleRunMacro(macro, params, cells) {
const newChangeSet = runMacroOnChangeSet(macro, params, cells, $changeSetStore?.value, display, false);
if (newChangeSet) {
dispatchChangeSet({ type: 'set', value: newChangeSet });
}
}
registerMenu({ command: 'collectionTable.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('collection_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('collection_collapsedLeftColumn', $collapsedLeftColumnStore);
const quickExportHandlerRef = createQuickExportHandlerRef();
function handleSetLoadedRows(rows) {
loadedRows = rows;
}
</script>
<ToolStripContainer>
<DataGrid
setLoadedRows={handleSetLoadedRows}
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{display}
{changeSetStore}
{dispatchChangeSet}
gridCoreComponent={CollectionDataGridCore}
jsonViewComponent={CollectionJsonView}
isDynamicStructure
showMacros
macroCondition={macro => macro.type == 'transformValue'}
onRunMacro={handleRunMacro}
/>
<svelte:fragment slot="toolstrip">
<ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled />
<ToolStripCommandButton
command="collectionTable.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
/>
<ToolStripCommandButton command="dataGrid.revertAllChanges" hideDisabled />
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled />
<ToolStripCommandButton command="dataGrid.deleteSelectedRows" hideDisabled />
<ToolStripCommandButton command="dataGrid.addNewColumn" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToJson" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled />
<ToolStripExportButton {quickExportHandlerRef} command="collectionDataGrid.export" />
<ToolStripCommandButton command="collectionJsonView.expandAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.collapseAll" hideDisabled />
<ToolStripCommandButton command="dataGrid.toggleCellDataView" hideDisabled data-testid="CollectionDataTab_toggleCellDataView" />
const getCurrentEditor = () => getActiveComponent('CollectionDataTab');
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
registerCommand({
id: 'collectionTable.save',
group: 'save',
category: __t('command.collectionData', { defaultMessage: 'Collection data' }),
name: __t('command.collectionData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
</script>
<script lang="ts">
import App from '../App.svelte';
import DataGrid from '../datagrid/DataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
createChangeSet,
createGridCache,
CollectionGridDisplay,
changeSetContainsChanges,
runMacroOnChangeSet,
changeSetChangedCount,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import CollectionDataGridCore from '../datagrid/CollectionDataGridCore.svelte';
import { useCollectionInfo, useConnectionInfo, useSettings } from '../utility/metadataLoaders';
import { extensions } from '../stores';
import CollectionJsonView from '../formview/CollectionJsonView.svelte';
import createActivator from '../utility/createActivator';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import ConfirmNoSqlModal from '../modals/ConfirmNoSqlModal.svelte';
import { registerMenu } from '../utility/contextMenu';
import { setContext } from 'svelte';
import _ from 'lodash';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import { getBoolSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import { getNumberIcon } from '../icons/FontIcon.svelte';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
let loadedRows;
export const activator = createActivator('CollectionDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const settingsValue = useSettings();
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
$: {
$changeSetStore;
invalidateCommands();
}
$: connection = useConnectionInfo({ conid });
$: collectionInfo = useCollectionInfo({ conid, database, schemaName, pureName });
$: display =
$collectionInfo && $connection
? new CollectionGridDisplay(
$collectionInfo,
findEngineDriver($connection, $extensions),
//@ts-ignore
$config,
config.update,
$cache,
cache.update,
loadedRows,
$changeSetStore?.value,
$connection?.isReadOnly,
$settingsValue
)
: null;
// $: console.log('LOADED ROWS MONGO', loadedRows);
async function handleConfirmChange(changeSet) {
const resp = await apiCall('database-connections/update-collection', {
conid,
database,
changeSet: {
...changeSet,
updates: changeSet.updates.map(update => ({
...update,
fields: _.mapValues(update.fields, (v, k) => (v === undefined ? { $$undefined$$: true } : v)),
})),
},
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, { title: 'Error when saving', message: errorMessage });
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
display?.reload();
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function save() {
const json = $changeSetStore?.value;
const driver = findEngineDriver($connection, $extensions);
const script = driver.getCollectionUpdateScript ? driver.getCollectionUpdateScript(json, $collectionInfo) : null;
if (script) {
if (getBoolSettingsValue('skipConfirm.collectionDataSave', false)) {
handleConfirmChange(json);
} else {
showModal(ConfirmNoSqlModal, {
script,
onConfirm: () => handleConfirmChange(json),
engine: display.engine,
skipConfirmSettingKey: 'skipConfirm.collectionDataSave',
});
}
} else {
handleConfirmChange(json);
}
}
function handleRunMacro(macro, params, cells) {
const newChangeSet = runMacroOnChangeSet(macro, params, cells, $changeSetStore?.value, display, false);
if (newChangeSet) {
dispatchChangeSet({ type: 'set', value: newChangeSet });
}
}
registerMenu({ command: 'collectionTable.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('collection_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('collection_collapsedLeftColumn', $collapsedLeftColumnStore);
const quickExportHandlerRef = createQuickExportHandlerRef();
function handleSetLoadedRows(rows) {
loadedRows = rows;
}
</script>
<ToolStripContainer>
<DataGrid
setLoadedRows={handleSetLoadedRows}
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{display}
{changeSetStore}
{dispatchChangeSet}
gridCoreComponent={CollectionDataGridCore}
jsonViewComponent={CollectionJsonView}
isDynamicStructure
showMacros
macroCondition={macro => macro.type == 'transformValue'}
onRunMacro={handleRunMacro}
/>
<svelte:fragment slot="toolstrip">
<ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled />
<ToolStripCommandButton
command="collectionTable.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
/>
<ToolStripCommandButton command="dataGrid.revertAllChanges" hideDisabled />
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled />
<ToolStripCommandButton command="dataGrid.deleteSelectedRows" hideDisabled />
<ToolStripCommandButton command="dataGrid.addNewColumn" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToJson" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled />
<ToolStripExportButton {quickExportHandlerRef} command="collectionDataGrid.export" />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.expandAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.collapseAll" hideDisabled />
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="CollectionDataTab_toggleCellDataView"
/>
</svelte:fragment>
</ToolStripContainer>

View File

@@ -24,7 +24,7 @@
name: __t('command.query.AiAssistant', { defaultMessage: 'AI Assistant' }),
keyText: 'Shift+Alt+A',
icon: 'icon ai',
testEnabled: () => isProApp(),
testEnabled: () => isProApp() && !isAiDisabled(),
onClick: () => getCurrentEditor().toggleAiAssistant(),
});
registerCommand({
@@ -164,7 +164,7 @@
import HorizontalSplitter from '../elements/HorizontalSplitter.svelte';
import uuidv1 from 'uuid/v1';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { getIntSettingsValue } from '../settings/settingsTools';
import { getIntSettingsValue, isAiDisabled } from '../settings/settingsTools';
import RowsLimitModal from '../modals/RowsLimitModal.svelte';
import _ from 'lodash';
import FontIcon from '../icons/FontIcon.svelte';
@@ -197,19 +197,19 @@
},
{
value: '@',
text: _t('query.variable', { defaultMessage: '@variable' }),
text: '@' + _t('query.variable', { defaultMessage: 'variable' }),
},
{
value: ':',
text: _t('query.named', { defaultMessage: ':variable' }),
text: ':' + _t('query.variable', { defaultMessage: 'variable' }),
},
{
value: '$',
text: _t('query.variable', { defaultMessage: '$variable' }),
text: '$' + _t('query.variable', { defaultMessage: 'variable' }),
},
{
value: '#',
text: _t('query.variable', { defaultMessage: '#variable' }),
text: '#' + _t('query.variable', { defaultMessage: 'variable' }),
},
];
@@ -253,6 +253,10 @@
let isAiAssistantVisible = isProApp() && localStorage.getItem(`tabdata_isAiAssistantVisible_${tabid}`) == 'true';
let domAiAssistant;
$: if ($settingsValue?.['storage.disableAiFeatures']) {
isAiAssistantVisible = false;
}
onMount(() => {
intervalId = setInterval(() => {
if (!driver?.singleConnectionOnly && sessionId) {
@@ -619,7 +623,7 @@
}
async function handleExplainError(errorObject) {
if (!isProApp()) return;
if (!isProApp() || isAiDisabled()) return;
isAiAssistantVisible = true;
await tick();
domAiAssistant?.explainError({

View File

@@ -20,8 +20,11 @@
import SQLEditorSettings from '../settings/SQLEditorSettings.svelte';
import AiSettingsTab from '../settings/AiSettingsTab.svelte';
import hasPermission from '../utility/hasPermission';
import { useSettings } from '../utility/metadataLoaders';
import { openedTabs } from '../stores';
const settings = useSettings();
export let selectedItem = 'general';
export let tabid = null;
@@ -33,7 +36,7 @@
);
}
const menuItems = [
$: menuItems = [
{
label: _t('settings.general', { defaultMessage: 'General' }),
identifier: 'general',
@@ -113,7 +116,8 @@
testid: 'settings-license',
},
hasPermission('settings/change') &&
isProApp() && {
isProApp() &&
!$settings?.['storage.disableAiFeatures'] && {
label: _t('settings.AI', { defaultMessage: 'AI' }),
identifier: 'ai',
component: AiSettingsTab,

View File

@@ -2,434 +2,436 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import { __t } from '../translations';
const getCurrentEditor = () => getActiveComponent('TableDataTab');
const INTERVALS = [5, 10, 15, 30, 60];
const INTERVAL_COMMANDS = [
{
time: 5,
name: __t('command.datagrid.setAutoRefresh.5', { defaultMessage: 'Refresh every 5 seconds' }),
},
{
time: 10,
name: __t('command.datagrid.setAutoRefresh.10', { defaultMessage: 'Refresh every 10 seconds' }),
},
{
time: 15,
name: __t('command.datagrid.setAutoRefresh.15', { defaultMessage: 'Refresh every 15 seconds' }),
},
{
time: 30,
name: __t('command.datagrid.setAutoRefresh.30', { defaultMessage: 'Refresh every 30 seconds' }),
},
{
time: 60,
name: __t('command.datagrid.setAutoRefresh.60', { defaultMessage: 'Refresh every 60 seconds' }),
},
];
registerCommand({
id: 'tableData.save',
group: 'save',
category: __t('command.tableData', { defaultMessage: 'Table data' }),
name: __t('command.tableData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
registerCommand({
id: 'tableData.setAutoRefresh.1',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.setAutoRefresh.1', { defaultMessage: 'Refresh every 1 second' }),
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(1),
});
for (const { time, name } of INTERVAL_COMMANDS) {
registerCommand({
id: `tableData.setAutoRefresh.${time}`,
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name,
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(time),
});
}
registerCommand({
id: 'tableData.stopAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.stopAutoRefresh', { defaultMessage: 'Stop auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === true,
onClick: () => getCurrentEditor().stopAutoRefresh(null),
});
registerCommand({
id: 'tableData.startAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.startAutoRefresh', { defaultMessage: 'Start auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === false,
onClick: () => getCurrentEditor().startAutoRefresh(),
});
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName', 'isRawMode'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
</script>
<script lang="ts">
import _ from 'lodash';
import App from '../App.svelte';
import TableDataGrid from '../datagrid/TableDataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
changeSetChangedCount,
changeSetContainsChanges,
changeSetToSql,
createChangeSet,
createGridCache,
getDeleteCascades,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { reloadDataCacheFunc } from 'dbgate-datalib';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { getTableInfo, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders';
import { scriptToSql } from 'dbgate-sqltree';
import { extensions, lastUsedDefaultActions } from '../stores';
import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte';
import createActivator from '../utility/createActivator';
import { registerMenu } from '../utility/contextMenu';
import { showSnackbarSuccess } from '../utility/snackbar';
import openNewTab from '../utility/openNewTab';
import { onDestroy, setContext } from 'svelte';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import ToolStripCommandSplitButton from '../buttons/ToolStripCommandSplitButton.svelte';
import { getBoolSettingsValue, getIntSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { getNumberIcon } from '../icons/FontIcon.svelte';
import { _t } from '../translations';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
export let isRawMode = false;
export let tabPreviewMode;
export const activator = createActivator('TableDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const dbinfo = useDatabaseInfo({ conid, database });
let autoRefreshInterval = getIntSettingsValue('dataGrid.defaultAutoRefreshInterval', 10, 1, 3600);
let autoRefreshStarted = false;
let autoRefreshTimer = null;
$: connection = useConnectionInfo({ conid });
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
async function handleConfirmSql(sql) {
const resp = await apiCall('database-connections/run-script', { conid, database, sql, useTransaction: true });
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
}
export async function save() {
const driver = findEngineDriver($connection, $extensions);
const tablePermissionRole = (await getTableInfo({ conid, database, schemaName, pureName }))?.tablePermissionRole;
if (tablePermissionRole == 'create_update_delete' || tablePermissionRole == 'update_only') {
const resp = await apiCall('database-connections/save-table-data', {
conid,
database,
changeSet: $changeSetStore?.value,
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
} else {
const script = driver.createSaveChangeSetScript($changeSetStore?.value, $dbinfo, () =>
changeSetToSql($changeSetStore?.value, $dbinfo, driver.dialect)
);
const deleteCascades = getDeleteCascades($changeSetStore?.value, $dbinfo);
const sql = scriptToSql(driver, script);
const deleteCascadesScripts = _.map(deleteCascades, ({ title, commands }) => ({
title,
script: scriptToSql(driver, commands),
}));
// console.log('deleteCascadesScripts', deleteCascadesScripts);
if (getBoolSettingsValue('skipConfirm.tableDataSave', false) && !deleteCascadesScripts?.length) {
handleConfirmSql(sql);
} else {
showModal(ConfirmSqlModal, {
sql,
onConfirm: confirmedSql => handleConfirmSql(confirmedSql),
engine: driver.engine,
deleteCascadesScripts,
skipConfirmSettingKey: deleteCascadesScripts?.length ? null : 'skipConfirm.tableDataSave',
});
}
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function setAutoRefresh(interval) {
autoRefreshInterval = interval;
startAutoRefresh();
invalidateCommands();
}
export function isAutoRefresh() {
return autoRefreshStarted;
}
export function startAutoRefresh() {
closeRefreshTimer();
autoRefreshTimer = setInterval(() => {
cache.update(reloadDataCacheFunc);
}, autoRefreshInterval * 1000);
autoRefreshStarted = true;
invalidateCommands();
}
export function stopAutoRefresh() {
closeRefreshTimer();
autoRefreshStarted = false;
invalidateCommands();
}
function closeRefreshTimer() {
if (autoRefreshTimer) {
clearInterval(autoRefreshTimer);
autoRefreshTimer = null;
}
}
$: {
$changeSetStore;
invalidateCommands();
}
registerMenu({ command: 'tableData.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('dataGrid_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('dataGrid_collapsedLeftColumn', $collapsedLeftColumnStore);
onDestroy(() => {
closeRefreshTimer();
});
const quickExportHandlerRef = createQuickExportHandlerRef();
function createAutoRefreshMenu() {
return [
{ divider: true },
{ command: 'dataGrid.deepRefresh', hideDisabled: true },
{ command: 'tableData.stopAutoRefresh', hideDisabled: true },
{ command: 'tableData.startAutoRefresh', hideDisabled: true },
'tableData.setAutoRefresh.1',
...INTERVALS.map(seconds => ({
command: `tableData.setAutoRefresh.${seconds}`,
text: `...${seconds}` + ' ' + _t('command.datagrid.autoRefresh.seconds', { defaultMessage: 'seconds' }),
})),
];
}
</script>
<ToolStripContainer>
<TableDataGrid
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{changeSetStore}
{dispatchChangeSet}
/>
<svelte:fragment slot="toolstrip">
<ToolStripButton
icon="icon structure"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'openStructure',
}));
}
openNewTab({
title: pureName,
icon: 'img table-structure',
tabComponent: 'TableStructureTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'openStructure',
},
});
}}>{_t('datagrid.structure', { defaultMessage: 'Structure' })}</ToolStripButton
>
<ToolStripButton
icon="img sql-file"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'showSql',
}));
}
openNewTab({
title: pureName,
icon: 'img sql-file',
tabComponent: 'SqlObjectTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'showSql',
},
});
}}>SQL</ToolStripButton
>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataGrid.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshGrid"
/>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataForm.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshForm"
/>
<!-- <ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled /> -->
<ToolStripCommandButton command="dataForm.goToFirst" hideDisabled data-testid="TableDataTab_goToFirst" />
<ToolStripCommandButton command="dataForm.goToPrevious" hideDisabled data-testid="TableDataTab_goToPrevious" />
<ToolStripCommandButton command="dataForm.goToNext" hideDisabled data-testid="TableDataTab_goToNext" />
<ToolStripCommandButton command="dataForm.goToLast" hideDisabled data-testid="TableDataTab_goToLast" />
<ToolStripCommandButton
command="tableData.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
data-testid="TableDataTab_save"
/>
<ToolStripCommandButton
command="dataGrid.revertAllChanges"
hideDisabled
data-testid="TableDataTab_revertAllChanges"
/>
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled data-testid="TableDataTab_insertNewRow" />
<ToolStripCommandButton
command="dataGrid.deleteSelectedRows"
hideDisabled
data-testid="TableDataTab_deleteSelectedRows"
/>
<ToolStripCommandButton command="dataGrid.switchToForm" hideDisabled data-testid="TableDataTab_switchToForm" />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled data-testid="TableDataTab_switchToTable" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripButton
icon={$collapsedLeftColumnStore ? 'icon columns-outline' : 'icon columns'}
on:click={() => collapsedLeftColumnStore.update(x => !x)}
>{_t('tableData.viewColumns', { defaultMessage: 'View columns' })}</ToolStripButton
>
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="TableDataTab_toggleCellDataView"
/>
</svelte:fragment>
const getCurrentEditor = () => getActiveComponent('TableDataTab');
const INTERVALS = [5, 10, 15, 30, 60];
const INTERVAL_COMMANDS = [
{
time: 5,
name: __t('command.datagrid.setAutoRefresh.5', { defaultMessage: 'Refresh every 5 seconds' }),
},
{
time: 10,
name: __t('command.datagrid.setAutoRefresh.10', { defaultMessage: 'Refresh every 10 seconds' }),
},
{
time: 15,
name: __t('command.datagrid.setAutoRefresh.15', { defaultMessage: 'Refresh every 15 seconds' }),
},
{
time: 30,
name: __t('command.datagrid.setAutoRefresh.30', { defaultMessage: 'Refresh every 30 seconds' }),
},
{
time: 60,
name: __t('command.datagrid.setAutoRefresh.60', { defaultMessage: 'Refresh every 60 seconds' }),
},
];
registerCommand({
id: 'tableData.save',
group: 'save',
category: __t('command.tableData', { defaultMessage: 'Table data' }),
name: __t('command.tableData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
registerCommand({
id: 'tableData.setAutoRefresh.1',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.setAutoRefresh.1', { defaultMessage: 'Refresh every 1 second' }),
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(1),
});
for (const { time, name } of INTERVAL_COMMANDS) {
registerCommand({
id: `tableData.setAutoRefresh.${time}`,
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name,
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(time),
});
}
registerCommand({
id: 'tableData.stopAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.stopAutoRefresh', { defaultMessage: 'Stop auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === true,
onClick: () => getCurrentEditor().stopAutoRefresh(null),
});
registerCommand({
id: 'tableData.startAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.startAutoRefresh', { defaultMessage: 'Start auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === false,
onClick: () => getCurrentEditor().startAutoRefresh(),
});
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName', 'isRawMode'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
</script>
<script lang="ts">
import _ from 'lodash';
import App from '../App.svelte';
import TableDataGrid from '../datagrid/TableDataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
changeSetChangedCount,
changeSetContainsChanges,
changeSetToSql,
createChangeSet,
createGridCache,
getDeleteCascades,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { reloadDataCacheFunc } from 'dbgate-datalib';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { getTableInfo, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders';
import { scriptToSql } from 'dbgate-sqltree';
import { extensions, lastUsedDefaultActions } from '../stores';
import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte';
import createActivator from '../utility/createActivator';
import { registerMenu } from '../utility/contextMenu';
import { showSnackbarSuccess } from '../utility/snackbar';
import openNewTab from '../utility/openNewTab';
import { onDestroy, setContext } from 'svelte';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import ToolStripCommandSplitButton from '../buttons/ToolStripCommandSplitButton.svelte';
import { getBoolSettingsValue, getIntSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { getNumberIcon } from '../icons/FontIcon.svelte';
import { _t } from '../translations';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
export let isRawMode = false;
export let tabPreviewMode;
export const activator = createActivator('TableDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const dbinfo = useDatabaseInfo({ conid, database });
let autoRefreshInterval = getIntSettingsValue('dataGrid.defaultAutoRefreshInterval', 10, 1, 3600);
let autoRefreshStarted = false;
let autoRefreshTimer = null;
$: connection = useConnectionInfo({ conid });
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
async function handleConfirmSql(sql) {
const resp = await apiCall('database-connections/run-script', { conid, database, sql, useTransaction: true });
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
}
export async function save() {
const driver = findEngineDriver($connection, $extensions);
const tablePermissionRole = (await getTableInfo({ conid, database, schemaName, pureName }))?.tablePermissionRole;
if (tablePermissionRole == 'create_update_delete' || tablePermissionRole == 'update_only') {
const resp = await apiCall('database-connections/save-table-data', {
conid,
database,
changeSet: $changeSetStore?.value,
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
} else {
const script = driver.createSaveChangeSetScript($changeSetStore?.value, $dbinfo, () =>
changeSetToSql($changeSetStore?.value, $dbinfo, driver.dialect)
);
const deleteCascades = getDeleteCascades($changeSetStore?.value, $dbinfo);
const sql = scriptToSql(driver, script);
const deleteCascadesScripts = _.map(deleteCascades, ({ title, commands }) => ({
title,
script: scriptToSql(driver, commands),
}));
// console.log('deleteCascadesScripts', deleteCascadesScripts);
if (getBoolSettingsValue('skipConfirm.tableDataSave', false) && !deleteCascadesScripts?.length) {
handleConfirmSql(sql);
} else {
showModal(ConfirmSqlModal, {
sql,
onConfirm: confirmedSql => handleConfirmSql(confirmedSql),
engine: driver.engine,
deleteCascadesScripts,
skipConfirmSettingKey: deleteCascadesScripts?.length ? null : 'skipConfirm.tableDataSave',
});
}
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function setAutoRefresh(interval) {
autoRefreshInterval = interval;
startAutoRefresh();
invalidateCommands();
}
export function isAutoRefresh() {
return autoRefreshStarted;
}
export function startAutoRefresh() {
closeRefreshTimer();
autoRefreshTimer = setInterval(() => {
cache.update(reloadDataCacheFunc);
}, autoRefreshInterval * 1000);
autoRefreshStarted = true;
invalidateCommands();
}
export function stopAutoRefresh() {
closeRefreshTimer();
autoRefreshStarted = false;
invalidateCommands();
}
function closeRefreshTimer() {
if (autoRefreshTimer) {
clearInterval(autoRefreshTimer);
autoRefreshTimer = null;
}
}
$: {
$changeSetStore;
invalidateCommands();
}
registerMenu({ command: 'tableData.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('dataGrid_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('dataGrid_collapsedLeftColumn', $collapsedLeftColumnStore);
onDestroy(() => {
closeRefreshTimer();
});
const quickExportHandlerRef = createQuickExportHandlerRef();
function createAutoRefreshMenu() {
return [
{ divider: true },
{ command: 'dataGrid.deepRefresh', hideDisabled: true },
{ command: 'tableData.stopAutoRefresh', hideDisabled: true },
{ command: 'tableData.startAutoRefresh', hideDisabled: true },
'tableData.setAutoRefresh.1',
...INTERVALS.map(seconds => ({
command: `tableData.setAutoRefresh.${seconds}`,
text: `...${seconds}` + ' ' + _t('command.datagrid.autoRefresh.seconds', { defaultMessage: 'seconds' }),
})),
];
}
</script>
<ToolStripContainer>
<TableDataGrid
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{changeSetStore}
{dispatchChangeSet}
/>
<svelte:fragment slot="toolstrip">
<ToolStripButton
icon="icon structure"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'openStructure',
}));
}
openNewTab({
title: pureName,
icon: 'img table-structure',
tabComponent: 'TableStructureTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'openStructure',
},
});
}}>{_t('datagrid.structure', { defaultMessage: 'Structure' })}</ToolStripButton
>
<ToolStripButton
icon="img sql-file"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'showSql',
}));
}
openNewTab({
title: pureName,
icon: 'img sql-file',
tabComponent: 'SqlObjectTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'showSql',
},
});
}}>SQL</ToolStripButton
>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataGrid.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshGrid"
/>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataForm.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshForm"
/>
<!-- <ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled /> -->
<ToolStripCommandButton command="dataForm.goToFirst" hideDisabled data-testid="TableDataTab_goToFirst" />
<ToolStripCommandButton command="dataForm.goToPrevious" hideDisabled data-testid="TableDataTab_goToPrevious" />
<ToolStripCommandButton command="dataForm.goToNext" hideDisabled data-testid="TableDataTab_goToNext" />
<ToolStripCommandButton command="dataForm.goToLast" hideDisabled data-testid="TableDataTab_goToLast" />
<ToolStripCommandButton
command="tableData.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
data-testid="TableDataTab_save"
/>
<ToolStripCommandButton
command="dataGrid.revertAllChanges"
hideDisabled
data-testid="TableDataTab_revertAllChanges"
/>
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled data-testid="TableDataTab_insertNewRow" />
<ToolStripCommandButton
command="dataGrid.deleteSelectedRows"
hideDisabled
data-testid="TableDataTab_deleteSelectedRows"
/>
<ToolStripCommandButton command="dataGrid.switchToForm" hideDisabled data-testid="TableDataTab_switchToForm" />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled data-testid="TableDataTab_switchToTable" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled data-testid="TableDataTab_fetchAll" />
<ToolStripButton
icon={$collapsedLeftColumnStore ? 'icon columns-outline' : 'icon columns'}
on:click={() => collapsedLeftColumnStore.update(x => !x)}
>{_t('tableData.viewColumns', { defaultMessage: 'View columns' })}</ToolStripButton
>
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="TableDataTab_toggleCellDataView"
/>
</svelte:fragment>
</ToolStripContainer>

View File

@@ -129,6 +129,7 @@
<ToolStripCommandButton command="dataGrid.refresh" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled />
<ToolStripCommandButton command="dataGrid.toggleCellDataView" hideDisabled />
</svelte:fragment>
</ToolStripContainer>

View File

@@ -98,29 +98,31 @@ const clipboardTextFormatter = (delimiter, headers) => (columns, rows, options)
const clipboardJsonFormatter = () => (columns, rows) => {
return JSON.stringify(
rows.map(row => _.pick(row, columns)),
rows.map(row => _.omitBy(_.pick(row, columns), _.isUndefined)),
undefined,
2
);
};
const clipboardYamlFormatter = () => (columns, rows) => {
return yaml.dump(rows.map(row => _.pick(row, columns)));
return yaml.dump(rows.map(row => _.omitBy(_.pick(row, columns), _.isUndefined)));
};
const clipboardJsonLinesFormatter = () => (columns, rows) => {
return rows.map(row => JSON.stringify(_.pick(row, columns))).join('\r\n');
return rows.map(row => JSON.stringify(_.omitBy(_.pick(row, columns), _.isUndefined))).join('\r\n');
};
const clipboardInsertsFormatter = () => (columns, rows, options) => {
const { schemaName, pureName, driver } = options;
const dmp = driver.createDumper();
for (const row of rows) {
const definedColumns = columns.filter(col => row[col] !== undefined);
if (definedColumns.length === 0) continue;
dmp.putCmd(
'^insert ^into %f (%,i) ^values (%,v)',
{ schemaName, pureName },
columns,
columns.map(col => row[col])
definedColumns,
definedColumns.map(col => row[col])
);
}
return dmp.s;
@@ -130,8 +132,10 @@ const clipboardUpdatesFormatter = () => (columns, rows, options) => {
const { schemaName, pureName, driver, keyColumns } = options;
const dmp = driver.createDumper();
for (const row of rows) {
const definedColumns = columns.filter(col => row[col] !== undefined);
if (definedColumns.length === 0) continue;
dmp.put('^update %f ^set ', { schemaName, pureName });
dmp.putCollection(', ', columns, col => dmp.put('%i=%v', col, row[col]));
dmp.putCollection(', ', definedColumns, col => dmp.put('%i=%v', col, row[col]));
dmp.put(' ^where ');
dmp.putCollection(' ^and ', keyColumns, col => dmp.put('%i=%v', col, row[col]));
dmp.endCommand();
@@ -141,7 +145,7 @@ const clipboardUpdatesFormatter = () => (columns, rows, options) => {
const clipboardMongoInsertFormatter = () => (columns, rows, options) => {
const { pureName } = options;
return rows.map(row => `db.${pureName}.insert(${JSON.stringify(_.pick(row, columns), undefined, 2)});`).join('\n');
return rows.map(row => `db.${pureName}.insert(${JSON.stringify(_.omitBy(_.pick(row, columns), _.isUndefined), undefined, 2)});`).join('\n');
};
export function formatClipboardRows(format, columns, rows, options) {

View File

@@ -0,0 +1,353 @@
import { apiCall, apiOff, apiOn } from './api';
import getElectron from './getElectron';
import resolveApi, { resolveApiHeaders } from './resolveApi';
export interface FetchAllCallbacks {
/** Called with each page of rows as they arrive. */
onPage(rows: object[]): void;
/** Called once when all data has been received. */
onFinished(): void;
/** Called if an error occurs. */
onError(message: string): void;
}
export interface FetchAllHandle {
/** Signal the loader to stop fetching. */
cancel(): void;
}
const STREAM_BATCH_SIZE = 1000;
const WEB_PAGE_SIZE = 5000;
/**
* Fetches all rows from a JSONL source.
*
* Electron: uses paginated `jsldata/get-rows` via IPC (already fast).
* Web: waits for source to finish, then streams the entire JSONL file in a
* single HTTP request via `jsldata/stream-rows`, parsing lines
* progressively with ReadableStream. Falls back to paginated reads
* with larger page sizes if streaming is unavailable.
*/
export function fetchAll(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks,
pageSize: number = 100
): FetchAllHandle {
const isElectron = !!getElectron();
if (isElectron) {
return fetchAllPaginated(jslid, loadDataPage, callbacks, pageSize);
} else {
return fetchAllWeb(jslid, loadDataPage, callbacks);
}
}
/**
* Web strategy: listen to SSE stats for progress, once source is finished
* stream the entire JSONL in one HTTP request.
*/
function fetchAllWeb(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks
): FetchAllHandle {
let cancelled = false;
let streamStarted = false;
let abortController: AbortController | null = null;
let streamReader: ReadableStreamDefaultReader<Uint8Array> | null = null;
// Initialize cancelFn before registering the SSE handler to avoid TDZ errors
// if an immediate stats event triggers fallbackToPaginated() before initialization.
let cancelFn = () => {
cancelled = true;
if (streamReader) {
streamReader.cancel().catch(() => {});
streamReader = null;
}
if (abortController) {
abortController.abort();
abortController = null;
}
cleanup();
};
const handleStats = (stats: { rowCount: number; changeIndex: number; isFinished: boolean }) => {
if (cancelled || streamStarted) return;
// Report progress while source is still writing
if (!stats.isFinished) {
callbacks.onPage([]); // trigger UI update with count info
return;
}
// Source finished — stream all rows at once
streamStarted = true;
startStream();
};
apiOn(`jsldata-stats-${jslid}`, handleStats);
async function startStream() {
abortController = new AbortController();
try {
const resp = await fetch(`${resolveApi()}/jsldata/stream-rows?jslid=${encodeURIComponent(jslid)}`, {
method: 'GET',
cache: 'no-cache',
signal: abortController.signal,
headers: {
...resolveApiHeaders(),
},
});
if (!resp.body || resp.status === 404 || resp.status === 405) {
// Streaming endpoint not available in this environment — fall back to paginated reads
cleanup();
fallbackToPaginated();
return;
}
if (!resp.ok) {
// Non-recoverable server error (e.g. 403 security rejection, 5xx) — surface it
callbacks.onError(`HTTP ${resp.status}: ${resp.statusText}`);
cleanup();
return;
}
streamReader = resp.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
let isFirstLine = true;
let batch: any[] = [];
while (!cancelled) {
const { done, value } = await streamReader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (cancelled) break;
const trimmed = line.trim();
if (!trimmed) continue;
if (isFirstLine) {
isFirstLine = false;
// Check if first line is a header
try {
const parsed = JSON.parse(trimmed);
if (parsed.__isStreamHeader) continue;
// Not a header — it's a data row
batch.push(parsed);
} catch {
continue;
}
continue;
}
try {
batch.push(JSON.parse(trimmed));
} catch {
// skip malformed lines
}
if (batch.length >= STREAM_BATCH_SIZE) {
if (cancelled) break;
callbacks.onPage(batch);
batch = [];
}
}
}
// Flush the decoder — any bytes held for multi-byte char completion are released
const flushed = decoder.decode();
if (flushed) buffer += flushed;
// Process remaining buffer
const remainingBuffer = buffer.trim();
if (remainingBuffer && !cancelled) {
try {
const parsed = JSON.parse(remainingBuffer);
if (!parsed.__isStreamHeader) {
batch.push(parsed);
}
} catch {
// ignore
}
}
if (batch.length > 0 && !cancelled) {
callbacks.onPage(batch);
}
if (!cancelled) {
callbacks.onFinished();
}
} catch (err) {
if (!cancelled) {
callbacks.onError(err?.message ?? String(err));
}
} finally {
streamReader = null;
abortController = null;
cleanup();
}
}
function fallbackToPaginated() {
const handle = fetchAllPaginated(jslid, loadDataPage, callbacks, WEB_PAGE_SIZE);
cancelFn = handle.cancel;
}
function cleanup() {
apiOff(`jsldata-stats-${jslid}`, handleStats);
}
// Check if data is already finished
checkInitialState();
async function checkInitialState() {
try {
const stats = await apiCall('jsldata/get-stats', { jslid });
if (stats && stats.isFinished && stats.rowCount > 0) {
streamStarted = true;
startStream();
} else if (stats && stats.isFinished && stats.rowCount === 0) {
// Source finished with zero rows — no SSE event will follow, finish immediately
cleanup();
callbacks.onFinished();
}
// Source still writing or no stats yet — SSE events will trigger stream when done
} catch {
// Stats not available yet — SSE events will arrive
}
}
return {
cancel() {
cancelFn();
},
};
}
/**
* Paginated strategy (Electron / fallback): uses `jsldata/get-rows` with
* SSE stats events to know when new data is available.
*/
function fetchAllPaginated(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks,
pageSize: number
): FetchAllHandle {
let cancelled = false;
let finished = false;
let offset = 0;
let isRunning = false;
let isSourceFinished = false;
let drainRequested = false;
function finish() {
if (finished) return;
finished = true;
callbacks.onFinished();
cleanup();
}
const handleStats = (stats: { rowCount: number; changeIndex: number; isFinished: boolean }) => {
isSourceFinished = stats.isFinished;
if (stats.rowCount > offset) {
scheduleDrain();
} else if (stats.isFinished && stats.rowCount === offset) {
finish();
}
};
function scheduleDrain() {
if (isRunning) {
drainRequested = true;
return;
}
drain();
}
apiOn(`jsldata-stats-${jslid}`, handleStats);
async function drain() {
if (isRunning || cancelled) return;
isRunning = true;
drainRequested = false;
try {
while (!cancelled) {
const rows = await loadDataPage(offset, pageSize);
if (cancelled) break;
if (rows.errorMessage) {
callbacks.onError(rows.errorMessage);
cleanup();
return;
}
if (rows.length > 0) {
offset += rows.length;
callbacks.onPage(rows);
}
if (rows.length < pageSize) {
if (isSourceFinished) {
finish();
return;
}
break;
}
await new Promise(resolve => setTimeout(resolve, 0));
}
} catch (err) {
if (!cancelled) {
const msg = err?.message ?? String(err);
if (msg.includes('ENOENT')) {
// File not ready yet
} else {
callbacks.onError(msg);
cleanup();
}
}
} finally {
isRunning = false;
if (drainRequested && !cancelled) {
scheduleDrain();
}
}
}
function cleanup() {
apiOff(`jsldata-stats-${jslid}`, handleStats);
}
checkInitialState();
async function checkInitialState() {
try {
const stats = await apiCall('jsldata/get-stats', { jslid });
if (stats) {
isSourceFinished = stats.isFinished;
if (stats.rowCount > 0) {
scheduleDrain();
} else if (stats.isFinished && !cancelled) {
// rowCount === 0: source finished empty — no SSE event will follow
finish();
}
}
} catch {
// Stats not available yet
}
}
return {
cancel() {
cancelled = true;
cleanup();
},
};
}

View File

@@ -2,9 +2,12 @@
import { rightPanelWidget } from '../stores';
import hasPermission from '../utility/hasPermission';
import { isProApp } from '../utility/proTools';
import { useSettings } from '../utility/metadataLoaders';
import ThemeAiAssistantWidget from '../ai/ThemeAiAssistantWidget.svelte';
const settings = useSettings();
</script>
{#if $rightPanelWidget == 'themeAiAssistant' && hasPermission('widgets/themeAiAssistant') && isProApp()}
{#if $rightPanelWidget == 'themeAiAssistant' && hasPermission('widgets/themeAiAssistant') && isProApp() && !$settings?.['storage.disableAiFeatures']}
<ThemeAiAssistantWidget />
{/if}

View File

@@ -72,6 +72,8 @@ class Analyser extends DatabaseAnalyser {
...replacements,
$typeAggFunc: this.driver.dialect.stringAgg ? 'string_agg' : 'max',
$typeAggParam: this.driver.dialect.stringAgg ? ", '|'" : '',
$hashColumnAggTail: this.driver.dialect.stringAgg ? ", ',' ORDER BY a.attnum" : '',
$hashConstraintAggTail: this.driver.dialect.stringAgg ? ", ',' ORDER BY con.conname" : '',
$md5Function: this.dialect?.isFipsComplianceOn ? 'LENGTH' : 'MD5',
});
return query;
@@ -83,131 +85,92 @@ class Analyser extends DatabaseAnalyser {
}
async _runAnalysis() {
this.feedback({ analysingMessage: 'DBGM-00241 Loading tables' });
const tables = await this.analyserQuery('tableList', ['tables']);
const useInfoSchema = this.driver.__analyserInternals.useInfoSchemaRoutines;
const routinesQueryName = useInfoSchema ? 'routinesInfoSchema' : 'routines';
const proceduresParametersQueryName = useInfoSchema ? 'proceduresParametersInfoSchema' : 'proceduresParameters';
this.feedback({ analysingMessage: 'DBGM-00242 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views']);
this.feedback({ analysingMessage: 'DBGM-00243 Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
let fkColumns = null;
this.feedback({ analysingMessage: 'DBGM-00244 Loading foreign key constraints' });
// const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00245 Loading foreign key refs' });
const foreignKeys = await this.analyserQuery('foreignKeys', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00246 Loading foreign key columns' });
const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']);
// const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`;
const fkRows = [];
// const fkConstraintDct = _.keyBy(fk_tableConstraints.rows, cntKey);
for (const fkRef of foreignKeys.rows) {
// const cntBase = fkConstraintDct[cntKey(fkRef)];
// const cntRef = fkConstraintDct[`${fkRef.unique_constraint_name}|${fkRef.unique_constraint_schema}`];
// if (!cntBase || !cntRef) continue;
const baseCols = _.sortBy(
fk_keyColumnUsage.rows.filter(
x =>
x.table_name == fkRef.table_name &&
x.constraint_name == fkRef.constraint_name &&
x.table_schema == fkRef.table_schema
),
'ordinal_position'
);
const refCols = _.sortBy(
fk_keyColumnUsage.rows.filter(
x =>
x.table_name == fkRef.ref_table_name &&
x.constraint_name == fkRef.unique_constraint_name &&
x.table_schema == fkRef.ref_table_schema
),
'ordinal_position'
);
if (baseCols.length != refCols.length) continue;
for (let i = 0; i < baseCols.length; i++) {
const baseCol = baseCols[i];
const refCol = refCols[i];
fkRows.push({
...fkRef,
pure_name: fkRef.table_name,
schema_name: fkRef.table_schema,
ref_table_name: fkRef.ref_table_name,
ref_schema_name: fkRef.ref_table_schema,
column_name: baseCol.column_name,
ref_column_name: refCol.column_name,
update_action: fkRef.update_action,
delete_action: fkRef.delete_action,
});
}
}
fkColumns = { rows: fkRows };
this.feedback({ analysingMessage: 'DBGM-00247 Loading views' });
const views = await this.analyserQuery('views', ['views']);
this.feedback({ analysingMessage: 'DBGM-00248 Loading materialized views' });
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
this.feedback({ analysingMessage: 'DBGM-00249 Loading materialized view columns' });
const matviewColumns = this.driver.dialect.materializedViews
? await this.analyserQuery('matviewColumns', ['matviews'])
: null;
this.feedback({ analysingMessage: 'DBGM-00250 Loading routines' });
const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
this.feedback({ analysingMessage: 'DBGM-00251 Loading routine parameters' });
const routineParametersRows = await this.analyserQuery('proceduresParameters');
this.feedback({ analysingMessage: 'DBGM-00252 Loading indexes' });
const indexes = this.driver.__analyserInternals.skipIndexes
? { rows: [] }
: await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00253 Loading index columns' });
const indexcols = this.driver.__analyserInternals.skipIndexes
? { rows: [] }
: await this.analyserQuery('indexcols', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00254 Loading unique names' });
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
// Run all independent queries in parallel
this.feedback({ analysingMessage: 'DBGM-00241 Loading database structure' });
const [
tables,
views,
columns,
pkColumns,
foreignKeys,
uniqueNames,
routines,
routineParametersRows,
indexes,
indexcols,
matviews,
matviewColumns,
triggers,
] = await Promise.all([
this.analyserQuery('tableList', ['tables']),
this.analyserQuery('views', ['views']),
this.analyserQuery('columns', ['tables', 'views']),
this.analyserQuery('primaryKeys', ['tables']),
this.analyserQuery('foreignKeys', ['tables']),
this.analyserQuery('uniqueNames', ['tables']),
this.analyserQuery(routinesQueryName, ['procedures', 'functions']),
this.analyserQuery(proceduresParametersQueryName),
this.driver.__analyserInternals.skipIndexes
? Promise.resolve({ rows: [] })
: this.analyserQuery('indexes', ['tables']),
this.driver.__analyserInternals.skipIndexes
? Promise.resolve({ rows: [] })
: this.analyserQuery('indexcols', ['tables']),
this.driver.dialect.materializedViews
? this.analyserQuery('matviews', ['matviews'])
: Promise.resolve(null),
this.driver.dialect.materializedViews
? this.analyserQuery('matviewColumns', ['matviews'])
: Promise.resolve(null),
this.analyserQuery('triggers'),
]);
// Load geometry/geography columns if the views exist (these are rare, so run after views are loaded)
let geometryColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'DBGM-00255 Loading geometry columns' });
geometryColumns = await this.analyserQuery('geometryColumns', ['tables']);
}
let geographyColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'DBGM-00256 Loading geography columns' });
geographyColumns = await this.analyserQuery('geographyColumns', ['tables']);
const hasGeometry = views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public');
const hasGeography = views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public');
if (hasGeometry || hasGeography) {
const [geomCols, geogCols] = await Promise.all([
hasGeometry
? this.analyserQuery('geometryColumns', ['tables'])
: Promise.resolve({ rows: [] }),
hasGeography
? this.analyserQuery('geographyColumns', ['tables'])
: Promise.resolve({ rows: [] }),
]);
geometryColumns = geomCols;
geographyColumns = geogCols;
}
this.feedback({ analysingMessage: 'DBGM-00257 Loading triggers' });
const triggers = await this.analyserQuery('triggers');
this.feedback({ analysingMessage: 'DBGM-00258 Finalizing DB structure' });
const columnColumnsMapped = fkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
// Pre-build lookup maps for O(1) access instead of O(n) scanning per table/view
const columnsByTable = _.groupBy(columns.rows, x => `${x.schema_name}.${x.pure_name}`);
const indexcolsByOidAttnum = _.keyBy(indexcols.rows, x => `${x.oid}_${x.attnum}`);
const uniqueNameSet = new Set(uniqueNames.rows.map(x => x.constraint_name));
const indexesByTable = _.groupBy(indexes.rows, x => `${x.schema_name}.${x.table_name}`);
const matviewColumnsByTable = matviewColumns
? _.groupBy(matviewColumns.rows, x => `${x.schema_name}.${x.pure_name}`)
: {};
const columnColumnsMapped = foreignKeys.rows.map(x => ({
pureName: x.table_name,
schemaName: x.table_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
refColumnName: x.ref_column_name,
updateAction: x.update_action,
deleteAction: x.delete_action,
refTableName: x.ref_table_name,
refSchemaName: x.ref_schema_name,
refSchemaName: x.ref_table_schema,
}));
const fkByTable = _.groupBy(columnColumnsMapped, x => `${x.schemaName}.${x.pureName}`);
const pkColumnsMapped = pkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
@@ -215,6 +178,7 @@ class Analyser extends DatabaseAnalyser {
constraintName: x.constraint_name,
columnName: x.column_name,
}));
const pkByTable = _.groupBy(pkColumnsMapped, x => `${x.schemaName}.${x.pureName}`);
const procedureParameters = routineParametersRows.rows
.filter(i => i.routine_type == 'PROCEDURE')
@@ -252,6 +216,7 @@ class Analyser extends DatabaseAnalyser {
const res = {
tables: tables.rows.map(table => {
const tableKey = `${table.schema_name}.${table.pure_name}`;
const newTable = {
pureName: table.pure_name,
schemaName: table.schema_name,
@@ -259,20 +224,16 @@ class Analyser extends DatabaseAnalyser {
objectId: `tables:${table.schema_name}.${table.pure_name}`,
contentHash: table.hash_code_columns ? `${table.hash_code_columns}-${table.hash_code_constraints}` : null,
};
const tableIndexes = indexesByTable[tableKey] || [];
return {
...newTable,
columns: columns.rows
.filter(col => col.pure_name == table.pure_name && col.schema_name == table.schema_name)
.map(col => getColumnInfo(col, newTable, geometryColumns, geographyColumns)),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkColumnsMapped),
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, columnColumnsMapped),
indexes: indexes.rows
.filter(
x =>
x.table_name == table.pure_name &&
x.schema_name == table.schema_name &&
!uniqueNames.rows.find(y => y.constraint_name == x.index_name)
)
columns: (columnsByTable[tableKey] || []).map(col =>
getColumnInfo(col, newTable, geometryColumns, geographyColumns)
),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkByTable[tableKey] || []),
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, fkByTable[tableKey] || []),
indexes: tableIndexes
.filter(x => !uniqueNameSet.has(x.index_name))
.map(idx => {
const indOptionSplit = idx.indoption.split(' ');
return {
@@ -281,7 +242,7 @@ class Analyser extends DatabaseAnalyser {
columns: _.compact(
idx.indkey
.split(' ')
.map(colid => indexcols.rows.find(col => col.oid == idx.oid && col.attnum == colid))
.map(colid => indexcolsByOidAttnum[`${idx.oid}_${colid}`])
.filter(col => col != null)
.map((col, colIndex) => ({
columnName: col.column_name,
@@ -290,19 +251,14 @@ class Analyser extends DatabaseAnalyser {
),
};
}),
uniques: indexes.rows
.filter(
x =>
x.table_name == table.pure_name &&
x.schema_name == table.schema_name &&
uniqueNames.rows.find(y => y.constraint_name == x.index_name)
)
uniques: tableIndexes
.filter(x => uniqueNameSet.has(x.index_name))
.map(idx => ({
constraintName: idx.index_name,
columns: _.compact(
idx.indkey
.split(' ')
.map(colid => indexcols.rows.find(col => col.oid == idx.oid && col.attnum == colid))
.map(colid => indexcolsByOidAttnum[`${idx.oid}_${colid}`])
.filter(col => col != null)
.map(col => ({
columnName: col.column_name,
@@ -317,9 +273,7 @@ class Analyser extends DatabaseAnalyser {
schemaName: view.schema_name,
contentHash: view.hash_code,
createSql: `CREATE VIEW "${view.schema_name}"."${view.pure_name}"\nAS\n${view.create_sql}`,
columns: columns.rows
.filter(col => col.pure_name == view.pure_name && col.schema_name == view.schema_name)
.map(col => getColumnInfo(col)),
columns: (columnsByTable[`${view.schema_name}.${view.pure_name}`] || []).map(col => getColumnInfo(col)),
})),
matviews: matviews
? matviews.rows.map(matview => ({
@@ -328,8 +282,7 @@ class Analyser extends DatabaseAnalyser {
schemaName: matview.schema_name,
contentHash: matview.hash_code,
createSql: `CREATE MATERIALIZED VIEW "${matview.schema_name}"."${matview.pure_name}"\nAS\n${matview.definition}`,
columns: matviewColumns.rows
.filter(col => col.pure_name == matview.pure_name && col.schema_name == matview.schema_name)
columns: (matviewColumnsByTable[`${matview.schema_name}.${matview.pure_name}`] || [])
.map(col => getColumnInfo(col)),
}))
: undefined,
@@ -396,14 +349,31 @@ class Analyser extends DatabaseAnalyser {
}
async _getFastSnapshot() {
const viewModificationsQueryData = await this.analyserQuery('viewModifications');
const matviewModificationsQueryData = this.driver.dialect.materializedViews
? await this.analyserQuery('matviewModifications')
: null;
const routineModificationsQueryData = await this.analyserQuery('routineModifications');
const useInfoSchema = this.driver.__analyserInternals.useInfoSchemaRoutines;
const routineModificationsQueryName = useInfoSchema ? 'routineModificationsInfoSchema' : 'routineModifications';
// Run all modification queries in parallel
const [
tableModificationsQueryData,
viewModificationsQueryData,
matviewModificationsQueryData,
routineModificationsQueryData,
] = await Promise.all([
this.analyserQuery('tableModifications'),
this.analyserQuery('viewModifications'),
this.driver.dialect.materializedViews
? this.analyserQuery('matviewModifications')
: Promise.resolve(null),
this.analyserQuery(routineModificationsQueryName),
]);
return {
tables: null,
tables: tableModificationsQueryData.rows.map(x => ({
objectId: `tables:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: `${x.hash_code_columns}-${x.hash_code_constraints}`,
})),
views: viewModificationsQueryData.rows.map(x => ({
objectId: `views:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,

View File

@@ -1,22 +1,38 @@
module.exports = `
select
table_schema as "schema_name",
table_name as "pure_name",
column_name as "column_name",
is_nullable as "is_nullable",
data_type as "data_type",
character_maximum_length as "char_max_length",
numeric_precision as "numeric_precision",
numeric_scale as "numeric_scale",
column_default as "default_value"
from information_schema.columns
where
table_schema !~ '^_timescaledb_'
and (
('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
or
('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
)
and table_schema =SCHEMA_NAME_CONDITION
order by ordinal_position
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
a.attname AS "column_name",
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS "is_nullable",
format_type(a.atttypid, NULL) AS "data_type",
CASE
WHEN a.atttypmod > 0 AND t.typname IN ('varchar', 'bpchar', 'char') THEN a.atttypmod - 4
WHEN a.atttypmod > 0 AND t.typname IN ('bit', 'varbit') THEN a.atttypmod
ELSE NULL
END AS "char_max_length",
CASE
WHEN a.atttypmod > 0 AND t.typname = 'numeric' THEN ((a.atttypmod - 4) >> 16) & 65535
ELSE NULL
END AS "numeric_precision",
CASE
WHEN a.atttypmod > 0 AND t.typname = 'numeric' THEN (a.atttypmod - 4) & 65535
ELSE NULL
END AS "numeric_scale",
pg_get_expr(d.adbin, d.adrelid) AS "default_value"
FROM pg_catalog.pg_attribute a
JOIN pg_catalog.pg_class c ON c.oid = a.attrelid
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
JOIN pg_catalog.pg_type t ON t.oid = a.atttypid
LEFT JOIN pg_catalog.pg_attrdef d ON d.adrelid = a.attrelid AND d.adnum = a.attnum
WHERE a.attnum > 0
AND NOT a.attisdropped
AND c.relkind IN ('r', 'v', 'p', 'f')
AND n.nspname !~ '^_timescaledb_'
AND (
('tables:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
OR
('views:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
)
AND n.nspname =SCHEMA_NAME_CONDITION
ORDER BY a.attnum
`;

View File

@@ -5,7 +5,8 @@ SELECT
con.conname AS constraint_name,
nsp2.nspname AS ref_table_schema,
rel2.relname AS ref_table_name,
conpk.conname AS unique_constraint_name,
att.attname AS column_name,
att2.attname AS ref_column_name,
CASE con.confupdtype
WHEN 'a' THEN 'NO ACTION'
WHEN 'r' THEN 'RESTRICT'
@@ -13,26 +14,26 @@ SELECT
WHEN 'n' THEN 'SET NULL'
WHEN 'd' THEN 'SET DEFAULT'
ELSE con.confupdtype::text
END AS update_action,
CASE con.confdeltype
END AS update_action,
CASE con.confdeltype
WHEN 'a' THEN 'NO ACTION'
WHEN 'r' THEN 'RESTRICT'
WHEN 'c' THEN 'CASCADE'
WHEN 'n' THEN 'SET NULL'
WHEN 'd' THEN 'SET DEFAULT'
ELSE con.confdeltype::text
END AS delete_action
END AS delete_action
FROM pg_constraint con
JOIN pg_class rel ON rel.oid = con.conrelid
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
JOIN pg_class rel2 ON rel2.oid = con.confrelid
JOIN pg_namespace nsp2 ON nsp2.oid = rel2.relnamespace
JOIN pg_constraint conpk
ON conpk.conrelid = con.confrelid
AND conpk.conkey = con.confkey
AND conpk.contype IN ('p','u') -- 'p' = primary key, 'u' = unique constraint
WHERE con.contype = 'f' AND ('tables:' || nsp.nspname || '.' || rel.relname) =OBJECT_ID_CONDITION AND nsp.nspname =SCHEMA_NAME_CONDITION
JOIN LATERAL unnest(con.conkey, con.confkey) WITH ORDINALITY AS cols(attnum, ref_attnum, ordinal_position) ON TRUE
JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = cols.attnum
JOIN pg_attribute att2 ON att2.attrelid = con.confrelid AND att2.attnum = cols.ref_attnum
WHERE con.contype = 'f'
AND ('tables:' || nsp.nspname || '.' || rel.relname) =OBJECT_ID_CONDITION
AND nsp.nspname =SCHEMA_NAME_CONDITION
ORDER BY con.conname, cols.ordinal_position
;
`;

View File

@@ -19,15 +19,16 @@ const triggers = require('./triggers');
const listDatabases = require('./listDatabases');
const listVariables = require('./listVariables');
const listProcesses = require('./listProcesses');
const fk_keyColumnUsage = require('./fk_key_column_usage');
const routinesInfoSchema = require('./routinesInfoSchema');
const proceduresParametersInfoSchema = require('./proceduresParametersInfoSchema');
const routineModificationsInfoSchema = require('./routineModificationsInfoSchema');
const tableModifications = require('./tableModifications');
module.exports = {
columns,
tableList,
viewModifications,
primaryKeys,
fk_keyColumnUsage,
foreignKeys,
views,
routines,
@@ -45,4 +46,8 @@ module.exports = {
listDatabases,
listVariables,
listProcesses,
routinesInfoSchema,
proceduresParametersInfoSchema,
routineModificationsInfoSchema,
tableModifications,
};

View File

@@ -1,31 +1,34 @@
module.exports = `
SELECT
proc.specific_schema AS schema_name,
proc.routine_name AS pure_name,
proc.routine_type as routine_type,
args.parameter_name AS parameter_name,
args.parameter_mode,
args.data_type AS data_type,
args.ordinal_position AS parameter_index,
args.parameter_mode AS parameter_mode
FROM
information_schema.routines proc
LEFT JOIN
information_schema.parameters args
ON proc.specific_schema = args.specific_schema
AND proc.specific_name = args.specific_name
WHERE
proc.specific_schema NOT IN ('pg_catalog', 'information_schema') -- Exclude system schemas
AND args.parameter_name IS NOT NULL
AND proc.routine_type IN ('PROCEDURE', 'FUNCTION') -- Filter for procedures
AND proc.specific_schema !~ '^_timescaledb_'
AND proc.specific_schema =SCHEMA_NAME_CONDITION
SELECT
n.nspname AS "schema_name",
p.proname AS "pure_name",
CASE p.prokind WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "routine_type",
a.parameter_name AS "parameter_name",
CASE (p.proargmodes::text[])[a.ordinal_position]
WHEN 'o' THEN 'OUT'
WHEN 'b' THEN 'INOUT'
WHEN 'v' THEN 'VARIADIC'
WHEN 't' THEN 'TABLE'
ELSE 'IN'
END AS "parameter_mode",
pg_catalog.format_type(a.parameter_type, NULL) AS "data_type",
a.ordinal_position AS "parameter_index"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
CROSS JOIN LATERAL unnest(
COALESCE(p.proallargtypes, p.proargtypes::oid[]),
p.proargnames
) WITH ORDINALITY AS a(parameter_type, parameter_name, ordinal_position)
WHERE p.prokind IN ('f', 'p')
AND p.proargnames IS NOT NULL
AND a.parameter_name IS NOT NULL
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
AND (
(routine_type = 'PROCEDURE' AND ('procedures:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
OR
(routine_type = 'FUNCTION' AND ('functions:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
(p.prokind = 'p' AND ('procedures:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
OR
(p.prokind != 'p' AND ('functions:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
)
ORDER BY
schema_name,
args.ordinal_position;
ORDER BY n.nspname, a.ordinal_position
`;

View File

@@ -0,0 +1,31 @@
module.exports = `
SELECT
proc.specific_schema AS schema_name,
proc.routine_name AS pure_name,
proc.routine_type as routine_type,
args.parameter_name AS parameter_name,
args.parameter_mode,
args.data_type AS data_type,
args.ordinal_position AS parameter_index,
args.parameter_mode AS parameter_mode
FROM
information_schema.routines proc
LEFT JOIN
information_schema.parameters args
ON proc.specific_schema = args.specific_schema
AND proc.specific_name = args.specific_name
WHERE
proc.specific_schema NOT IN ('pg_catalog', 'information_schema')
AND args.parameter_name IS NOT NULL
AND proc.routine_type IN ('PROCEDURE', 'FUNCTION')
AND proc.specific_schema !~ '^_timescaledb_'
AND proc.specific_schema =SCHEMA_NAME_CONDITION
AND (
(routine_type = 'PROCEDURE' AND ('procedures:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
OR
(routine_type = 'FUNCTION' AND ('functions:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
ORDER BY
schema_name,
args.ordinal_position;
`;

View File

@@ -1,10 +1,13 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
$md5Function(routine_definition) as "hash_code",
routine_type as "object_type"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION
SELECT
p.proname AS "pure_name",
n.nspname AS "schema_name",
$md5Function(p.prosrc) AS "hash_code",
CASE p.prokind WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "object_type"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
WHERE p.prokind IN ('f', 'p')
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
`;

View File

@@ -0,0 +1,10 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
$md5Function(routine_definition) as "hash_code",
routine_type as "object_type"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION
`;

View File

@@ -1,19 +1,23 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
max(routine_definition) as "definition",
max($md5Function(routine_definition)) as "hash_code",
routine_type as "object_type",
$typeAggFunc(data_type $typeAggParam) as "data_type",
max(external_language) as "language"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_schema =SCHEMA_NAME_CONDITION
and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
or
(routine_type = 'FUNCTION' and ('functions:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
group by routine_name, routine_schema, routine_type
SELECT
p.proname AS "pure_name",
n.nspname AS "schema_name",
max(p.prosrc) AS "definition",
max($md5Function(p.prosrc)) AS "hash_code",
CASE max(p.prokind) WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "object_type",
$typeAggFunc(pg_catalog.format_type(p.prorettype, NULL) $typeAggParam) AS "data_type",
max(l.lanname) AS "language"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
JOIN pg_catalog.pg_language l ON l.oid = p.prolang
WHERE p.prokind IN ('f', 'p')
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
AND (
(p.prokind = 'p' AND ('procedures:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
OR
(p.prokind != 'p' AND ('functions:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
)
GROUP BY p.proname, n.nspname, p.prokind
`;

View File

@@ -0,0 +1,19 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
max(routine_definition) as "definition",
max($md5Function(routine_definition)) as "hash_code",
routine_type as "object_type",
$typeAggFunc(data_type $typeAggParam) as "data_type",
max(external_language) as "language"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_schema =SCHEMA_NAME_CONDITION
and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
or
(routine_type = 'FUNCTION' and ('functions:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
group by routine_name, routine_schema, routine_type
`;

View File

@@ -1,10 +1,35 @@
module.exports = `
select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_name",
pg_relation_size('"'||infoTables.table_schema||'"."'||infoTables.table_name||'"') as "size_bytes"
from information_schema.tables infoTables
where infoTables.table_type not like '%VIEW%'
and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION
and infoTables.table_schema <> 'pg_internal'
and infoTables.table_schema !~ '^_timescaledb_'
and infoTables.table_schema =SCHEMA_NAME_CONDITION
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
pg_relation_size(c.oid) AS "size_bytes",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
a.attname || ':' || pg_catalog.format_type(a.atttypid, a.atttypmod) || ':' || a.attnotnull::text
$hashColumnAggTail
)
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped),
''
)
) AS "hash_code_columns",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
con.conname || ':' || con.contype::text
$hashConstraintAggTail
)
FROM pg_catalog.pg_constraint con
WHERE con.conrelid = c.oid),
''
)
) AS "hash_code_constraints"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'p', 'f')
AND ('tables:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
AND n.nspname <> 'pg_internal'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
`;

View File

@@ -0,0 +1,34 @@
module.exports = `
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
a.attname || ':' || pg_catalog.format_type(a.atttypid, a.atttypmod) || ':' || a.attnotnull::text
$hashColumnAggTail
)
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped),
''
)
) AS "hash_code_columns",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
con.conname || ':' || con.contype::text
$hashConstraintAggTail
)
FROM pg_catalog.pg_constraint con
WHERE con.conrelid = c.oid),
''
)
) AS "hash_code_constraints"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'p', 'f')
AND n.nspname <> 'pg_internal'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
`;

View File

@@ -1,8 +1,13 @@
module.exports = `
select
table_name as "pure_name",
table_schema as "schema_name",
$md5Function(view_definition) as "hash_code"
from
information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
SELECT
c.relname AS "pure_name",
n.nspname AS "schema_name",
$md5Function(pg_get_viewdef(c.oid, true)) AS "hash_code"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind = 'v'
AND n.nspname != 'information_schema'
AND n.nspname != 'pg_catalog'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
`;

View File

@@ -1,11 +1,20 @@
module.exports = `
select
table_name as "pure_name",
table_schema as "schema_name",
view_definition as "create_sql",
$md5Function(view_definition) as "hash_code"
from
information_schema.views
where table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
WITH view_defs AS (
SELECT
c.relname AS pure_name,
n.nspname AS schema_name,
pg_get_viewdef(c.oid, true) AS viewdef
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind = 'v'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
AND ('views:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
)
SELECT
pure_name AS "pure_name",
schema_name AS "schema_name",
viewdef AS "create_sql",
$md5Function(viewdef) AS "hash_code"
FROM view_defs
`;

View File

@@ -418,6 +418,7 @@ const redshiftDriver = {
},
__analyserInternals: {
skipIndexes: true,
useInfoSchemaRoutines: true,
},
engine: 'redshift@dbgate-plugin-postgres',
title: 'Amazon Redshift',

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Upravit hodnotu buňky",
"command.datagrid.editJsonDocument": "Upravit řádek jako JSON dokument",
"command.datagrid.editSelection": "Upravit výběr jako tabulku",
"command.datagrid.fetchAll": "Načíst všechny řádky",
"command.datagrid.fetchAll.toolbar": "Načíst vše",
"command.datagrid.filterSelected": "Filtrovat vybranou hodnotu",
"command.datagrid.findColumn": "Najít sloupec",
"command.datagrid.generateSql": "Generovat SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Název sloupce",
"datagrid.columnNameFilter": "Filtr názvu sloupce",
"datagrid.copyAdvanced": "Pokročilé kopírování",
"datagrid.fetchAll.confirm": "Načíst vše",
"datagrid.fetchAll.progress": "Načítání všech řádků... načteno {count}",
"datagrid.fetchAll.progressDb": "Načítání dat z databáze...",
"datagrid.fetchAll.title": "Načíst všechny řádky",
"datagrid.fetchAll.warning": "Tímto se načtou všechny zbývající řádky do paměti. U velkých tabulek to může spotřebovat významné množství paměti a ovlivnit výkon aplikace.",
"datagrid.macros.calculation": "Výpočet",
"datagrid.macros.calculationDescription": "Vlastní výraz. Použijte řádek.název_sloupce pro přístup k hodnotám sloupců, value pro původní hodnotu",
"datagrid.macros.changeTextCase": "Změnit velikost písmen",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filtr skupiny",
"query.isolationLevel": "Úroveň izolace",
"query.limitRows": "Omezit na {queryRowsLimit} řádků",
"query.named": ":proměnná",
"query.noParameters": "(žádné parametry)",
"query.noRowsLimit": "(bez limitu řádků)",
"query.orFilter": "NEBO filtr {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Řazení",
"query.table": "Tabulka",
"query.unlimitedRows": "Neomezený počet řádků",
"query.variable": "#proměnná",
"query.variable": "proměnná",
"queryParameters.editQueryParameters": "Upravit parametry dotazu",
"queryParameters.runQuery": "Spustit dotaz",
"queryParameters.stringValuesMustBeQuoted": "Řetězcové hodnoty musí být 'v uvozovkách'. Můžete použít platné SQL výrazy.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Potvrzení",
"settings.confirmations.skipConfirm.collectionDataSave": "Přeskočit potvrzení při ukládání údajů kolekce (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Přeskočit potvrzení při ukládání údajů tabulky (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Přeskočit potvrzení při načítání všech řádků",
"settings.connection": "Připojení",
"settings.connection.autoRefresh": "Automatické obnovení modelu databáze na pozadí",
"settings.connection.autoRefreshInterval": "Interval mezi automatickým načítáním struktury DB v sekundách",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Zellwert bearbeiten",
"command.datagrid.editJsonDocument": "Zeile als JSON-Dokument bearbeiten",
"command.datagrid.editSelection": "Auswahl als Tabelle bearbeiten",
"command.datagrid.fetchAll": "Alle Zeilen laden",
"command.datagrid.fetchAll.toolbar": "Alle laden",
"command.datagrid.filterSelected": "Ausgewählten Wert filtern",
"command.datagrid.findColumn": "Spalte finden",
"command.datagrid.generateSql": "SQL generieren",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Spaltenname",
"datagrid.columnNameFilter": "Spaltenname-Filter",
"datagrid.copyAdvanced": "Erweitert kopieren",
"datagrid.fetchAll.confirm": "Alle laden",
"datagrid.fetchAll.progress": "Alle Zeilen werden geladen... {count} geladen",
"datagrid.fetchAll.progressDb": "Daten werden aus der Datenbank geladen...",
"datagrid.fetchAll.title": "Alle Zeilen laden",
"datagrid.fetchAll.warning": "Dadurch werden alle verbleibenden Zeilen in den Speicher geladen. Bei großen Tabellen kann dies erheblichen Speicher verbrauchen und die Anwendungsleistung beeinträchtigen.",
"datagrid.macros.calculation": "Berechnung",
"datagrid.macros.calculationDescription": "Benutzerdefinierter Ausdruck. Verwenden Sie row.spaltenname für den Zugriff auf Spaltenwerte, value für den ursprünglichen Wert",
"datagrid.macros.changeTextCase": "Textgroß-/Kleinschreibung ändern",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Gruppenfilter",
"query.isolationLevel": "Isolationsstufe",
"query.limitRows": "Auf {queryRowsLimit} Zeilen begrenzen",
"query.named": ":Variable",
"query.noParameters": "(keine Parameter)",
"query.noRowsLimit": "(Keine Zeilenbegrenzung)",
"query.orFilter": "ODER-Filter {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Sortierreihenfolge",
"query.table": "Tabelle",
"query.unlimitedRows": "Unbegrenzte Zeilen",
"query.variable": "#Variable",
"query.variable": "Variable",
"queryParameters.editQueryParameters": "Abfrageparameter bearbeiten",
"queryParameters.runQuery": "Abfrage ausführen",
"queryParameters.stringValuesMustBeQuoted": "Zeichenkettenwerte müssen in 'Anführungszeichen' stehen. Sie können gültige SQL-Ausdrücke verwenden.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Bestätigungen",
"settings.confirmations.skipConfirm.collectionDataSave": "Bestätigung beim Speichern von Sammlungsdaten überspringen (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Bestätigung beim Speichern von Tabellendaten überspringen (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Bestätigung beim Laden aller Zeilen überspringen",
"settings.connection": "Verbindung",
"settings.connection.autoRefresh": "Automatische Aktualisierung des Datenbankmodells im Hintergrund",
"settings.connection.autoRefreshInterval": "Intervall zwischen automatischen DB-Strukturaktualisierungen in Sekunden",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Edit cell value",
"command.datagrid.editJsonDocument": "Edit row as JSON document",
"command.datagrid.editSelection": "Edit selection as table",
"command.datagrid.fetchAll": "Fetch all rows",
"command.datagrid.fetchAll.toolbar": "Fetch all",
"command.datagrid.filterSelected": "Filter selected value",
"command.datagrid.findColumn": "Find column",
"command.datagrid.generateSql": "Generate SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Column name",
"datagrid.columnNameFilter": "Column name filter",
"datagrid.copyAdvanced": "Copy advanced",
"datagrid.fetchAll.confirm": "Fetch All",
"datagrid.fetchAll.progress": "Fetching all rows... {count} loaded",
"datagrid.fetchAll.progressDb": "Fetching data from database...",
"datagrid.fetchAll.title": "Fetch All Rows",
"datagrid.fetchAll.warning": "This will load all remaining rows into memory. For large tables, this may consume a significant amount of memory and could affect application performance.",
"datagrid.macros.calculation": "Calculation",
"datagrid.macros.calculationDescription": "Custom expression. Use row.column_name for accessing column values, value for original value",
"datagrid.macros.changeTextCase": "Change text case",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Group filter",
"query.isolationLevel": "Isolation level",
"query.limitRows": "Limit {queryRowsLimit} rows",
"query.named": ":variable",
"query.noParameters": "(no parameters)",
"query.noRowsLimit": "(No rows limit)",
"query.orFilter": "OR Filter {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Sort order",
"query.table": "Table",
"query.unlimitedRows": "Unlimited rows",
"query.variable": "#variable",
"query.variable": "variable",
"queryParameters.editQueryParameters": "Edit query parameters",
"queryParameters.runQuery": "Run query",
"queryParameters.stringValuesMustBeQuoted": "String values must be 'quoted'. You can use valid SQL expressions.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Confirmations",
"settings.confirmations.skipConfirm.collectionDataSave": "Skip confirmation when saving collection data (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Skip confirmation when saving table data (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Skip confirmation when fetching all rows",
"settings.connection": "Connection",
"settings.connection.autoRefresh": "Automatic refresh of database model on background",
"settings.connection.autoRefreshInterval": "Interval between automatic DB structure reloads in seconds",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Editar valor de celda",
"command.datagrid.editJsonDocument": "Editar fila como documento JSON",
"command.datagrid.editSelection": "Editar selección como tabla",
"command.datagrid.fetchAll": "Cargar todas las filas",
"command.datagrid.fetchAll.toolbar": "Cargar todo",
"command.datagrid.filterSelected": "Filtrar valor seleccionado",
"command.datagrid.findColumn": "Buscar columna",
"command.datagrid.generateSql": "Generar SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Nombre de columna",
"datagrid.columnNameFilter": "Filtro de nombre de columna",
"datagrid.copyAdvanced": "Copiar avanzado",
"datagrid.fetchAll.confirm": "Cargar todo",
"datagrid.fetchAll.progress": "Cargando todas las filas... {count} cargadas",
"datagrid.fetchAll.progressDb": "Cargando datos desde la base de datos...",
"datagrid.fetchAll.title": "Cargar todas las filas",
"datagrid.fetchAll.warning": "Esto cargará todas las filas restantes en memoria. Para tablas grandes, esto puede consumir una cantidad significativa de memoria y podría afectar el rendimiento de la aplicación.",
"datagrid.macros.calculation": "Cálculo",
"datagrid.macros.calculationDescription": "Expresión personalizada. Use row.column_name para acceder a valores de columna, value para valor original",
"datagrid.macros.changeTextCase": "Cambiar mayúsculas/minúsculas",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filtro de grupo",
"query.isolationLevel": "Nivel de aislamiento",
"query.limitRows": "Limitar {queryRowsLimit} filas",
"query.named": ":variable",
"query.noParameters": "(sin parámetros)",
"query.noRowsLimit": "(Sin límite de filas)",
"query.orFilter": "Filtro OR {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Orden de clasificación",
"query.table": "Tabla",
"query.unlimitedRows": "Filas ilimitadas",
"query.variable": "#variable",
"query.variable": "variable",
"queryParameters.editQueryParameters": "Editar parámetros de consulta",
"queryParameters.runQuery": "Ejecutar consulta",
"queryParameters.stringValuesMustBeQuoted": "Los valores de cadena deben estar 'entre comillas'. Puede usar expresiones SQL válidas.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Confirmaciones",
"settings.confirmations.skipConfirm.collectionDataSave": "Omitir confirmación al guardar datos de colección (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Omitir confirmación al guardar datos de tabla (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Omitir confirmación al cargar todas las filas",
"settings.connection": "Conexión",
"settings.connection.autoRefresh": "Recarga automática del modelo de base de datos en segundo plano",
"settings.connection.autoRefreshInterval": "Intervalo entre recargas automáticas de estructura de BD en segundos",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Modifier la valeur de cellule",
"command.datagrid.editJsonDocument": "Modifier la ligne en tant que document JSON",
"command.datagrid.editSelection": "Modifier la sélection en tant que table",
"command.datagrid.fetchAll": "Charger toutes les lignes",
"command.datagrid.fetchAll.toolbar": "Tout charger",
"command.datagrid.filterSelected": "Filtrer la valeur sélectionnée",
"command.datagrid.findColumn": "Rechercher une colonne",
"command.datagrid.generateSql": "Générer du SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Nom de la colonne",
"datagrid.columnNameFilter": "Filtre de nom de colonne",
"datagrid.copyAdvanced": "Copie avancée",
"datagrid.fetchAll.confirm": "Tout charger",
"datagrid.fetchAll.progress": "Chargement de toutes les lignes... {count} chargées",
"datagrid.fetchAll.progressDb": "Chargement des données depuis la base de données...",
"datagrid.fetchAll.title": "Charger toutes les lignes",
"datagrid.fetchAll.warning": "Cela chargera toutes les lignes restantes en mémoire. Pour les grandes tables, cela peut consommer une quantité importante de mémoire et affecter les performances de l'application.",
"datagrid.macros.calculation": "Calcul",
"datagrid.macros.calculationDescription": "Expression personnalisée. Utilisez row.column_name pour accéder aux valeurs de colonne et value pour la valeur d'origine",
"datagrid.macros.changeTextCase": "Modifier la casse du texte",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filtre de groupe",
"query.isolationLevel": "Niveau d'isolation",
"query.limitRows": "Limiter à {queryRowsLimit} lignes",
"query.named": ":variable",
"query.noParameters": "(aucun paramètre)",
"query.noRowsLimit": "(Aucune limite de lignes)",
"query.orFilter": "Filtre OU {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Ordre de tri",
"query.table": "Table",
"query.unlimitedRows": "Lignes illimitées",
"query.variable": "#variable",
"query.variable": "variable",
"queryParameters.editQueryParameters": "Modifier les paramètres de requête",
"queryParameters.runQuery": "Exécuter la requête",
"queryParameters.stringValuesMustBeQuoted": "Les valeurs de type chaîne doivent être 'entre guillemets'. Vous pouvez utiliser des expressions SQL valides.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Confirmations",
"settings.confirmations.skipConfirm.collectionDataSave": "Ignorer la confirmation lors de l'enregistrement des données de collection (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Ignorer la confirmation lors de l'enregistrement des données de table (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Ignorer la confirmation lors du chargement de toutes les lignes",
"settings.connection": "Connexion",
"settings.connection.autoRefresh": "Rafraîchissement automatique du modèle de base de données en arrière-plan",
"settings.connection.autoRefreshInterval": "Intervalle entre les rechargements automatiques de la structure de BD en secondes",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Modifica valore cella",
"command.datagrid.editJsonDocument": "Modifica riga come documento JSON",
"command.datagrid.editSelection": "Modifica selezione come tabella",
"command.datagrid.fetchAll": "Carica tutte le righe",
"command.datagrid.fetchAll.toolbar": "Carica tutto",
"command.datagrid.filterSelected": "Filtra valore selezionato",
"command.datagrid.findColumn": "Trova colonna",
"command.datagrid.generateSql": "Genera SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Nome colonna",
"datagrid.columnNameFilter": "Filtro nome colonna",
"datagrid.copyAdvanced": "Copia avanzato",
"datagrid.fetchAll.confirm": "Carica tutto",
"datagrid.fetchAll.progress": "Caricamento di tutte le righe... {count} caricate",
"datagrid.fetchAll.progressDb": "Caricamento dati dal database...",
"datagrid.fetchAll.title": "Carica tutte le righe",
"datagrid.fetchAll.warning": "Questo caricherà tutte le righe rimanenti in memoria. Per tabelle grandi, potrebbe consumare una quantità significativa di memoria e influire sulle prestazioni dell'applicazione.",
"datagrid.macros.calculation": "Calcolo",
"datagrid.macros.calculationDescription": "Espressione personalizzata. Usa row.column_name per accedere ai valori colonna, value per il valore originale",
"datagrid.macros.changeTextCase": "Cambia maiuscole/minuscole",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filtro gruppo",
"query.isolationLevel": "Livello di isolamento",
"query.limitRows": "Limita a {queryRowsLimit} righe",
"query.named": ":variabile",
"query.noParameters": "(nessun parametro)",
"query.noRowsLimit": "(Nessun limite righe)",
"query.orFilter": "Filtro OR {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Ordinamento",
"query.table": "Tabella",
"query.unlimitedRows": "Righe illimitate",
"query.variable": "#variabile",
"query.variable": "variabile",
"queryParameters.editQueryParameters": "Modifica parametri query",
"queryParameters.runQuery": "Esegui query",
"queryParameters.stringValuesMustBeQuoted": "I valori stringa devono essere 'quoted'. Puoi usare espressioni SQL valide.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Conferme",
"settings.confirmations.skipConfirm.collectionDataSave": "Salta conferma quando salvi dati collezione (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Salta conferma quando salvi dati tabella (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Salta conferma quando carichi tutte le righe",
"settings.connection": "Connessione",
"settings.connection.autoRefresh": "Aggiornamento automatico del modello database in background",
"settings.connection.autoRefreshInterval": "Intervallo tra ricaricamenti automatici struttura DB in secondi",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "セルの値を編集",
"command.datagrid.editJsonDocument": "行をJSONドキュメントとして編集",
"command.datagrid.editSelection": "選択範囲をテーブルとして編集",
"command.datagrid.fetchAll": "すべての行を取得",
"command.datagrid.fetchAll.toolbar": "すべて取得",
"command.datagrid.filterSelected": "選択した値でフィルター",
"command.datagrid.findColumn": "カラムを検索",
"command.datagrid.generateSql": "SQLを生成",
@@ -723,6 +725,11 @@
"datagrid.columnName": "カラム名",
"datagrid.columnNameFilter": "カラム名フィルター",
"datagrid.copyAdvanced": "高度なコピー",
"datagrid.fetchAll.confirm": "すべて取得",
"datagrid.fetchAll.progress": "すべての行を取得中... {count} 件読み込み済み",
"datagrid.fetchAll.progressDb": "データベースからデータを取得中...",
"datagrid.fetchAll.title": "すべての行を取得",
"datagrid.fetchAll.warning": "これにより、残りのすべての行がメモリに読み込まれます。大きなテーブルの場合、かなりのメモリを消費し、アプリケーションのパフォーマンスに影響を与える可能性があります。",
"datagrid.macros.calculation": "Calculation",
"datagrid.macros.calculationDescription": "Custom expression. Use row.column_name for accessing column values, value for original value",
"datagrid.macros.changeTextCase": "Change text case",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "グループフィルター",
"query.isolationLevel": "分離レベル",
"query.limitRows": "{queryRowsLimit}行に制限",
"query.named": ":variable",
"query.noParameters": "(パラメーターなし)",
"query.noRowsLimit": "(行数制限なし)",
"query.orFilter": "ORフィルター {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "ソート順",
"query.table": "テーブル",
"query.unlimitedRows": "無制限",
"query.variable": "#variable",
"query.variable": "変数",
"queryParameters.editQueryParameters": "クエリパラメーターを編集",
"queryParameters.runQuery": "クエリを実行",
"queryParameters.stringValuesMustBeQuoted": "文字列値は 'クォート' する必要があります。有効なSQL式を使用できます。",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "確認",
"settings.confirmations.skipConfirm.collectionDataSave": "コレクションデータ保存時の確認をスキップ (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "テーブルデータ保存時の確認をスキップ (SQL)",
"settings.confirmations.skipFetchAllConfirm": "すべての行を取得する際の確認をスキップ",
"settings.connection": "接続",
"settings.connection.autoRefresh": "バックグラウンドでデータベースモデルを自動更新",
"settings.connection.autoRefreshInterval": "DB構造の自動再読み込み間隔",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "셀 값 편집",
"command.datagrid.editJsonDocument": "행을 JSON 문서로 편집",
"command.datagrid.editSelection": "선택 영역을 테이블로 편집",
"command.datagrid.fetchAll": "모든 행 가져오기",
"command.datagrid.fetchAll.toolbar": "모두 가져오기",
"command.datagrid.filterSelected": "선택한 값으로 필터",
"command.datagrid.findColumn": "컬럼 찾기",
"command.datagrid.generateSql": "SQL 생성",
@@ -723,6 +725,11 @@
"datagrid.columnName": "컬럼 이름",
"datagrid.columnNameFilter": "컬럼 이름 필터",
"datagrid.copyAdvanced": "고급 복사",
"datagrid.fetchAll.confirm": "모든 행 가져오기",
"datagrid.fetchAll.progress": "모든 행 가져오는 중... {count}개 로드됨",
"datagrid.fetchAll.progressDb": "데이터베이스에서 데이터 가져오는 중...",
"datagrid.fetchAll.title": "모든 행 가져오기",
"datagrid.fetchAll.warning": "남은 모든 행을 메모리로 로드합니다. 테이블이 큰 경우 상당한 메모리를 사용할 수 있으며 애플리케이션 성능에 영향을 줄 수 있습니다.",
"datagrid.macros.calculation": "계산",
"datagrid.macros.calculationDescription": "사용자 정의 표현식. 컬럼 값에 접근하려면 row.column_name, 원래 값은 value 사용",
"datagrid.macros.changeTextCase": "대소문자 변경",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "그룹 필터",
"query.isolationLevel": "격리 수준",
"query.limitRows": "{queryRowsLimit}행 제한",
"query.named": ":variable",
"query.noParameters": "(매개변수 없음)",
"query.noRowsLimit": "(행 제한 없음)",
"query.orFilter": "OR 필터 {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "정렬 순서",
"query.table": "테이블",
"query.unlimitedRows": "무제한 행",
"query.variable": "#variable",
"query.variable": "변수",
"queryParameters.editQueryParameters": "쿼리 매개변수 편집",
"queryParameters.runQuery": "쿼리 실행",
"queryParameters.stringValuesMustBeQuoted": "문자열 값은 '따옴표'로 감싸야 합니다. 유효한 SQL 표현식을 사용할 수 있습니다.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "확인",
"settings.confirmations.skipConfirm.collectionDataSave": "컬렉션 데이터 저장 시 확인 건너뛰기(NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "테이블 데이터 저장 시 확인 건너뛰기(SQL)",
"settings.confirmations.skipFetchAllConfirm": "모든 행 가져오기 시 확인 건너뛰기",
"settings.connection": "연결",
"settings.connection.autoRefresh": "백그라운드에서 데이터베이스 모델 자동 새로 고침",
"settings.connection.autoRefreshInterval": "자동 DB 구조 재로딩 간격(초)",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Editar valor da célula",
"command.datagrid.editJsonDocument": "Editar linha como documento JSON",
"command.datagrid.editSelection": "Editar seleção como tabela",
"command.datagrid.fetchAll": "Buscar todas as linhas",
"command.datagrid.fetchAll.toolbar": "Buscar todas",
"command.datagrid.filterSelected": "Filtrar valor selecionado",
"command.datagrid.findColumn": "Localizar coluna",
"command.datagrid.generateSql": "Gerar SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Nome da coluna",
"datagrid.columnNameFilter": "Filtro de nome de coluna",
"datagrid.copyAdvanced": "Cópia avançada",
"datagrid.fetchAll.confirm": "Buscar todas",
"datagrid.fetchAll.progress": "Buscando todas as linhas... {count} carregadas",
"datagrid.fetchAll.progressDb": "Buscando dados do banco de dados...",
"datagrid.fetchAll.title": "Buscar todas as linhas",
"datagrid.fetchAll.warning": "Isso irá carregar todas as linhas restantes na memória. Para tabelas grandes, isso pode consumir uma quantidade significativa de memória e afetar o desempenho da aplicação.",
"datagrid.macros.calculation": "Cálculo",
"datagrid.macros.calculationDescription": "Expressão personalizada. Use row.nome_coluna para acessar valores de colunas, value para valor original",
"datagrid.macros.changeTextCase": "Alterar maiúsculas/minúsculas",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filtro de grupo",
"query.isolationLevel": "Nível de isolamento",
"query.limitRows": "Limitar a {queryRowsLimit} linhas",
"query.named": ":variável",
"query.noParameters": "(sem parâmetros)",
"query.noRowsLimit": "(Sem limite de linhas)",
"query.orFilter": "Filtro OU {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Ordem de classificação",
"query.table": "Tabela",
"query.unlimitedRows": "Linhas ilimitadas",
"query.variable": "#variável",
"query.variable": "variável",
"queryParameters.editQueryParameters": "Editar parâmetros da consulta",
"queryParameters.runQuery": "Executar consulta",
"queryParameters.stringValuesMustBeQuoted": "Valores de texto devem estar 'entre aspas'. Você pode usar expressões SQL válidas.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Confirmações",
"settings.confirmations.skipConfirm.collectionDataSave": "Pular confirmação ao salvar dados de coleção (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Pular confirmação ao salvar dados de tabela (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Pular confirmação ao buscar todas as linhas",
"settings.connection": "Conexão",
"settings.connection.autoRefresh": "Atualização automática do modelo de banco de dados em segundo plano",
"settings.connection.autoRefreshInterval": "Intervalo entre recarregamentos automáticos da estrutura do BD em segundos",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "Upraviť hodnotu bunky",
"command.datagrid.editJsonDocument": "Upraviť riadok ako JSON dokument",
"command.datagrid.editSelection": "Upraviť výber ako tabuľku",
"command.datagrid.fetchAll": "Načítať všetky riadky",
"command.datagrid.fetchAll.toolbar": "Načítať všetko",
"command.datagrid.filterSelected": "Filtrovať vybranú hodnotu",
"command.datagrid.findColumn": "Nájsť stĺpec",
"command.datagrid.generateSql": "Generovať SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "Názov stĺpca",
"datagrid.columnNameFilter": "Filter názvu stĺpca",
"datagrid.copyAdvanced": "Pokročilé kopírovanie",
"datagrid.fetchAll.confirm": "Načítať všetko",
"datagrid.fetchAll.progress": "Načítavam všetky riadky... {count} načítaných",
"datagrid.fetchAll.progressDb": "Načítavanie dát z databázy...",
"datagrid.fetchAll.title": "Načítať všetky riadky",
"datagrid.fetchAll.warning": "Týmto sa načítajú všetky zostávajúce riadky do pamäte. Pri veľkých tabuľkách to môže spotrebovať značné množstvo pamäte a môže ovplyvniť výkon aplikácie.",
"datagrid.macros.calculation": "Výpočet",
"datagrid.macros.calculationDescription": "Vlastný výraz. Použite riadok.názov_stĺpca pre prístup k hodnotám stĺpcov, value pre pôvodnú hodnotu",
"datagrid.macros.changeTextCase": "Zmeniť veľkosť písmen",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "Filter skupiny",
"query.isolationLevel": "Úroveň izolácie",
"query.limitRows": "Obmedziť na {queryRowsLimit} riadkov",
"query.named": ":premenná",
"query.noParameters": "(žiadne parametre)",
"query.noRowsLimit": "(bez limitu riadkov)",
"query.orFilter": "OR filter {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "Poradie zoradenia",
"query.table": "Tabuľka",
"query.unlimitedRows": "Neobmedzené riadky",
"query.variable": "#premenná",
"query.variable": "premenná",
"queryParameters.editQueryParameters": "Upraviť parametre dotazu",
"queryParameters.runQuery": "Spustiť dotaz",
"queryParameters.stringValuesMustBeQuoted": "Reťazcové hodnoty musia byť 'v úvodzovkách'. Môžete použiť platné SQL výrazy.",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "Potvrdenia",
"settings.confirmations.skipConfirm.collectionDataSave": "Preskočiť potvrdenie pri ukladaní údajov kolekcie (NoSQL)",
"settings.confirmations.skipConfirm.tableDataSave": "Preskočiť potvrdenie pri ukladaní údajov tabuľky (SQL)",
"settings.confirmations.skipFetchAllConfirm": "Preskočiť potvrdenie pri načítaní všetkých riadkov",
"settings.connection": "Pripojenie",
"settings.connection.autoRefresh": "Automatické obnovenie modelu databázy na pozadí",
"settings.connection.autoRefreshInterval": "Interval medzi automatickým načítaním štruktúry DB (v sekundách)",

View File

@@ -207,6 +207,8 @@
"command.datagrid.editCell": "编辑单元格值",
"command.datagrid.editJsonDocument": "将行编辑为 JSON 文档",
"command.datagrid.editSelection": "将选区编辑为表",
"command.datagrid.fetchAll": "获取所有行",
"command.datagrid.fetchAll.toolbar": "获取全部",
"command.datagrid.filterSelected": "筛选选中值",
"command.datagrid.findColumn": "查找列",
"command.datagrid.generateSql": "生成 SQL",
@@ -723,6 +725,11 @@
"datagrid.columnName": "列名",
"datagrid.columnNameFilter": "列名筛选",
"datagrid.copyAdvanced": "高级复制",
"datagrid.fetchAll.confirm": "获取全部",
"datagrid.fetchAll.progress": "正在获取所有行... 已加载 {count}",
"datagrid.fetchAll.progressDb": "正在从数据库获取数据...",
"datagrid.fetchAll.title": "获取所有行",
"datagrid.fetchAll.warning": "这将把所有剩余的行加载到内存中。对于大型表,这可能会占用大量内存,并可能影响应用程序性能。",
"datagrid.macros.calculation": "计算",
"datagrid.macros.calculationDescription": "自定义表达式。使用 row.column_name 访问列值value 访问原始值",
"datagrid.macros.changeTextCase": "更改文本大小写",
@@ -1238,7 +1245,6 @@
"query.groupFilter": "分组筛选",
"query.isolationLevel": "隔离级别",
"query.limitRows": "限制 {queryRowsLimit} 行",
"query.named": ":variable",
"query.noParameters": "(无参数)",
"query.noRowsLimit": "(无行数限制)",
"query.orFilter": "OR 筛选 {number}",
@@ -1256,7 +1262,7 @@
"query.sortOrder": "排序顺序",
"query.table": "表",
"query.unlimitedRows": "不限行数",
"query.variable": "#variable",
"query.variable": "变量",
"queryParameters.editQueryParameters": "编辑查询参数",
"queryParameters.runQuery": "运行查询",
"queryParameters.stringValuesMustBeQuoted": "字符串值必须使用引号括起来。您可以使用有效的 SQL 表达式。",
@@ -1333,6 +1339,7 @@
"settings.confirmations": "确认",
"settings.confirmations.skipConfirm.collectionDataSave": "保存集合数据时跳过确认NoSQL",
"settings.confirmations.skipConfirm.tableDataSave": "保存表数据时跳过确认SQL",
"settings.confirmations.skipFetchAllConfirm": "获取所有行时跳过确认",
"settings.connection": "连接",
"settings.connection.autoRefresh": "后台自动刷新数据库模型",
"settings.connection.autoRefreshInterval": "自动重新加载数据库结构的间隔(秒)",

View File

@@ -57,7 +57,7 @@ jobs:
# Ensure npm 11.5.1 or later is installed
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
- name: Remove dbmodel - should be not published
run: |

View File

@@ -40,7 +40,7 @@ jobs:
# Ensure npm 11.5.1 or later is installed
- name: Update npm
run: npm install -g npm@latest
run: npm install -g npm@11.5.1
# - name: Configure NPM token
# env:

View File

@@ -7,7 +7,7 @@ checkout-and-merge-pro:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 87c3efdaf83786abee4366dee2c58fea355edc4c
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro

View File

@@ -26,30 +26,37 @@ jobs:
with:
fetch-depth: 1
- _include: checkout-and-merge-pro
- name: yarn install
run: |
cd ../dbgate-merged
yarn install
- name: Integration tests
run: |
cd ../dbgate-merged
cd integration-tests
yarn test:ci
- name: Filter parser tests
if: always()
run: |
cd ../dbgate-merged
cd packages/filterparser
yarn test:ci
- name: Datalib (perspective) tests
if: always()
run: |
cd ../dbgate-merged
cd packages/datalib
yarn test:ci
- name: Tools tests
if: always()
run: |
cd ../dbgate-merged
cd packages/tools
yarn test:ci
@@ -138,3 +145,16 @@ jobs:
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'
mongodb:
image: mongo:4.0.12
ports:
- '27017:27017'
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
dynamodb:
image: amazon/dynamodb-local
ports:
- '8000:8000'