Compare commits

..

94 Commits

Author SHA1 Message Date
SPRINX0\prochazka 100e3fe75f deleted sast workflows 2026-04-08 10:59:29 +02:00
SPRINX0\prochazka af7930cea2 Enhance aggregation functions in SQL queries for improved PostgreSQL compatibility 2026-04-08 10:55:24 +02:00
SPRINX0\prochazka 6b4f6b909c Merge branch 'feature/postgres-optimalization' of https://github.com/dbgate/dbgate into feature/postgres-optimalization 2026-04-08 10:26:35 +02:00
Jan Prochazka 9a6e5cd7cc Update plugins/dbgate-plugin-postgres/src/backend/sql/views.js
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-08 10:21:22 +02:00
SPRINX0\prochazka 9f64b6ec7a Merge branch 'master' into feature/postgres-optimalization 2026-04-08 10:20:28 +02:00
Stela Augustinova 77f720e34c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-08 10:20:09 +02:00
Stela Augustinova 168dcb7824 Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-08 10:20:09 +02:00
Stela Augustinova 759186a212 Improve error handling for volatile connection responses in subprocess communication 2026-04-08 10:20:09 +02:00
Stela Augustinova 71ed7a76ea Handle errors in volatile connection resolution and remove unused registration function 2026-04-08 10:20:09 +02:00
Stela Augustinova bd939b22c7 Fix volatile connection resolution to prevent multiple resolves 2026-04-08 10:20:09 +02:00
Stela Augustinova c327f77294 Refactor volatile connections handling in connections and runners modules 2026-04-08 10:20:09 +02:00
Stela Augustinova d907d79beb Streamline volatile connections handling and remove unused registration module 2026-04-08 10:20:09 +02:00
Stela Augustinova 93b879927c Implement volatile connections handling in runners and shell modules 2026-04-08 10:20:09 +02:00
Stela Augustinova 0c545d4cf9 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-08 10:20:09 +02:00
Stela Augustinova 95c90c1517 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-08 10:20:09 +02:00
CI workflows cb731fa858 chore: auto-update github workflows 2026-04-08 10:20:09 +02:00
Stela Augustinova 9bb3b09ecf SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-08 10:20:09 +02:00
SPRINX0\prochazka 7c8f541d3e deleted sast workflow 2026-04-08 10:18:37 +02:00
Jan Prochazka ce41687382 Merge pull request #1417 from dbgate/feature/auth-error
Implement volatile connections handling in runners and shell modules
2026-04-08 10:14:02 +02:00
Stela Augustinova 4b083dea5c Refactor connection handling in extractShellConnection to improve volatile ID management and ensure secure credential handling 2026-04-07 14:56:29 +02:00
Stela Augustinova c84473c1eb Enhance error handling for connection requests in subprocesses and validate connection ID format 2026-04-07 14:26:58 +02:00
Stela Augustinova 7fc078f3e6 Improve error handling for volatile connection responses in subprocess communication 2026-04-07 14:15:18 +02:00
Stela Augustinova cbbd538248 Handle errors in volatile connection resolution and remove unused registration function 2026-04-07 14:01:13 +02:00
Stela Augustinova 825f6e562b Fix volatile connection resolution to prevent multiple resolves 2026-04-07 13:46:34 +02:00
Stela Augustinova a278afb260 Refactor volatile connections handling in connections and runners modules 2026-04-07 13:42:11 +02:00
Stela Augustinova 2fbeea717c Streamline volatile connections handling and remove unused registration module 2026-04-07 13:26:16 +02:00
Jan Prochazka c7259e4663 Merge pull request #1412 from dbgate/feature/copy-sql
Improve clipboard formatters to omit undefined values, enhancing data…
2026-04-07 13:11:49 +02:00
Stela Augustinova 69a2669342 Implement volatile connections handling in runners and shell modules 2026-04-07 13:06:04 +02:00
CI workflows 42d1ca8fd4 chore: auto-update github workflows 2026-04-07 10:27:40 +00:00
Stela Augustinova 1cf52d8b39 SYNC: Add SAST workflow for security scanning using Semgrep 2026-04-07 10:27:24 +00:00
Jan Prochazka 6e482afab2 v7.1.7-premium-beta.1 2026-04-02 16:39:06 +02:00
SPRINX0\prochazka ddf3295e6d Merge branch 'master' into feature/postgres-optimalization 2026-04-02 16:33:25 +02:00
SPRINX0\prochazka 79e087abd3 Optimize PostgreSQL analysis queries and add support for Info Schema routines 2026-04-02 16:32:36 +02:00
CI workflows a7cf51bdf7 chore: auto-update github workflows 2026-04-02 13:55:33 +00:00
Jan Prochazka dfdb31e2f8 Merge pull request #1413 from dbgate/feature/integration-test-pro
Update test workflow to include directory changes for integration tests
2026-04-02 15:55:14 +02:00
Stela Augustinova 3508ddc3ca Update test workflow to include directory changes for integration tests 2026-04-02 11:02:36 +02:00
Stela Augustinova 137fc6b928 Enhance clipboard formatters to skip empty rows, improving data handling in clipboard operations 2026-04-02 10:29:02 +02:00
Jan Prochazka e6f5295420 Merge pull request #1410 from dbgate/feature/large-fields
Enhance binary size handling in grid cell display
2026-04-01 16:01:23 +02:00
CI workflows 2bb08921c3 chore: auto-update github workflows 2026-04-01 13:55:00 +00:00
Stela Augustinova ee2d0e4c30 Remove unnecessary restart policy for DynamoDB service 2026-04-01 15:54:35 +02:00
Jan Prochazka c43a838572 Merge pull request #1411 from dbgate/feature/unreadable-dropdown
Correct class binding and update style variables in SelectField compo…
2026-04-01 15:53:23 +02:00
CI workflows 17ff6a8013 chore: auto-update github workflows 2026-04-01 13:53:13 +00:00
Stela Augustinova 62ad6a0d08 Remove unnecessary restart policy for MongoDB service 2026-04-01 15:52:48 +02:00
CI workflows 5c049fa867 chore: auto-update github workflows 2026-04-01 13:51:09 +00:00
CI workflows 619f17114a Update pro ref 2026-04-01 13:50:58 +00:00
Stela Augustinova 1c1431014c SYNC: Merge pull request #87 from dbgate/feature/collection-test 2026-04-01 13:50:46 +00:00
Stela Augustinova 9d1d7b7e34 Improve clipboard formatters to omit undefined values, enhancing data integrity in exports 2026-04-01 15:49:35 +02:00
Stela Augustinova f68ca1e786 Correct class binding and update style variables in SelectField component 2026-04-01 13:24:34 +02:00
Stela Augustinova 8d16a30064 Fix message formatting for large binary fields in stringifyCellValue function 2026-04-01 10:55:47 +02:00
Stela Augustinova cf601c33c0 Enhance binary size handling in grid cell display 2026-04-01 10:25:40 +02:00
Jan Prochazka 588cd39d7c Merge pull request #1404 from dbgate/feature/fetch-all-button
Add fetch all button
2026-04-01 09:44:04 +02:00
Stela Augustinova 79ebfa9b7a Add fetchAll command to dataGrid menu 2026-03-31 13:37:06 +02:00
Stela Augustinova 0c6b2746d1 Fix file stream reference in jsldata and remove redundant buffer assignment in LoadingDataGridCore 2026-03-31 08:59:33 +02:00
Stela Augustinova 978972c55c Enhance file path validation in streamRows to include symlink resolution and case normalization, improving security and error handling 2026-03-31 08:31:43 +02:00
Stela Augustinova 37854fc577 Refactor fetchAll to trim lines before parsing, improving error handling for malformed data 2026-03-31 06:54:37 +02:00
Stela Augustinova 5537e193a6 Improve fetchAll error handling and cleanup process during streaming and paginated reads 2026-03-31 06:21:06 +02:00
Stela Augustinova 0d42b2b133 Refactor fetchAll cancel function to improve cleanup process and prevent errors 2026-03-30 15:48:35 +02:00
Stela Augustinova 44bd7972d4 Enhance fetchAll functionality with improved error handling and state management 2026-03-30 14:34:57 +02:00
Stela Augustinova 5143eb39f7 Implement fetchAll functionality with streaming support and error handling 2026-03-30 13:30:12 +02:00
Stela Augustinova cf51883b3e Add checkbox to skip confirmation when fetching all rows 2026-03-26 15:24:25 +01:00
Stela Augustinova 484ca0c78a Reset loaded time reference in reload function 2026-03-26 15:11:11 +01:00
Stela Augustinova 8f5cad0e2c Prevent loading next data when fetching all rows is in progress 2026-03-26 15:03:54 +01:00
Stela Augustinova 988512a571 Update warning message in FetchAllConfirmModal to simplify language 2026-03-26 14:50:09 +01:00
Stela Augustinova f8bd380051 Optimize fetchAllRows by using a local buffer to reduce array copies and improve performance 2026-03-26 14:19:11 +01:00
Stela Augustinova 281131dbba Enhance fetchAll functionality by adding loading state check 2026-03-26 14:07:12 +01:00
Stela Augustinova ea3a61077a v7.1.6 2026-03-26 12:47:09 +01:00
Stela Augustinova d1a898b40d SYNC: Add translations for cloudUnavailable message in multiple languages 2026-03-26 11:11:07 +00:00
Stela Augustinova a521a81ef0 v7.1.6-premium-beta.1 2026-03-26 11:25:13 +01:00
Stela Augustinova 2505c61975 Add fetch all button 2026-03-26 11:24:05 +01:00
Stela Augustinova ab5a54dbb6 SYNC: Merge pull request #89 from dbgate/feature/cloud-error 2026-03-26 10:12:05 +00:00
Stela Augustinova 44ad8fa60a Update CHANGELOG for version 7.1.5 2026-03-25 16:59:13 +01:00
Stela Augustinova 5b27a241d7 v7.1.5 2026-03-25 16:21:59 +01:00
Stela Augustinova 084019ca65 v7.1.5-premium-beta.3 2026-03-25 15:21:43 +01:00
Stela Augustinova ba147af8fe SYNC: v7.1.5-premium-beta.2 2026-03-25 14:08:24 +00:00
Stela Augustinova 1b3f4db07d SYNC: Merge pull request #88 from dbgate/feature/cloud-error 2026-03-25 13:39:00 +00:00
Jan Prochazka c36705d458 Merge pull request #1395 from dbgate/feature/display-uuid
Feature/display UUID
2026-03-25 10:04:58 +01:00
Stela Augustinova 0e126cb8cf Enhance BinData subType handling to support hexadecimal strings and improve validation 2026-03-25 08:32:03 +01:00
Stela Augustinova c48183a539 Enhance base64 to UUID conversion with error handling and regex improvements 2026-03-25 08:23:15 +01:00
Stela Augustinova 50f380dbbe Enhance uuidToBase64 function with validation and improve UUID parsing in parseCellValue 2026-03-24 17:15:32 +01:00
Stela Augustinova 66023a9a68 Validate base64 UUID conversion and enhance handling in stringifyCellValue 2026-03-24 17:06:52 +01:00
Stela Augustinova c3fbc3354c Validate BinData subType to ensure it is an integer between 0 and 255 2026-03-24 16:32:16 +01:00
Jan Prochazka a7d2ed11f3 SYNC: Merge pull request #86 from dbgate/feature/icon-vulnerability 2026-03-23 12:50:27 +00:00
SPRINX0\prochazka 899aec2658 v7.1.5-premium-beta.1 2026-03-20 14:24:11 +01:00
SPRINX0\prochazka 74e47587e2 Merge branch 'master' into feature/postgres-optimalization 2026-03-20 14:23:40 +01:00
Stela Augustinova 6a3dc92572 Add uuid to base64 conversion and enhance cell value parsing for UUIDs 2026-03-20 12:46:50 +01:00
Stela Augustinova e3a4667422 feat: add base64 to UUID conversion and integrate into cell value parsing 2026-03-19 14:50:08 +01:00
Stela Augustinova c4dd99bba9 Changelog 7.1.4 2026-03-19 13:07:44 +01:00
SPRINX0\prochazka cb70f3c318 postgres loading optimalization 2026-03-19 12:17:29 +01:00
Stela Augustinova 588b6f9882 v7.1.4 2026-03-19 12:13:37 +01:00
Stela Augustinova 375f69ca1e v7.1.4-alpha.2 2026-03-19 11:13:29 +01:00
Stela Augustinova a32e5cc139 v7.1.4-alpha.1 2026-03-19 10:56:16 +01:00
CI workflows 8e00137751 chore: auto-update github workflows 2026-03-19 09:33:56 +00:00
Stela Augustinova 003db50833 SYNC: Add missing publish step for rest 2026-03-19 09:33:36 +00:00
Stela Augustinova bc519c2c20 Changelog 7.1.3 2026-03-18 16:06:01 +01:00
67 changed files with 3005 additions and 2669 deletions
+1 -1
View File
@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -1
View File
@@ -47,7 +47,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -1
View File
@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -1
View File
@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+1 -1
View File
@@ -35,7 +35,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+4 -1
View File
@@ -56,7 +56,10 @@ jobs:
working-directory: packages/sqltree
run: |
npm publish --tag "$NPM_TAG"
- name: Publish rest
working-directory: packages/rest
run: |
npm publish --tag "$NPM_TAG"
- name: Publish api
working-directory: packages/api
run: |
+1 -1
View File
@@ -30,7 +30,7 @@ jobs:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+34
View File
@@ -23,26 +23,49 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
mv dbgate-pro/* ../dbgate-pro/
cd ..
mkdir dbgate-merged
cd dbgate-pro
cd sync
yarn
node sync.js --nowatch
cd ..
- name: yarn install
run: |
cd ../dbgate-merged
yarn install
- name: Integration tests
run: |
cd ../dbgate-merged
cd integration-tests
yarn test:ci
- name: Filter parser tests
if: always()
run: |
cd ../dbgate-merged
cd packages/filterparser
yarn test:ci
- name: Datalib (perspective) tests
if: always()
run: |
cd ../dbgate-merged
cd packages/datalib
yarn test:ci
- name: Tools tests
if: always()
run: |
cd ../dbgate-merged
cd packages/tools
yarn test:ci
services:
@@ -98,3 +121,14 @@ jobs:
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'
mongodb:
image: mongo:4.0.12
ports:
- '27017:27017'
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
dynamodb:
image: amazon/dynamodb-local
ports:
- '8000:8000'
+196 -41
View File
@@ -1,14 +1,38 @@
# ChangeLog
Builds:
- docker - build
- npm - npm package dbgate-serve
- app - classic electron app
- mac - application for macOS
- linux - application for linux
- win - application for Windows
- docker - build
- npm - npm package dbgate-serve
- app - classic electron app
- mac - application for macOS
- linux - application for linux
- win - application for Windows
## 7.1.5
- FIXED: Issues with cloud and file loading
- ADDED: Support for displaying MongoDB UUID #1394
- ADDED: SVG icon sanitization
## 7.1.4
- FIXED: NPM installation failure #1375
## 7.1.3
- FIXED: "Add files" button in import/export #1373
- FIXED: Importing XLSX files #1379
- ADDED: Ability to set default transaction isolation level for connections #1376
- ADDED: Option to set transaction isolation level directly in Query Tab #1376
- ADDED: Filtering of SQL Scripts by connection and database name #1377
- ADDED: Proxy configuration support for OData, OpenAPI and GraphQL (Premium) #1381
- CHANGED: Updated DuckDB version to 1.5.0 #1386
- FIXED: DuckDB column order in query result #1385
- FIXED: Administration panel not displayed for authorized users (Team Premium) #1374
## 7.1.2
- ADDED: GraphQL chat - AI chat with GraphQL endpoint (Premium)
- FIXED: Error "400 Provider returned error" in Database Chat (Premium)
- CHANGED: Upgraded AI components to latest versions, improved stability and performance of AI features (Premium)
@@ -16,6 +40,7 @@ Builds:
- CHANGED: Upgraded some internal building components (svelte-preprocess, typescript)
## 7.1.1
- CHANGED: Fixed some DynamoDB issues, improved filtering performance
- FIXED: Afilter filter scroll issue #1370
- FIXED: Team Premium - filtering by connection in database and table permissions
@@ -24,10 +49,10 @@ Builds:
- FIXED: Firebird - improved connectivity & table loading #1324
- ADDED: New GraphQL query option, changed GraphQL query icon (Premium)
## 7.1.0
- ADDED: Support for Amazon DynamoDB (Premium)
- ADDED: Connect to API endpoints - OpenAPI (Swagger), GraphQL and oData (Premium)
- ADDED: Connect to API endpoints - OpenAPI (Swagger), GraphQL and oData (Premium)
- FIXED: Redis key list infinite loading when first key hierarchy segment is numeric (e.g. "0:profile:1234") #1363
- FIXED: Sum of PostgreSQL numeric values always 0 #1354
- FIXED: SQL SERVER Table structure key duplication #1351
@@ -37,10 +62,12 @@ Builds:
- CHANGED: Used rolldown bundler instead of legacy rollup
## 7.0.6
- ADDED: Reset password for Team Premium edition
- ADDED: Encrypting passwords sent to frontend when using SHELL_CONNECTION=1 in Docker Community edition #1357
## 7.0.4
- FIXED: MS SQL server export to CSV does not convert bit FALSE to 0 #1276
- ADDED: MySQL FULLTEXT support #1305
- FIXED: Error messages in Chinese will display garbled characters(MS SQL over ODBC) #1321
@@ -53,6 +80,7 @@ Builds:
- CHANGED: Improved custom connection color palette
## 7.0.3
- FIXED: Optimalized loading MySQL primary keys #1261
- FIXED: Test connection now works for MS Entra authentication #1315
- FIXED: SQL Server - Unable to use 'Is Empty or Null' or 'Has Not Empty Value' filters on a field with data type TEXT #1338
@@ -64,6 +92,7 @@ Builds:
- CHANGED: Upgraded node for DbGate AWS distribution
## 7.0.1
- FIXED: Foreign key actions not detected on PostgreSQL #1323
- FIXED: Vulnerabilities in bundled dependencies: axios, cross-spawn, glob #1322
- FIXED: The JsonB field in the cell data view always displays as null. #1320
@@ -74,6 +103,7 @@ Builds:
- ADDED: Default editor theme is part of application theme now
## 7.0.0
- CHANGED: New design of application, new theme system
- ADDED: Theme AI assistant - create custom themes using AI (Premium)
- CHANGED: Themes are now defined in JSON files, custom themes could be shared via DbGate Cloud
@@ -89,12 +119,15 @@ Builds:
- ADDED: Widget for currently opened tabs
## 6.8.2
- FIXED: Initialize storage database from envoronment variables failed with PostgreSQL
## 6.8.1
- FIXED: Won't navigate to the relevant field on click of a field in columns #1303
## 6.8.0
- ADDED: Form cell view for detailed data inspection and editing in data grids, with multi-row bulk editing support
- CHANGED: Cell data sidebar moved to right side, now is part of data grid
- FIXED: Improved widget resizing algorithm
@@ -113,11 +146,13 @@ Builds:
- ADDED: Import connections from environment variables (Team Premium)
## 6.7.3
- FIXED: Fixed problem in analyser core - in PostgreSQL, after dropping table, dropped table still appeared in structure
- FIXED: PostgreSQL numeric columns do not align right #1254
- ADDED: Custom thousands separator #1213
## 6.7.2
- CHANGED: Settings modal redesign - now is settings opened in tab instead of modal, similarily as in VSCode
- FIXED: Fixed search in table shortcuts #1273
- CHANGED: Improved foreign key editor UX
@@ -127,6 +162,7 @@ Builds:
- CHANGED: Improved storage of settings, especially for Team Premium edition
## 6.7.1
- ADDED: LANGUAGE environment variable for the web version. #1266
- ADDED: New localizations (Italian, Portugese (Brazil), Japanese)
- ADDED: Option to detect language from browser settings in web version
@@ -140,6 +176,7 @@ Builds:
- ADDED: Other files types supported in Team Premium edition (diagrams, query design, perspectives, import/export jobs, shell scripts, database compare jobs)
## 6.7.0
- ADDED: Added localization support, now you can use DbGate in multiple languages (French, Spanish, German, Czech, Slovak, Simplified Chinese) #347 #705 #939 #1079
- CHANGED: Solved many issues with binary fields, huge performance improvements in binary fields processing
- FIXED: Export to CSV produces empty file #1247
@@ -153,13 +190,16 @@ Builds:
- FIXED: Horizontal scrolling on macOS trackpad/Magic Mouse #1250
## 6.6.12
- FIXED: Cannot paste license key on Mac (special commands like copy/paste were disabled on license screen)
## 6.6.11
- FIXED: Fixed theming on application startup
- CHANGED: Improved licensing page
## 6.6.10
- FIXED: License from environment variable is not refreshed #1245
- FIXED: connection closing / reconnecting #1237
- ADDED: retain history across multiple queries #1236
@@ -167,19 +207,22 @@ Builds:
- FIXED: Not possible to scroll the data view horizontally by pressing shift and scroll mouse middle button on Mac #453
- FIXED: Expired trial workflow (Premium)
- ADDED: Column name collision resolving #1234 (MySQL)
## 6.6.8
- CHANGED: Windows executable now uses Azure trusted signing certificate
- CHANGED: NPM packages now use GitHub OIDC provenance signing for better security
- CHANGED: Some features moved to Premium edition (master/detail views, FK lookups, column expansion, split view, advanced export/import, data archives, grouping, macros)
## 6.6.6
- ADDED: Allow disable/re-enable filter #1174
- ADDED: Close right side tabs #1219
- ADDED: Ability disable execute current line in query editor #1209
- ADDED: Support for Redis Cluster #1204 (Premium)
## 6.6.5
- ADDED: SQL AI assistant - powered by database chat, could help you to write SQL queries (Premium)
- ADDED: Explain SQL error (powered by AI) (Premium)
- ADDED: Database chat (and SQL AI Assistant) now supports showing charts (Premium)
@@ -188,6 +231,7 @@ Builds:
- FIXED: Cannot open up large JSON file #1215
## 6.6.4
- ADDED: AI Database chat now supports much more LLM models. (Premium)
- ADDED: Possibility to use your own API key with OPENAI-compatible providers (OpenRouter, Antropic...)
- ADDED: Possibility to use self-hosted own LLM (eg. Llama)
@@ -201,11 +245,13 @@ Builds:
- CHANGED: Community edition now supports shared folders in read-only mode
## 6.6.3
- FIXED: Error “db.getCollection(…).renameCollection is not a function” when renaming collection in dbGate #1198
- FIXED: Can't list databases from Azure SQL SERVER #1197
- ADDED: Save zoom level in electron apps
## 6.6.2
- ADDED: List of processes, ability to kill process (Server summary) #1178
- ADDED: Database and table permissions (Team Premium edition)
- ADDED: Redis search box - Scan all #1191
@@ -215,6 +261,7 @@ Builds:
- FIXED: Executing queries for SQLite crash #1195
## 6.6.1
- ADDED: Support for Mongo shell (Premium) - #1114
- FIXED: Support for BLOB in Oracle #1181
- ADDED: Connect to named SQL Server instance #340
@@ -224,12 +271,14 @@ Builds:
- CHANGED: Improved logging system, added related database and connection to logs metadata
## 6.6.0
- ADDED: Database chat - AI powered chatbot, which knows your database (Premium)
- ADDED: Firestore support (Premium)
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
- FIXED: Chart permissions were ignored (Premium)
- REMOVED: Query AI assistant (replaced by Database Chat) (Premium)
- FIXED: Chart permissions were ignored (Premium)
## 6.5.6
- ADDED: New object window - quick access to most common functions
- ADDED: Possibility to disable split query by empty line #1162
- ADDED: Possibility to opt out authentication #1152
@@ -238,6 +287,7 @@ Builds:
- FIXED: Fixed some minor problems of charts
## 6.5.5
- ADDED: Administer cloud folder window
- CHANGED: Cloud menu redesign
- ADDED: Audit log (for Team Premium edition)
@@ -247,25 +297,29 @@ Builds:
- ADDED: Added chart data type detection
- FIXED: Fixed chart displaying problems
- FIXED: Fixed exporting chart to HTML
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual __count)
- CHANGED: Choose COUNT measure without selecting underlying ID field (use virtual \_\_count)
- FIXED: Problems with authentification administration, especially for Postgres storage
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
- CHANGED: Anonymous autentification (in Team Premium) is now by default disabled
## 6.5.3
- CHANGED: Improved DbGate Cloud sign-in workflow
- FIXED: Some fixes and error handling in new charts engine
- ADDED: Charts - ability to choose aggregate function
- CHANGED: Improved About window
## 6.5.2
- CHANGED: Autodetecting charts is disabled by default #1145
- CHANGED: Improved chart displaying workflow
- ADDED: Ability to close chart
## 6.5.1
- FIXED: DbGate Cloud e-mail sign-in method for desktop clients
## 6.5.0
- ADDED: DbGate cloud - online storage for connections, SQL scripts and other objects
- ADDED: Public knowledge base - common SQL scripts for specific DB engines (table sizes, index stats etc.)
- ADDED: Query results could be visualised in charts (Premium)
@@ -286,7 +340,7 @@ Builds:
## 6.4.2
- ADDED: Source label to docker container #1105
- ADDED: Source label to docker container #1105
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
- ADDED: View PostgreSQL query console output #1108
- FIXED: Single quote generete MySql error #1107
@@ -296,6 +350,7 @@ Builds:
- FIXED: Fixed loading Redis keys with :: in key name
## 6.4.0
- ADDED: DuckDB support
- ADDED: Data deployer (Premium)
- ADDED: Compare data between JSON lines file in archive and database table
@@ -317,6 +372,7 @@ Builds:
- CHANGED: Amazon and Azure instalations are not auto-upgraded by default
## 6.3.3
- CHANGED: New administration UI, redesigned administration of users, connections and roles
- ADDED: Encrypting passwords in team-premium edition
- ADDED: Show scale bar on map #1090
@@ -326,6 +382,7 @@ Builds:
- ADDED: Line Wrap for JSON viewer #768
### 6.3.2
- ADDED: "Use system theme" switch, use changed system theme without restart #1084
- ADDED: "Skip SETNAME instruction" option for Redis #1077
- FIXED: Clickhouse views are now available even for user with limited permissions #1076
@@ -338,6 +395,7 @@ Builds:
- FIXED: Correctly end connection process after succesful/unsuccesful connect
### 6.3.0
- ADDED: Support for libSQL and Turso (Premium)
- ADDED: Native backup and restore database for MySQL and PostgreSQL (Premium)
- REMOVED: DbGate internal dump export for MySQL (replaced with call of mysqldump)
@@ -349,6 +407,7 @@ Builds:
- FIXED: Linux Appimage crash => A JavaScript error occurred in the main process #1065 , #1067
### 6.2.1
- ADDED: Commit/rollback and autocommit in scripts #1039
- FIXED: Doesn't import all the records from MongoDB #1044
- ADDED: Show server name alongside database name in title of the tab group #1041
@@ -361,6 +420,7 @@ Builds:
- CHANGED: Upgraded SQLite engine version
### 6.2.0
- ADDED: Query AI Assistant (Premium)
- ADDED: Cassandra database support
- ADDED: XML cell data view
@@ -373,13 +433,16 @@ Builds:
- CHANGED: Open real executed query, when datagrid shows loading error
### 6.1.6
- FIXED: Hotfix build process for premium edition
### 6.1.5
- FIXED: Serious security hotfix (for Docker and NPM, when using LOGIN and PASSWORD environment variables or LOGIN_PASSWORD_xxx)
- no changes for desktop app and for Team premium edition, when using storage DB
### 6.1.4
- CHANGED: Show Data/Structure button in one place #1015
- ADDED: Data view coloring (every second row) #1014
- ADDED: Pin icon for tab in preview mode (#1013)
@@ -394,11 +457,12 @@ Builds:
- ADDED: Redis JSON format for String values #852
### 6.1.3
- FIXED: Fulltext search now shows correctly columns and SQL code lines
- ADDED: Configuration of SSH tunnel local host (IPv4 vs IPv6). Should fix majority of SSH tunnel problems
- FIXED: Handled SSH tunnel connection error, now it shows error instead of connecting forever
- ADDED: Support of triggers (SQLite)
- ADDED: Create, drop trigger
- ADDED: Create, drop trigger
- ADDED: Support for MySQL scheduled events
- FIXED: Cannot connect to DB using askUser/askPassword mode #995
- FIXED: Filtering in Oracle #992
@@ -406,6 +470,7 @@ Builds:
- ADDED: Introduced E2E Cypress tests, test refactor
### 6.1.1
- ADDED: Trigger support (SQL Server, PostgreSQL, MySQL, Oracle)
- FIXED: PostgreSQL and Oracle export #970
- FIXED: Cursor Becomes Stuck When Escaping "Case" #954
@@ -413,6 +478,7 @@ Builds:
- FIXED: Search in packed list
### 6.1.0
- ADDED: Fulltext search in DB model and connections, highlight searched names
- ADDED: Tab preview mode configuration #963
- CHANGED: Single-click to open server connection/database + ability to configure this #959
@@ -429,6 +495,7 @@ Builds:
- ADDED: Display comment into tables and column list #755
### 6.0.0
- ADDED: Order or filter the indexes for huge tables #922
- ADDED: Empty string filters
- CHANGED: (Premium) Workflow for new installation (used in Docker and AWS distribution)
@@ -461,6 +528,7 @@ Builds:
- ADDED: Show SQL quick view
### 5.5.6
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
- FIXED: Correctly closing connection behind SSH tunnel (#920)
- FIXED: Updating MongoDB documents on MongoDB 4 (#916)
@@ -468,6 +536,7 @@ Builds:
- FIXED: (Premium) Better handling of connection storage errors
### 5.5.5
- ADDED: AWS IAM authentication for MySQL, MariaDB, PostgreSQL (Premium)
- FIXED: Datitme filtering #912
- FIXED: Load redis keys
@@ -478,6 +547,7 @@ Builds:
- FIXED: Save connection params in administration for MS SQL and Postgres storages (Team Premium)
### 5.5.4
- FIXED: correct handling when use LOGIN and PASSWORD env variables #903
- FIXED: fixed problems in dbmodel commandline tool
- ADDED: dbmodel - allow connection defined in environment variables
@@ -489,6 +559,7 @@ Builds:
- ADDED: (Premium) Show purchase button after trial license is expired
### 5.5.3
- FIXED: Separate schema mode #894 - for databases with many schemas
- FIXED: Sort by UUID column in POstgreSQL #895
- ADDED: Load pg_dump outputs #893
@@ -498,9 +569,11 @@ Builds:
- FIXED: MS Entra authentication for Azure SQL
### 5.5.2
- FIXED: MySQL, PostgreSQL readonly conections #900
### 5.5.1
- ADDED: Clickhouse support (#532)
- ADDED: MySQL - specify table engine, show table engine in table list
- FIXED: Hidden primary key name in PK editor for DB engines with anonymous PK (MySQL)
@@ -528,6 +601,7 @@ Builds:
- ADDED: (Premium) MS Entra authentization for Azure SQL databases
### 5.4.4
- CHANGED: Improved autoupdate, notification is now in app
- CHANGED: Default behaviour of autoupdate, new version is downloaded after click of "Download" button
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)
@@ -536,14 +610,17 @@ Builds:
- FIXED: Fixes following issues: #886, #865, #782, #375
### 5.4.2
- FIXED: DbGate now works correctly with Oracle 10g
- FIXED: Fixed update channel for premium edition
### 5.4.1
- FIXED: Broken older plugins #881
- ADDED: Premium edition - "Start trial" button
### 5.4.0
- ADDED: Support for CosmosDB (Premium only)
- ADDED: Administration UI (Premium only)
- ADDED: New application icon
@@ -560,10 +637,12 @@ Builds:
- FIXED: Script with escaped backslash causes erro #880
### 5.3.4
- FIXED: On blank system does not start (window does not appear) #862
- FIXED: Missing Execute, Export bar #861
### 5.3.3
- FIXED: The application Window is not visible when openning after changing monitor configuration. #856
- FIXED: Multi column filter is broken for Postgresql #855
- ADDED: Do not display internal timescaledb objects in postgres databases #839
@@ -571,12 +650,14 @@ Builds:
- FIXED: Cannot filter by uuid field in psql #538
### 5.3.1
- FIXED: Column sorting on query tab not working #819
- FIXED: Postgres Connection stays in "Loading database structure" until reloading the page #826
- FIXED: Cannot read properties of undefined (reading 'length') on Tables #824
- FIXED: Redshift doesn't show tables when connected #816
### 5.3.0
- CHANGED: New Oracle driver, much better Oracle support. Works now also in docker distribution
- FIXED: Connection to oracle with service name #809
- ADDED: Connect to redis using a custom username #807
@@ -585,18 +666,20 @@ Builds:
- ADDED: Switch connection for opened file #814
### 5.2.9
- FIXED: PostgresSQL doesn't show tables when connected #793 #805
- FIXED: MongoDB write operations fail #798 #802
- FIXED: Elecrron app logging losed most of log messages
- FIXED: Connection error with SSH tunnel
- FIXED: Connection error with SSH tunnel
- ADDED: option to disable autoupgrades (with --disable-auto-upgrade)
- ADDED: Send error context to github gist
### 5.2.8
- FIXED: file menu save and save as not working
- FIXED: query editor on import/export screen overlaps with selector
- FIXED: Fixed inconsistencies in max/unmaximize window buttons
- FIXED: shortcut for select all
- FIXED: shortcut for select all
- FIXED: download with auth header
- CHANGED: Upgraded database drivers for mysql, postgres, sqlite, mssql, mongo, redis
- CHANGED: Upgraded electron version (now using v30)
@@ -613,8 +696,8 @@ Builds:
- ADDED: Button for discard/reset changes (#759)
- FIXED: Don't show error dialog when subprocess fails, as DbGate handles this correctly (#751, #746, #542, #272)
### 5.2.7
- FIXED: fix body overflow when context menu height great than viewport #592
- FIXED: Pass signals in entrypoint.sh #596
- FIXED: Remove missing links to jenasoft #625
@@ -625,6 +708,7 @@ Builds:
- CHANGED: Improved stability of electron client on Windows and Mac (fewer EPIPE errors)
### 5.2.6
- FIXED: DbGate creates a lot of .tmp.node files in the temp directory #561
- FIXED: Typo in datetimeoffset dataType #556
- FIXED: SQL export is using the wrong hour formatting #537
@@ -632,6 +716,7 @@ Builds:
- FIXED: MongoDB password could contain special characters #560
### 5.2.5
- ADDED: Split Windows #394
- FIXED: Postgres index asc/desc #514
- FIXED: Excel export not working since 5.2.3 #511
@@ -640,9 +725,11 @@ Builds:
- FIXED: Solved some minor problems with widget collapsing
### 5.2.4
- FIXED: npm version crash (#508)
### 5.2.3
- ADDED: Search entire table (multi column filter) #491
- ADDED: OracleDB - connection to toher than default ports #496
- CHANGED: OracleDB - status of support set to experimental
@@ -674,8 +761,8 @@ Builds:
- FIXED: Fixed some scenarios using tables from different DBs
- FIXED: Sessions with long-running queries are not killed
### 5.2.2
- FIXED: Optimalized load DB structure for PostgreSQL #451
- ADDED: Auto-closing query connections after configurable (15 minutes default) no-activity interval #468
- ADDED: Set application-name connection parameter (for PostgreSQL and MS SQL) for easier identifying of DbGate connections
@@ -686,8 +773,8 @@ Builds:
- FIXED: crash on Windows and Mac after system goes in suspend mode #458
- ADDED: dbmodel standalone NPM package (https://www.npmjs.com/package/dbmodel) - deploy database via commandline tool
### 5.2.1
- FIXED: client_id param in OAuth
- ADDED: OAuth scope parameter
- FIXED: login page - password was not sent, when submitting by pressing ENTER
@@ -695,6 +782,7 @@ Builds:
- FIXED: Export modal - fixed crash when selecting different database
### 5.2.0
- ADDED: Oracle database support #380
- ADDED: OAuth authentification #407
- ADDED: Active directory (Windows) authentification #261
@@ -716,7 +804,7 @@ Builds:
- ADDED: Perspective designer supports joins from MongoDB nested documents and arrays
- FIXED: Perspective designer joins on MongoDB ObjectId fields
- ADDED: Filtering columns in designer (query designer, diagram designer, perspective designer)
- FIXED: Clone MongoDB rows without _id attribute #404
- FIXED: Clone MongoDB rows without \_id attribute #404
- CHANGED: Improved cell view with GPS latitude, longitude fields
- ADDED: SQL: ALTER VIEW and SQL:ALTER PROCEDURE scripts
- ADDED: Ctrl+F5 refreshes data grid also with database structure #428
@@ -725,8 +813,8 @@ Builds:
- ADDED: Rename, remove connection folder, memoize opened state after app restart #425
- FIXED: Show SQLServer alter store procedure #435
### 5.1.6
- ADDED: Connection folders support #274
- ADDED: Keyboard shortcut to hide result window and show/hide the side toolbar #406
- ADDED: Ability to show/hide query results #406
@@ -738,6 +826,7 @@ Builds:
- CHANGED: More strict timeouts to kill database and server connections (reduces resource consumption)
### 5.1.5
- ADDED: Support perspectives for MongoDB - MongoDB query designer
- ADDED: Show JSON content directly in the overview #395
- CHANGED: OSX Command H shortcut for hiding window #390
@@ -748,6 +837,7 @@ Builds:
- ADDED: connect via socket - configurable via environment variables #358
### 5.1.4
- ADDED: Drop database commands #384
- ADDED: Customizable Redis key separator #379
- ADDED: ARM support for docker images
@@ -756,6 +846,7 @@ Builds:
- ADDED: Unsaved marker for SQL files
### 5.1.3
- ADDED: Editing multiline cell values #378 #371 #359
- ADDED: Truncate table #333
- ADDED: Perspectives - show row count
@@ -764,6 +855,7 @@ Builds:
- FIXED: Correct error line numbers returned from queries
### 5.1.2
- FIXED: MongoDb any export function does not work. #373
- ADDED: Query Designer short order more flexibility #372
- ADDED: Form View move between records #370
@@ -777,6 +869,7 @@ Builds:
- ADDED: Perspectives - cells without joined data are gray
### 5.1.1
- ADDED: Perspective designer
- FIXED: NULL,NOT NULL filter datatime columns #356
- FIXED: Recognize computed columns on SQL server #354
@@ -786,32 +879,35 @@ Builds:
- ADDED: Custom editor font size #345
- ADDED: Ability to open perspective files
### 5.1.0
- ADDED: Perspectives (docs: https://dbgate.org/docs/perspectives.html )
- CHANGED: Upgraded SQLite engine version (driver better-sqlite3: 7.6.2)
- CHANGED: Upgraded ElectronJS version (from version 13 to version 17)
- CHANGED: Upgraded all dependencies with current available minor version updates
- CHANGED: By default, connect on click #332˝
- CHANGED: Improved keyboard navigation, when editing table data #331
- ADDED: Option to skip Save changes dialog #329
- ADDED: Option to skip Save changes dialog #329
- FIXED: Unsigned column doesn't work correctly. #324
- FIXED: Connect to MS SQL with domain user now works also under Linux and Mac #305
### 5.0.9
- FIXED: Fixed problem with SSE events on web version
- ADDED: Added menu command "New query designer"
- ADDED: Added menu command "New ER diagram"
### 5.0.8
- ADDED: SQL Server - support using domain logins under Linux and Mac #305
- ADDED: Permissions for connections #318
- ADDED: Ability to change editor front #308
- ADDED: Custom expression in query designer #306
- ADDED: OR conditions in query designer #321
- ADDED: Ability to configure settings view environment variables #304
### 5.0.7
- FIXED: Fixed some problems with SSH tunnel (upgraded SSH client) #315
- FIXED: Fixed MognoDB executing find query #312
- ADDED: Interval filters for date/time columns #311
@@ -819,8 +915,9 @@ Builds:
- ADDED: connecting option Trust server certificate for SQL Server #305
- ADDED: Autorefresh, reload table every x second #303
- FIXED(app): Changing editor theme and font size in Editor Themes #300
### 5.0.6
- ADDED: Search in columns
- CHANGED: Upgraded mongodb driver
- ADDED: Ability to reset view, when data load fails
@@ -828,6 +925,7 @@ Builds:
- FIXED: Fixed some NPM package problems
### 5.0.5
- ADDED: Visualisation geographics objects on map #288
- ADDED: Support for native SQL as default value inside yaml files #296
- FIXED: Postgres boolean columns don't filter correctly #298
@@ -835,10 +933,11 @@ Builds:
- FIXED: Handle error when reading deleted archive
### 5.0.3
- CHANGED: Optimalization of loading DB structure for PostgreSQL, MySQL #273
- CHANGED: Upgraded mysql driver #293
- CHANGED: Better UX when defining SSH port #291
- ADDED: Database object menu from tab
- ADDED: Database object menu from tab
- CHANGED: Ability to close file uploader
- FIXED: Correct handling of NUL values in update keys
- CHANGED: Upgraded MS SQL tedious driver
@@ -848,13 +947,17 @@ Builds:
- ADDED: Configurable object actions #255
- ADDED: Multiple sort criteria #235
- ADDED(app): Open JSON file
### 5.0.2
- FIXED: Cannot use SSH Tunnel after update #291
### 5.0.1
- FIXED(app): Can't Click Sidebar Menu Item #287
### 5.0.0
- CHANGED: Connection workflow, connections are opened on tabs instead of modals
- ADDED: Posibility to connect to DB without saving connection
- ADDED(mac): Support for SQLite on Mac M1
@@ -867,6 +970,7 @@ Builds:
- FIXED: Removed SSL tab on Redis connection (SSL is not supported for Redis)
### 4.8.8
- CHANGED: New app icon
- ADDED: SQL dump, SQL import - also from/to saved queries
- FIXED(mac): Fixed crash when reopening main window
@@ -875,6 +979,7 @@ Builds:
- ADDED(app): Browse tabs in reverse order with Ctrl+Shift+Tab #245
### 4.8.7
- ADDED: MySQL dump/backup database
- ADDED: Import SQL dump from file or from URL
- FIXED(mac): Fixed Cmd+C, Cmd+V, Cmd+X - shortcuts for copy/cut/paste #270
@@ -883,6 +988,7 @@ Builds:
- ADDED: Support for dockerhost network name under docker #271
### 4.8.4
- FIXED(mac): Fixed build for macOS arm64 #259
- FIXED(mac): Fixed opening SQLite files on macOS #243
- FIXED(mac): Fixed opening PEM certificates on macOS #206
@@ -894,6 +1000,7 @@ Builds:
- ADDED: Added menu command "Tools/Change to recent database"
### 4.8.3
- FIXED: filters in query result and NDJSON/archive viewer
- ADDED: Added select values from query result and NDJSON/archive viewer
- ADDED: tab navigation in datagrid #254
@@ -903,19 +1010,24 @@ Builds:
- ADDED: Data type + reference link in column manager
- FIXED(win,linux,mac): Unable to change theme after installing plugin #244
### 4.8.2
- ADDED: implemented missing redis search key logic
### 4.8.2
### 4.8.1
- FIXED: fixed crash after disconnecting from all DBs
- ADDED: implemented missing redis search key logic
### 4.8.1
- FIXED: fixed crash after disconnecting from all DBs
### 4.8.0
- ADDED: Redis support (support stream type), removed experimental status
- ADDED: Redis readonly support
- ADDED: Explicit NDJSON support, when opening NDJSON/JSON lines file, table data are immediately shown, without neccesarity to import
- ADDED(win,linux,mac): Opening developer tools when crashing without reload app
### 4.7.4
- ADDED: Experimental Redis support (full support is planned to version 4.8.0)
- ADDED: Experimental Redis support (full support is planned to version 4.8.0)
- ADDED: Read-only connections
- FIXED: MongoDB filters
- ADDED: MongoDB column value selection
@@ -923,13 +1035,14 @@ Builds:
- ADDED: Fuzzy search #246
- ADDED(docker, npm): New permissions
- FIXED(npm): NPM build no longer allocates additonal ports
- CHANGED(npm): renamed NPM package dbgate => dbgate-serve
- CHANGED(npm): renamed NPM package dbgate => dbgate-serve
- CHANGED(docker): custom JavaScripts and connections defined in scripts are now prohibited by default, use SHELL_CONNECTION and SHELL_SCRIPTING environment variables for allowing this
- ADDED(docker, npm): Better documentation of environment variables configuration, https://dbgate.org/docs/env-variables.html
- ADDED(docker): support for multiple users with different permissions
- ADDED(docker): logout operation
### 4.7.3
- CHANGED: Export menu redesign, quick export menu merged with old export menu
- REMOVED: Quick export menu
- ADDED: Export column mapping
@@ -944,6 +1057,7 @@ Builds:
- ADDED: NPM dist accepts .env configuration
### 4.7.2
- CHANGED: documentation URL - https://dbgate.org/docs/
- CHANGED: Close button available for all tab groups - #238
- ADDED: Search function for the Keyboard Shortcuts overview - #239
@@ -952,7 +1066,8 @@ Builds:
- FIXED: bug in cache subsystem
### 4.7.1
- FIXED: Fixed connecting to MS SQL server running in docker container from DbGate running in docker container #236
- FIXED: Fixed connecting to MS SQL server running in docker container from DbGate running in docker container #236
- FIXED: Fixed export MongoDB collections into Excel and CSV #240
- ADDED: Added support for docker volumes to persiste connections, when not using configuration via env variables #232
- ADDED: DbGate in Docker can run in subdirectory #228
@@ -962,7 +1077,9 @@ Builds:
- ADDED: Improved fullscreen state, title bar with menu is hidden, menu is in hamburger menu, like in web version
- ADDED: Theme choose dialog (added as tab in settings)
- FIXED: Fixed crash when clicking on application layers #231
### 4.7.0
- CHANGED: Changed main menu style, menu and title bar is in one line (+ability to switch to system menu)
- REMOVED: Removed main toolbar, use main menu or tab related bottom tool instead
- ADDED: Added tab related context bottom toolbar
@@ -981,11 +1098,13 @@ Builds:
- ADDED: Better work with JSON lines file, added JSONL editor with preview
### 4.6.3
- FIXED: Fixed Windows build
- FIXED: Fixed crash, when there is invalid value in browser local storage
- FIXED: Fixed plugin description display, where author name or description is not correctly filled
### 4.6.2
- FIXED: Fixed issues of XML import plugin
- ADDED: Split columns macro (available in data sheet editor)
- CHANGED: Accepting non standard plugins names (which doesn't start with dbgate-plugin-)
@@ -997,6 +1116,7 @@ Builds:
- FIXED: Fixed configuring connection to SQLite with environment variables #215
### 4.6.1
- ADDED: Ability to configure SSH tunnel over environment variables #210 (for docker container)
- ADDED: XML export and import
- ADDED: Archive file - show and edit source text file
@@ -1012,20 +1132,23 @@ Builds:
- CHANGED: UX improvements of table editor
### 4.6.0
- ADDED: ER diagrams #118
- Generate diagram from table or for database
- Automatic layout
- Diagram styles - colors, select columns to display, optional displaying data type or nullability
- Export diagram to HTML file
- Generate diagram from table or for database
- Automatic layout
- Diagram styles - colors, select columns to display, optional displaying data type or nullability
- Export diagram to HTML file
- FIXED: Mac latest build link #204
### 4.5.1
- FIXED: MongoId detection
- FIXED: #203 disabled spellchecker
- FIXED: Prevented display filters in form view twice
- FIXED: Query designer fixes
### 4.5.0
- ADDED: #220 functions, materialized views and stored procedures in code completion
- ADDED: Query result in statusbar
- ADDED: Highlight and execute current query
@@ -1043,6 +1166,7 @@ Builds:
- FIXED: Fixed delete dependency cycle detection (delete didn't work for some tables)
### 4.4.4
- FIXED: Database colors
- CHANGED: Precise work with MongoDB ObjectId
- FIXED: Run macro works on MongoDB collection data editor
@@ -1057,6 +1181,7 @@ Builds:
- ADDED: Show change log after app upgrade
### 4.4.3
- ADDED: Connection and database colors
- ADDED: Ability to pin connection or table
- ADDED: MongoDb: create, drop collection from menu
@@ -1074,6 +1199,7 @@ Builds:
- CHANGED: Save widget visibility and size
### 4.4.2
- ADDED: Open SQL script from SQL confirm
- CHANGED: Better looking statusbar
- ADDED: Create table from database popup menu
@@ -1083,6 +1209,7 @@ Builds:
- ADDED: Support for Command key on Mac (#199)
### 4.4.1
- FIXED: #188 Fixed problem with datetime values in PostgreSQL and mysql
- ADDED: #194 Close tabs by DB
- FIXED: Improved form view width calculations
@@ -1096,6 +1223,7 @@ Builds:
- ADDED: Row count information moved into status bar, when only one grid on tab is used (typical case)
### 4.4.0
- ADDED: Database structure compare, export report to HTML
- ADDED: Experimental: Deploy DB structure changes between databases
- ADDED: Lookup dialog, available in table view on columns with foreign key
@@ -1112,21 +1240,25 @@ Builds:
- FIXED: Fixed import into SQLite and PostgreSQL databases, added integration test for this
### 4.3.4
- FIXED: Delete row with binary ID in MySQL (#182)
- ADDED: Using 'ODBC Driver 17 for SQL Server' or 'SQL Server Native Client 11.0', when connecting to MS SQL using windows auth #183
### 4.3.3
- ADDED: Generate SQL from data (#176 - Copy row as INSERT/UPDATE statement)
- ADDED: Datagrid keyboard column operations (Ctrl+F - find column, Ctrl+H - hide column) #180
- FIXED: Make window remember that it was maximized
- FIXED: Fixed lost focus after copy to clipboard and after inserting SQL join
### 4.3.2
- FIXED: Sorted database list in PostgreSQL (#178)
- FIXED: Loading stricture of PostgreSQL database, when it contains indexes on expressions (#175)
- ADDED: Hotkey Shift+Alt+F for formatting SQL code
### 4.3.1
- FIXED: #173 Using key phrase for SSH key file connection
- ADDED: #172 Abiloity to quick search within database names
- ADDED: Database search added to command palette (Ctrl+P)
@@ -1134,24 +1266,28 @@ Builds:
- ADDED: DELETE cascade option - ability to delete all referenced rows, when deleting rows
### 4.3.0
- ADDED: Table structure editor
- ADDED: Index support
- ADDED: Unique constraint support
- ADDED: Context menu for drop/rename table/columns and for drop view/procedure/function
- ADDED: Added support for Windows arm64 platform
- FIXED: Search by _id in MongoDB
- FIXED: Search by \_id in MongoDB
### 4.2.6
- FIXED: Fixed MongoDB import
- ADDED: Configurable thousands separator #136
- ADDED: Using case insensitive text search in postgres
### 4.2.5
- FIXED: Fixed crash when using large model on some installations
- FIXED: Postgre SQL CREATE function
- FIXED: Postgre SQL CREATE function
- FIXED: Analysing of MySQL when modifyDate is not known
### 4.2.4
- ADDED: Query history
- ADDED: One-click exports in desktop app
- ADDED: JSON array export
@@ -1163,23 +1299,27 @@ Builds:
- CHANGED: Introduced package dbgate-query-splitter, instead of sql-query-identifier and @verycrazydog/mysql-parse
### 4.2.3
- ADDED: ARM builds for MacOS and Linux
- ADDED: Filter by columns in form view
### 4.2.2
- CHANGED: Further startup optimalization (approx. 2 times quicker start of electron app)
### 4.2.1
- FIXED: Fixed+optimalized app startup (esp. on Windows)
### 4.2.0
- ADDED: Support of SQLite database
- ADDED: Support of Amazon Redshift database
- ADDED: Support of CockcroachDB
- CHANGED: DB Model is not auto-refreshed by default, refresh could be invoked from statusbar
- FIXED: Fixed race conditions on startup
- FIXED: Fixed broken style in data grid under strange circumstances
- ADDED: Configure connections with commandline arguments #108
- ADDED: Configure connections with commandline arguments #108
- CHANGED: Optimalized algorithm of incremental DB model updates
- CHANGED: Loading queries from PostgreSQL doesn't need cursors, using streamed query instead
- ADDED: Disconnect command
@@ -1188,9 +1328,11 @@ Builds:
- ADDED: Cosmetic improvements of MariaDB support
### 4.1.11
- FIX: Fixed crash of API process when using SSH tunnel connection (race condition)
### 4.1.11
- FIX: fixed processing postgre query containing $$
- FIX: fixed postgre analysing procedures & functions
- FIX: patched svelte crash #105
@@ -1203,6 +1345,7 @@ Builds:
- CHANGED: Toolbar design - current tab related commands are delimited
### 4.1.10
- ADDED: Default database option in connectin settings #96 #92
- FIX: Bundle size optimalization for Windows #97
- FIX: Popup menu placement on smaller displays #94
@@ -1213,22 +1356,32 @@ Builds:
- ADDED: Show database server version in status bar
- ADDED: Show detailed info about error, when connect to database fails
- ADDED: Portable ZIP distribution for Windows #84
### 4.1.9
- FIX: Incorrect row count info in query result #83
### 4.1.1
- CHANGED: Default plugins are now part of installation
### 4.1.0
- ADDED: MongoDB support
- ADDED: Configurable keyboard shortcuts
- ADDED: JSON row cell data view
- FIX: Fixed some problems from migration to Svelte
### 4.0.3
- FIX: fixes for FireFox (mainly incorrent handle of bind:clientHeight, replaces with resizeobserver)
### 4.0.2
- FIX: fixed docker and NPM build
### 4.0.0
- CHANGED: Excahnged React with Svelte. Changed theme colors. Huge speed and memory optimalization
- ADDED: SQL Generator (CREATE, INSERT, DROP)
- ADDED: Command palette (F1). Introduced commands, extended some context menus
@@ -1240,6 +1393,7 @@ Builds:
- FIX: Solved reconnecting expired connection
### 3.9.6
- ADDED: Connect using SSH Tunnel
- ADDED: Connect using SSL
- ADDED: Database connection dialog redesigned
@@ -1249,4 +1403,5 @@ Builds:
- FIX: #62 - import, export executed from SNAP installs didn't work
### 3.9.5
- Start point of changelog
@@ -0,0 +1,536 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
const stream = require('stream');
const { mongoDbEngine, dynamoDbEngine } = require('../engines');
const tableWriter = require('dbgate-api/src/shell/tableWriter');
const tableReader = require('dbgate-api/src/shell/tableReader');
const copyStream = require('dbgate-api/src/shell/copyStream');
function randomCollectionName() {
return 'test_' + crypto.randomBytes(6).toString('hex');
}
const documentEngines = [
{ label: 'MongoDB', engine: mongoDbEngine },
{ label: 'DynamoDB', engine: dynamoDbEngine },
];
async function connectEngine(engine) {
const driver = requireEngineDriver(engine.connection);
const conn = await driver.connect(engine.connection);
return { driver, conn };
}
async function createCollection(driver, conn, collectionName, engine) {
if (engine.connection.engine.startsWith('dynamodb')) {
await driver.operation(conn, {
type: 'createCollection',
collection: {
name: collectionName,
partitionKey: '_id',
partitionKeyType: 'S',
},
});
} else {
await driver.operation(conn, {
type: 'createCollection',
collection: { name: collectionName },
});
}
}
async function dropCollection(driver, conn, collectionName) {
try {
await driver.operation(conn, {
type: 'dropCollection',
collection: collectionName,
});
} catch (e) {
// Ignore errors when dropping (collection may not exist)
}
}
async function insertDocument(driver, conn, collectionName, doc) {
return driver.updateCollection(conn, {
inserts: [{ pureName: collectionName, document: {}, fields: doc }],
updates: [],
deletes: [],
});
}
async function readAll(driver, conn, collectionName) {
return driver.readCollection(conn, { pureName: collectionName, limit: 1000 });
}
async function updateDocument(driver, conn, collectionName, condition, fields) {
return driver.updateCollection(conn, {
inserts: [],
updates: [{ pureName: collectionName, condition, fields }],
deletes: [],
});
}
async function deleteDocument(driver, conn, collectionName, condition) {
return driver.updateCollection(conn, {
inserts: [],
updates: [],
deletes: [{ pureName: collectionName, condition }],
});
}
describe('Collection CRUD', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
// ---- INSERT ----
test('insert a single document', async () => {
const res = await insertDocument(driver, conn, collectionName, {
_id: 'doc1',
name: 'Alice',
age: 30,
});
expect(res.inserted.length).toBe(1);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
expect(all.rows[0].age).toBe(30);
});
test('insert multiple documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'a1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'a2', name: 'Bob' });
await insertDocument(driver, conn, collectionName, { _id: 'a3', name: 'Charlie' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('insert document with nested object', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'nested1',
name: 'Alice',
address: { city: 'Prague', zip: '11000' },
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].address.city).toBe('Prague');
expect(all.rows[0].address.zip).toBe('11000');
});
// ---- READ ----
test('read from empty collection returns no rows', async () => {
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
test('read with limit', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'l1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'l2', name: 'B' });
await insertDocument(driver, conn, collectionName, { _id: 'l3', name: 'C' });
const limited = await driver.readCollection(conn, {
pureName: collectionName,
limit: 2,
});
expect(limited.rows.length).toBe(2);
});
test('count documents', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'c1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'c2', name: 'B' });
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(2);
});
test('count documents on empty collection returns zero', async () => {
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(0);
});
// ---- UPDATE ----
test('update an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u1', name: 'Alice', age: 25 });
const res = await updateDocument(driver, conn, collectionName, { _id: 'u1' }, { name: 'Alice Updated' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice Updated');
});
test('update does not create new document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u2', name: 'Bob' });
await updateDocument(driver, conn, collectionName, { _id: 'nonexistent' }, { name: 'Ghost' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('update only specified fields', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'u3', name: 'Carol', age: 40, city: 'London' });
await updateDocument(driver, conn, collectionName, { _id: 'u3' }, { age: 41 });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Carol');
expect(all.rows[0].age).toBe(41);
expect(all.rows[0].city).toBe('London');
});
// ---- DELETE ----
test('delete an existing document', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'd1', name: 'Alice' });
await insertDocument(driver, conn, collectionName, { _id: 'd2', name: 'Bob' });
const res = await deleteDocument(driver, conn, collectionName, { _id: 'd1' });
expect(res.errorMessage).toBeUndefined();
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Bob');
});
test('delete non-existing document does not affect collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'dx1', name: 'Alice' });
await deleteDocument(driver, conn, collectionName, { _id: 'nonexistent' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Alice');
});
test('delete all documents leaves empty collection', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'da1', name: 'A' });
await insertDocument(driver, conn, collectionName, { _id: 'da2', name: 'B' });
await deleteDocument(driver, conn, collectionName, { _id: 'da1' });
await deleteDocument(driver, conn, collectionName, { _id: 'da2' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
// ---- EDGE CASES ----
test('insert and read document with empty string field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'e1', name: '', value: 'test' });
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('');
expect(all.rows[0].value).toBe('test');
});
test('insert and read document with numeric values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'n1',
intVal: 42,
floatVal: 3.14,
zero: 0,
negative: -10,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].intVal).toBe(42);
expect(all.rows[0].floatVal).toBeCloseTo(3.14);
expect(all.rows[0].zero).toBe(0);
expect(all.rows[0].negative).toBe(-10);
});
test('insert and read document with boolean values', async () => {
await insertDocument(driver, conn, collectionName, {
_id: 'b1',
active: true,
deleted: false,
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].active).toBe(true);
expect(all.rows[0].deleted).toBe(false);
});
test('reading non-existing collection returns error or empty', async () => {
const result = await driver.readCollection(conn, {
pureName: 'nonexistent_collection_' + crypto.randomBytes(4).toString('hex'),
limit: 10,
});
// Depending on the driver, this may return an error or empty rows
if (result.errorMessage) {
expect(typeof result.errorMessage).toBe('string');
} else {
expect(result.rows.length).toBe(0);
}
});
test('replace full document via update with document field', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'r1', name: 'Original', extra: 'data' });
await driver.updateCollection(conn, {
inserts: [],
updates: [
{
pureName: collectionName,
condition: { _id: 'r1' },
document: { _id: 'r1', name: 'Replaced' },
fields: {},
},
],
deletes: [],
});
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].name).toBe('Replaced');
});
test('insert then update then delete lifecycle', async () => {
// Insert
await insertDocument(driver, conn, collectionName, { _id: 'life1', name: 'Lifecycle', status: 'created' });
let all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(1);
expect(all.rows[0].status).toBe('created');
// Update
await updateDocument(driver, conn, collectionName, { _id: 'life1' }, { status: 'updated' });
all = await readAll(driver, conn, collectionName);
expect(all.rows[0].status).toBe('updated');
// Delete
await deleteDocument(driver, conn, collectionName, { _id: 'life1' });
all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(0);
});
});
});
function createDocumentImportStream(documents) {
const pass = new stream.PassThrough({ objectMode: true });
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
for (const doc of documents) {
pass.write(doc);
}
pass.end();
return pass;
}
function createExportStream() {
const writable = new stream.Writable({ objectMode: true });
writable.resultArray = [];
writable._write = (chunk, encoding, callback) => {
writable.resultArray.push(chunk);
callback();
};
return writable;
}
describe('Collection Import/Export', () => {
describe.each(documentEngines.map(e => [e.label, e.engine]))('%s', (label, engine) => {
let driver;
let conn;
let collectionName;
beforeAll(async () => {
const result = await connectEngine(engine);
driver = result.driver;
conn = result.conn;
});
afterAll(async () => {
if (conn) {
await driver.close(conn);
}
});
beforeEach(async () => {
collectionName = randomCollectionName();
await createCollection(driver, conn, collectionName, engine);
});
afterEach(async () => {
await dropCollection(driver, conn, collectionName);
});
test('import documents via stream', async () => {
const documents = [
{ _id: 'imp1', name: 'Alice', age: 30 },
{ _id: 'imp2', name: 'Bob', age: 25 },
{ _id: 'imp3', name: 'Charlie', age: 35 },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(3);
const names = all.rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('export documents via stream', async () => {
await insertDocument(driver, conn, collectionName, { _id: 'exp1', name: 'Alice', city: 'Prague' });
await insertDocument(driver, conn, collectionName, { _id: 'exp2', name: 'Bob', city: 'Vienna' });
await insertDocument(driver, conn, collectionName, { _id: 'exp3', name: 'Charlie', city: 'Berlin' });
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(3);
const names = rows.map(r => r.name).sort();
expect(names).toEqual(['Alice', 'Bob', 'Charlie']);
});
test('import then export round-trip', async () => {
const documents = [
{ _id: 'rt1', name: 'Alice', value: 100 },
{ _id: 'rt2', name: 'Bob', value: 200 },
{ _id: 'rt3', name: 'Charlie', value: 300 },
{ _id: 'rt4', name: 'Diana', value: 400 },
];
// Import
const importReader = createDocumentImportStream(documents);
const importWriter = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(importReader, importWriter);
// Export
const exportReader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const exportWriter = createExportStream();
await copyStream(exportReader, exportWriter);
const rows = exportWriter.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(4);
const sortedRows = rows.sort((a, b) => a._id.localeCompare(b._id));
for (const doc of documents) {
const found = sortedRows.find(r => r._id === doc._id);
expect(found).toBeDefined();
expect(found.name).toBe(doc.name);
expect(found.value).toBe(doc.value);
}
});
test('import documents with nested objects', async () => {
const documents = [
{ _id: 'nest1', name: 'Alice', address: { city: 'Prague', zip: '11000' } },
{ _id: 'nest2', name: 'Bob', address: { city: 'Vienna', zip: '1010' } },
];
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const all = await readAll(driver, conn, collectionName);
expect(all.rows.length).toBe(2);
const alice = all.rows.find(r => r.name === 'Alice');
expect(alice.address.city).toBe('Prague');
expect(alice.address.zip).toBe('11000');
});
test('import many documents', async () => {
const documents = [];
for (let i = 0; i < 150; i++) {
documents.push({ _id: `many${i}`, name: `Name${i}`, index: i });
}
const reader = createDocumentImportStream(documents);
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: collectionName,
createIfNotExists: true,
});
await copyStream(reader, writer);
const result = await driver.readCollection(conn, {
pureName: collectionName,
countDocuments: true,
});
expect(result.count).toBe(150);
});
test('export empty collection returns no data rows', async () => {
const reader = await tableReader({
systemConnection: conn,
driver,
pureName: collectionName,
});
const writer = createExportStream();
await copyStream(reader, writer);
const rows = writer.resultArray.filter(x => !x.__isStreamHeader);
expect(rows.length).toBe(0);
});
});
});
+17
View File
@@ -123,5 +123,22 @@ services:
retries: 3
start_period: 40s
mongodb:
image: mongo:4.0.12
restart: always
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
ports:
- 27017:27017
dynamodb:
image: amazon/dynamodb-local
restart: always
ports:
- 8000:8000
volumes:
firebird-data:
mongo-data:
mongo-config:
+23
View File
@@ -738,6 +738,27 @@ const firebirdEngine = {
skipDropReferences: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mongoDbEngine = {
label: 'MongoDB',
connection: {
engine: 'mongo@dbgate-plugin-mongo',
server: 'localhost',
port: 27017,
},
};
/** @type {import('dbgate-types').TestEngineInfo} */
const dynamoDbEngine = {
label: 'DynamoDB',
connection: {
engine: 'dynamodb@dbgate-plugin-dynamodb',
server: 'localhost',
port: 8000,
authType: 'onpremise',
},
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -788,3 +809,5 @@ module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;
module.exports.firebirdEngine = firebirdEngine;
module.exports.mongoDbEngine = mongoDbEngine;
module.exports.dynamoDbEngine = dynamoDbEngine;
+4 -2
View File
@@ -1,5 +1,6 @@
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const engines = require('./engines');
const { mongoDbEngine, dynamoDbEngine } = require('./engines');
global.DBGATE_PACKAGES = {
'dbgate-tools': require('dbgate-tools'),
'dbgate-sqltree': require('dbgate-sqltree'),
@@ -9,7 +10,7 @@ global.DBGATE_PACKAGES = {
async function connectEngine(engine) {
const { connection } = engine;
const driver = requireEngineDriver(connection);
for (;;) {
for (; ;) {
try {
const conn = await driver.connect(connection);
await driver.getVersion(conn);
@@ -26,7 +27,8 @@ async function connectEngine(engine) {
async function run() {
await new Promise(resolve => setTimeout(resolve, 10000));
await Promise.all(engines.map(engine => connectEngine(engine)));
const documentEngines = [mongoDbEngine, dynamoDbEngine];
await Promise.all([...engines, ...documentEngines].map(engine => connectEngine(engine)));
}
run();
+1 -1
View File
@@ -1,6 +1,6 @@
{
"private": true,
"version": "7.1.3",
"version": "7.1.7-premium-beta.1",
"name": "dbgate-all",
"workspaces": [
"packages/*",
+55 -1
View File
@@ -492,7 +492,61 @@ module.exports = {
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : encryptConnection(res);
}
const res = await this.datastore.get(conid);
return res || null;
if (res) return res;
// In a forked runner-script child process, ask the parent for connections that may be
// volatile (in-memory only, e.g. ask-for-password). We only do this when
// there really is a parent (process.send exists) to avoid an infinite loop
// when the parent's own getCore falls through here.
// The check is intentionally narrow: only runner scripts pass
// --process-display-name script, so connect/session/ssh-forward subprocesses
// are not affected and continue to return null immediately.
if (process.send && processArgs.processDisplayName === 'script') {
const conn = await new Promise(resolve => {
let resolved = false;
const cleanup = () => {
process.removeListener('message', handler);
process.removeListener('disconnect', onDisconnect);
clearTimeout(timeout);
};
const settle = value => {
if (!resolved) {
resolved = true;
cleanup();
resolve(value);
}
};
const handler = message => {
if (message?.msgtype === 'volatile-connection-response' && message.conid === conid) {
settle(message.conn || null);
}
};
const onDisconnect = () => settle(null);
const timeout = setTimeout(() => settle(null), 5000);
// Don't let the timer alone keep the process alive if all other work is done
timeout.unref();
process.on('message', handler);
process.once('disconnect', onDisconnect);
try {
process.send({ msgtype: 'get-volatile-connection', conid });
} catch {
settle(null);
}
});
if (conn) {
volatileConnections[conn._id] = conn; // cache for subsequent calls
return conn;
}
}
return null;
},
get_meta: true,
+74 -1
View File
@@ -1,5 +1,8 @@
const { filterName } = require('dbgate-tools');
const { filterName, getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('jsldata');
const { jsldir, archivedir } = require('../utility/directories');
const fs = require('fs');
const path = require('path');
const lineReader = require('line-reader');
const _ = require('lodash');
const { __ } = require('lodash/fp');
@@ -149,6 +152,10 @@ module.exports = {
getRows_meta: true,
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
return [];
}
const datastore = await this.ensureDatastore(jslid, formatterFunction);
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
},
@@ -159,6 +166,72 @@ module.exports = {
return fs.existsSync(fileName);
},
streamRows_meta: {
method: 'get',
raw: true,
},
streamRows(req, res) {
const { jslid } = req.query;
if (!jslid) {
res.status(400).json({ apiErrorMessage: 'Missing jslid' });
return;
}
// Reject file:// jslids — they resolve to arbitrary server-side paths
if (jslid.startsWith('file://')) {
res.status(403).json({ apiErrorMessage: 'Forbidden jslid scheme' });
return;
}
const fileName = getJslFileName(jslid);
if (!fs.existsSync(fileName)) {
res.status(404).json({ apiErrorMessage: 'File not found' });
return;
}
// Dereference symlinks and normalize case (Windows) before the allow-list check.
// realpathSync is safe here because existsSync confirmed the file is present.
// path.resolve() alone cannot dereference symlinks, so a symlink inside an allowed
// root could otherwise point to an arbitrary external path.
const normalize = p => (process.platform === 'win32' ? p.toLowerCase() : p);
const resolveRoot = r => { try { return fs.realpathSync(r); } catch { return path.resolve(r); } };
let realFile;
try {
realFile = fs.realpathSync(fileName);
} catch {
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
const allowedRoots = [jsldir(), archivedir()].map(r => normalize(resolveRoot(r)) + path.sep);
const isAllowed = allowedRoots.some(root => normalize(realFile).startsWith(root));
if (!isAllowed) {
logger.warn({ jslid, realFile }, 'DBGM-00000 streamRows rejected path outside allowed roots');
res.status(403).json({ apiErrorMessage: 'Forbidden path' });
return;
}
res.setHeader('Content-Type', 'application/x-ndjson');
res.setHeader('Cache-Control', 'no-cache');
const stream = fs.createReadStream(realFile, 'utf-8');
req.on('close', () => {
stream.destroy();
});
stream.on('error', err => {
logger.error(extractErrorLogData(err), 'DBGM-00000 Error streaming JSONL file');
if (!res.headersSent) {
res.status(500).json({ apiErrorMessage: 'Stream error' });
} else {
res.end();
}
});
stream.pipe(res);
},
getStats_meta: true,
getStats({ jslid }) {
const file = `${getJslFileName(jslid)}.stats`;
+21
View File
@@ -196,6 +196,27 @@ module.exports = {
// @ts-ignore
const { msgtype } = message;
if (handleProcessCommunication(message, subprocess)) return;
if (msgtype === 'get-volatile-connection') {
const connections = require('./connections');
// @ts-ignore
const conid = message.conid;
if (!conid || typeof conid !== 'string') return;
const trySend = payload => {
if (!subprocess.connected) return;
try {
subprocess.send(payload);
} catch {
// child disconnected between the check and the send — ignore
}
};
connections.getCore({ conid }).then(conn => {
trySend({ msgtype: 'volatile-connection-response', conid, conn: conn?.unsaved ? conn : null });
}).catch(err => {
logger.error({ ...extractErrorLogData(err), conid }, 'DBGM-00000 Error resolving volatile connection for child process');
trySend({ msgtype: 'volatile-connection-response', conid, conn: null });
});
return;
}
this[`handle_${msgtype}`](runid, message);
});
return _.pick(newOpened, ['runid']);
+1
View File
@@ -7,6 +7,7 @@ async function runScript(func) {
if (processArgs.checkParent) {
childProcessChecker();
}
try {
await func();
process.exit(0);
+82 -1
View File
@@ -12,6 +12,13 @@ import isPlainObject from 'lodash/isPlainObject';
import md5 from 'blueimp-md5';
export const MAX_GRID_TEXT_LENGTH = 1000; // maximum length of text in grid cell, longer text is truncated
export const MAX_GRID_BINARY_SIZE = 10000; // maximum binary size (base64 chars or byte count) before showing 'too large' in grid cell
function formatByteSize(bytes: number): string {
if (bytes >= 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
if (bytes >= 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${bytes} B`;
}
export type EditorDataType =
| 'null'
@@ -49,6 +56,26 @@ export function base64ToHex(base64String) {
return '0x' + hexString.toUpperCase();
}
export function base64ToUuid(base64String): string | null {
let binaryString: string;
try {
binaryString = atob(base64String);
} catch {
return null;
}
if (binaryString.length !== 16) {
return null;
}
const hex = Array.from(binaryString, c => c.charCodeAt(0).toString(16).padStart(2, '0')).join('');
return [
hex.slice(0, 8),
hex.slice(8, 12),
hex.slice(12, 16),
hex.slice(16, 20),
hex.slice(20, 32),
].join('-');
}
export function hexToBase64(hexString) {
const binaryString = hexString
.match(/.{1,2}/g)
@@ -57,6 +84,23 @@ export function hexToBase64(hexString) {
return btoa(binaryString);
}
const uuidPattern = '[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}';
const uuidRegex = new RegExp(`^${uuidPattern}$`);
const uuid3WrapperRegex = new RegExp(`^UUID3\\("(${uuidPattern})"\\)$`);
const uuid4WrapperRegex = new RegExp(`^UUID\\("(${uuidPattern})"\\)$`);
export function uuidToBase64(uuid: string): string | null {
if (!uuid || !uuidRegex.test(uuid)) {
return null;
}
const hex = uuid.replace(/-/g, '');
const binaryString = hex
.match(/.{1,2}/g)
.map(byte => String.fromCharCode(parseInt(byte, 16)))
.join('');
return btoa(binaryString);
}
export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
if (!_isString(value)) return value;
@@ -65,6 +109,20 @@ export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
}
if (editorTypes?.parseHexAsBuffer) {
const mUuid3 = value.match(uuid3WrapperRegex);
if (mUuid3) {
const base64Uuid3 = uuidToBase64(mUuid3[1]);
if (base64Uuid3 != null) return { $binary: { base64: base64Uuid3, subType: '03' } };
}
const mUuid4 = value.match(uuid4WrapperRegex);
if (mUuid4) {
const base64Uuid4 = uuidToBase64(mUuid4[1]);
if (base64Uuid4 != null) return { $binary: { base64: base64Uuid4, subType: '04' } };
}
if (uuidRegex.test(value)) {
const base64UuidPlain = uuidToBase64(value);
if (base64UuidPlain != null) return { $binary: { base64: base64UuidPlain, subType: '04' } };
}
const mHex = value.match(/^0x([0-9a-fA-F][0-9a-fA-F])+$/);
if (mHex) {
return {
@@ -266,6 +324,21 @@ export function stringifyCellValue(
if (value === false) return { value: 'false', gridStyle: 'valueCellStyle' };
if (value?.$binary?.base64) {
const subType = value.$binary.subType;
if (subType === '03' || subType === '04') {
const uuidStr = base64ToUuid(value.$binary.base64);
if (uuidStr != null) {
if (intent === 'gridCellIntent' || intent === 'exportIntent' || intent === 'clipboardIntent' || intent === 'stringConversionIntent') {
return { value: uuidStr, gridStyle: 'valueCellStyle' };
}
// For editing intents: tag with subType so parseCellValue can round-trip it
const tag = subType === '03' ? 'UUID3' : 'UUID';
return { value: `${tag}("${uuidStr}")`, gridStyle: 'valueCellStyle' };
}
}
if (intent === 'gridCellIntent' && value.$binary.base64.length > MAX_GRID_BINARY_SIZE) {
return { value: `(Field too large, ${formatByteSize(Math.round(value.$binary.base64.length * 3 / 4))})`, gridStyle: 'nullCellStyle' };
}
return {
value: base64ToHex(value.$binary.base64),
gridStyle: 'valueCellStyle',
@@ -354,6 +427,14 @@ export function stringifyCellValue(
}
}
if (value?.type === 'Buffer' && _isArray(value.data)) {
if (intent === 'gridCellIntent') {
return value.data.length > MAX_GRID_BINARY_SIZE
? { value: `(Field too large, ${formatByteSize(value.data.length)})`, gridStyle: 'nullCellStyle' }
: { value: '0x' + arrayToHexString(value.data), gridStyle: 'valueCellStyle' };
}
}
if (_isArray(value)) {
switch (intent) {
case 'gridCellIntent':
@@ -482,7 +563,7 @@ export function shouldOpenMultilineDialog(value) {
}
export function isJsonLikeLongString(value) {
return _isString(value) && value.length > 100 && value.match(/^\s*\{.*\}\s*$|^\s*\[.*\]\s*$/m);
return _isString(value) && value.length > 100 && value.length <= MAX_GRID_BINARY_SIZE && value.match(/^\s*\{.*\}\s*$|^\s*\[.*\]\s*$/m);
}
export function getIconForRedisType(type) {
+1
View File
@@ -70,6 +70,7 @@
"date-fns": "^4.1.0",
"debug": "^4.3.4",
"dom-to-image": "^2.6.0",
"dompurify": "^3.3.2",
"flatpickr": "^4.6.13",
"fuzzy": "^0.1.3",
"highlight.js": "^11.11.1",
+46 -5
View File
@@ -26,6 +26,18 @@
onClick: () => getCurrentDataGrid().deepRefresh(),
});
registerCommand({
id: 'dataGrid.fetchAll',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.fetchAll', { defaultMessage: 'Fetch all rows' }),
toolbarName: __t('command.datagrid.fetchAll.toolbar', { defaultMessage: 'Fetch all' }),
icon: 'icon download',
toolbar: true,
isRelatedToTab: true,
testEnabled: () => getCurrentDataGrid()?.canFetchAll(),
onClick: () => getCurrentDataGrid().fetchAll(),
});
registerCommand({
id: 'dataGrid.revertRowChanges',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
@@ -432,6 +444,7 @@
import CollapseButton from './CollapseButton.svelte';
import GenerateSqlFromDataModal from '../modals/GenerateSqlFromDataModal.svelte';
import { showModal } from '../modals/modalTools';
import FetchAllConfirmModal from '../modals/FetchAllConfirmModal.svelte';
import StatusBarTabItem from '../widgets/StatusBarTabItem.svelte';
import { findCommand } from '../commands/runCommand';
import { openJsonDocument } from '../tabs/JsonTab.svelte';
@@ -454,6 +467,7 @@
import macros from '../macro/macros';
export let onLoadNextData = undefined;
export let onFetchAllRows = undefined;
export let grider = undefined;
export let display: GridDisplay = undefined;
export let conid = undefined;
@@ -473,6 +487,9 @@
export let errorMessage = undefined;
export let pureName = undefined;
export let schemaName = undefined;
export let isFetchingAll = false;
export let isFetchingFromDb = false;
export let fetchAllLoadedCount = 0;
export let allowDefineVirtualReferences = false;
export let formatterFunction;
export let passAllRows = null;
@@ -647,6 +664,21 @@
return canRefresh() && !!conid && !!database;
}
export function canFetchAll() {
return !!onFetchAllRows && !isLoadedAll && !isFetchingAll && !isLoading;
}
export function fetchAll() {
if (!canFetchAll()) return;
const settings = $settingsValue || {};
if (settings['dataGrid.skipFetchAllConfirm']) {
onFetchAllRows();
} else {
showModal(FetchAllConfirmModal, { onConfirm: () => onFetchAllRows() });
}
}
export async function deepRefresh() {
callUnsubscribeDbRefresh();
await apiCall('database-connections/sync-model', { conid, database });
@@ -1977,6 +2009,7 @@
registerMenu(
{ command: 'dataGrid.refresh' },
{ command: 'dataGrid.fetchAll', hideDisabled: true },
{ placeTag: 'copy' },
{
text: _t('datagrid.copyAdvanced', { defaultMessage: 'Copy advanced' }),
@@ -2404,11 +2437,7 @@
</div>
{:else if allRowCountError && multipleGridsOnTab}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
class="row-count-label row-count-error"
title={allRowCountError}
on:click={onReloadRowCount}
>
<div class="row-count-label row-count-error" title={allRowCountError} on:click={onReloadRowCount}>
{_t('datagrid.rows', { defaultMessage: 'Rows' })}: {_t('datagrid.rowCountMany', { defaultMessage: 'Many' })}
</div>
{/if}
@@ -2417,6 +2446,18 @@
<LoadingInfo wrapper message="Loading data" />
{/if}
{#if isFetchingAll}
<LoadingInfo
wrapper
message={isFetchingFromDb
? _t('datagrid.fetchAll.progressDb', { defaultMessage: 'Fetching data from database...' })
: _t('datagrid.fetchAll.progress', {
defaultMessage: 'Fetching all rows... {count} loaded',
values: { count: fetchAllLoadedCount.toLocaleString() },
})}
/>
{/if}
{#if !tabControlHiddenTab && !multipleGridsOnTab && allRowCount != null}
<StatusBarTabItem text={`${_t('datagrid.rows', { defaultMessage: 'Rows' })}: ${allRowCount.toLocaleString()}`} />
{:else if !tabControlHiddenTab && !multipleGridsOnTab && allRowCountError}
@@ -1,14 +1,18 @@
<script lang="ts">
import { getIntSettingsValue } from '../settings/settingsTools';
import { onDestroy } from 'svelte';
import createRef from '../utility/createRef';
import { useSettings } from '../utility/metadataLoaders';
import { fetchAll, type FetchAllHandle } from '../utility/fetchAll';
import { apiCall } from '../utility/api';
import DataGridCore from './DataGridCore.svelte';
export let loadDataPage;
export let dataPageAvailable;
export let loadRowCount;
export let startFetchAll = null;
export let grider;
export let display;
export let masterLoadedTime = undefined;
@@ -29,6 +33,12 @@
let errorMessage = null;
let domGrid;
let isFetchingAll = false;
let isFetchingFromDb = false;
let fetchAllLoadedCount = 0;
let fetchAllHandle: FetchAllHandle | null = null;
let readerJslid: string | null = null;
const loadNextDataRef = createRef(false);
const loadedTimeRef = createRef(null);
@@ -96,11 +106,161 @@
// console.log('LOADED', nextRows, loadedRows);
}
async function fetchAllRows() {
if (isFetchingAll || isLoadedAll) return;
const jslid = ($$props as any).jslid;
if (jslid) {
// Already have a JSONL file (e.g. query tab) — read directly
fetchAllViaJslid(jslid);
} else if (startFetchAll) {
// SQL/table grid: execute full query → stream to JSONL → read from it
fetchAllViaReader();
} else {
fetchAllRowsLegacy();
}
}
function stopReader() {
if (readerJslid) {
apiCall('sessions/stop-loading-reader', { jslid: readerJslid });
readerJslid = null;
}
}
async function fetchAllViaReader() {
isFetchingAll = true;
isFetchingFromDb = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
// Token guards against a reload/destroy that happens while we await startFetchAll.
// loadedTimeRef is already updated by reload(), so we reuse it as our token.
const token = loadedTime;
let jslid;
try {
jslid = await startFetchAll($$props);
} catch (err) {
if (loadedTime !== token) return; // reload() already reset state
errorMessage = err?.message ?? 'Failed to start data reader';
isFetchingAll = false;
isFetchingFromDb = false;
return;
}
// If reload()/onDestroy ran while we were awaiting, discard the result and
// immediately stop the reader that was just started on the server.
if (loadedTime !== token) {
if (jslid) apiCall('sessions/stop-loading-reader', { jslid });
return;
}
if (!jslid) {
errorMessage = 'Failed to start data reader';
isFetchingAll = false;
isFetchingFromDb = false;
return;
}
readerJslid = jslid;
fetchAllViaJslid(jslid);
}
function fetchAllViaJslid(jslid: string) {
if (!isFetchingAll) {
isFetchingAll = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
}
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 50000);
const buffer: any[] = [];
const jslLoadDataPage = async (offset: number, limit: number) => {
return apiCall('jsldata/get-rows', { jslid, offset, limit });
};
fetchAllHandle = fetchAll(
jslid,
jslLoadDataPage,
{
onPage(rows) {
if (rows.length > 0) isFetchingFromDb = false;
const processed = preprocessLoadedRow ? rows.map(preprocessLoadedRow) : rows;
buffer.push(...processed);
fetchAllLoadedCount = buffer.length;
},
onFinished() {
loadedRows = buffer;
isLoadedAll = true;
isFetchingAll = false;
isFetchingFromDb = false;
fetchAllHandle = null;
readerJslid = null;
if (allRowCount == null && !isRawMode) handleLoadRowCount();
},
onError(msg) {
errorMessage = msg;
isFetchingAll = false;
isFetchingFromDb = false;
fetchAllHandle = null;
stopReader();
},
},
pageSize
);
}
async function fetchAllRowsLegacy() {
isFetchingAll = true;
fetchAllLoadedCount = loadedRows.length;
errorMessage = null;
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 50000);
const fetchStart = new Date().getTime();
loadedTimeRef.set(fetchStart);
// Accumulate into a local buffer to avoid O(n²) full-array copies each iteration.
const buffer = [...loadedRows];
try {
while (!isLoadedAll) {
const nextRows = await loadDataPage($$props, buffer.length, pageSize);
if (loadedTimeRef.get() !== fetchStart) {
// a reload was triggered; abort without overwriting loadedRows with stale data
return;
}
if (nextRows.errorMessage) {
errorMessage = nextRows.errorMessage;
break;
}
if (nextRows.length === 0) {
isLoadedAll = true;
break;
}
const processed = preprocessLoadedRow ? nextRows.map(preprocessLoadedRow) : nextRows;
buffer.push(...processed);
fetchAllLoadedCount = buffer.length;
}
// Single assignment triggers Svelte reactivity once for all accumulated rows.
loadedRows = buffer;
if (allRowCount == null && !isRawMode) handleLoadRowCount();
} finally {
isFetchingAll = false;
}
}
// $: griderProps = { ...$$props, sourceRows: loadProps.loadedRows };
// $: grider = griderFactory(griderProps);
function handleLoadNextData() {
if (!isLoadedAll && !errorMessage && (!grider.disableLoadNextPage || loadedRows.length == 0)) {
if (!isLoadedAll && !errorMessage && !isFetchingAll && (!grider.disableLoadNextPage || loadedRows.length == 0)) {
if (dataPageAvailable($$props)) {
// If not, callbacks to load missing metadata are dispatched
loadNextData();
@@ -109,14 +269,23 @@
}
function reload() {
if (fetchAllHandle) {
fetchAllHandle.cancel();
fetchAllHandle = null;
}
stopReader();
isFetchingFromDb = false;
allRowCount = null;
allRowCountError = null;
isLoading = false;
isFetchingAll = false;
fetchAllLoadedCount = 0;
loadedRows = [];
isLoadedAll = false;
loadedTime = new Date().getTime();
errorMessage = null;
loadNextDataRef.set(false);
loadedTimeRef.set(null);
// loadNextDataToken = 0;
}
@@ -130,6 +299,13 @@
}
}
onDestroy(() => {
if (fetchAllHandle) {
fetchAllHandle.cancel();
}
stopReader();
});
$: if (setLoadedRows) setLoadedRows(loadedRows);
</script>
@@ -137,10 +313,14 @@
{...$$props}
bind:this={domGrid}
onLoadNextData={handleLoadNextData}
onFetchAllRows={fetchAllRows}
{errorMessage}
{isLoading}
{isFetchingAll}
{isFetchingFromDb}
{fetchAllLoadedCount}
allRowCount={rowCountLoaded || allRowCount}
allRowCountError={allRowCountError}
{allRowCountError}
onReloadRowCount={handleLoadRowCount}
{isLoadedAll}
{loadedTime}
@@ -2,13 +2,13 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import hasPermission from '../utility/hasPermission';
import { __t, _t } from '../translations'
import { __t, _t } from '../translations';
const getCurrentEditor = () => getActiveComponent('SqlDataGridCore');
registerCommand({
id: 'sqlDataGrid.openQuery',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.openQuery', { defaultMessage : 'Open query' }),
name: __t('command.openQuery', { defaultMessage: 'Open query' }),
testEnabled: () => getCurrentEditor() != null && hasPermission('dbops/query'),
onClick: () => getCurrentEditor().openQuery(),
});
@@ -16,7 +16,7 @@
registerCommand({
id: 'sqlDataGrid.export',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('common.export', { defaultMessage : 'Export' }),
name: __t('common.export', { defaultMessage: 'Export' }),
icon: 'icon export',
keyText: 'CtrlOrCommand+E',
testEnabled: () => getCurrentEditor() != null && hasPermission('dbops/export'),
@@ -232,6 +232,20 @@
return { errorMessage: err.message || 'Error loading row count' };
}
}
async function startFetchAll(props) {
const { display, conid, database } = props;
const sql = display.getExportQuery();
if (!sql) return null;
const resp = await apiCall('sessions/execute-reader', {
conid,
database,
sql,
});
if (!resp || resp.errorMessage) return null;
return resp.jslid;
}
</script>
<LoadingDataGridCore
@@ -239,6 +253,7 @@
{loadDataPage}
{dataPageAvailable}
{loadRowCount}
{startFetchAll}
setLoadedRows={handleSetLoadedRows}
onPublishedCellsChanged={value => {
publishedCells = value;
+5 -7
View File
@@ -24,7 +24,7 @@
{#if isNative}
<select
value={options.find(x => x.value == value) ? value : defaultValue}
class="{selectClass}"
class={selectClass}
{...$$restProps}
on:change={e => {
dispatch('change', e.target['value']);
@@ -47,7 +47,7 @@
{...$$restProps}
items={options ?? []}
value={isMulti
? _.compact((value && Array.isArray(value)) ? value.map(item => options?.find(x => x.value == item)) : [])
? _.compact(value && Array.isArray(value) ? value.map(item => options?.find(x => x.value == item)) : [])
: (options?.find(x => x.value == value) ?? null)}
on:select={e => {
if (isMulti) {
@@ -69,7 +69,6 @@
</div>
{/if}
<style>
.select {
--border: var(--theme-input-border);
@@ -78,10 +77,10 @@
--background: var(--theme-input-background);
--borderHoverColor: var(--theme-input-border-hover-color);
--borderFocusColor: var(--theme-input-border-focus-color);
--listBackground: var(--theme-input-list-background);
--listBackground: var(--theme-input-background);
--itemActiveBackground: var(--theme-input-item-active-background);
--itemIsActiveBG: var(--theme-input-item-active-background);
--itemHoverBG: var(--theme-input-item-hover-background);
--itemHoverBG: var(--theme-input-multi-clear-hover);
--itemColor: var(--theme-input-item-foreground);
--listEmptyColor: var(--theme-input-background);
--height: 40px;
@@ -95,9 +94,8 @@
--multiClearHoverFill: var(--theme-input-multi-clear-foreground);
--multiItemActiveBG: var(--theme-input-multi-item-background);
--multiItemActiveColor: var(--theme-input-multi-item-foreground);
--multiItemBG: var(--theme-input-multi-item-background);
--multiItemBG: var(--theme-input-multi-clear-background);
--multiItemDisabledHoverBg: var(--theme-input-multi-item-background);
--multiItemDisabledHoverColor: var(--theme-input-multi-item-foreground);
}
</style>
+4 -1
View File
@@ -26,6 +26,8 @@
</script>
<script>
import DOMPurify from 'dompurify';
export let icon;
export let title = null;
export let padLeft = false;
@@ -34,6 +36,7 @@
export let colorClass = null;
$: iconValue = typeof icon === 'string' ? icon : icon?.light || icon?.dark || '';
$: isSvgString = iconValue.trim().startsWith('<svg');
$: sanitizedSvg = isSvgString ? DOMPurify.sanitize(iconValue, { USE_PROFILES: { svg: true, svgFilters: true } }) : '';
$: isTextIcon = iconValue.trim().startsWith('text ');
const iconNames = {
@@ -379,7 +382,7 @@
{#if isSvgString}
<span class="svg-inline" class:padLeft class:padRight {title} {style} on:click data-testid={$$props['data-testid']}>
{@html iconValue}
{@html sanitizedSvg}
</span>
{:else if isTextIcon}
{@const textIconParts = iconValue.trim().split(' ')}
@@ -6,6 +6,7 @@ import { getConnectionInfo } from '../utility/metadataLoaders';
import { findEngineDriver, findObjectLike } from 'dbgate-tools';
import { findFileFormat } from '../plugins/fileformats';
import { getCurrentConfig, getExtensions } from '../stores';
import { getVolatileRemapping } from '../utility/api';
export function getTargetName(extensions, source, values) {
const key = `targetName_${source}`;
@@ -38,6 +39,30 @@ function extractDriverApiParameters(values, direction, driver) {
export function extractShellConnection(connection, database) {
const config = getCurrentConfig();
// Case 1: connection._id is the original ID and a volatile remap exists.
// Use the volatile ID so the backend child process can look up the credentials.
const volatileId = getVolatileRemapping(connection._id);
if (volatileId !== connection._id) {
return {
_id: volatileId,
engine: connection.engine,
database,
};
}
// Case 2: apiCall.transformApiArgs already remapped the conid before the
// connection was fetched, so connection._id IS already the volatile ID and
// connection.unsaved === true. Falling through to allowShellConnection here
// would embed plaintext credentials in the generated script — always use the
// _id reference instead.
if (connection.unsaved) {
return {
_id: connection._id,
engine: connection.engine,
database,
};
}
return config.allowShellConnection
? {
..._.omitBy(
@@ -0,0 +1,74 @@
<script lang="ts">
import FormStyledButton from '../buttons/FormStyledButton.svelte';
import FormProvider from '../forms/FormProvider.svelte';
import FormSubmit from '../forms/FormSubmit.svelte';
import TemplatedCheckboxField from '../forms/TemplatedCheckboxField.svelte';
import FontIcon from '../icons/FontIcon.svelte';
import ModalBase from './ModalBase.svelte';
import { closeCurrentModal } from './modalTools';
import { apiCall } from '../utility/api';
import { _t } from '../translations';
export let onConfirm;
const SKIP_SETTING_KEY = 'dataGrid.skipFetchAllConfirm';
let dontAskAgain = false;
</script>
<FormProvider>
<ModalBase {...$$restProps} data-testid="FetchAllConfirmModal">
<svelte:fragment slot="header">
{_t('datagrid.fetchAll.title', { defaultMessage: 'Fetch All Rows' })}
</svelte:fragment>
<div class="message">
<FontIcon icon="img warn" />
<span>
{_t('datagrid.fetchAll.warning', {
defaultMessage:
'This will load all remaining rows into memory. For large tables, this may consume a significant amount of memory and could affect application performance.',
})}
</span>
</div>
<div class="mt-2">
<TemplatedCheckboxField
label={_t('common.dontAskAgain', { defaultMessage: "Don't ask again" })}
templateProps={{ noMargin: true }}
checked={dontAskAgain}
on:change={e => {
dontAskAgain = e.detail;
apiCall('config/update-settings', { [SKIP_SETTING_KEY]: e.detail });
}}
data-testid="FetchAllConfirmModal_dontAskAgain"
/>
</div>
<svelte:fragment slot="footer">
<FormSubmit
value={_t('datagrid.fetchAll.confirm', { defaultMessage: 'Fetch All' })}
on:click={() => {
closeCurrentModal();
onConfirm();
}}
data-testid="FetchAllConfirmModal_okButton"
/>
<FormStyledButton
type="button"
value={_t('common.close', { defaultMessage: 'Close' })}
on:click={closeCurrentModal}
data-testid="FetchAllConfirmModal_closeButton"
/>
</svelte:fragment>
</ModalBase>
</FormProvider>
<style>
.message {
display: flex;
align-items: flex-start;
gap: 8px;
line-height: 1.5;
}
</style>
@@ -55,6 +55,12 @@
defaultMessage: 'Skip confirmation when saving collection data (NoSQL)',
})}
/>
<FormCheckboxField
name="dataGrid.skipFetchAllConfirm"
label={_t('settings.confirmations.skipFetchAllConfirm', {
defaultMessage: 'Skip confirmation when fetching all rows',
})}
/>
</FormValues>
</div>
+227 -220
View File
@@ -2,223 +2,230 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import { __t } from '../translations';
const getCurrentEditor = () => getActiveComponent('CollectionDataTab');
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
registerCommand({
id: 'collectionTable.save',
group: 'save',
category: __t('command.collectionData', { defaultMessage: 'Collection data' }),
name: __t('command.collectionData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
</script>
<script lang="ts">
import App from '../App.svelte';
import DataGrid from '../datagrid/DataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
createChangeSet,
createGridCache,
CollectionGridDisplay,
changeSetContainsChanges,
runMacroOnChangeSet,
changeSetChangedCount,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import CollectionDataGridCore from '../datagrid/CollectionDataGridCore.svelte';
import { useCollectionInfo, useConnectionInfo, useSettings } from '../utility/metadataLoaders';
import { extensions } from '../stores';
import CollectionJsonView from '../formview/CollectionJsonView.svelte';
import createActivator from '../utility/createActivator';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import ConfirmNoSqlModal from '../modals/ConfirmNoSqlModal.svelte'; import { registerMenu } from '../utility/contextMenu';
import { setContext } from 'svelte';
import _ from 'lodash';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import { getBoolSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import { getNumberIcon } from '../icons/FontIcon.svelte'; export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
let loadedRows;
export const activator = createActivator('CollectionDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const settingsValue = useSettings();
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
$: {
$changeSetStore;
invalidateCommands();
}
$: connection = useConnectionInfo({ conid });
$: collectionInfo = useCollectionInfo({ conid, database, schemaName, pureName });
$: display =
$collectionInfo && $connection
? new CollectionGridDisplay(
$collectionInfo,
findEngineDriver($connection, $extensions),
//@ts-ignore
$config,
config.update,
$cache,
cache.update,
loadedRows,
$changeSetStore?.value,
$connection?.isReadOnly,
$settingsValue
)
: null;
// $: console.log('LOADED ROWS MONGO', loadedRows);
async function handleConfirmChange(changeSet) {
const resp = await apiCall('database-connections/update-collection', {
conid,
database,
changeSet: {
...changeSet,
updates: changeSet.updates.map(update => ({
...update,
fields: _.mapValues(update.fields, (v, k) => (v === undefined ? { $$undefined$$: true } : v)),
})),
},
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, { title: 'Error when saving', message: errorMessage });
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
display?.reload();
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function save() {
const json = $changeSetStore?.value;
const driver = findEngineDriver($connection, $extensions);
const script = driver.getCollectionUpdateScript ? driver.getCollectionUpdateScript(json, $collectionInfo) : null;
if (script) {
if (getBoolSettingsValue('skipConfirm.collectionDataSave', false)) {
handleConfirmChange(json);
} else {
showModal(ConfirmNoSqlModal, {
script,
onConfirm: () => handleConfirmChange(json),
engine: display.engine,
skipConfirmSettingKey: 'skipConfirm.collectionDataSave',
});
}
} else {
handleConfirmChange(json);
}
}
function handleRunMacro(macro, params, cells) {
const newChangeSet = runMacroOnChangeSet(macro, params, cells, $changeSetStore?.value, display, false);
if (newChangeSet) {
dispatchChangeSet({ type: 'set', value: newChangeSet });
}
}
registerMenu({ command: 'collectionTable.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('collection_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('collection_collapsedLeftColumn', $collapsedLeftColumnStore);
const quickExportHandlerRef = createQuickExportHandlerRef();
function handleSetLoadedRows(rows) {
loadedRows = rows;
}
</script>
<ToolStripContainer>
<DataGrid
setLoadedRows={handleSetLoadedRows}
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{display}
{changeSetStore}
{dispatchChangeSet}
gridCoreComponent={CollectionDataGridCore}
jsonViewComponent={CollectionJsonView}
isDynamicStructure
showMacros
macroCondition={macro => macro.type == 'transformValue'}
onRunMacro={handleRunMacro}
/>
<svelte:fragment slot="toolstrip">
<ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled />
<ToolStripCommandButton
command="collectionTable.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
/>
<ToolStripCommandButton command="dataGrid.revertAllChanges" hideDisabled />
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled />
<ToolStripCommandButton command="dataGrid.deleteSelectedRows" hideDisabled />
<ToolStripCommandButton command="dataGrid.addNewColumn" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToJson" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled />
<ToolStripExportButton {quickExportHandlerRef} command="collectionDataGrid.export" />
<ToolStripCommandButton command="collectionJsonView.expandAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.collapseAll" hideDisabled />
<ToolStripCommandButton command="dataGrid.toggleCellDataView" hideDisabled data-testid="CollectionDataTab_toggleCellDataView" />
</svelte:fragment>
</ToolStripContainer>
const getCurrentEditor = () => getActiveComponent('CollectionDataTab');
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
registerCommand({
id: 'collectionTable.save',
group: 'save',
category: __t('command.collectionData', { defaultMessage: 'Collection data' }),
name: __t('command.collectionData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
</script>
<script lang="ts">
import App from '../App.svelte';
import DataGrid from '../datagrid/DataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
createChangeSet,
createGridCache,
CollectionGridDisplay,
changeSetContainsChanges,
runMacroOnChangeSet,
changeSetChangedCount,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import CollectionDataGridCore from '../datagrid/CollectionDataGridCore.svelte';
import { useCollectionInfo, useConnectionInfo, useSettings } from '../utility/metadataLoaders';
import { extensions } from '../stores';
import CollectionJsonView from '../formview/CollectionJsonView.svelte';
import createActivator from '../utility/createActivator';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import ConfirmNoSqlModal from '../modals/ConfirmNoSqlModal.svelte';
import { registerMenu } from '../utility/contextMenu';
import { setContext } from 'svelte';
import _ from 'lodash';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import { getBoolSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import { getNumberIcon } from '../icons/FontIcon.svelte';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
let loadedRows;
export const activator = createActivator('CollectionDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const settingsValue = useSettings();
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
$: {
$changeSetStore;
invalidateCommands();
}
$: connection = useConnectionInfo({ conid });
$: collectionInfo = useCollectionInfo({ conid, database, schemaName, pureName });
$: display =
$collectionInfo && $connection
? new CollectionGridDisplay(
$collectionInfo,
findEngineDriver($connection, $extensions),
//@ts-ignore
$config,
config.update,
$cache,
cache.update,
loadedRows,
$changeSetStore?.value,
$connection?.isReadOnly,
$settingsValue
)
: null;
// $: console.log('LOADED ROWS MONGO', loadedRows);
async function handleConfirmChange(changeSet) {
const resp = await apiCall('database-connections/update-collection', {
conid,
database,
changeSet: {
...changeSet,
updates: changeSet.updates.map(update => ({
...update,
fields: _.mapValues(update.fields, (v, k) => (v === undefined ? { $$undefined$$: true } : v)),
})),
},
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, { title: 'Error when saving', message: errorMessage });
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
display?.reload();
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function save() {
const json = $changeSetStore?.value;
const driver = findEngineDriver($connection, $extensions);
const script = driver.getCollectionUpdateScript ? driver.getCollectionUpdateScript(json, $collectionInfo) : null;
if (script) {
if (getBoolSettingsValue('skipConfirm.collectionDataSave', false)) {
handleConfirmChange(json);
} else {
showModal(ConfirmNoSqlModal, {
script,
onConfirm: () => handleConfirmChange(json),
engine: display.engine,
skipConfirmSettingKey: 'skipConfirm.collectionDataSave',
});
}
} else {
handleConfirmChange(json);
}
}
function handleRunMacro(macro, params, cells) {
const newChangeSet = runMacroOnChangeSet(macro, params, cells, $changeSetStore?.value, display, false);
if (newChangeSet) {
dispatchChangeSet({ type: 'set', value: newChangeSet });
}
}
registerMenu({ command: 'collectionTable.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('collection_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('collection_collapsedLeftColumn', $collapsedLeftColumnStore);
const quickExportHandlerRef = createQuickExportHandlerRef();
function handleSetLoadedRows(rows) {
loadedRows = rows;
}
</script>
<ToolStripContainer>
<DataGrid
setLoadedRows={handleSetLoadedRows}
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{display}
{changeSetStore}
{dispatchChangeSet}
gridCoreComponent={CollectionDataGridCore}
jsonViewComponent={CollectionJsonView}
isDynamicStructure
showMacros
macroCondition={macro => macro.type == 'transformValue'}
onRunMacro={handleRunMacro}
/>
<svelte:fragment slot="toolstrip">
<ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled />
<ToolStripCommandButton
command="collectionTable.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
/>
<ToolStripCommandButton command="dataGrid.revertAllChanges" hideDisabled />
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled />
<ToolStripCommandButton command="dataGrid.deleteSelectedRows" hideDisabled />
<ToolStripCommandButton command="dataGrid.addNewColumn" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToJson" hideDisabled />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled />
<ToolStripExportButton {quickExportHandlerRef} command="collectionDataGrid.export" />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.expandAll" hideDisabled />
<ToolStripCommandButton command="collectionJsonView.collapseAll" hideDisabled />
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="CollectionDataTab_toggleCellDataView"
/>
</svelte:fragment>
</ToolStripContainer>
+433 -431
View File
@@ -2,434 +2,436 @@
import { getActiveComponent } from '../utility/createActivator';
import registerCommand from '../commands/registerCommand';
import { __t } from '../translations';
const getCurrentEditor = () => getActiveComponent('TableDataTab');
const INTERVALS = [5, 10, 15, 30, 60];
const INTERVAL_COMMANDS = [
{
time: 5,
name: __t('command.datagrid.setAutoRefresh.5', { defaultMessage: 'Refresh every 5 seconds' }),
},
{
time: 10,
name: __t('command.datagrid.setAutoRefresh.10', { defaultMessage: 'Refresh every 10 seconds' }),
},
{
time: 15,
name: __t('command.datagrid.setAutoRefresh.15', { defaultMessage: 'Refresh every 15 seconds' }),
},
{
time: 30,
name: __t('command.datagrid.setAutoRefresh.30', { defaultMessage: 'Refresh every 30 seconds' }),
},
{
time: 60,
name: __t('command.datagrid.setAutoRefresh.60', { defaultMessage: 'Refresh every 60 seconds' }),
},
];
registerCommand({
id: 'tableData.save',
group: 'save',
category: __t('command.tableData', { defaultMessage: 'Table data' }),
name: __t('command.tableData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
registerCommand({
id: 'tableData.setAutoRefresh.1',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.setAutoRefresh.1', { defaultMessage: 'Refresh every 1 second' }),
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(1),
});
for (const { time, name } of INTERVAL_COMMANDS) {
registerCommand({
id: `tableData.setAutoRefresh.${time}`,
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name,
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(time),
});
}
registerCommand({
id: 'tableData.stopAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.stopAutoRefresh', { defaultMessage: 'Stop auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === true,
onClick: () => getCurrentEditor().stopAutoRefresh(null),
});
registerCommand({
id: 'tableData.startAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.startAutoRefresh', { defaultMessage: 'Start auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === false,
onClick: () => getCurrentEditor().startAutoRefresh(),
});
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName', 'isRawMode'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
</script>
<script lang="ts">
import _ from 'lodash';
import App from '../App.svelte';
import TableDataGrid from '../datagrid/TableDataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
changeSetChangedCount,
changeSetContainsChanges,
changeSetToSql,
createChangeSet,
createGridCache,
getDeleteCascades,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { reloadDataCacheFunc } from 'dbgate-datalib';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { getTableInfo, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders';
import { scriptToSql } from 'dbgate-sqltree';
import { extensions, lastUsedDefaultActions } from '../stores';
import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte';
import createActivator from '../utility/createActivator'; import { registerMenu } from '../utility/contextMenu';
import { showSnackbarSuccess } from '../utility/snackbar';
import openNewTab from '../utility/openNewTab';
import { onDestroy, setContext } from 'svelte';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import ToolStripCommandSplitButton from '../buttons/ToolStripCommandSplitButton.svelte';
import { getBoolSettingsValue, getIntSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { getNumberIcon } from '../icons/FontIcon.svelte';
import { _t } from '../translations';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
export let isRawMode = false;
export let tabPreviewMode;
export const activator = createActivator('TableDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const dbinfo = useDatabaseInfo({ conid, database });
let autoRefreshInterval = getIntSettingsValue('dataGrid.defaultAutoRefreshInterval', 10, 1, 3600);
let autoRefreshStarted = false;
let autoRefreshTimer = null;
$: connection = useConnectionInfo({ conid });
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
async function handleConfirmSql(sql) {
const resp = await apiCall('database-connections/run-script', { conid, database, sql, useTransaction: true });
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
}
export async function save() {
const driver = findEngineDriver($connection, $extensions);
const tablePermissionRole = (await getTableInfo({ conid, database, schemaName, pureName }))?.tablePermissionRole;
if (tablePermissionRole == 'create_update_delete' || tablePermissionRole == 'update_only') {
const resp = await apiCall('database-connections/save-table-data', {
conid,
database,
changeSet: $changeSetStore?.value,
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
} else {
const script = driver.createSaveChangeSetScript($changeSetStore?.value, $dbinfo, () =>
changeSetToSql($changeSetStore?.value, $dbinfo, driver.dialect)
);
const deleteCascades = getDeleteCascades($changeSetStore?.value, $dbinfo);
const sql = scriptToSql(driver, script);
const deleteCascadesScripts = _.map(deleteCascades, ({ title, commands }) => ({
title,
script: scriptToSql(driver, commands),
}));
// console.log('deleteCascadesScripts', deleteCascadesScripts);
if (getBoolSettingsValue('skipConfirm.tableDataSave', false) && !deleteCascadesScripts?.length) {
handleConfirmSql(sql);
} else {
showModal(ConfirmSqlModal, {
sql,
onConfirm: confirmedSql => handleConfirmSql(confirmedSql),
engine: driver.engine,
deleteCascadesScripts,
skipConfirmSettingKey: deleteCascadesScripts?.length ? null : 'skipConfirm.tableDataSave',
});
}
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function setAutoRefresh(interval) {
autoRefreshInterval = interval;
startAutoRefresh();
invalidateCommands();
}
export function isAutoRefresh() {
return autoRefreshStarted;
}
export function startAutoRefresh() {
closeRefreshTimer();
autoRefreshTimer = setInterval(() => {
cache.update(reloadDataCacheFunc);
}, autoRefreshInterval * 1000);
autoRefreshStarted = true;
invalidateCommands();
}
export function stopAutoRefresh() {
closeRefreshTimer();
autoRefreshStarted = false;
invalidateCommands();
}
function closeRefreshTimer() {
if (autoRefreshTimer) {
clearInterval(autoRefreshTimer);
autoRefreshTimer = null;
}
}
$: {
$changeSetStore;
invalidateCommands();
}
registerMenu({ command: 'tableData.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('dataGrid_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('dataGrid_collapsedLeftColumn', $collapsedLeftColumnStore);
onDestroy(() => {
closeRefreshTimer();
});
const quickExportHandlerRef = createQuickExportHandlerRef();
function createAutoRefreshMenu() {
return [
{ divider: true },
{ command: 'dataGrid.deepRefresh', hideDisabled: true },
{ command: 'tableData.stopAutoRefresh', hideDisabled: true },
{ command: 'tableData.startAutoRefresh', hideDisabled: true },
'tableData.setAutoRefresh.1',
...INTERVALS.map(seconds => ({
command: `tableData.setAutoRefresh.${seconds}`,
text: `...${seconds}` + ' ' + _t('command.datagrid.autoRefresh.seconds', { defaultMessage: 'seconds' }),
})),
];
}
</script>
<ToolStripContainer>
<TableDataGrid
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{changeSetStore}
{dispatchChangeSet}
/>
<svelte:fragment slot="toolstrip">
<ToolStripButton
icon="icon structure"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'openStructure',
}));
}
openNewTab({
title: pureName,
icon: 'img table-structure',
tabComponent: 'TableStructureTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'openStructure',
},
});
}}>{_t('datagrid.structure', { defaultMessage: 'Structure' })}</ToolStripButton
>
<ToolStripButton
icon="img sql-file"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'showSql',
}));
}
openNewTab({
title: pureName,
icon: 'img sql-file',
tabComponent: 'SqlObjectTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'showSql',
},
});
}}>SQL</ToolStripButton
>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataGrid.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshGrid"
/>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataForm.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshForm"
/>
<!-- <ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled /> -->
<ToolStripCommandButton command="dataForm.goToFirst" hideDisabled data-testid="TableDataTab_goToFirst" />
<ToolStripCommandButton command="dataForm.goToPrevious" hideDisabled data-testid="TableDataTab_goToPrevious" />
<ToolStripCommandButton command="dataForm.goToNext" hideDisabled data-testid="TableDataTab_goToNext" />
<ToolStripCommandButton command="dataForm.goToLast" hideDisabled data-testid="TableDataTab_goToLast" />
<ToolStripCommandButton
command="tableData.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
data-testid="TableDataTab_save"
/>
<ToolStripCommandButton
command="dataGrid.revertAllChanges"
hideDisabled
data-testid="TableDataTab_revertAllChanges"
/>
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled data-testid="TableDataTab_insertNewRow" />
<ToolStripCommandButton
command="dataGrid.deleteSelectedRows"
hideDisabled
data-testid="TableDataTab_deleteSelectedRows"
/>
<ToolStripCommandButton command="dataGrid.switchToForm" hideDisabled data-testid="TableDataTab_switchToForm" />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled data-testid="TableDataTab_switchToTable" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripButton
icon={$collapsedLeftColumnStore ? 'icon columns-outline' : 'icon columns'}
on:click={() => collapsedLeftColumnStore.update(x => !x)}
>{_t('tableData.viewColumns', { defaultMessage: 'View columns' })}</ToolStripButton
>
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="TableDataTab_toggleCellDataView"
/>
</svelte:fragment>
</ToolStripContainer>
const getCurrentEditor = () => getActiveComponent('TableDataTab');
const INTERVALS = [5, 10, 15, 30, 60];
const INTERVAL_COMMANDS = [
{
time: 5,
name: __t('command.datagrid.setAutoRefresh.5', { defaultMessage: 'Refresh every 5 seconds' }),
},
{
time: 10,
name: __t('command.datagrid.setAutoRefresh.10', { defaultMessage: 'Refresh every 10 seconds' }),
},
{
time: 15,
name: __t('command.datagrid.setAutoRefresh.15', { defaultMessage: 'Refresh every 15 seconds' }),
},
{
time: 30,
name: __t('command.datagrid.setAutoRefresh.30', { defaultMessage: 'Refresh every 30 seconds' }),
},
{
time: 60,
name: __t('command.datagrid.setAutoRefresh.60', { defaultMessage: 'Refresh every 60 seconds' }),
},
];
registerCommand({
id: 'tableData.save',
group: 'save',
category: __t('command.tableData', { defaultMessage: 'Table data' }),
name: __t('command.tableData.save', { defaultMessage: 'Save' }),
// keyText: 'CtrlOrCommand+S',
toolbar: true,
isRelatedToTab: true,
icon: 'icon save',
testEnabled: () => getCurrentEditor()?.canSave(),
onClick: () => getCurrentEditor().save(),
});
registerCommand({
id: 'tableData.setAutoRefresh.1',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.setAutoRefresh.1', { defaultMessage: 'Refresh every 1 second' }),
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(1),
});
for (const { time, name } of INTERVAL_COMMANDS) {
registerCommand({
id: `tableData.setAutoRefresh.${time}`,
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name,
isRelatedToTab: true,
testEnabled: () => !!getCurrentEditor(),
onClick: () => getCurrentEditor().setAutoRefresh(time),
});
}
registerCommand({
id: 'tableData.stopAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.stopAutoRefresh', { defaultMessage: 'Stop auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === true,
onClick: () => getCurrentEditor().stopAutoRefresh(null),
});
registerCommand({
id: 'tableData.startAutoRefresh',
category: __t('command.datagrid', { defaultMessage: 'Data grid' }),
name: __t('command.datagrid.startAutoRefresh', { defaultMessage: 'Start auto refresh' }),
isRelatedToTab: true,
keyText: 'CtrlOrCommand+Shift+R',
testEnabled: () => getCurrentEditor()?.isAutoRefresh() === false,
onClick: () => getCurrentEditor().startAutoRefresh(),
});
export const matchingProps = ['conid', 'database', 'schemaName', 'pureName', 'isRawMode'];
export const allowAddToFavorites = props => true;
export const allowSwitchDatabase = props => true;
</script>
<script lang="ts">
import _ from 'lodash';
import App from '../App.svelte';
import TableDataGrid from '../datagrid/TableDataGrid.svelte';
import useGridConfig from '../utility/useGridConfig';
import {
changeSetChangedCount,
changeSetContainsChanges,
changeSetToSql,
createChangeSet,
createGridCache,
getDeleteCascades,
} from 'dbgate-datalib';
import { findEngineDriver } from 'dbgate-tools';
import { reloadDataCacheFunc } from 'dbgate-datalib';
import { writable } from 'svelte/store';
import createUndoReducer from '../utility/createUndoReducer';
import invalidateCommands from '../commands/invalidateCommands';
import { showModal } from '../modals/modalTools';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { getTableInfo, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders';
import { scriptToSql } from 'dbgate-sqltree';
import { extensions, lastUsedDefaultActions } from '../stores';
import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte';
import createActivator from '../utility/createActivator';
import { registerMenu } from '../utility/contextMenu';
import { showSnackbarSuccess } from '../utility/snackbar';
import openNewTab from '../utility/openNewTab';
import { onDestroy, setContext } from 'svelte';
import { apiCall } from '../utility/api';
import { getLocalStorage, setLocalStorage } from '../utility/storageCache';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripExportButton, { createQuickExportHandlerRef } from '../buttons/ToolStripExportButton.svelte';
import ToolStripCommandSplitButton from '../buttons/ToolStripCommandSplitButton.svelte';
import { getBoolSettingsValue, getIntSettingsValue } from '../settings/settingsTools';
import useEditorData from '../query/useEditorData';
import { markTabSaved, markTabUnsaved } from '../utility/common';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { getNumberIcon } from '../icons/FontIcon.svelte';
import { _t } from '../translations';
export let tabid;
export let conid;
export let database;
export let schemaName;
export let pureName;
export let isRawMode = false;
export let tabPreviewMode;
export const activator = createActivator('TableDataTab', true);
const config = useGridConfig(tabid);
const cache = writable(createGridCache());
const dbinfo = useDatabaseInfo({ conid, database });
let autoRefreshInterval = getIntSettingsValue('dataGrid.defaultAutoRefreshInterval', 10, 1, 3600);
let autoRefreshStarted = false;
let autoRefreshTimer = null;
$: connection = useConnectionInfo({ conid });
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
dispatchChangeSet({ type: 'reset', value });
invalidateCommands();
if (changeSetContainsChanges(value)) {
markTabUnsaved(tabid);
}
},
});
const [changeSetStore, dispatchChangeSet] = createUndoReducer(createChangeSet());
$: {
setEditorData($changeSetStore.value);
if (changeSetContainsChanges($changeSetStore?.value)) {
markTabUnsaved(tabid);
} else {
markTabSaved(tabid);
}
}
async function handleConfirmSql(sql) {
const resp = await apiCall('database-connections/run-script', { conid, database, sql, useTransaction: true });
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
}
export async function save() {
const driver = findEngineDriver($connection, $extensions);
const tablePermissionRole = (await getTableInfo({ conid, database, schemaName, pureName }))?.tablePermissionRole;
if (tablePermissionRole == 'create_update_delete' || tablePermissionRole == 'update_only') {
const resp = await apiCall('database-connections/save-table-data', {
conid,
database,
changeSet: $changeSetStore?.value,
});
const { errorMessage } = resp || {};
if (errorMessage) {
showModal(ErrorMessageModal, {
title: _t('tableData.errorWhenSaving', { defaultMessage: 'Error when saving' }),
message: errorMessage,
});
} else {
dispatchChangeSet({ type: 'reset', value: createChangeSet() });
cache.update(reloadDataCacheFunc);
showSnackbarSuccess(_t('tableData.savedToDatabase', { defaultMessage: 'Saved to database' }));
}
} else {
const script = driver.createSaveChangeSetScript($changeSetStore?.value, $dbinfo, () =>
changeSetToSql($changeSetStore?.value, $dbinfo, driver.dialect)
);
const deleteCascades = getDeleteCascades($changeSetStore?.value, $dbinfo);
const sql = scriptToSql(driver, script);
const deleteCascadesScripts = _.map(deleteCascades, ({ title, commands }) => ({
title,
script: scriptToSql(driver, commands),
}));
// console.log('deleteCascadesScripts', deleteCascadesScripts);
if (getBoolSettingsValue('skipConfirm.tableDataSave', false) && !deleteCascadesScripts?.length) {
handleConfirmSql(sql);
} else {
showModal(ConfirmSqlModal, {
sql,
onConfirm: confirmedSql => handleConfirmSql(confirmedSql),
engine: driver.engine,
deleteCascadesScripts,
skipConfirmSettingKey: deleteCascadesScripts?.length ? null : 'skipConfirm.tableDataSave',
});
}
}
}
export function canSave() {
return changeSetContainsChanges($changeSetStore?.value);
}
export function setAutoRefresh(interval) {
autoRefreshInterval = interval;
startAutoRefresh();
invalidateCommands();
}
export function isAutoRefresh() {
return autoRefreshStarted;
}
export function startAutoRefresh() {
closeRefreshTimer();
autoRefreshTimer = setInterval(() => {
cache.update(reloadDataCacheFunc);
}, autoRefreshInterval * 1000);
autoRefreshStarted = true;
invalidateCommands();
}
export function stopAutoRefresh() {
closeRefreshTimer();
autoRefreshStarted = false;
invalidateCommands();
}
function closeRefreshTimer() {
if (autoRefreshTimer) {
clearInterval(autoRefreshTimer);
autoRefreshTimer = null;
}
}
$: {
$changeSetStore;
invalidateCommands();
}
registerMenu({ command: 'tableData.save', tag: 'save' });
const collapsedLeftColumnStore = writable(getLocalStorage('dataGrid_collapsedLeftColumn', false));
setContext('collapsedLeftColumnStore', collapsedLeftColumnStore);
$: setLocalStorage('dataGrid_collapsedLeftColumn', $collapsedLeftColumnStore);
onDestroy(() => {
closeRefreshTimer();
});
const quickExportHandlerRef = createQuickExportHandlerRef();
function createAutoRefreshMenu() {
return [
{ divider: true },
{ command: 'dataGrid.deepRefresh', hideDisabled: true },
{ command: 'tableData.stopAutoRefresh', hideDisabled: true },
{ command: 'tableData.startAutoRefresh', hideDisabled: true },
'tableData.setAutoRefresh.1',
...INTERVALS.map(seconds => ({
command: `tableData.setAutoRefresh.${seconds}`,
text: `...${seconds}` + ' ' + _t('command.datagrid.autoRefresh.seconds', { defaultMessage: 'seconds' }),
})),
];
}
</script>
<ToolStripContainer>
<TableDataGrid
{...$$props}
config={$config}
setConfig={config.update}
cache={$cache}
setCache={cache.update}
changeSetState={$changeSetStore}
focusOnVisible
{changeSetStore}
{dispatchChangeSet}
/>
<svelte:fragment slot="toolstrip">
<ToolStripButton
icon="icon structure"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'openStructure',
}));
}
openNewTab({
title: pureName,
icon: 'img table-structure',
tabComponent: 'TableStructureTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'openStructure',
},
});
}}>{_t('datagrid.structure', { defaultMessage: 'Structure' })}</ToolStripButton
>
<ToolStripButton
icon="img sql-file"
iconAfter="icon arrow-link"
on:click={() => {
if (tabPreviewMode && getBoolSettingsValue('defaultAction.useLastUsedAction', true)) {
lastUsedDefaultActions.update(actions => ({
...actions,
tables: 'showSql',
}));
}
openNewTab({
title: pureName,
icon: 'img sql-file',
tabComponent: 'SqlObjectTab',
tabPreviewMode: true,
props: {
schemaName,
pureName,
conid,
database,
objectTypeField: 'tables',
defaultActionId: 'showSql',
},
});
}}>SQL</ToolStripButton
>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataGrid.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshGrid"
/>
<ToolStripCommandSplitButton
buttonLabel={autoRefreshStarted
? _t('tableData.refreshEvery', {
defaultMessage: 'Refresh (every {autoRefreshInterval}s)',
values: { autoRefreshInterval },
})
: null}
commands={['dataForm.refresh', ...createAutoRefreshMenu()]}
hideDisabled
data-testid="TableDataTab_refreshForm"
/>
<!-- <ToolStripCommandButton command="dataGrid.refresh" hideDisabled />
<ToolStripCommandButton command="dataForm.refresh" hideDisabled /> -->
<ToolStripCommandButton command="dataForm.goToFirst" hideDisabled data-testid="TableDataTab_goToFirst" />
<ToolStripCommandButton command="dataForm.goToPrevious" hideDisabled data-testid="TableDataTab_goToPrevious" />
<ToolStripCommandButton command="dataForm.goToNext" hideDisabled data-testid="TableDataTab_goToNext" />
<ToolStripCommandButton command="dataForm.goToLast" hideDisabled data-testid="TableDataTab_goToLast" />
<ToolStripCommandButton
command="tableData.save"
iconAfter={getNumberIcon(changeSetChangedCount($changeSetStore?.value))}
data-testid="TableDataTab_save"
/>
<ToolStripCommandButton
command="dataGrid.revertAllChanges"
hideDisabled
data-testid="TableDataTab_revertAllChanges"
/>
<ToolStripCommandButton command="dataGrid.insertNewRow" hideDisabled data-testid="TableDataTab_insertNewRow" />
<ToolStripCommandButton
command="dataGrid.deleteSelectedRows"
hideDisabled
data-testid="TableDataTab_deleteSelectedRows"
/>
<ToolStripCommandButton command="dataGrid.switchToForm" hideDisabled data-testid="TableDataTab_switchToForm" />
<ToolStripCommandButton command="dataGrid.switchToTable" hideDisabled data-testid="TableDataTab_switchToTable" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled data-testid="TableDataTab_fetchAll" />
<ToolStripButton
icon={$collapsedLeftColumnStore ? 'icon columns-outline' : 'icon columns'}
on:click={() => collapsedLeftColumnStore.update(x => !x)}
>{_t('tableData.viewColumns', { defaultMessage: 'View columns' })}</ToolStripButton
>
<ToolStripCommandButton
command="dataGrid.toggleCellDataView"
hideDisabled
data-testid="TableDataTab_toggleCellDataView"
/>
</svelte:fragment>
</ToolStripContainer>
+1
View File
@@ -129,6 +129,7 @@
<ToolStripCommandButton command="dataGrid.refresh" />
<ToolStripExportButton {quickExportHandlerRef} />
<ToolStripCommandButton command="dataGrid.fetchAll" hideDisabled />
<ToolStripCommandButton command="dataGrid.toggleCellDataView" hideDisabled />
</svelte:fragment>
</ToolStripContainer>
+11 -7
View File
@@ -98,29 +98,31 @@ const clipboardTextFormatter = (delimiter, headers) => (columns, rows, options)
const clipboardJsonFormatter = () => (columns, rows) => {
return JSON.stringify(
rows.map(row => _.pick(row, columns)),
rows.map(row => _.omitBy(_.pick(row, columns), _.isUndefined)),
undefined,
2
);
};
const clipboardYamlFormatter = () => (columns, rows) => {
return yaml.dump(rows.map(row => _.pick(row, columns)));
return yaml.dump(rows.map(row => _.omitBy(_.pick(row, columns), _.isUndefined)));
};
const clipboardJsonLinesFormatter = () => (columns, rows) => {
return rows.map(row => JSON.stringify(_.pick(row, columns))).join('\r\n');
return rows.map(row => JSON.stringify(_.omitBy(_.pick(row, columns), _.isUndefined))).join('\r\n');
};
const clipboardInsertsFormatter = () => (columns, rows, options) => {
const { schemaName, pureName, driver } = options;
const dmp = driver.createDumper();
for (const row of rows) {
const definedColumns = columns.filter(col => row[col] !== undefined);
if (definedColumns.length === 0) continue;
dmp.putCmd(
'^insert ^into %f (%,i) ^values (%,v)',
{ schemaName, pureName },
columns,
columns.map(col => row[col])
definedColumns,
definedColumns.map(col => row[col])
);
}
return dmp.s;
@@ -130,8 +132,10 @@ const clipboardUpdatesFormatter = () => (columns, rows, options) => {
const { schemaName, pureName, driver, keyColumns } = options;
const dmp = driver.createDumper();
for (const row of rows) {
const definedColumns = columns.filter(col => row[col] !== undefined);
if (definedColumns.length === 0) continue;
dmp.put('^update %f ^set ', { schemaName, pureName });
dmp.putCollection(', ', columns, col => dmp.put('%i=%v', col, row[col]));
dmp.putCollection(', ', definedColumns, col => dmp.put('%i=%v', col, row[col]));
dmp.put(' ^where ');
dmp.putCollection(' ^and ', keyColumns, col => dmp.put('%i=%v', col, row[col]));
dmp.endCommand();
@@ -141,7 +145,7 @@ const clipboardUpdatesFormatter = () => (columns, rows, options) => {
const clipboardMongoInsertFormatter = () => (columns, rows, options) => {
const { pureName } = options;
return rows.map(row => `db.${pureName}.insert(${JSON.stringify(_.pick(row, columns), undefined, 2)});`).join('\n');
return rows.map(row => `db.${pureName}.insert(${JSON.stringify(_.omitBy(_.pick(row, columns), _.isUndefined), undefined, 2)});`).join('\n');
};
export function formatClipboardRows(format, columns, rows, options) {
+353
View File
@@ -0,0 +1,353 @@
import { apiCall, apiOff, apiOn } from './api';
import getElectron from './getElectron';
import resolveApi, { resolveApiHeaders } from './resolveApi';
export interface FetchAllCallbacks {
/** Called with each page of rows as they arrive. */
onPage(rows: object[]): void;
/** Called once when all data has been received. */
onFinished(): void;
/** Called if an error occurs. */
onError(message: string): void;
}
export interface FetchAllHandle {
/** Signal the loader to stop fetching. */
cancel(): void;
}
const STREAM_BATCH_SIZE = 1000;
const WEB_PAGE_SIZE = 5000;
/**
* Fetches all rows from a JSONL source.
*
* Electron: uses paginated `jsldata/get-rows` via IPC (already fast).
* Web: waits for source to finish, then streams the entire JSONL file in a
* single HTTP request via `jsldata/stream-rows`, parsing lines
* progressively with ReadableStream. Falls back to paginated reads
* with larger page sizes if streaming is unavailable.
*/
export function fetchAll(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks,
pageSize: number = 100
): FetchAllHandle {
const isElectron = !!getElectron();
if (isElectron) {
return fetchAllPaginated(jslid, loadDataPage, callbacks, pageSize);
} else {
return fetchAllWeb(jslid, loadDataPage, callbacks);
}
}
/**
* Web strategy: listen to SSE stats for progress, once source is finished
* stream the entire JSONL in one HTTP request.
*/
function fetchAllWeb(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks
): FetchAllHandle {
let cancelled = false;
let streamStarted = false;
let abortController: AbortController | null = null;
let streamReader: ReadableStreamDefaultReader<Uint8Array> | null = null;
// Initialize cancelFn before registering the SSE handler to avoid TDZ errors
// if an immediate stats event triggers fallbackToPaginated() before initialization.
let cancelFn = () => {
cancelled = true;
if (streamReader) {
streamReader.cancel().catch(() => {});
streamReader = null;
}
if (abortController) {
abortController.abort();
abortController = null;
}
cleanup();
};
const handleStats = (stats: { rowCount: number; changeIndex: number; isFinished: boolean }) => {
if (cancelled || streamStarted) return;
// Report progress while source is still writing
if (!stats.isFinished) {
callbacks.onPage([]); // trigger UI update with count info
return;
}
// Source finished — stream all rows at once
streamStarted = true;
startStream();
};
apiOn(`jsldata-stats-${jslid}`, handleStats);
async function startStream() {
abortController = new AbortController();
try {
const resp = await fetch(`${resolveApi()}/jsldata/stream-rows?jslid=${encodeURIComponent(jslid)}`, {
method: 'GET',
cache: 'no-cache',
signal: abortController.signal,
headers: {
...resolveApiHeaders(),
},
});
if (!resp.body || resp.status === 404 || resp.status === 405) {
// Streaming endpoint not available in this environment — fall back to paginated reads
cleanup();
fallbackToPaginated();
return;
}
if (!resp.ok) {
// Non-recoverable server error (e.g. 403 security rejection, 5xx) — surface it
callbacks.onError(`HTTP ${resp.status}: ${resp.statusText}`);
cleanup();
return;
}
streamReader = resp.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
let isFirstLine = true;
let batch: any[] = [];
while (!cancelled) {
const { done, value } = await streamReader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (cancelled) break;
const trimmed = line.trim();
if (!trimmed) continue;
if (isFirstLine) {
isFirstLine = false;
// Check if first line is a header
try {
const parsed = JSON.parse(trimmed);
if (parsed.__isStreamHeader) continue;
// Not a header — it's a data row
batch.push(parsed);
} catch {
continue;
}
continue;
}
try {
batch.push(JSON.parse(trimmed));
} catch {
// skip malformed lines
}
if (batch.length >= STREAM_BATCH_SIZE) {
if (cancelled) break;
callbacks.onPage(batch);
batch = [];
}
}
}
// Flush the decoder — any bytes held for multi-byte char completion are released
const flushed = decoder.decode();
if (flushed) buffer += flushed;
// Process remaining buffer
const remainingBuffer = buffer.trim();
if (remainingBuffer && !cancelled) {
try {
const parsed = JSON.parse(remainingBuffer);
if (!parsed.__isStreamHeader) {
batch.push(parsed);
}
} catch {
// ignore
}
}
if (batch.length > 0 && !cancelled) {
callbacks.onPage(batch);
}
if (!cancelled) {
callbacks.onFinished();
}
} catch (err) {
if (!cancelled) {
callbacks.onError(err?.message ?? String(err));
}
} finally {
streamReader = null;
abortController = null;
cleanup();
}
}
function fallbackToPaginated() {
const handle = fetchAllPaginated(jslid, loadDataPage, callbacks, WEB_PAGE_SIZE);
cancelFn = handle.cancel;
}
function cleanup() {
apiOff(`jsldata-stats-${jslid}`, handleStats);
}
// Check if data is already finished
checkInitialState();
async function checkInitialState() {
try {
const stats = await apiCall('jsldata/get-stats', { jslid });
if (stats && stats.isFinished && stats.rowCount > 0) {
streamStarted = true;
startStream();
} else if (stats && stats.isFinished && stats.rowCount === 0) {
// Source finished with zero rows — no SSE event will follow, finish immediately
cleanup();
callbacks.onFinished();
}
// Source still writing or no stats yet — SSE events will trigger stream when done
} catch {
// Stats not available yet — SSE events will arrive
}
}
return {
cancel() {
cancelFn();
},
};
}
/**
* Paginated strategy (Electron / fallback): uses `jsldata/get-rows` with
* SSE stats events to know when new data is available.
*/
function fetchAllPaginated(
jslid: string,
loadDataPage: (offset: number, limit: number) => Promise<any>,
callbacks: FetchAllCallbacks,
pageSize: number
): FetchAllHandle {
let cancelled = false;
let finished = false;
let offset = 0;
let isRunning = false;
let isSourceFinished = false;
let drainRequested = false;
function finish() {
if (finished) return;
finished = true;
callbacks.onFinished();
cleanup();
}
const handleStats = (stats: { rowCount: number; changeIndex: number; isFinished: boolean }) => {
isSourceFinished = stats.isFinished;
if (stats.rowCount > offset) {
scheduleDrain();
} else if (stats.isFinished && stats.rowCount === offset) {
finish();
}
};
function scheduleDrain() {
if (isRunning) {
drainRequested = true;
return;
}
drain();
}
apiOn(`jsldata-stats-${jslid}`, handleStats);
async function drain() {
if (isRunning || cancelled) return;
isRunning = true;
drainRequested = false;
try {
while (!cancelled) {
const rows = await loadDataPage(offset, pageSize);
if (cancelled) break;
if (rows.errorMessage) {
callbacks.onError(rows.errorMessage);
cleanup();
return;
}
if (rows.length > 0) {
offset += rows.length;
callbacks.onPage(rows);
}
if (rows.length < pageSize) {
if (isSourceFinished) {
finish();
return;
}
break;
}
await new Promise(resolve => setTimeout(resolve, 0));
}
} catch (err) {
if (!cancelled) {
const msg = err?.message ?? String(err);
if (msg.includes('ENOENT')) {
// File not ready yet
} else {
callbacks.onError(msg);
cleanup();
}
}
} finally {
isRunning = false;
if (drainRequested && !cancelled) {
scheduleDrain();
}
}
}
function cleanup() {
apiOff(`jsldata-stats-${jslid}`, handleStats);
}
checkInitialState();
async function checkInitialState() {
try {
const stats = await apiCall('jsldata/get-stats', { jslid });
if (stats) {
isSourceFinished = stats.isFinished;
if (stats.rowCount > 0) {
scheduleDrain();
} else if (stats.isFinished && !cancelled) {
// rowCount === 0: source finished empty — no SSE event will follow
finish();
}
}
} catch {
// Stats not available yet
}
}
return {
cancel() {
cancelled = true;
cleanup();
},
};
}
+21 -2
View File
@@ -8,7 +8,7 @@ import {
unsubscribeCachePeek,
} from './cache';
import stableStringify from 'json-stable-stringify';
import { derived } from 'svelte/store';
import { derived, writable } from 'svelte/store';
import { extendDatabaseInfo } from 'dbgate-tools';
import { setLocalStorage } from '../utility/storageCache';
import { apiCall, apiOff, apiOn } from './api';
@@ -175,11 +175,13 @@ const filesLoader = ({ folder, parseFrontMatter = false }) => ({
url: 'files/list',
params: parseFrontMatter ? { folder, parseFrontMatter: true } : { folder },
reloadTrigger: { key: `files-changed`, folder },
errorValue: [],
});
const allFilesLoader = () => ({
url: 'files/list-all',
params: {},
reloadTrigger: { key: `all-files-changed` },
errorValue: [],
});
const authTypesLoader = ({ engine }) => ({
url: 'plugins/auth-types',
@@ -188,25 +190,34 @@ const authTypesLoader = ({ engine }) => ({
errorValue: null,
});
const publicCloudErrorStore = writable(false);
const cloudContentErrorStore = writable(false);
const publicCloudFilesLoader = () => ({
url: 'cloud/public-files',
params: {},
reloadTrigger: { key: `public-cloud-changed` },
errorValue: [],
onError: err => publicCloudErrorStore.set(!!err),
});
const cloudContentListLoader = () => ({
url: 'cloud/content-list',
params: {},
reloadTrigger: { key: `cloud-content-changed` },
errorValue: [],
onError: err => cloudContentErrorStore.set(!!err),
});
const teamFilesLoader = () => ({
url: 'team-files/list',
params: {},
reloadTrigger: { key: `team-files-changed` },
errorValue: [],
});
const teamFoldersLoader = () => ({
url: 'team-files/list-folders',
params: {},
reloadTrigger: { key: `team-folders-changed` },
errorValue: [],
});
const promoWidgetLoader = () => ({
url: 'cloud/premium-promo-widget',
@@ -220,15 +231,17 @@ const fileThemesLoader = () => ({
});
async function getCore(loader, args) {
const { url, params, reloadTrigger, transform, onLoaded, errorValue } = loader(args);
const { url, params, reloadTrigger, transform, onLoaded, onError, errorValue } = loader(args);
const key = stableStringify({ url, ...params });
async function doLoad() {
const resp = await apiCall(url, params);
if (resp?.errorMessage && errorValue !== undefined) {
if (onError) onError(resp.errorMessage);
if (onLoaded) onLoaded(errorValue);
return errorValue;
}
if (onError) onError(null);
const res = (transform || (x => x))(resp);
if (onLoaded) onLoaded(res);
return res;
@@ -551,6 +564,9 @@ export function getPublicCloudFiles(args) {
export function usePublicCloudFiles(args = {}) {
return useCore(publicCloudFilesLoader, args);
}
export function usePublicCloudError() {
return publicCloudErrorStore;
}
export function getCloudContentList(args) {
return getCore(cloudContentListLoader, args);
@@ -558,6 +574,9 @@ export function getCloudContentList(args) {
export function useCloudContentList(args = {}) {
return useCore(cloudContentListLoader, args);
}
export function useCloudContentError() {
return cloudContentErrorStore;
}
export function getTeamFiles(args) {
return getCore(teamFilesLoader, args);
@@ -4,7 +4,12 @@
import AppObjectList from '../appobj/AppObjectList.svelte';
import * as cloudContentAppObject from '../appobj/CloudContentAppObject.svelte';
import { useCloudContentList, usePublicCloudFiles, useServerStatus } from '../utility/metadataLoaders';
import {
useCloudContentList,
usePublicCloudFiles,
useServerStatus,
useCloudContentError,
} from '../utility/metadataLoaders';
import { _t } from '../translations';
import WidgetsInnerContainer from './WidgetsInnerContainer.svelte';
@@ -47,6 +52,7 @@
const cloudContentList = useCloudContentList();
const serverStatus = useServerStatus();
const cloudContentError = useCloudContentError();
const cloudContentColorFactory = useCloudContentColorFactory();
const connectionColorFactory = useConnectionColorFactory();
@@ -259,16 +265,19 @@
icon="icon plus-thick"
menu={createAddItemMenu}
title={_t('privateCloudWidget.addNewConnectionOrFile', { defaultMessage: 'Add new connection or file' })}
disabled={$cloudContentError}
/>
<DropDownButton
icon="icon add-folder"
menu={createAddFolderMenu}
title={_t('privateCloudWidget.addNewFolder', { defaultMessage: 'Add new folder' })}
disabled={$cloudContentError}
/>
<InlineButton
on:click={handleRefreshContent}
title={_t('privateCloudWidget.refreshFiles', { defaultMessage: 'Refresh files' })}
data-testid="CloudItemsWidget_buttonRefreshContent"
disabled={$cloudContentError}
>
<FontIcon icon="icon refresh" />
</InlineButton>
@@ -300,7 +309,14 @@
groupContextMenu={createGroupContextMenu}
/>
{#if !cloudContentFlat?.length}
{#if $cloudContentError}
<ErrorInfo
message={_t('privateCloudWidget.cloudUnavailable', {
defaultMessage: 'DbGate Cloud is temporarily unavailable',
})}
icon="img warn"
/>
{:else if !cloudContentFlat?.length}
<ErrorInfo
message={_t('privateCloudWidget.noContent', { defaultMessage: 'You have no content on DbGate cloud' })}
icon="img info"
@@ -4,7 +4,7 @@
import AppObjectList from '../appobj/AppObjectList.svelte';
import * as publicCloudFileAppObject from '../appobj/PublicCloudFileAppObject.svelte';
import { usePublicCloudFiles } from '../utility/metadataLoaders';
import { usePublicCloudFiles, usePublicCloudError } from '../utility/metadataLoaders';
import { _t } from '../translations';
import WidgetsInnerContainer from './WidgetsInnerContainer.svelte';
@@ -20,6 +20,7 @@
let filter = '';
const publicFiles = usePublicCloudFiles();
const publicCloudError = usePublicCloudError();
function handleRefreshPublic() {
refreshPublicCloudFiles(true);
@@ -42,6 +43,7 @@
on:click={handleRefreshPublic}
title={_t('publicCloudWidget.refreshFiles', { defaultMessage: 'Refresh files' })}
data-testid="CloudItemsWidget_buttonRefreshPublic"
disabled={$publicCloudError}
>
<FontIcon icon="icon refresh" />
</InlineButton>
@@ -54,7 +56,14 @@
{filter}
/>
{#if !$publicFiles?.length}
{#if $publicCloudError}
<ErrorInfo
message={_t('publicCloudWidget.cloudUnavailable', {
defaultMessage: 'DbGate Cloud is temporarily unavailable',
})}
icon="img warn"
/>
{:else if !$publicFiles?.length}
<ErrorInfo
message={_t('publicCloudWidget.noFilesFound', { defaultMessage: 'No files found for your configuration' })}
/>
@@ -6,7 +6,7 @@ const Analyser = require('./Analyser');
const isPromise = require('is-promise');
const mongodb = require('mongodb');
const { ObjectId } = require('mongodb');
const { EJSON } = require('bson');
const { EJSON, Binary } = require('bson');
const { serializeJsTypesForJsonStringify, deserializeJsTypesFromJsonParse, getLogger } = require('dbgate-tools');
const createBulkInsertStream = require('./createBulkInsertStream');
const {
@@ -53,8 +53,18 @@ function findArrayResult(resValue) {
return null;
}
function BinData(_subType, base64) {
return Buffer.from(base64, 'base64');
function BinData(subType, base64) {
let numericSubType;
if (typeof subType === 'string') {
const hex = subType.startsWith('0x') || subType.startsWith('0X') ? subType.slice(2) : subType;
numericSubType = parseInt(hex, 16);
} else {
numericSubType = subType;
}
if (!Number.isInteger(numericSubType) || numericSubType < 0 || numericSubType > 255) {
throw new TypeError(`BinData subType must be an integer between 0 and 255, got: ${subType}`);
}
return new Binary(Buffer.from(base64, 'base64'), numericSubType);
}
async function getScriptableDb(dbhan) {
@@ -72,6 +72,8 @@ class Analyser extends DatabaseAnalyser {
...replacements,
$typeAggFunc: this.driver.dialect.stringAgg ? 'string_agg' : 'max',
$typeAggParam: this.driver.dialect.stringAgg ? ", '|'" : '',
$hashColumnAggTail: this.driver.dialect.stringAgg ? ", ',' ORDER BY a.attnum" : '',
$hashConstraintAggTail: this.driver.dialect.stringAgg ? ", ',' ORDER BY con.conname" : '',
$md5Function: this.dialect?.isFipsComplianceOn ? 'LENGTH' : 'MD5',
});
return query;
@@ -83,131 +85,92 @@ class Analyser extends DatabaseAnalyser {
}
async _runAnalysis() {
this.feedback({ analysingMessage: 'DBGM-00241 Loading tables' });
const tables = await this.analyserQuery('tableList', ['tables']);
const useInfoSchema = this.driver.__analyserInternals.useInfoSchemaRoutines;
const routinesQueryName = useInfoSchema ? 'routinesInfoSchema' : 'routines';
const proceduresParametersQueryName = useInfoSchema ? 'proceduresParametersInfoSchema' : 'proceduresParameters';
this.feedback({ analysingMessage: 'DBGM-00242 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views']);
this.feedback({ analysingMessage: 'DBGM-00243 Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
let fkColumns = null;
this.feedback({ analysingMessage: 'DBGM-00244 Loading foreign key constraints' });
// const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00245 Loading foreign key refs' });
const foreignKeys = await this.analyserQuery('foreignKeys', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00246 Loading foreign key columns' });
const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']);
// const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`;
const fkRows = [];
// const fkConstraintDct = _.keyBy(fk_tableConstraints.rows, cntKey);
for (const fkRef of foreignKeys.rows) {
// const cntBase = fkConstraintDct[cntKey(fkRef)];
// const cntRef = fkConstraintDct[`${fkRef.unique_constraint_name}|${fkRef.unique_constraint_schema}`];
// if (!cntBase || !cntRef) continue;
const baseCols = _.sortBy(
fk_keyColumnUsage.rows.filter(
x =>
x.table_name == fkRef.table_name &&
x.constraint_name == fkRef.constraint_name &&
x.table_schema == fkRef.table_schema
),
'ordinal_position'
);
const refCols = _.sortBy(
fk_keyColumnUsage.rows.filter(
x =>
x.table_name == fkRef.ref_table_name &&
x.constraint_name == fkRef.unique_constraint_name &&
x.table_schema == fkRef.ref_table_schema
),
'ordinal_position'
);
if (baseCols.length != refCols.length) continue;
for (let i = 0; i < baseCols.length; i++) {
const baseCol = baseCols[i];
const refCol = refCols[i];
fkRows.push({
...fkRef,
pure_name: fkRef.table_name,
schema_name: fkRef.table_schema,
ref_table_name: fkRef.ref_table_name,
ref_schema_name: fkRef.ref_table_schema,
column_name: baseCol.column_name,
ref_column_name: refCol.column_name,
update_action: fkRef.update_action,
delete_action: fkRef.delete_action,
});
}
}
fkColumns = { rows: fkRows };
this.feedback({ analysingMessage: 'DBGM-00247 Loading views' });
const views = await this.analyserQuery('views', ['views']);
this.feedback({ analysingMessage: 'DBGM-00248 Loading materialized views' });
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
this.feedback({ analysingMessage: 'DBGM-00249 Loading materialized view columns' });
const matviewColumns = this.driver.dialect.materializedViews
? await this.analyserQuery('matviewColumns', ['matviews'])
: null;
this.feedback({ analysingMessage: 'DBGM-00250 Loading routines' });
const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
this.feedback({ analysingMessage: 'DBGM-00251 Loading routine parameters' });
const routineParametersRows = await this.analyserQuery('proceduresParameters');
this.feedback({ analysingMessage: 'DBGM-00252 Loading indexes' });
const indexes = this.driver.__analyserInternals.skipIndexes
? { rows: [] }
: await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00253 Loading index columns' });
const indexcols = this.driver.__analyserInternals.skipIndexes
? { rows: [] }
: await this.analyserQuery('indexcols', ['tables']);
this.feedback({ analysingMessage: 'DBGM-00254 Loading unique names' });
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
// Run all independent queries in parallel
this.feedback({ analysingMessage: 'DBGM-00241 Loading database structure' });
const [
tables,
views,
columns,
pkColumns,
foreignKeys,
uniqueNames,
routines,
routineParametersRows,
indexes,
indexcols,
matviews,
matviewColumns,
triggers,
] = await Promise.all([
this.analyserQuery('tableList', ['tables']),
this.analyserQuery('views', ['views']),
this.analyserQuery('columns', ['tables', 'views']),
this.analyserQuery('primaryKeys', ['tables']),
this.analyserQuery('foreignKeys', ['tables']),
this.analyserQuery('uniqueNames', ['tables']),
this.analyserQuery(routinesQueryName, ['procedures', 'functions']),
this.analyserQuery(proceduresParametersQueryName),
this.driver.__analyserInternals.skipIndexes
? Promise.resolve({ rows: [] })
: this.analyserQuery('indexes', ['tables']),
this.driver.__analyserInternals.skipIndexes
? Promise.resolve({ rows: [] })
: this.analyserQuery('indexcols', ['tables']),
this.driver.dialect.materializedViews
? this.analyserQuery('matviews', ['matviews'])
: Promise.resolve(null),
this.driver.dialect.materializedViews
? this.analyserQuery('matviewColumns', ['matviews'])
: Promise.resolve(null),
this.analyserQuery('triggers'),
]);
// Load geometry/geography columns if the views exist (these are rare, so run after views are loaded)
let geometryColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'DBGM-00255 Loading geometry columns' });
geometryColumns = await this.analyserQuery('geometryColumns', ['tables']);
}
let geographyColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'DBGM-00256 Loading geography columns' });
geographyColumns = await this.analyserQuery('geographyColumns', ['tables']);
const hasGeometry = views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public');
const hasGeography = views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public');
if (hasGeometry || hasGeography) {
const [geomCols, geogCols] = await Promise.all([
hasGeometry
? this.analyserQuery('geometryColumns', ['tables'])
: Promise.resolve({ rows: [] }),
hasGeography
? this.analyserQuery('geographyColumns', ['tables'])
: Promise.resolve({ rows: [] }),
]);
geometryColumns = geomCols;
geographyColumns = geogCols;
}
this.feedback({ analysingMessage: 'DBGM-00257 Loading triggers' });
const triggers = await this.analyserQuery('triggers');
this.feedback({ analysingMessage: 'DBGM-00258 Finalizing DB structure' });
const columnColumnsMapped = fkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
// Pre-build lookup maps for O(1) access instead of O(n) scanning per table/view
const columnsByTable = _.groupBy(columns.rows, x => `${x.schema_name}.${x.pure_name}`);
const indexcolsByOidAttnum = _.keyBy(indexcols.rows, x => `${x.oid}_${x.attnum}`);
const uniqueNameSet = new Set(uniqueNames.rows.map(x => x.constraint_name));
const indexesByTable = _.groupBy(indexes.rows, x => `${x.schema_name}.${x.table_name}`);
const matviewColumnsByTable = matviewColumns
? _.groupBy(matviewColumns.rows, x => `${x.schema_name}.${x.pure_name}`)
: {};
const columnColumnsMapped = foreignKeys.rows.map(x => ({
pureName: x.table_name,
schemaName: x.table_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
refColumnName: x.ref_column_name,
updateAction: x.update_action,
deleteAction: x.delete_action,
refTableName: x.ref_table_name,
refSchemaName: x.ref_schema_name,
refSchemaName: x.ref_table_schema,
}));
const fkByTable = _.groupBy(columnColumnsMapped, x => `${x.schemaName}.${x.pureName}`);
const pkColumnsMapped = pkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
@@ -215,6 +178,7 @@ class Analyser extends DatabaseAnalyser {
constraintName: x.constraint_name,
columnName: x.column_name,
}));
const pkByTable = _.groupBy(pkColumnsMapped, x => `${x.schemaName}.${x.pureName}`);
const procedureParameters = routineParametersRows.rows
.filter(i => i.routine_type == 'PROCEDURE')
@@ -252,6 +216,7 @@ class Analyser extends DatabaseAnalyser {
const res = {
tables: tables.rows.map(table => {
const tableKey = `${table.schema_name}.${table.pure_name}`;
const newTable = {
pureName: table.pure_name,
schemaName: table.schema_name,
@@ -259,20 +224,16 @@ class Analyser extends DatabaseAnalyser {
objectId: `tables:${table.schema_name}.${table.pure_name}`,
contentHash: table.hash_code_columns ? `${table.hash_code_columns}-${table.hash_code_constraints}` : null,
};
const tableIndexes = indexesByTable[tableKey] || [];
return {
...newTable,
columns: columns.rows
.filter(col => col.pure_name == table.pure_name && col.schema_name == table.schema_name)
.map(col => getColumnInfo(col, newTable, geometryColumns, geographyColumns)),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkColumnsMapped),
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, columnColumnsMapped),
indexes: indexes.rows
.filter(
x =>
x.table_name == table.pure_name &&
x.schema_name == table.schema_name &&
!uniqueNames.rows.find(y => y.constraint_name == x.index_name)
)
columns: (columnsByTable[tableKey] || []).map(col =>
getColumnInfo(col, newTable, geometryColumns, geographyColumns)
),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkByTable[tableKey] || []),
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, fkByTable[tableKey] || []),
indexes: tableIndexes
.filter(x => !uniqueNameSet.has(x.index_name))
.map(idx => {
const indOptionSplit = idx.indoption.split(' ');
return {
@@ -281,7 +242,7 @@ class Analyser extends DatabaseAnalyser {
columns: _.compact(
idx.indkey
.split(' ')
.map(colid => indexcols.rows.find(col => col.oid == idx.oid && col.attnum == colid))
.map(colid => indexcolsByOidAttnum[`${idx.oid}_${colid}`])
.filter(col => col != null)
.map((col, colIndex) => ({
columnName: col.column_name,
@@ -290,19 +251,14 @@ class Analyser extends DatabaseAnalyser {
),
};
}),
uniques: indexes.rows
.filter(
x =>
x.table_name == table.pure_name &&
x.schema_name == table.schema_name &&
uniqueNames.rows.find(y => y.constraint_name == x.index_name)
)
uniques: tableIndexes
.filter(x => uniqueNameSet.has(x.index_name))
.map(idx => ({
constraintName: idx.index_name,
columns: _.compact(
idx.indkey
.split(' ')
.map(colid => indexcols.rows.find(col => col.oid == idx.oid && col.attnum == colid))
.map(colid => indexcolsByOidAttnum[`${idx.oid}_${colid}`])
.filter(col => col != null)
.map(col => ({
columnName: col.column_name,
@@ -317,9 +273,7 @@ class Analyser extends DatabaseAnalyser {
schemaName: view.schema_name,
contentHash: view.hash_code,
createSql: `CREATE VIEW "${view.schema_name}"."${view.pure_name}"\nAS\n${view.create_sql}`,
columns: columns.rows
.filter(col => col.pure_name == view.pure_name && col.schema_name == view.schema_name)
.map(col => getColumnInfo(col)),
columns: (columnsByTable[`${view.schema_name}.${view.pure_name}`] || []).map(col => getColumnInfo(col)),
})),
matviews: matviews
? matviews.rows.map(matview => ({
@@ -328,8 +282,7 @@ class Analyser extends DatabaseAnalyser {
schemaName: matview.schema_name,
contentHash: matview.hash_code,
createSql: `CREATE MATERIALIZED VIEW "${matview.schema_name}"."${matview.pure_name}"\nAS\n${matview.definition}`,
columns: matviewColumns.rows
.filter(col => col.pure_name == matview.pure_name && col.schema_name == matview.schema_name)
columns: (matviewColumnsByTable[`${matview.schema_name}.${matview.pure_name}`] || [])
.map(col => getColumnInfo(col)),
}))
: undefined,
@@ -396,14 +349,31 @@ class Analyser extends DatabaseAnalyser {
}
async _getFastSnapshot() {
const viewModificationsQueryData = await this.analyserQuery('viewModifications');
const matviewModificationsQueryData = this.driver.dialect.materializedViews
? await this.analyserQuery('matviewModifications')
: null;
const routineModificationsQueryData = await this.analyserQuery('routineModifications');
const useInfoSchema = this.driver.__analyserInternals.useInfoSchemaRoutines;
const routineModificationsQueryName = useInfoSchema ? 'routineModificationsInfoSchema' : 'routineModifications';
// Run all modification queries in parallel
const [
tableModificationsQueryData,
viewModificationsQueryData,
matviewModificationsQueryData,
routineModificationsQueryData,
] = await Promise.all([
this.analyserQuery('tableModifications'),
this.analyserQuery('viewModifications'),
this.driver.dialect.materializedViews
? this.analyserQuery('matviewModifications')
: Promise.resolve(null),
this.analyserQuery(routineModificationsQueryName),
]);
return {
tables: null,
tables: tableModificationsQueryData.rows.map(x => ({
objectId: `tables:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: `${x.hash_code_columns}-${x.hash_code_constraints}`,
})),
views: viewModificationsQueryData.rows.map(x => ({
objectId: `views:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
@@ -1,22 +1,38 @@
module.exports = `
select
table_schema as "schema_name",
table_name as "pure_name",
column_name as "column_name",
is_nullable as "is_nullable",
data_type as "data_type",
character_maximum_length as "char_max_length",
numeric_precision as "numeric_precision",
numeric_scale as "numeric_scale",
column_default as "default_value"
from information_schema.columns
where
table_schema !~ '^_timescaledb_'
and (
('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
or
('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
)
and table_schema =SCHEMA_NAME_CONDITION
order by ordinal_position
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
a.attname AS "column_name",
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS "is_nullable",
format_type(a.atttypid, NULL) AS "data_type",
CASE
WHEN a.atttypmod > 0 AND t.typname IN ('varchar', 'bpchar', 'char') THEN a.atttypmod - 4
WHEN a.atttypmod > 0 AND t.typname IN ('bit', 'varbit') THEN a.atttypmod
ELSE NULL
END AS "char_max_length",
CASE
WHEN a.atttypmod > 0 AND t.typname = 'numeric' THEN ((a.atttypmod - 4) >> 16) & 65535
ELSE NULL
END AS "numeric_precision",
CASE
WHEN a.atttypmod > 0 AND t.typname = 'numeric' THEN (a.atttypmod - 4) & 65535
ELSE NULL
END AS "numeric_scale",
pg_get_expr(d.adbin, d.adrelid) AS "default_value"
FROM pg_catalog.pg_attribute a
JOIN pg_catalog.pg_class c ON c.oid = a.attrelid
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
JOIN pg_catalog.pg_type t ON t.oid = a.atttypid
LEFT JOIN pg_catalog.pg_attrdef d ON d.adrelid = a.attrelid AND d.adnum = a.attnum
WHERE a.attnum > 0
AND NOT a.attisdropped
AND c.relkind IN ('r', 'v', 'p', 'f')
AND n.nspname !~ '^_timescaledb_'
AND (
('tables:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
OR
('views:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
)
AND n.nspname =SCHEMA_NAME_CONDITION
ORDER BY a.attnum
`;
@@ -5,7 +5,8 @@ SELECT
con.conname AS constraint_name,
nsp2.nspname AS ref_table_schema,
rel2.relname AS ref_table_name,
conpk.conname AS unique_constraint_name,
att.attname AS column_name,
att2.attname AS ref_column_name,
CASE con.confupdtype
WHEN 'a' THEN 'NO ACTION'
WHEN 'r' THEN 'RESTRICT'
@@ -13,26 +14,26 @@ SELECT
WHEN 'n' THEN 'SET NULL'
WHEN 'd' THEN 'SET DEFAULT'
ELSE con.confupdtype::text
END AS update_action,
CASE con.confdeltype
END AS update_action,
CASE con.confdeltype
WHEN 'a' THEN 'NO ACTION'
WHEN 'r' THEN 'RESTRICT'
WHEN 'c' THEN 'CASCADE'
WHEN 'n' THEN 'SET NULL'
WHEN 'd' THEN 'SET DEFAULT'
ELSE con.confdeltype::text
END AS delete_action
END AS delete_action
FROM pg_constraint con
JOIN pg_class rel ON rel.oid = con.conrelid
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
JOIN pg_class rel2 ON rel2.oid = con.confrelid
JOIN pg_namespace nsp2 ON nsp2.oid = rel2.relnamespace
JOIN pg_constraint conpk
ON conpk.conrelid = con.confrelid
AND conpk.conkey = con.confkey
AND conpk.contype IN ('p','u') -- 'p' = primary key, 'u' = unique constraint
WHERE con.contype = 'f' AND ('tables:' || nsp.nspname || '.' || rel.relname) =OBJECT_ID_CONDITION AND nsp.nspname =SCHEMA_NAME_CONDITION
JOIN LATERAL unnest(con.conkey, con.confkey) WITH ORDINALITY AS cols(attnum, ref_attnum, ordinal_position) ON TRUE
JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = cols.attnum
JOIN pg_attribute att2 ON att2.attrelid = con.confrelid AND att2.attnum = cols.ref_attnum
WHERE con.contype = 'f'
AND ('tables:' || nsp.nspname || '.' || rel.relname) =OBJECT_ID_CONDITION
AND nsp.nspname =SCHEMA_NAME_CONDITION
ORDER BY con.conname, cols.ordinal_position
;
`;
@@ -19,15 +19,16 @@ const triggers = require('./triggers');
const listDatabases = require('./listDatabases');
const listVariables = require('./listVariables');
const listProcesses = require('./listProcesses');
const fk_keyColumnUsage = require('./fk_key_column_usage');
const routinesInfoSchema = require('./routinesInfoSchema');
const proceduresParametersInfoSchema = require('./proceduresParametersInfoSchema');
const routineModificationsInfoSchema = require('./routineModificationsInfoSchema');
const tableModifications = require('./tableModifications');
module.exports = {
columns,
tableList,
viewModifications,
primaryKeys,
fk_keyColumnUsage,
foreignKeys,
views,
routines,
@@ -45,4 +46,8 @@ module.exports = {
listDatabases,
listVariables,
listProcesses,
routinesInfoSchema,
proceduresParametersInfoSchema,
routineModificationsInfoSchema,
tableModifications,
};
@@ -1,31 +1,34 @@
module.exports = `
SELECT
proc.specific_schema AS schema_name,
proc.routine_name AS pure_name,
proc.routine_type as routine_type,
args.parameter_name AS parameter_name,
args.parameter_mode,
args.data_type AS data_type,
args.ordinal_position AS parameter_index,
args.parameter_mode AS parameter_mode
FROM
information_schema.routines proc
LEFT JOIN
information_schema.parameters args
ON proc.specific_schema = args.specific_schema
AND proc.specific_name = args.specific_name
WHERE
proc.specific_schema NOT IN ('pg_catalog', 'information_schema') -- Exclude system schemas
AND args.parameter_name IS NOT NULL
AND proc.routine_type IN ('PROCEDURE', 'FUNCTION') -- Filter for procedures
AND proc.specific_schema !~ '^_timescaledb_'
AND proc.specific_schema =SCHEMA_NAME_CONDITION
SELECT
n.nspname AS "schema_name",
p.proname AS "pure_name",
CASE p.prokind WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "routine_type",
a.parameter_name AS "parameter_name",
CASE (p.proargmodes::text[])[a.ordinal_position]
WHEN 'o' THEN 'OUT'
WHEN 'b' THEN 'INOUT'
WHEN 'v' THEN 'VARIADIC'
WHEN 't' THEN 'TABLE'
ELSE 'IN'
END AS "parameter_mode",
pg_catalog.format_type(a.parameter_type, NULL) AS "data_type",
a.ordinal_position AS "parameter_index"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
CROSS JOIN LATERAL unnest(
COALESCE(p.proallargtypes, p.proargtypes::oid[]),
p.proargnames
) WITH ORDINALITY AS a(parameter_type, parameter_name, ordinal_position)
WHERE p.prokind IN ('f', 'p')
AND p.proargnames IS NOT NULL
AND a.parameter_name IS NOT NULL
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
AND (
(routine_type = 'PROCEDURE' AND ('procedures:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
OR
(routine_type = 'FUNCTION' AND ('functions:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
(p.prokind = 'p' AND ('procedures:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
OR
(p.prokind != 'p' AND ('functions:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
)
ORDER BY
schema_name,
args.ordinal_position;
ORDER BY n.nspname, a.ordinal_position
`;
@@ -0,0 +1,31 @@
module.exports = `
SELECT
proc.specific_schema AS schema_name,
proc.routine_name AS pure_name,
proc.routine_type as routine_type,
args.parameter_name AS parameter_name,
args.parameter_mode,
args.data_type AS data_type,
args.ordinal_position AS parameter_index,
args.parameter_mode AS parameter_mode
FROM
information_schema.routines proc
LEFT JOIN
information_schema.parameters args
ON proc.specific_schema = args.specific_schema
AND proc.specific_name = args.specific_name
WHERE
proc.specific_schema NOT IN ('pg_catalog', 'information_schema')
AND args.parameter_name IS NOT NULL
AND proc.routine_type IN ('PROCEDURE', 'FUNCTION')
AND proc.specific_schema !~ '^_timescaledb_'
AND proc.specific_schema =SCHEMA_NAME_CONDITION
AND (
(routine_type = 'PROCEDURE' AND ('procedures:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
OR
(routine_type = 'FUNCTION' AND ('functions:' || proc.specific_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
ORDER BY
schema_name,
args.ordinal_position;
`;
@@ -1,10 +1,13 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
$md5Function(routine_definition) as "hash_code",
routine_type as "object_type"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION
SELECT
p.proname AS "pure_name",
n.nspname AS "schema_name",
$md5Function(p.prosrc) AS "hash_code",
CASE p.prokind WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "object_type"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
WHERE p.prokind IN ('f', 'p')
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
`;
@@ -0,0 +1,10 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
$md5Function(routine_definition) as "hash_code",
routine_type as "object_type"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION
`;
@@ -1,19 +1,23 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
max(routine_definition) as "definition",
max($md5Function(routine_definition)) as "hash_code",
routine_type as "object_type",
$typeAggFunc(data_type $typeAggParam) as "data_type",
max(external_language) as "language"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_schema =SCHEMA_NAME_CONDITION
and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
or
(routine_type = 'FUNCTION' and ('functions:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
group by routine_name, routine_schema, routine_type
SELECT
p.proname AS "pure_name",
n.nspname AS "schema_name",
max(p.prosrc) AS "definition",
max($md5Function(p.prosrc)) AS "hash_code",
CASE max(p.prokind) WHEN 'p' THEN 'PROCEDURE' ELSE 'FUNCTION' END AS "object_type",
$typeAggFunc(pg_catalog.format_type(p.prorettype, NULL) $typeAggParam) AS "data_type",
max(l.lanname) AS "language"
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
JOIN pg_catalog.pg_language l ON l.oid = p.prolang
WHERE p.prokind IN ('f', 'p')
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
AND (
(p.prokind = 'p' AND ('procedures:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
OR
(p.prokind != 'p' AND ('functions:' || n.nspname || '.' || p.proname) =OBJECT_ID_CONDITION)
)
GROUP BY p.proname, n.nspname, p.prokind
`;
@@ -0,0 +1,19 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
max(routine_definition) as "definition",
max($md5Function(routine_definition)) as "hash_code",
routine_type as "object_type",
$typeAggFunc(data_type $typeAggParam) as "data_type",
max(external_language) as "language"
from
information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_schema =SCHEMA_NAME_CONDITION
and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
or
(routine_type = 'FUNCTION' and ('functions:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
)
group by routine_name, routine_schema, routine_type
`;
@@ -1,10 +1,35 @@
module.exports = `
select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_name",
pg_relation_size('"'||infoTables.table_schema||'"."'||infoTables.table_name||'"') as "size_bytes"
from information_schema.tables infoTables
where infoTables.table_type not like '%VIEW%'
and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION
and infoTables.table_schema <> 'pg_internal'
and infoTables.table_schema !~ '^_timescaledb_'
and infoTables.table_schema =SCHEMA_NAME_CONDITION
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
pg_relation_size(c.oid) AS "size_bytes",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
a.attname || ':' || pg_catalog.format_type(a.atttypid, a.atttypmod) || ':' || a.attnotnull::text
$hashColumnAggTail
)
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped),
''
)
) AS "hash_code_columns",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
con.conname || ':' || con.contype::text
$hashConstraintAggTail
)
FROM pg_catalog.pg_constraint con
WHERE con.conrelid = c.oid),
''
)
) AS "hash_code_constraints"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'p', 'f')
AND ('tables:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
AND n.nspname <> 'pg_internal'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
`;
@@ -0,0 +1,34 @@
module.exports = `
SELECT
n.nspname AS "schema_name",
c.relname AS "pure_name",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
a.attname || ':' || pg_catalog.format_type(a.atttypid, a.atttypmod) || ':' || a.attnotnull::text
$hashColumnAggTail
)
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped),
''
)
) AS "hash_code_columns",
$md5Function(
COALESCE(
(SELECT $typeAggFunc(
con.conname || ':' || con.contype::text
$hashConstraintAggTail
)
FROM pg_catalog.pg_constraint con
WHERE con.conrelid = c.oid),
''
)
) AS "hash_code_constraints"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'p', 'f')
AND n.nspname <> 'pg_internal'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
AND n.nspname =SCHEMA_NAME_CONDITION
`;
@@ -1,8 +1,13 @@
module.exports = `
select
table_name as "pure_name",
table_schema as "schema_name",
$md5Function(view_definition) as "hash_code"
from
information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
SELECT
c.relname AS "pure_name",
n.nspname AS "schema_name",
$md5Function(pg_get_viewdef(c.oid, true)) AS "hash_code"
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind = 'v'
AND n.nspname != 'information_schema'
AND n.nspname != 'pg_catalog'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
`;
@@ -1,11 +1,20 @@
module.exports = `
select
table_name as "pure_name",
table_schema as "schema_name",
view_definition as "create_sql",
$md5Function(view_definition) as "hash_code"
from
information_schema.views
where table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
WITH view_defs AS (
SELECT
c.relname AS pure_name,
n.nspname AS schema_name,
pg_get_viewdef(c.oid, true) AS viewdef
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind = 'v'
AND n.nspname !~ '^_timescaledb_'
AND n.nspname =SCHEMA_NAME_CONDITION
AND ('views:' || n.nspname || '.' || c.relname) =OBJECT_ID_CONDITION
)
SELECT
pure_name AS "pure_name",
schema_name AS "schema_name",
viewdef AS "create_sql",
$md5Function(viewdef) AS "hash_code"
FROM view_defs
`;
@@ -418,6 +418,7 @@ const redshiftDriver = {
},
__analyserInternals: {
skipIndexes: true,
useInfoSchemaRoutines: true,
},
engine: 'redshift@dbgate-plugin-postgres',
title: 'Amazon Redshift',
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Přidat nové připojení nebo soubor",
"privateCloudWidget.addNewFolder": "Přidat novou složku",
"privateCloudWidget.administrateAccess": "Spravovat přístup",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud je dočasně nedostupný",
"privateCloudWidget.createConnection": "Vytvořit připojení na DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Vytvořit sdílenou složku",
"privateCloudWidget.deleteFolder": "Smazat složku",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Přejmenovat složku",
"privateCloudWidget.searchPlaceholder": "Hledat cloudová připojení a soubory",
"privateCloudWidget.yourInviteLink": "Váš pozvánkový odkaz (ve tvaru dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud je dočasně nedostupný",
"publicCloudWidget.noFilesFound": "Pro vaši konfiguraci nebyly nalezeny žádné soubory",
"publicCloudWidget.onlyRelevantFilesListed": "Jsou zobrazeny pouze soubory relevantní pro vaše připojení, platformu a edici DbGate. Nejprve prosím definujte připojení.",
"publicCloudWidget.publicKnowledgeBase": "Veřejná znalostní báze",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Neue Verbindung oder Datei hinzufügen",
"privateCloudWidget.addNewFolder": "Neuen Ordner hinzufügen",
"privateCloudWidget.administrateAccess": "Zugriff verwalten",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud ist vorübergehend nicht verfügbar",
"privateCloudWidget.createConnection": "Verbindung in DbGate Cloud erstellen",
"privateCloudWidget.createSharedFolder": "Freigegebenen Ordner erstellen",
"privateCloudWidget.deleteFolder": "Ordner löschen",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Ordner umbenennen",
"privateCloudWidget.searchPlaceholder": "Cloud-Verbindungen und Dateien suchen",
"privateCloudWidget.yourInviteLink": "Ihr Einladungslink (im Format dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud ist vorübergehend nicht verfügbar",
"publicCloudWidget.noFilesFound": "Keine Dateien für Ihre Konfiguration gefunden",
"publicCloudWidget.onlyRelevantFilesListed": "Es werden nur Dateien aufgelistet, die für Ihre Verbindungen, Plattform und DbGate-Edition relevant sind. Bitte definieren Sie zuerst Verbindungen.",
"publicCloudWidget.publicKnowledgeBase": "Öffentliche Wissensdatenbank",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Add new connection or file",
"privateCloudWidget.addNewFolder": "Add new folder",
"privateCloudWidget.administrateAccess": "Administrate access",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud is temporarily unavailable",
"privateCloudWidget.createConnection": "Create connection on DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Create shared folder",
"privateCloudWidget.deleteFolder": "Delete folder",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Rename folder",
"privateCloudWidget.searchPlaceholder": "Search cloud connections and files",
"privateCloudWidget.yourInviteLink": "Your invite link (in form dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud is temporarily unavailable",
"publicCloudWidget.noFilesFound": "No files found for your configuration",
"publicCloudWidget.onlyRelevantFilesListed": "Only files relevant for your connections, platform and DbGate edition are listed. Please define connections at first.",
"publicCloudWidget.publicKnowledgeBase": "Public Knowledge Base",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Agregar nueva conexión o archivo",
"privateCloudWidget.addNewFolder": "Agregar nueva carpeta",
"privateCloudWidget.administrateAccess": "Administrar acceso",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud está temporalmente no disponible",
"privateCloudWidget.createConnection": "Crear conexión en DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Crear carpeta compartida",
"privateCloudWidget.deleteFolder": "Eliminar carpeta",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Renombrar carpeta",
"privateCloudWidget.searchPlaceholder": "Buscar conexiones y archivos en la nube",
"privateCloudWidget.yourInviteLink": "Su enlace de invitación (en forma dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud está temporalmente no disponible",
"publicCloudWidget.noFilesFound": "No se encontraron archivos para su configuración",
"publicCloudWidget.onlyRelevantFilesListed": "Solo se listan archivos relevantes para sus conexiones, plataforma y edición de DbGate. Defina primero las conexiones.",
"publicCloudWidget.publicKnowledgeBase": "Base de conocimiento pública",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Ajouter une nouvelle connexion ou un fichier",
"privateCloudWidget.addNewFolder": "Ajouter un nouveau dossier",
"privateCloudWidget.administrateAccess": "Gérer les accès",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud est temporairement indisponible",
"privateCloudWidget.createConnection": "Créer une connexion sur DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Créer un dossier partagé",
"privateCloudWidget.deleteFolder": "Supprimer le dossier",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Renommer le dossier",
"privateCloudWidget.searchPlaceholder": "Rechercher des connexions et fichiers cloud",
"privateCloudWidget.yourInviteLink": "Votre lien d'invitation (sous la forme dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud est temporairement indisponible",
"publicCloudWidget.noFilesFound": "Aucun fichier trouvé pour votre configuration",
"publicCloudWidget.onlyRelevantFilesListed": "Seuls les fichiers pertinents pour vos connexions, votre plateforme et votre édition de DbGate sont listés. Veuillez d'abord définir des connexions.",
"publicCloudWidget.publicKnowledgeBase": "Base de connaissances publique",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Aggiungi nuova connessione o file",
"privateCloudWidget.addNewFolder": "Aggiungi nuova cartella",
"privateCloudWidget.administrateAccess": "Amministra accesso",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud è temporaneamente non disponibile",
"privateCloudWidget.createConnection": "Crea connessione su DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Crea cartella condivisa",
"privateCloudWidget.deleteFolder": "Elimina cartella",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Rinomina cartella",
"privateCloudWidget.searchPlaceholder": "Cerca connessioni e file cloud",
"privateCloudWidget.yourInviteLink": "Il tuo link di invito (nel formato dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud è temporaneamente non disponibile",
"publicCloudWidget.noFilesFound": "Nessun file trovato per la tua configurazione",
"publicCloudWidget.onlyRelevantFilesListed": "Sono elencati solo i file rilevanti per le tue connessioni, piattaforma ed edizione DbGate. Definisci prima le connessioni.",
"publicCloudWidget.publicKnowledgeBase": "Knowledge Base pubblica",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "新しい接続またはファイルを追加",
"privateCloudWidget.addNewFolder": "新しいフォルダーを追加",
"privateCloudWidget.administrateAccess": "アクセスを管理",
"privateCloudWidget.cloudUnavailable": "DbGateクラウドは一時的に利用できません",
"privateCloudWidget.createConnection": "DbGateクラウドに接続を作成",
"privateCloudWidget.createSharedFolder": "共有フォルダーを作成",
"privateCloudWidget.deleteFolder": "フォルダーを削除",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "フォルダー名を変更",
"privateCloudWidget.searchPlaceholder": "クラウド接続とファイルを検索",
"privateCloudWidget.yourInviteLink": "招待リンク(dbgate://folder/xxx の形式)",
"publicCloudWidget.cloudUnavailable": "DbGateクラウドは一時的に利用できません",
"publicCloudWidget.noFilesFound": "設定に該当するファイルが見つかりません",
"publicCloudWidget.onlyRelevantFilesListed": "接続、プラットフォーム、DbGateエディションに関連するファイルのみが一覧表示されます。まず接続を定義してください。",
"publicCloudWidget.publicKnowledgeBase": "公開ナレッジベース",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "새 연결 또는 파일 추가",
"privateCloudWidget.addNewFolder": "새 폴더 추가",
"privateCloudWidget.administrateAccess": "접근 권한 관리",
"privateCloudWidget.cloudUnavailable": "DbGate 클라우드는 일시적으로 사용할 수 없습니다",
"privateCloudWidget.createConnection": "DbGate 클라우드에 연결 생성",
"privateCloudWidget.createSharedFolder": "공유 폴더 생성",
"privateCloudWidget.deleteFolder": "폴더 삭제",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "폴더 이름 변경",
"privateCloudWidget.searchPlaceholder": "클라우드 연결 및 파일 검색",
"privateCloudWidget.yourInviteLink": "초대 링크(dbgate://folder/xxx 형식)",
"publicCloudWidget.cloudUnavailable": "DbGate 클라우드는 일시적으로 사용할 수 없습니다",
"publicCloudWidget.noFilesFound": "구성에 해당하는 파일을 찾을 수 없습니다",
"publicCloudWidget.onlyRelevantFilesListed": "연결, 플랫폼 및 DbGate 에디션과 관련된 파일만 표시됩니다. 먼저 연결을 정의하세요.",
"publicCloudWidget.publicKnowledgeBase": "공개 지식 베이스",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Adicionar nova conexão ou arquivo",
"privateCloudWidget.addNewFolder": "Adicionar nova pasta",
"privateCloudWidget.administrateAccess": "Administrar acesso",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud está temporariamente indisponível",
"privateCloudWidget.createConnection": "Criar conexão no DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Criar pasta compartilhada",
"privateCloudWidget.deleteFolder": "Excluir pasta",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Renomear pasta",
"privateCloudWidget.searchPlaceholder": "Pesquisar conexões e arquivos na nuvem",
"privateCloudWidget.yourInviteLink": "Seu link de convite (no formato dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud está temporariamente indisponível",
"publicCloudWidget.noFilesFound": "Nenhum arquivo encontrado para sua configuração",
"publicCloudWidget.onlyRelevantFilesListed": "Apenas arquivos relevantes para suas conexões, plataforma e edição do DbGate são listados. Defina conexões primeiro.",
"publicCloudWidget.publicKnowledgeBase": "Base de Conhecimento Pública",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "Pridať nové pripojenie alebo súbor",
"privateCloudWidget.addNewFolder": "Pridať nový priečinok",
"privateCloudWidget.administrateAccess": "Spravovať prístup",
"privateCloudWidget.cloudUnavailable": "DbGate Cloud je dočasne nedostupný",
"privateCloudWidget.createConnection": "Vytvoriť pripojenie na DbGate Cloud",
"privateCloudWidget.createSharedFolder": "Vytvoriť zdieľaný priečinok",
"privateCloudWidget.deleteFolder": "Odstrániť priečinok",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "Premenovať priečinok",
"privateCloudWidget.searchPlaceholder": "Hľadať cloud pripojenia a súbory",
"privateCloudWidget.yourInviteLink": "Váš pozývací odkaz (vo forme dbgate://folder/xxx)",
"publicCloudWidget.cloudUnavailable": "DbGate Cloud je dočasne nedostupný",
"publicCloudWidget.noFilesFound": "Pre vašu konfiguráciu neboli nájdené žiadne súbory",
"publicCloudWidget.onlyRelevantFilesListed": "Sú zobrazené iba súbory relevantné pre vaše pripojenia, platformu a edíciu DbGate. Najprv definujte pripojenia.",
"publicCloudWidget.publicKnowledgeBase": "Verejná znalostná báza",
+2
View File
@@ -1195,6 +1195,7 @@
"privateCloudWidget.addNewConnectionOrFile": "添加新连接或文件",
"privateCloudWidget.addNewFolder": "添加新文件夹",
"privateCloudWidget.administrateAccess": "管理访问权限",
"privateCloudWidget.cloudUnavailable": "DbGate 云暂时不可用",
"privateCloudWidget.createConnection": "在 DbGate 云上创建连接",
"privateCloudWidget.createSharedFolder": "创建共享文件夹",
"privateCloudWidget.deleteFolder": "删除文件夹",
@@ -1207,6 +1208,7 @@
"privateCloudWidget.renameFolder": "重命名文件夹",
"privateCloudWidget.searchPlaceholder": "搜索云连接和文件",
"privateCloudWidget.yourInviteLink": "您的邀请链接(格式为 dbgate://folder/xxx",
"publicCloudWidget.cloudUnavailable": "DbGate 云暂时不可用",
"publicCloudWidget.noFilesFound": "未找到适合您配置的文件",
"publicCloudWidget.onlyRelevantFilesListed": "仅列出与您的连接、平台和 DbGate 版本相关的文件。请先定义连接。",
"publicCloudWidget.publicKnowledgeBase": "公共知识库",
+7 -3
View File
@@ -19,7 +19,6 @@ permissions:
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
@@ -52,7 +51,7 @@ jobs:
- name: yarn install
run: |
yarn install
- name: setCurrentVersion
run: |
yarn setCurrentVersion
@@ -79,7 +78,12 @@ jobs:
working-directory: packages/sqltree
run: |
npm publish --tag "$NPM_TAG"
- name: Publish rest
working-directory: packages/rest
run: |
npm publish --tag "$NPM_TAG"
- name: Publish api
working-directory: packages/api
run: |
+1 -1
View File
@@ -7,7 +7,7 @@ checkout-and-merge-pro:
repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro
ref: 5baa88d0ad253537298e911868579bae0835888d
ref: 6b5e2ff831db9baedb2a43862daa4247810b15de
- name: Merge dbgate/dbgate-pro
run: |
mkdir ../dbgate-pro
+20
View File
@@ -26,30 +26,37 @@ jobs:
with:
fetch-depth: 1
- _include: checkout-and-merge-pro
- name: yarn install
run: |
cd ../dbgate-merged
yarn install
- name: Integration tests
run: |
cd ../dbgate-merged
cd integration-tests
yarn test:ci
- name: Filter parser tests
if: always()
run: |
cd ../dbgate-merged
cd packages/filterparser
yarn test:ci
- name: Datalib (perspective) tests
if: always()
run: |
cd ../dbgate-merged
cd packages/datalib
yarn test:ci
- name: Tools tests
if: always()
run: |
cd ../dbgate-merged
cd packages/tools
yarn test:ci
@@ -138,3 +145,16 @@ jobs:
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'
mongodb:
image: mongo:4.0.12
ports:
- '27017:27017'
volumes:
- mongo-data:/data/db
- mongo-config:/data/configdb
dynamodb:
image: amazon/dynamodb-local
ports:
- '8000:8000'
+65 -1675
View File
File diff suppressed because it is too large Load Diff