Compare commits
246 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2d15e2f84 | ||
|
|
c4ab06a14b | ||
|
|
6729317fcb | ||
|
|
3533683a32 | ||
|
|
d7ceb297e9 | ||
|
|
06e98cff9f | ||
|
|
c96cb08cfd | ||
|
|
424aff5d93 | ||
|
|
644a35d8c3 | ||
|
|
fb3af22302 | ||
|
|
556a35f4ba | ||
|
|
5862a2cdc4 | ||
|
|
eef3195ee1 | ||
|
|
ea2996c9b3 | ||
|
|
88cf6d35ed | ||
|
|
d3cfc44fd9 | ||
|
|
994195667d | ||
|
|
a22320e141 | ||
|
|
e23e749cc5 | ||
|
|
3cdba4339f | ||
|
|
7d1c0c5c18 | ||
|
|
286cac066c | ||
|
|
6ed3eaa896 | ||
|
|
5ec39054a3 | ||
|
|
3009724a82 | ||
|
|
8ab8034060 | ||
|
|
e14165c403 | ||
|
|
1f4a93f1d5 | ||
|
|
3771134b1c | ||
|
|
e2ee1f7561 | ||
|
|
b92e28695e | ||
|
|
4a7d45e4d0 | ||
|
|
9a2520a10a | ||
|
|
682f53881d | ||
|
|
03451c6897 | ||
|
|
7ccb1d9c90 | ||
|
|
0cd3e393e8 | ||
|
|
da805db44b | ||
|
|
6324fd1de4 | ||
|
|
64362cdf13 | ||
|
|
ac049f43a3 | ||
|
|
ef3c96f574 | ||
|
|
d0acbb4054 | ||
|
|
03c8c8c21f | ||
|
|
f8bc990f81 | ||
|
|
ea2709086c | ||
|
|
f98f4414e4 | ||
|
|
0486e9cc8c | ||
|
|
a3a4923397 | ||
|
|
b51a343323 | ||
|
|
61cb445237 | ||
|
|
3a83418fe4 | ||
|
|
b849e5f81c | ||
|
|
f7ebcd9537 | ||
|
|
09b26319f0 | ||
|
|
2a032dfc33 | ||
|
|
5384677c6c | ||
|
|
7572cd273f | ||
|
|
aee0a0fe2e | ||
|
|
e1732d83fb | ||
|
|
86325701b3 | ||
|
|
72776f3297 | ||
|
|
801bf05a31 | ||
|
|
eaf45d8768 | ||
|
|
075146403a | ||
|
|
844ebf129a | ||
|
|
b1ce2f7b90 | ||
|
|
17c4d21347 | ||
|
|
7f7d39cfc2 | ||
|
|
08efc787c7 | ||
|
|
8b610cdf32 | ||
|
|
2eca08944f | ||
|
|
5eace3e332 | ||
|
|
f30e7da503 | ||
|
|
a8d88d05db | ||
|
|
a49f429f13 | ||
|
|
aa0501a729 | ||
|
|
7658a2838a | ||
|
|
c7b693cfb6 | ||
|
|
4ef7f275e6 | ||
|
|
98d7b3c6b9 | ||
|
|
c8f7dc3d2c | ||
|
|
1c2dedfef3 | ||
|
|
1169e23997 | ||
|
|
1de4294a4e | ||
|
|
f890edacea | ||
|
|
00d693e9e4 | ||
|
|
ca6d552f5b | ||
|
|
a7fcf1d3a3 | ||
|
|
4bf797c27d | ||
|
|
7819cc9541 | ||
|
|
caf773bd64 | ||
|
|
1379ba5026 | ||
|
|
593e919e32 | ||
|
|
21c26067ef | ||
|
|
fd12eef0fc | ||
|
|
6fb314c414 | ||
|
|
c65806fd89 | ||
|
|
307aaa2801 | ||
|
|
19dadcd4ae | ||
|
|
d234226750 | ||
|
|
c57ec68916 | ||
|
|
dfc8c75d76 | ||
|
|
399d194771 | ||
|
|
7b64e33e92 | ||
|
|
42ffd49f6e | ||
|
|
3982a28549 | ||
|
|
f5e243a77f | ||
|
|
7888cf6714 | ||
|
|
fd9fa0c95a | ||
|
|
0d2120e96b | ||
|
|
229f0ea9c1 | ||
|
|
c9308255a7 | ||
|
|
8ff44e41b1 | ||
|
|
ab2fb3bf97 | ||
|
|
d5b8433c17 | ||
|
|
cb0aee6476 | ||
|
|
4efa87c3c8 | ||
|
|
20180fe4c4 | ||
|
|
80e17eff39 | ||
|
|
9bed46fe01 | ||
|
|
44059f1215 | ||
|
|
4593ab7c46 | ||
|
|
68cf397473 | ||
|
|
cc385c12ec | ||
|
|
d243e8cee5 | ||
|
|
5f56aa2cf6 | ||
|
|
ce38f7da4c | ||
|
|
3f14fec678 | ||
|
|
b39af32426 | ||
|
|
f81cefa8cb | ||
|
|
8a2b6f3f37 | ||
|
|
2ba0c2cc46 | ||
|
|
6e4a53a2ab | ||
|
|
c80510c37b | ||
|
|
857f3fb4f7 | ||
|
|
22a263a598 | ||
|
|
f9f2a501ab | ||
|
|
45d172d0b1 | ||
|
|
00453ae379 | ||
|
|
abc007753a | ||
|
|
b314e363cd | ||
|
|
b439c7bb70 | ||
|
|
7704e9b305 | ||
|
|
9adf7a6ae2 | ||
|
|
7681f9e1ec | ||
|
|
541f064ddb | ||
|
|
61b4bf91b0 | ||
|
|
da1617729b | ||
|
|
c4914429ce | ||
|
|
1a54d6bab0 | ||
|
|
a7ed6bf62b | ||
|
|
6792b652fb | ||
|
|
e833853d3f | ||
|
|
69ea8010d2 | ||
|
|
b0f0710a75 | ||
|
|
39a4c39b6d | ||
|
|
74d3407048 | ||
|
|
a07d99c731 | ||
|
|
598c48069a | ||
|
|
7aec8ccf99 | ||
|
|
5638706252 | ||
|
|
474d3962e2 | ||
|
|
3819bf9bd7 | ||
|
|
360a4ef1bc | ||
|
|
34fff77f66 | ||
|
|
c13f8b4786 | ||
|
|
73b5b86ace | ||
|
|
c2316c7006 | ||
|
|
6548400b96 | ||
|
|
29a7b68b59 | ||
|
|
5359f850dd | ||
|
|
36dffe0a0f | ||
|
|
a88e38dcf7 | ||
|
|
fbd963bfb1 | ||
|
|
154a4fc7d9 | ||
|
|
5fa36c40f2 | ||
|
|
eb3aec0978 | ||
|
|
f9b3691a58 | ||
|
|
55eda6c1b0 | ||
|
|
6b04593343 | ||
|
|
e9cbd72100 | ||
|
|
dacb810768 | ||
|
|
61e881d4d9 | ||
|
|
c00ccd61bb | ||
|
|
36dfa7c740 | ||
|
|
71861779e8 | ||
|
|
3c1be39976 | ||
|
|
eaaa7beaa1 | ||
|
|
abf7ad478d | ||
|
|
0c3a2fb047 | ||
|
|
a85ad0a1f0 | ||
|
|
4e1ee72d4d | ||
|
|
8e9b6d5ea2 | ||
|
|
536ee6678f | ||
|
|
dc4fbe21de | ||
|
|
f78b1adefa | ||
|
|
d754896f88 | ||
|
|
c38aac4015 | ||
|
|
bf24796899 | ||
|
|
5e0563c42c | ||
|
|
ca2d182e83 | ||
|
|
387867b1ae | ||
|
|
ae2ee7c6e2 | ||
|
|
da4370a420 | ||
|
|
8131df6a2e | ||
|
|
b01fec4adc | ||
|
|
bd56587517 | ||
|
|
72d38e4b8c | ||
|
|
9cd2e68f0b | ||
|
|
e1eb8ffd56 | ||
|
|
0e1e3b9ed7 | ||
|
|
425e58627f | ||
|
|
bfc6f2a8a8 | ||
|
|
cecb88f024 | ||
|
|
ac9bd62ecf | ||
|
|
0d755fa8fc | ||
|
|
10d8a40d1c | ||
|
|
217be698af | ||
|
|
061921fd6c | ||
|
|
07287e5f7f | ||
|
|
ec8b034541 | ||
|
|
694e76b654 | ||
|
|
8425fc46a7 | ||
|
|
967c5860c9 | ||
|
|
3f40996d2d | ||
|
|
a9ce93cd67 | ||
|
|
759754c437 | ||
|
|
b873dd75d3 | ||
|
|
b520501d1f | ||
|
|
41ee6e9b91 | ||
|
|
5c1920d60d | ||
|
|
bab14883a2 | ||
|
|
dda614165f | ||
|
|
38b6350ef8 | ||
|
|
a86f7e96ca | ||
|
|
dc7c44b797 | ||
|
|
f68bdafd9f | ||
|
|
ca079d5dce | ||
|
|
1695fb2fd8 | ||
|
|
26120969de | ||
|
|
eb7c65dc95 | ||
|
|
235a9ff92d | ||
|
|
63b95c6793 | ||
|
|
b91bf13281 | ||
|
|
594837573e |
9
.github/workflows/build-docker.yaml
vendored
9
.github/workflows/build-docker.yaml
vendored
@@ -4,8 +4,13 @@ name: Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- production
|
||||
tags:
|
||||
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
67
README.md
67
README.md
@@ -1,40 +1,32 @@
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://paypal.me/JanProchazkaCz/30eur)
|
||||
[](https://www.npmjs.com/package/dbgate-api)
|
||||
|
||||
# DbGate - database administration tool
|
||||
|
||||
DbGate is fast and efficient database administration tool. It is focused to work with data (filtering, editing, master/detail views etc.)
|
||||
|
||||
**Try it online** - https://dbgate.org
|
||||
**Try it online** - https://demo.dbgate.org - online demo application
|
||||
|
||||
## Currently implemented features
|
||||
## Features
|
||||
* Support for Microsoft SQL Server, Postgre SQL, MySQL
|
||||
* Table data browsing - filtering, sorting, adding related columns using foreign keys
|
||||
* Table data browsing - filtering, sorting, related columns using foreign keys
|
||||
* Master/detail views
|
||||
* Browsing objects - tables, views, procedures, functions
|
||||
* Table data editing, with SQL change script preview
|
||||
* SQL editor, execute SQL script, SQL code formatter
|
||||
* SQL editor, execute SQL script, SQL code formatter, SQL code completion, SQL join wizard
|
||||
* Runs as application for Windows, Linux and Mac. Or in Docker container on server and in web Browser on client.
|
||||
* Import, export from/to CSV, Excel, JSON
|
||||
* Free table editor - quick table data editing (cleanup data after import/before export, prototype tables etc.)
|
||||
* Archives - backup your data in JSON files on local filesystem (or on DbGate server, when using web application)
|
||||
* Light and dark theme
|
||||
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
|
||||
|
||||
## Current support for database engines
|
||||
| Action | Microsoft SQL Server | Postgre SQL | MySQL |
|
||||
|---|---|---|---|
|
||||
| Tables | yes | yes | yes |
|
||||
| Columns | yes | yes | yes |
|
||||
| Primary keys | yes | yes | yes |
|
||||
| Foreign keys | yes | yes | yes |
|
||||
| Run query | yes | yes | yes |
|
||||
| Data types | yes | no | no |
|
||||
| Views | yes | no | no |
|
||||
| Stored procedures | yes | no | no |
|
||||
| Functions | yes | no | no |
|
||||
|
||||

|
||||
|
||||
## Design goals
|
||||
* Application simplicity - DbGate takes the best and only the best from old [DbGate](http://www.jenasoft.com/dbgate), [DatAdmin](http://www.jenasoft.com/datadmin) and [DbMouse](http://www.jenasoft.com/dbmouse) . First will be implemented the most used features from this software.
|
||||
* Minimal dependencies - so that the software can be developed in future without problems with obsolete libraries
|
||||
* Application simplicity - DbGate takes the best and only the best from old [DbGate](http://www.jenasoft.com/dbgate), [DatAdmin](http://www.jenasoft.com/datadmin) and [DbMouse](http://www.jenasoft.com/dbmouse) .
|
||||
* Minimal dependencies
|
||||
* Frontend - React, styled-components, socket.io
|
||||
* Backend - NodeJs, ExpressJs, socket.io, database connection drivers
|
||||
* JavaScript + TypeScript
|
||||
@@ -43,17 +35,29 @@ DbGate is fast and efficient database administration tool. It is focused to work
|
||||
* Platform independed - will run as web application in single docker container on server, or as application using Electron platform on Linux, Windows and Mac
|
||||
|
||||
## How Can I Contribute?
|
||||
You're welcome to contribute to this project! Especially with these topics:
|
||||
You're welcome to contribute to this project! Below are some ideas, how to contribute:
|
||||
|
||||
* Bug fixing
|
||||
* Test Mac edition
|
||||
* Styles, graphics
|
||||
* Better MySQL, Postgre SQL support
|
||||
* Improve linux package build, add to APT repository
|
||||
* Auto-upgrade of electron application
|
||||
* Support for new import/export formats
|
||||
|
||||
Any help is appreciated!
|
||||
|
||||
Feel free to report issues and open merge requests.
|
||||
|
||||
## Roadmap
|
||||
|
||||
| Feature | Complexity | Schedule |
|
||||
|---|---|---|
|
||||
| Query designer | medium | december 2020 |
|
||||
| Table designer (structure editor) | big | january 2021 |
|
||||
| Filter SQL result sets | small | november 2020 |
|
||||
| Filtering, sorting in free table editor | small | november 2020 |
|
||||
| Using tedious driver instead of mssql | small | january 2021 |
|
||||
| Support for SQLite | big | 2021 |
|
||||
|
||||
## How to run development environment
|
||||
|
||||
```sh
|
||||
@@ -90,11 +94,14 @@ yarn start:app:local
|
||||
```
|
||||
|
||||
## Packages
|
||||
* api - backend, Javascript, ExpressJS
|
||||
* datalib - TypeScript library for utility classes
|
||||
* electron - application (JavaScript)
|
||||
* engines - drivers for database engine (mssql, mysql, postgres), analysing database structure, creating specific queries (JavaScript)
|
||||
* filterparser - TypeScript library for parsing data filter expressions using parsimmon
|
||||
* sqltree - JSON representation of SQL query, functions converting to SQL (TypeScript)
|
||||
* types - common TypeScript definitions
|
||||
* web - frontend in React (JavaScript)
|
||||
Some dbgate packages can be used also without DbGate. You can find them on [NPM repository](https://www.npmjs.com/search?q=keywords:dbgate)
|
||||
|
||||
* [api](https://github.com/dbshell/dbgate/tree/master/packages/api) - backend, Javascript, ExpressJS [](https://www.npmjs.com/package/dbgate-api)
|
||||
* [datalib](https://github.com/dbshell/dbgate/tree/master/packages/datalib) - TypeScript library for utility classes
|
||||
* [app](https://github.com/dbshell/dbgate/tree/master/app) - application (JavaScript)
|
||||
* [engines](https://github.com/dbshell/dbgate/tree/master/packages/engines) - drivers for database engine (mssql, mysql, postgres), analysing database structure, creating specific queries (JavaScript) [](https://www.npmjs.com/package/dbgate-engines)
|
||||
* [filterparser](https://github.com/dbshell/dbgate/tree/master/packages/filterparser) - TypeScript library for parsing data filter expressions using parsimmon
|
||||
* [sqltree](https://github.com/dbshell/dbgate/tree/master/packages/sqltree) - JSON representation of SQL query, functions converting to SQL (TypeScript) [](https://www.npmjs.com/package/dbgate-sqltree)
|
||||
* [types](https://github.com/dbshell/dbgate/tree/master/packages/types) - common TypeScript definitions [](https://www.npmjs.com/package/dbgate-types)
|
||||
* [web](https://github.com/dbshell/dbgate/tree/master/packages/web) - frontend in React (JavaScript)
|
||||
* [tools](https://github.com/dbshell/dbgate/tree/master/packages/tools) - various tools [](https://www.npmjs.com/package/dbgate-tools)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dbgate",
|
||||
"version": "3.7.6",
|
||||
"version": "3.7.33",
|
||||
"private": true,
|
||||
"author": "Jan Prochazka <jenasoft.database@gmail.com>",
|
||||
"dependencies": {
|
||||
|
||||
34
package.json
34
package.json
@@ -1,23 +1,25 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@dbgate/all",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
],
|
||||
"scripts": {
|
||||
"start:api": "yarn workspace @dbgate/api start",
|
||||
"start:api:portal": "yarn workspace @dbgate/api start:portal",
|
||||
"start:web": "yarn workspace @dbgate/web start",
|
||||
"start:sqltree": "yarn workspace @dbgate/sqltree start",
|
||||
"start:datalib": "yarn workspace @dbgate/datalib start",
|
||||
"start:filterparser": "yarn workspace @dbgate/filterparser start",
|
||||
"build:sqltree": "yarn workspace @dbgate/sqltree build",
|
||||
"build:datalib": "yarn workspace @dbgate/datalib build",
|
||||
"build:filterparser": "yarn workspace @dbgate/filterparser build",
|
||||
"build:lib": "yarn build:sqltree && yarn build:filterparser && yarn build:datalib",
|
||||
"start:api": "yarn workspace dbgate-api start",
|
||||
"start:api:portal": "yarn workspace dbgate-api start:portal",
|
||||
"start:web": "yarn workspace dbgate-web start",
|
||||
"start:sqltree": "yarn workspace dbgate-sqltree start",
|
||||
"start:tools": "yarn workspace dbgate-tools start",
|
||||
"start:datalib": "yarn workspace dbgate-datalib start",
|
||||
"start:filterparser": "yarn workspace dbgate-filterparser start",
|
||||
"build:sqltree": "yarn workspace dbgate-sqltree build",
|
||||
"build:datalib": "yarn workspace dbgate-datalib build",
|
||||
"build:filterparser": "yarn workspace dbgate-filterparser build",
|
||||
"build:tools": "yarn workspace dbgate-tools build",
|
||||
"build:lib": "yarn build:tools && yarn build:sqltree && yarn build:filterparser && yarn build:datalib",
|
||||
"build:app": "cd app && yarn install && yarn build",
|
||||
"build:api": "yarn workspace @dbgate/api build",
|
||||
"build:web:docker": "yarn workspace @dbgate/web build:docker",
|
||||
"build:api": "yarn workspace dbgate-api build",
|
||||
"build:web:docker": "yarn workspace dbgate-web build:docker",
|
||||
"build:app:local": "cd app && yarn build:local",
|
||||
"start:app:local": "cd app && yarn start:local",
|
||||
|
||||
@@ -26,9 +28,9 @@
|
||||
|
||||
"prepare": "yarn build:lib",
|
||||
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\"",
|
||||
"ts:api": "yarn workspace @dbgate/api ts",
|
||||
"ts:web": "yarn workspace @dbgate/web ts",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\"",
|
||||
"ts:api": "yarn workspace dbgate-api ts",
|
||||
"ts:web": "yarn workspace dbgate-web ts",
|
||||
"ts": "yarn ts:api && yarn ts:web",
|
||||
"postinstall": "patch-package"
|
||||
},
|
||||
|
||||
@@ -15,7 +15,7 @@ PORT_postgres=5433
|
||||
ENGINE_postgres=postgres
|
||||
|
||||
TOOLBAR=home
|
||||
ICON_home=fas fa-home
|
||||
ICON_home=mdi mdi-home
|
||||
TITLE_home=Home
|
||||
PAGE_home=home.html
|
||||
STARTUP_PAGES=home
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
node: true,
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
jquery: false,
|
||||
jest: true,
|
||||
jasmine: true,
|
||||
},
|
||||
extends: 'eslint:recommended',
|
||||
globals: {
|
||||
Atomics: 'readonly',
|
||||
SharedArrayBuffer: 'readonly',
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018,
|
||||
},
|
||||
rules: {
|
||||
'no-unused-vars': 'warn',
|
||||
},
|
||||
};
|
||||
2
packages/api/.npmignore
Normal file
2
packages/api/.npmignore
Normal file
@@ -0,0 +1,2 @@
|
||||
dist
|
||||
.vscode
|
||||
146
packages/api/README.md
Normal file
146
packages/api/README.md
Normal file
@@ -0,0 +1,146 @@
|
||||
# dbgate-api
|
||||
|
||||
Allows run DbGate data-manipulation scripts.
|
||||
|
||||
## Installation
|
||||
|
||||
yarn add dbgate-api
|
||||
|
||||
## Usage
|
||||
|
||||
This example exports table Customer info CSV file.
|
||||
|
||||
```javascript
|
||||
|
||||
const dbgateApi = require('dbgate-api');
|
||||
async function run() {
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql', user: 'sa', password: 'xxxx', database: 'Chinook' },
|
||||
schemaName: 'dbo',
|
||||
pureName: 'Customer',
|
||||
});
|
||||
const writer = await dbgateApi.csvWriter({ fileName: 'Customer.csv' });
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
|
||||
console.log('Finished job script');
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
|
||||
```
|
||||
|
||||
Silly example, runs without any dependencies. Copy [fakeObjectReader](https://github.com/dbshell/dbgate/blob/master/packages/api/src/shell/fakeObjectReader.js) to [consoleObjectWriter](https://github.com/dbshell/dbgate/blob/master/packages/api/src/shell/consoleObjectWriter.js) .
|
||||
|
||||
```javascript
|
||||
|
||||
const dbgateApi = require('dbgate-api');
|
||||
async function run() {
|
||||
const reader = await dbgateApi.fakeObjectReader();
|
||||
const writer = await dbgateApi.consoleObjectWriter();
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
console.log('Finished job script');
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
|
||||
```
|
||||
|
||||
## dbgateApi functions
|
||||
|
||||
### dbgateApi.copyStream
|
||||
Copies data from reader into writer. Reader and writer should be created from functions listed below.
|
||||
```js
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
```
|
||||
|
||||
### dbgateApi.tableReader
|
||||
Reads table or view.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
pureName: 'Customer',
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.queryReader
|
||||
Executes query and reads its result.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
sql: 'SELECT * FROM Album',
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.tableWriter
|
||||
Imports data into table. Options are optional, default values are false.
|
||||
- dropIfExists - if table already exists, it is dropped before import
|
||||
- truncate - delete table content before import
|
||||
- createIfNotExists - create table, if not exists
|
||||
```js
|
||||
const reader = await dbgateApi.tableWriter({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
pureName: 'Customer',
|
||||
options: {
|
||||
dropIfExists: false,
|
||||
truncate: false,
|
||||
createIfNotExists: false,
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.csvReader
|
||||
Reads CSV file
|
||||
```js
|
||||
const reader = await dbgateApi.csvReader({
|
||||
fileName: '/home/root/test.csv',
|
||||
encoding: 'utf-8',
|
||||
header: true,
|
||||
delimiter: ',',
|
||||
quoted: false,
|
||||
limitRows: null
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.csvWriter
|
||||
Writes CSV file
|
||||
```js
|
||||
const reader = await dbgateApi.csvWriter({
|
||||
fileName: '/home/root/test.csv',
|
||||
encoding: 'utf-8',
|
||||
header: true,
|
||||
delimiter: ',',
|
||||
quoted: false
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.jsonLinesReader
|
||||
Reads JSON lines data file. On first line could be structure. Every line contains one row as JSON serialized object.
|
||||
```js
|
||||
const reader = await dbgateApi.jsonLinesReader({
|
||||
fileName: '/home/root/test.jsonl',
|
||||
encoding: 'utf-8',
|
||||
header: true,
|
||||
limitRows: null
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.jsonLinesWriter
|
||||
Writes JSON lines data file. On first line could be structure. Every line contains one row as JSON serialized object.
|
||||
```js
|
||||
const reader = await dbgateApi.jsonLinesWriter({
|
||||
fileName: '/home/root/test.jsonl',
|
||||
encoding: 'utf-8',
|
||||
header: true
|
||||
});
|
||||
```
|
||||
|
||||
### dbgateApi.excelSheetReader
|
||||
Reads tabular data from one sheet in MS Excel file.
|
||||
```js
|
||||
const reader = await dbgateApi.excelSheetReader({
|
||||
fileName: '/home/root/test.xlsx',
|
||||
sheetName: 'Album',
|
||||
limitRows: null
|
||||
});
|
||||
```
|
||||
|
||||
@@ -1,28 +1,44 @@
|
||||
{
|
||||
"name": "@dbgate/api",
|
||||
"name": "dbgate-api",
|
||||
"main": "src/index.js",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"version": "1.0.5",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dbshell/dbgate.git"
|
||||
},
|
||||
"funding": "https://www.paypal.com/paypalme/JanProchazkaCz/30eur",
|
||||
"author": "Jan Prochazka",
|
||||
"license": "GPL",
|
||||
"keywords": [
|
||||
"sql",
|
||||
"json",
|
||||
"import",
|
||||
"export",
|
||||
"dbgate"
|
||||
],
|
||||
"dependencies": {
|
||||
"@dbgate/engines": "^0.1.0",
|
||||
"@dbgate/sqltree": "^0.1.0",
|
||||
"async-lock": "^1.2.4",
|
||||
"axios": "^0.19.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"bufferutil": "^4.0.1",
|
||||
"byline": "^5.0.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"csv": "^5.3.2",
|
||||
"dbgate-sqltree": "^1.0.0",
|
||||
"dbgate-tools": "^1.0.0",
|
||||
"eslint": "^6.8.0",
|
||||
"express": "^4.17.1",
|
||||
"express-basic-auth": "^1.2.0",
|
||||
"express-fileupload": "^1.2.0",
|
||||
"find-free-port": "^2.0.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"http": "^0.0.0",
|
||||
"line-reader": "^0.4.0",
|
||||
"mssql": "^6.0.1",
|
||||
"mysql": "^2.17.1",
|
||||
"lodash": "^4.17.15",
|
||||
"ncp": "^2.0.0",
|
||||
"nedb-promises": "^4.0.1",
|
||||
"pg": "^7.17.0",
|
||||
"pg-query-stream": "^3.1.1"
|
||||
"tar": "^6.0.5"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "nodemon src/index.js",
|
||||
@@ -31,8 +47,8 @@
|
||||
"build": "webpack"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@dbgate/types": "^0.1.0",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"dbgate-types": "^1.0.0",
|
||||
"env-cmd": "^10.1.0",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^3.7.4",
|
||||
|
||||
98
packages/api/src/controllers/archive.js
Normal file
98
packages/api/src/controllers/archive.js
Normal file
@@ -0,0 +1,98 @@
|
||||
const fs = require('fs-extra');
|
||||
const stream = require('stream');
|
||||
const readline = require('readline');
|
||||
const path = require('path');
|
||||
const { formatWithOptions } = require('util');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const { saveFreeTableData } = require('../utility/freeTableStorage');
|
||||
|
||||
module.exports = {
|
||||
folders_meta: 'get',
|
||||
async folders() {
|
||||
const folders = await fs.readdir(archivedir());
|
||||
return [
|
||||
{
|
||||
name: 'default',
|
||||
type: 'jsonl',
|
||||
},
|
||||
...folders
|
||||
.filter((x) => x != 'default')
|
||||
.map((name) => ({
|
||||
name,
|
||||
type: 'jsonl',
|
||||
})),
|
||||
];
|
||||
},
|
||||
|
||||
createFolder_meta: 'post',
|
||||
async createFolder({ folder }) {
|
||||
await fs.mkdir(path.join(archivedir(), folder));
|
||||
socket.emitChanged('archive-folders-changed');
|
||||
return true;
|
||||
},
|
||||
|
||||
files_meta: 'get',
|
||||
async files({ folder }) {
|
||||
const dir = path.join(archivedir(), folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await fs.readdir(dir);
|
||||
return files
|
||||
.filter((name) => name.endsWith('.jsonl'))
|
||||
.map((name) => ({
|
||||
name: name.slice(0, -'.jsonl'.length),
|
||||
type: 'jsonl',
|
||||
}));
|
||||
},
|
||||
|
||||
refreshFiles_meta: 'post',
|
||||
async refreshFiles({ folder }) {
|
||||
socket.emitChanged(`archive-files-changed-${folder}`);
|
||||
},
|
||||
|
||||
refreshFolders_meta: 'post',
|
||||
async refreshFolders() {
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
},
|
||||
|
||||
deleteFile_meta: 'post',
|
||||
async deleteFile({ folder, file }) {
|
||||
await fs.unlink(path.join(archivedir(), folder, `${file}.jsonl`));
|
||||
socket.emitChanged(`archive-files-changed-${folder}`);
|
||||
},
|
||||
|
||||
deleteFolder_meta: 'post',
|
||||
async deleteFolder({ folder }) {
|
||||
if (!folder) throw new Error('Missing folder parameter');
|
||||
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
},
|
||||
|
||||
saveFreeTable_meta: 'post',
|
||||
async saveFreeTable({ folder, file, data }) {
|
||||
saveFreeTableData(path.join(archivedir(), folder, `${file}.jsonl`), data);
|
||||
return true;
|
||||
},
|
||||
|
||||
loadFreeTable_meta: 'post',
|
||||
async loadFreeTable({ folder, file }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const fileStream = fs.createReadStream(path.join(archivedir(), folder, `${file}.jsonl`));
|
||||
const liner = readline.createInterface({
|
||||
input: fileStream,
|
||||
});
|
||||
let structure = null;
|
||||
const rows = [];
|
||||
liner.on('line', (line) => {
|
||||
const data = JSON.parse(line);
|
||||
if (structure) rows.push(data);
|
||||
else structure = data;
|
||||
});
|
||||
liner.on('close', () => {
|
||||
resolve({ structure, rows });
|
||||
fileStream.close();
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
@@ -2,10 +2,10 @@ const uuidv1 = require('uuid/v1');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
const DatabaseAnalyser = require('@dbgate/engines/default/DatabaseAnalyser');
|
||||
const { DatabaseAnalyser } = require('dbgate-tools');
|
||||
|
||||
module.exports = {
|
||||
/** @type {import('@dbgate/types').OpenedDatabaseConnection[]} */
|
||||
/** @type {import('dbgate-types').OpenedDatabaseConnection[]} */
|
||||
opened: [],
|
||||
closed: [],
|
||||
requests: {},
|
||||
@@ -67,7 +67,7 @@ module.exports = {
|
||||
return newOpened;
|
||||
},
|
||||
|
||||
/** @param {import('@dbgate/types').OpenedDatabaseConnection} conn */
|
||||
/** @param {import('dbgate-types').OpenedDatabaseConnection} conn */
|
||||
sendRequest(conn, message) {
|
||||
const msgid = uuidv1();
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
|
||||
@@ -1,95 +1,148 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const lineReader = require('line-reader');
|
||||
const { jsldir } = require('../utility/directories');
|
||||
const _ = require('lodash');
|
||||
const DatastoreProxy = require('../utility/DatastoreProxy');
|
||||
const { saveFreeTableData } = require('../utility/freeTableStorage');
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const socket = require('../utility/socket');
|
||||
|
||||
module.exports = {
|
||||
openedReaders: {},
|
||||
|
||||
closeReader(jslid) {
|
||||
// console.log('CLOSING READER');
|
||||
if (!this.openedReaders[jslid]) return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
this.openedReaders[jslid].reader.close((err) => {
|
||||
if (err) reject(err);
|
||||
delete this.openedReaders[jslid];
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
readLine(jslid) {
|
||||
if (!this.openedReaders[jslid]) return Promise.reject();
|
||||
return new Promise((resolve, reject) => {
|
||||
const { reader } = this.openedReaders[jslid];
|
||||
if (!reader.hasNextLine()) {
|
||||
function readFirstLine(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
lineReader.open(file, (err, reader) => {
|
||||
if (err) reject(err);
|
||||
if (reader.hasNextLine()) {
|
||||
reader.nextLine((err, line) => {
|
||||
if (err) reject(err);
|
||||
resolve(line);
|
||||
});
|
||||
} else {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
reader.nextLine((err, line) => {
|
||||
this.openedReaders[jslid].readedCount += 1;
|
||||
if (err) reject(err);
|
||||
resolve(line);
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
openReader(jslid) {
|
||||
// console.log('OPENING READER');
|
||||
console.log('OPENING READER, LINES=', fs.readFileSync(path.join(jsldir(), `${jslid}.jsonl`), 'utf-8').split('\n').length);
|
||||
const file = path.join(jsldir(), `${jslid}.jsonl`);
|
||||
return new Promise((resolve, reject) =>
|
||||
lineReader.open(file, (err, reader) => {
|
||||
if (err) reject(err);
|
||||
resolve();
|
||||
this.openedReaders[jslid] = {
|
||||
reader,
|
||||
readedCount: 0,
|
||||
};
|
||||
})
|
||||
);
|
||||
},
|
||||
module.exports = {
|
||||
datastores: {},
|
||||
|
||||
async ensureReader(jslid, offset) {
|
||||
if (this.openedReaders[jslid] && this.openedReaders[jslid].readedCount > offset) {
|
||||
await this.closeReader(jslid);
|
||||
}
|
||||
if (!this.openedReaders[jslid]) {
|
||||
await this.openReader(jslid);
|
||||
}
|
||||
while (this.openedReaders[jslid].readedCount < offset) {
|
||||
await this.readLine(jslid);
|
||||
// closeReader(jslid) {
|
||||
// // console.log('CLOSING READER');
|
||||
// if (!this.openedReaders[jslid]) return Promise.resolve();
|
||||
// return new Promise((resolve, reject) => {
|
||||
// this.openedReaders[jslid].reader.close((err) => {
|
||||
// if (err) reject(err);
|
||||
// delete this.openedReaders[jslid];
|
||||
// resolve();
|
||||
// });
|
||||
// });
|
||||
// },
|
||||
|
||||
// readLine(readerInfo) {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// const { reader } = readerInfo;
|
||||
// if (!reader.hasNextLine()) {
|
||||
// resolve(null);
|
||||
// return;
|
||||
// }
|
||||
// reader.nextLine((err, line) => {
|
||||
// if (readerInfo.readedSchemaRow) readerInfo.readedDataRowCount += 1;
|
||||
// else readerInfo.readedSchemaRow = true;
|
||||
// if (err) reject(err);
|
||||
// resolve(line);
|
||||
// });
|
||||
// });
|
||||
// },
|
||||
|
||||
// openReader(jslid) {
|
||||
// // console.log('OPENING READER');
|
||||
// // console.log(
|
||||
// // 'OPENING READER, LINES=',
|
||||
// // fs.readFileSync(path.join(jsldir(), `${jslid}.jsonl`), 'utf-8').split('\n').length
|
||||
// // );
|
||||
// const file = getJslFileName(jslid);
|
||||
// return new Promise((resolve, reject) =>
|
||||
// lineReader.open(file, (err, reader) => {
|
||||
// if (err) reject(err);
|
||||
// const readerInfo = {
|
||||
// reader,
|
||||
// readedDataRowCount: 0,
|
||||
// readedSchemaRow: false,
|
||||
// isReading: true,
|
||||
// };
|
||||
// this.openedReaders[jslid] = readerInfo;
|
||||
// resolve(readerInfo);
|
||||
// })
|
||||
// );
|
||||
// },
|
||||
|
||||
// async ensureReader(jslid, offset) {
|
||||
// if (this.openedReaders[jslid] && this.openedReaders[jslid].readedDataRowCount > offset) {
|
||||
// await this.closeReader(jslid);
|
||||
// }
|
||||
// let readerInfo = this.openedReaders[jslid];
|
||||
// if (!this.openedReaders[jslid]) {
|
||||
// readerInfo = await this.openReader(jslid);
|
||||
// }
|
||||
// readerInfo.isReading = true;
|
||||
// if (!readerInfo.readedSchemaRow) {
|
||||
// await this.readLine(readerInfo); // skip structure
|
||||
// }
|
||||
// while (readerInfo.readedDataRowCount < offset) {
|
||||
// await this.readLine(readerInfo);
|
||||
// }
|
||||
// return readerInfo;
|
||||
// },
|
||||
|
||||
async ensureDatastore(jslid) {
|
||||
let datastore = this.datastores[jslid];
|
||||
if (!datastore) {
|
||||
datastore = new JsonLinesDatastore(getJslFileName(jslid));
|
||||
// datastore = new DatastoreProxy(getJslFileName(jslid));
|
||||
this.datastores[jslid] = datastore;
|
||||
}
|
||||
return datastore;
|
||||
},
|
||||
|
||||
getInfo_meta: 'get',
|
||||
getInfo({ jslid }) {
|
||||
const file = path.join(jsldir(), `${jslid}.jsonl.info`);
|
||||
return JSON.parse(fs.readFileSync(file, 'utf-8'));
|
||||
async getInfo({ jslid }) {
|
||||
const file = getJslFileName(jslid);
|
||||
const firstLine = await readFirstLine(file);
|
||||
if (firstLine) return JSON.parse(firstLine);
|
||||
return null;
|
||||
},
|
||||
|
||||
getRows_meta: 'get',
|
||||
async getRows({ jslid, offset, limit }) {
|
||||
await this.ensureReader(jslid, offset);
|
||||
const res = [];
|
||||
for (let i = 0; i < limit; i += 1) {
|
||||
const line = await this.readLine(jslid);
|
||||
if (line == null) break;
|
||||
res.push(JSON.parse(line));
|
||||
}
|
||||
return res;
|
||||
getRows_meta: 'post',
|
||||
async getRows({ jslid, offset, limit, filters }) {
|
||||
const datastore = await this.ensureDatastore(jslid);
|
||||
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters);
|
||||
},
|
||||
|
||||
getStats_meta: 'get',
|
||||
getStats({ jslid }) {
|
||||
const file = path.join(jsldir(), `${jslid}.jsonl.stats`);
|
||||
return JSON.parse(fs.readFileSync(file, 'utf-8'));
|
||||
const file = `${getJslFileName(jslid)}.stats`;
|
||||
if (fs.existsSync(file)) return JSON.parse(fs.readFileSync(file, 'utf-8'));
|
||||
return {};
|
||||
},
|
||||
|
||||
async notifyChangedStats(stats) {
|
||||
console.log('SENDING STATS', JSON.stringify(stats));
|
||||
await this.closeReader(stats.jslid);
|
||||
const datastore = this.datastores[stats.jslid];
|
||||
if (datastore) await datastore.notifyChanged();
|
||||
socket.emit(`jsldata-stats-${stats.jslid}`, stats);
|
||||
|
||||
// const readerInfo = this.openedReaders[stats.jslid];
|
||||
// if (readerInfo && readerInfo.isReading) {
|
||||
// readerInfo.closeAfterReadAndSendStats = stats;
|
||||
// } else {
|
||||
// await this.closeReader(stats.jslid);
|
||||
// socket.emit(`jsldata-stats-${stats.jslid}`, stats);
|
||||
// }
|
||||
},
|
||||
|
||||
saveFreeTable_meta: 'post',
|
||||
async saveFreeTable({ jslid, data }) {
|
||||
saveFreeTableData(getJslFileName(jslid), data);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
150
packages/api/src/controllers/plugins.js
Normal file
150
packages/api/src/controllers/plugins.js
Normal file
@@ -0,0 +1,150 @@
|
||||
const fs = require('fs-extra');
|
||||
const axios = require('axios');
|
||||
const path = require('path');
|
||||
const { pluginsdir, datadir } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const requirePlugin = require('../shell/requirePlugin');
|
||||
const downloadPackage = require('../utility/downloadPackage');
|
||||
|
||||
// async function loadPackageInfo(dir) {
|
||||
// const readmeFile = path.join(dir, 'README.md');
|
||||
// const packageFile = path.join(dir, 'package.json');
|
||||
|
||||
// if (!(await fs.exists(packageFile))) {
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// let readme = null;
|
||||
// let manifest = null;
|
||||
// if (await fs.exists(readmeFile)) readme = await fs.readFile(readmeFile, { encoding: 'utf-8' });
|
||||
// if (await fs.exists(packageFile)) manifest = JSON.parse(await fs.readFile(packageFile, { encoding: 'utf-8' }));
|
||||
// return {
|
||||
// readme,
|
||||
// manifest,
|
||||
// };
|
||||
// }
|
||||
|
||||
const preinstallPlugins = [
|
||||
'dbgate-plugin-mssql',
|
||||
'dbgate-plugin-mysql',
|
||||
'dbgate-plugin-postgres',
|
||||
'dbgate-plugin-csv',
|
||||
'dbgate-plugin-excel',
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
script_meta: 'get',
|
||||
async script({ packageName }) {
|
||||
const file = path.join(pluginsdir(), packageName, 'dist', 'frontend.js');
|
||||
const data = await fs.readFile(file, {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return data;
|
||||
},
|
||||
|
||||
search_meta: 'get',
|
||||
async search({ filter }) {
|
||||
// DOCS: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#get-v1search
|
||||
const resp = await axios.default.get(
|
||||
`http://registry.npmjs.com/-/v1/search?text=${encodeURIComponent(filter)}+keywords:dbgateplugin&size=25&from=0`
|
||||
);
|
||||
const { objects } = resp.data || {};
|
||||
return (objects || []).map((x) => x.package);
|
||||
},
|
||||
|
||||
info_meta: 'get',
|
||||
async info({ packageName }) {
|
||||
try {
|
||||
const infoResp = await axios.default.get(`https://registry.npmjs.org/${packageName}`);
|
||||
const { latest } = infoResp.data['dist-tags'];
|
||||
const manifest = infoResp.data.versions[latest];
|
||||
const { readme } = infoResp.data;
|
||||
|
||||
return {
|
||||
readme,
|
||||
manifest,
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
state: 'error',
|
||||
error: err.message,
|
||||
};
|
||||
}
|
||||
|
||||
// const dir = path.join(pluginstmpdir(), packageName);
|
||||
// if (!(await fs.exists(dir))) {
|
||||
// await downloadPackage(packageName, dir);
|
||||
// }
|
||||
// return await loadPackageInfo(dir);
|
||||
// return await {
|
||||
// ...loadPackageInfo(dir),
|
||||
// installed: loadPackageInfo(path.join(pluginsdir(), packageName)),
|
||||
// };
|
||||
},
|
||||
|
||||
installed_meta: 'get',
|
||||
async installed() {
|
||||
const files = await fs.readdir(pluginsdir());
|
||||
const res = [];
|
||||
for (const packageName of files) {
|
||||
const manifest = await fs
|
||||
.readFile(path.join(pluginsdir(), packageName, 'package.json'))
|
||||
.then((x) => JSON.parse(x));
|
||||
const readmeFile = path.join(pluginsdir(), packageName, 'README.md');
|
||||
if (await fs.exists(readmeFile)) {
|
||||
manifest.readme = await fs.readFile(readmeFile, { encoding: 'utf-8' });
|
||||
}
|
||||
res.push(manifest);
|
||||
}
|
||||
return res;
|
||||
// const res = await Promise.all(
|
||||
// files.map((packageName) =>
|
||||
// fs.readFile(path.join(pluginsdir(), packageName, 'package.json')).then((x) => JSON.parse(x))
|
||||
// )
|
||||
// );
|
||||
},
|
||||
|
||||
install_meta: 'post',
|
||||
async install({ packageName }) {
|
||||
const dir = path.join(pluginsdir(), packageName);
|
||||
if (!(await fs.exists(dir))) {
|
||||
await downloadPackage(packageName, dir);
|
||||
}
|
||||
socket.emitChanged(`installed-plugins-changed`);
|
||||
},
|
||||
|
||||
uninstall_meta: 'post',
|
||||
async uninstall({ packageName }) {
|
||||
const dir = path.join(pluginsdir(), packageName);
|
||||
await fs.rmdir(dir, { recursive: true });
|
||||
socket.emitChanged(`installed-plugins-changed`);
|
||||
this.removedPlugins.push(packageName);
|
||||
await fs.writeFile(path.join(datadir(), 'removed-plugins'), this.removedPlugins.join('\n'));
|
||||
},
|
||||
|
||||
command_meta: 'post',
|
||||
async command({ packageName, command, args }) {
|
||||
const content = requirePlugin(packageName);
|
||||
return content.commands[command](args);
|
||||
},
|
||||
|
||||
async _init() {
|
||||
const installed = await this.installed();
|
||||
try {
|
||||
this.removedPlugins = (await fs.readFile(path.join(datadir(), 'removed-plugins'), { encoding: 'utf-8' })).split(
|
||||
'\n'
|
||||
);
|
||||
} catch (err) {
|
||||
this.removedPlugins = [];
|
||||
}
|
||||
for (const packageName of preinstallPlugins) {
|
||||
if (this.removedPlugins.includes(packageName)) continue;
|
||||
try {
|
||||
console.log('Preinstalling plugin', packageName);
|
||||
await this.install({ packageName });
|
||||
} catch (err) {
|
||||
console.error('Error preinstalling plugin', packageName, err);
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -2,25 +2,56 @@ const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const uuidv1 = require('uuid/v1');
|
||||
const byline = require('byline');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
const { rundir, uploadsdir } = require('../utility/directories');
|
||||
const { rundir, uploadsdir, pluginsdir } = require('../utility/directories');
|
||||
const { extractShellApiPlugins, extractShellApiFunctionName } = require('dbgate-tools');
|
||||
|
||||
function extractPlugins(script) {
|
||||
const requireRegex = /\s*\/\/\s*@require\s+([^\s]+)\s*\n/g;
|
||||
const matches = [...script.matchAll(requireRegex)];
|
||||
return matches.map((x) => x[1]);
|
||||
}
|
||||
|
||||
const requirePluginsTemplate = (plugins) =>
|
||||
plugins
|
||||
.map(
|
||||
(packageName) => `const ${_.camelCase(packageName)} = require(process.env.PLUGIN_${_.camelCase(packageName)});\n`
|
||||
)
|
||||
.join('') + `dbgateApi.registerPlugins(${plugins.map((x) => _.camelCase(x)).join(',')});\n`;
|
||||
|
||||
const scriptTemplate = (script) => `
|
||||
const dbgateApi = require(process.env.DBGATE_API || "@dbgate/api");
|
||||
const dbgateApi = require(process.env.DBGATE_API);
|
||||
${requirePluginsTemplate(extractPlugins(script))}
|
||||
require=null;
|
||||
async function run() {
|
||||
${script}
|
||||
await dbgateApi.finalizer.run();
|
||||
console.log('Finished job script');
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
`;
|
||||
|
||||
const loaderScriptTemplate = (functionName, props, runid) => `
|
||||
const dbgateApi = require(process.env.DBGATE_API);
|
||||
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
|
||||
require=null;
|
||||
async function run() {
|
||||
const reader=await ${extractShellApiFunctionName(functionName)}(${JSON.stringify(props)});
|
||||
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
`;
|
||||
|
||||
module.exports = {
|
||||
/** @type {import('@dbgate/types').OpenedRunner[]} */
|
||||
/** @type {import('dbgate-types').OpenedRunner[]} */
|
||||
opened: [],
|
||||
requests: {},
|
||||
|
||||
dispatchMessage(runid, message) {
|
||||
console.log('DISPATCHING', message);
|
||||
if (message) console.log('...', message.message);
|
||||
if (_.isString(message)) {
|
||||
socket.emit(`runner-info-${runid}`, {
|
||||
message,
|
||||
@@ -39,35 +70,48 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
start_meta: 'post',
|
||||
async start({ script }) {
|
||||
const runid = uuidv1();
|
||||
handle_freeData(runid, { freeData }) {
|
||||
const [resolve, reject] = this.requests[runid];
|
||||
resolve(freeData);
|
||||
delete this.requests[runid];
|
||||
},
|
||||
|
||||
rejectRequest(runid, error) {
|
||||
if (this.requests[runid]) {
|
||||
const [resolve, reject] = this.requests[runid];
|
||||
reject(error);
|
||||
delete this.requests[runid];
|
||||
}
|
||||
},
|
||||
|
||||
startCore(runid, scriptText) {
|
||||
const directory = path.join(rundir(), runid);
|
||||
const scriptFile = path.join(uploadsdir(), runid + '.js');
|
||||
fs.writeFileSync(`${scriptFile}`, scriptTemplate(script));
|
||||
fs.writeFileSync(`${scriptFile}`, scriptText);
|
||||
fs.mkdirSync(directory);
|
||||
const pluginNames = fs.readdirSync(pluginsdir());
|
||||
console.log(`RUNNING SCRIPT ${scriptFile}`);
|
||||
const subprocess = fork(scriptFile, ['--checkParent'], {
|
||||
cwd: directory,
|
||||
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
|
||||
env: {
|
||||
DBGATE_API: process.argv[1],
|
||||
..._.fromPairs(pluginNames.map((name) => [`PLUGIN_${_.camelCase(name)}`, path.join(pluginsdir(), name)])),
|
||||
},
|
||||
});
|
||||
const pipeDispatcher = (severity) => (data) =>
|
||||
data
|
||||
.toString()
|
||||
.split('\n')
|
||||
.forEach((message) => {
|
||||
if (message.trim()) this.dispatchMessage(runid, { severity, message: message.trim() });
|
||||
});
|
||||
this.dispatchMessage(runid, { severity, message: data.toString().trim() });
|
||||
|
||||
subprocess.stdout.on('data', pipeDispatcher('info'));
|
||||
subprocess.stderr.on('data', pipeDispatcher('error'));
|
||||
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
||||
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
|
||||
subprocess.on('exit', (code) => {
|
||||
this.rejectRequest(runid, { message: 'No data retured, maybe input data source is too big' });
|
||||
console.log('... EXIT process', code);
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
});
|
||||
subprocess.on('error', (error) => {
|
||||
this.rejectRequest(runid, { message: error && (error.message || error.toString()) });
|
||||
console.error('... ERROR subprocess', error);
|
||||
this.dispatchMessage({
|
||||
severity: 'error',
|
||||
message: error.toString(),
|
||||
@@ -85,6 +129,12 @@ module.exports = {
|
||||
return newOpened;
|
||||
},
|
||||
|
||||
start_meta: 'post',
|
||||
async start({ script }) {
|
||||
const runid = uuidv1();
|
||||
return this.startCore(runid, scriptTemplate(script));
|
||||
},
|
||||
|
||||
cancel_meta: 'post',
|
||||
async cancel({ runid }) {
|
||||
const runner = this.opened.find((x) => x.runid == runid);
|
||||
@@ -110,4 +160,14 @@ module.exports = {
|
||||
}
|
||||
return res;
|
||||
},
|
||||
|
||||
loadReader_meta: 'post',
|
||||
async loadReader({ functionName, props }) {
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
const runid = uuidv1();
|
||||
this.requests[runid] = [resolve, reject];
|
||||
this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -99,4 +99,11 @@ module.exports = {
|
||||
await this.ensureOpened(conid);
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
createDatabase_meta: 'post',
|
||||
async createDatabase({ conid, name }) {
|
||||
const opened = await this.ensureOpened(conid);
|
||||
opened.subprocess.send({ msgtype: 'createDatabase', name });
|
||||
return { status: 'ok' };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -6,7 +6,7 @@ const { fork } = require('child_process');
|
||||
const jsldata = require('./jsldata');
|
||||
|
||||
module.exports = {
|
||||
/** @type {import('@dbgate/types').OpenedSession[]} */
|
||||
/** @type {import('dbgate-types').OpenedSession[]} */
|
||||
opened: [],
|
||||
|
||||
// handle_error(sesid, props) {
|
||||
@@ -50,8 +50,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
handle_recordset(sesid, props) {
|
||||
const { jslid } = props;
|
||||
socket.emit(`session-recordset-${sesid}`, { jslid });
|
||||
const { jslid, resultIndex } = props;
|
||||
socket.emit(`session-recordset-${sesid}`, { jslid, resultIndex });
|
||||
},
|
||||
|
||||
handle_stats(sesid, stats) {
|
||||
@@ -105,6 +105,16 @@ module.exports = {
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
kill_meta: 'post',
|
||||
async kill({ sesid }) {
|
||||
const session = this.opened.find((x) => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
session.subprocess.kill();
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
// runCommand_meta: 'post',
|
||||
// async runCommand({ conid, database, sql }) {
|
||||
// console.log(`Running SQL command , conid=${conid}, database=${database}, sql=${sql}`);
|
||||
|
||||
28
packages/api/src/controllers/uploads.js
Normal file
28
packages/api/src/controllers/uploads.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const path = require('path');
|
||||
const { uploadsdir } = require('../utility/directories');
|
||||
const uuidv1 = require('uuid/v1');
|
||||
|
||||
module.exports = {
|
||||
upload_meta: {
|
||||
method: 'post',
|
||||
raw: true,
|
||||
},
|
||||
upload(req, res) {
|
||||
const { data } = req.files || {};
|
||||
if (!data) {
|
||||
res.json(null);
|
||||
return;
|
||||
}
|
||||
const uploadName = uuidv1();
|
||||
const filePath = path.join(uploadsdir(), uploadName);
|
||||
console.log(`Uploading file ${data.name}, size=${data.size}`);
|
||||
|
||||
data.mv(filePath, () => {
|
||||
res.json({
|
||||
originalName: data.name,
|
||||
uploadName,
|
||||
filePath,
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
@@ -12,7 +12,7 @@ if (argument && argument.endsWith('Process')) {
|
||||
|
||||
const module = proc[argument];
|
||||
module.start();
|
||||
} else if (!module['parent']) {
|
||||
} else if (!module['parent'] && !process.argv.includes('--checkParent')) {
|
||||
const main = require('./main');
|
||||
|
||||
main.start(argument);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const express = require('express');
|
||||
const basicAuth = require('express-basic-auth');
|
||||
const bodyParser = require('body-parser');
|
||||
const fileUpload = require('express-fileupload');
|
||||
const http = require('http');
|
||||
const cors = require('cors');
|
||||
const io = require('socket.io');
|
||||
@@ -18,6 +20,9 @@ const sessions = require('./controllers/sessions');
|
||||
const runners = require('./controllers/runners');
|
||||
const jsldata = require('./controllers/jsldata');
|
||||
const config = require('./controllers/config');
|
||||
const archive = require('./controllers/archive');
|
||||
const uploads = require('./controllers/uploads');
|
||||
const plugins = require('./controllers/plugins');
|
||||
|
||||
const { rundir } = require('./utility/directories');
|
||||
|
||||
@@ -29,8 +34,27 @@ function start(argument = null) {
|
||||
const server = http.createServer(app);
|
||||
socket.set(io(server));
|
||||
|
||||
if (process.env.LOGIN && process.env.PASSWORD) {
|
||||
app.use(
|
||||
basicAuth({
|
||||
users: {
|
||||
[process.env.LOGIN]: process.env.PASSWORD,
|
||||
},
|
||||
challenge: true,
|
||||
realm: 'DbGate Web App',
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
app.use(cors());
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
|
||||
app.use(
|
||||
'/uploads',
|
||||
fileUpload({
|
||||
limits: { fileSize: 4 * 1024 * 1024 },
|
||||
})
|
||||
);
|
||||
|
||||
useController(app, '/connections', connections);
|
||||
useController(app, '/server-connections', serverConnections);
|
||||
@@ -40,6 +64,9 @@ function start(argument = null) {
|
||||
useController(app, '/runners', runners);
|
||||
useController(app, '/jsldata', jsldata);
|
||||
useController(app, '/config', config);
|
||||
useController(app, '/archive', archive);
|
||||
useController(app, '/uploads', uploads);
|
||||
useController(app, '/plugins', plugins);
|
||||
|
||||
if (process.env.PAGES_DIRECTORY) {
|
||||
app.use('/pages', express.static(process.env.PAGES_DIRECTORY));
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
const engines = require('@dbgate/engines');
|
||||
const driverConnect = require('../utility/driverConnect');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
process.on('message', async (connection) => {
|
||||
try {
|
||||
const driver = engines(connection);
|
||||
const conn = await driverConnect(driver, connection);
|
||||
const driver = requireEngineDriver(connection);
|
||||
const conn = await driver.connect(connection);
|
||||
const res = await driver.getVersion(conn);
|
||||
process.send({ msgtype: 'connected', ...res });
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const engines = require('@dbgate/engines');
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const driverConnect = require('../utility/driverConnect');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
|
||||
let systemConnection;
|
||||
let storedConnection;
|
||||
@@ -26,14 +25,14 @@ async function checkedAsyncCall(promise) {
|
||||
}
|
||||
|
||||
async function handleFullRefresh() {
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection));
|
||||
process.send({ msgtype: 'structure', structure: analysedStructure });
|
||||
setStatusName('ok');
|
||||
}
|
||||
|
||||
async function handleIncrementalRefresh() {
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const newStructure = await checkedAsyncCall(driver.analyseIncremental(systemConnection, analysedStructure));
|
||||
if (newStructure != null) {
|
||||
analysedStructure = newStructure;
|
||||
@@ -58,8 +57,8 @@ async function handleConnect({ connection, structure }) {
|
||||
lastPing = new Date().getTime();
|
||||
|
||||
if (!structure) setStatusName('pending');
|
||||
const driver = engines(storedConnection);
|
||||
systemConnection = await checkedAsyncCall(driverConnect(driver, storedConnection));
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await checkedAsyncCall(driver.connect(storedConnection));
|
||||
if (structure) {
|
||||
analysedStructure = structure;
|
||||
handleIncrementalRefresh();
|
||||
@@ -82,7 +81,7 @@ function waitConnected() {
|
||||
|
||||
async function handleQueryData({ msgid, sql }) {
|
||||
await waitConnected();
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const res = await driver.query(systemConnection, sql);
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
|
||||
@@ -2,10 +2,12 @@ const connectProcess = require('./connectProcess');
|
||||
const databaseConnectionProcess = require('./databaseConnectionProcess');
|
||||
const serverConnectionProcess = require('./serverConnectionProcess');
|
||||
const sessionProcess = require('./sessionProcess');
|
||||
const jslDatastoreProcess = require('./jslDatastoreProcess');
|
||||
|
||||
module.exports = {
|
||||
connectProcess,
|
||||
databaseConnectionProcess,
|
||||
serverConnectionProcess,
|
||||
sessionProcess,
|
||||
jslDatastoreProcess,
|
||||
};
|
||||
|
||||
58
packages/api/src/proc/jslDatastoreProcess.js
Normal file
58
packages/api/src/proc/jslDatastoreProcess.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
|
||||
let lastPing = null;
|
||||
let datastore = new JsonLinesDatastore();
|
||||
|
||||
function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
function handleOpen({ file }) {
|
||||
handlePing();
|
||||
datastore = new JsonLinesDatastore(file);
|
||||
}
|
||||
|
||||
async function handleRead({ msgid, offset, limit }) {
|
||||
handlePing();
|
||||
const rows = await datastore.getRows(offset, limit);
|
||||
process.send({ msgtype: 'response', msgid, rows });
|
||||
}
|
||||
|
||||
async function handleNotify({ msgid }) {
|
||||
await datastore.notifyChanged();
|
||||
process.send({ msgtype: 'notify', msgid });
|
||||
}
|
||||
|
||||
const messageHandlers = {
|
||||
open: handleOpen,
|
||||
read: handleRead,
|
||||
ping: handlePing,
|
||||
notify: handleNotify,
|
||||
};
|
||||
|
||||
async function handleMessage({ msgtype, ...other }) {
|
||||
const handler = messageHandlers[msgtype];
|
||||
await handler(other);
|
||||
}
|
||||
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
|
||||
setInterval(() => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 60 * 1000) {
|
||||
process.exit(0);
|
||||
}
|
||||
}, 60 * 1000);
|
||||
|
||||
process.on('message', async (message) => {
|
||||
try {
|
||||
await handleMessage(message);
|
||||
} catch (e) {
|
||||
process.send({ msgtype: 'error', error: e.message });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { start };
|
||||
@@ -1,7 +1,6 @@
|
||||
const engines = require('@dbgate/engines');
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const driverConnect = require('../utility/driverConnect');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
|
||||
let systemConnection;
|
||||
let storedConnection;
|
||||
@@ -10,7 +9,7 @@ let lastStatus = null;
|
||||
let lastPing = null;
|
||||
|
||||
async function handleRefresh() {
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const databases = await driver.listDatabases(systemConnection);
|
||||
setStatusName('ok');
|
||||
@@ -46,9 +45,9 @@ async function handleConnect(connection) {
|
||||
setStatusName('pending');
|
||||
lastPing = new Date().getTime();
|
||||
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
systemConnection = await driverConnect(driver, storedConnection);
|
||||
systemConnection = await driver.connect(storedConnection);
|
||||
handleRefresh();
|
||||
setInterval(handleRefresh, 30 * 1000);
|
||||
} catch (err) {
|
||||
@@ -65,9 +64,18 @@ function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
async function handleCreateDatabase({ name }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await driver.connect(storedConnection);
|
||||
console.log(`RUNNING SCRIPT: CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
|
||||
await driver.query(systemConnection, `CREATE DATABASE ${driver.dialect.quoteIdentifier(name)}`);
|
||||
await handleRefresh();
|
||||
}
|
||||
|
||||
const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
ping: handlePing,
|
||||
createDatabase: handleCreateDatabase,
|
||||
};
|
||||
|
||||
async function handleMessage({ msgtype, ...other }) {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const engines = require('@dbgate/engines');
|
||||
const uuidv1 = require('uuid/v1');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const goSplit = require('../utility/goSplit');
|
||||
|
||||
const driverConnect = require('../utility/driverConnect');
|
||||
const { jsldir } = require('../utility/directories');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
|
||||
let systemConnection;
|
||||
let storedConnection;
|
||||
@@ -14,15 +14,16 @@ let afterConnectCallbacks = [];
|
||||
let currentHandlers = [];
|
||||
|
||||
class TableWriter {
|
||||
constructor(columns) {
|
||||
constructor(columns, resultIndex) {
|
||||
this.jslid = uuidv1();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
this.currentStream = fs.createWriteStream(this.currentFile);
|
||||
this.currentRowCount = 0;
|
||||
this.currentChangeIndex = 0;
|
||||
fs.writeFileSync(`${this.currentFile}.info`, JSON.stringify(columns));
|
||||
this.currentChangeIndex = 1;
|
||||
fs.writeFileSync(this.currentFile, JSON.stringify({ columns }) + '\n');
|
||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||
this.writeCurrentStats(false, false);
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid });
|
||||
this.resultIndex = resultIndex;
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
|
||||
}
|
||||
|
||||
row(row) {
|
||||
@@ -63,7 +64,7 @@ class TableWriter {
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor() {
|
||||
constructor(resultIndex) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
@@ -72,6 +73,7 @@ class StreamHandler {
|
||||
// use this for cancelling
|
||||
this.stream = null;
|
||||
this.plannedStats = false;
|
||||
this.resultIndex = resultIndex;
|
||||
currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
@@ -84,7 +86,7 @@ class StreamHandler {
|
||||
|
||||
recordset(columns) {
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new TableWriter(columns);
|
||||
this.currentWriter = new TableWriter(columns, this.resultIndex);
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
@@ -96,7 +98,8 @@ class StreamHandler {
|
||||
}
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentWriter.row(row);
|
||||
if (this.currentWriter) this.currentWriter.row(row);
|
||||
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
@@ -115,8 +118,8 @@ class StreamHandler {
|
||||
async function handleConnect(connection) {
|
||||
storedConnection = connection;
|
||||
|
||||
const driver = engines(storedConnection);
|
||||
systemConnection = await driverConnect(driver, storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await driver.connect(storedConnection);
|
||||
for (const [resolve] of afterConnectCallbacks) {
|
||||
resolve();
|
||||
}
|
||||
@@ -138,11 +141,15 @@ function waitConnected() {
|
||||
|
||||
async function handleExecuteQuery({ sql }) {
|
||||
await waitConnected();
|
||||
const driver = engines(storedConnection);
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
|
||||
const handler = new StreamHandler();
|
||||
const stream = await driver.stream(systemConnection, sql, handler);
|
||||
handler.stream = stream;
|
||||
let resultIndex = 0;
|
||||
for (const sqlItem of goSplit(sql)) {
|
||||
const handler = new StreamHandler(resultIndex);
|
||||
const stream = await driver.stream(systemConnection, sqlItem, handler);
|
||||
handler.stream = stream;
|
||||
resultIndex += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const messageHandlers = {
|
||||
|
||||
11
packages/api/src/shell/archiveReader.js
Normal file
11
packages/api/src/shell/archiveReader.js
Normal file
@@ -0,0 +1,11 @@
|
||||
const path = require('path');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
|
||||
function archiveReader({ folderName, fileName, ...other }) {
|
||||
const jsonlFile = path.join(archivedir(), folderName, `${fileName}.jsonl`);
|
||||
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
|
||||
return res;
|
||||
}
|
||||
|
||||
module.exports = archiveReader;
|
||||
19
packages/api/src/shell/archiveWriter.js
Normal file
19
packages/api/src/shell/archiveWriter.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
// const socket = require('../utility/socket');
|
||||
const jsonLinesWriter = require('./jsonLinesWriter');
|
||||
|
||||
function archiveWriter({ folderName, fileName }) {
|
||||
const dir = path.join(archivedir(), folderName);
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.log(`Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
const jsonlFile = path.join(dir, `${fileName}.jsonl`);
|
||||
const res = jsonLinesWriter({ fileName: jsonlFile });
|
||||
// socket.emitChanged(`archive-files-changed-${folderName}`);
|
||||
return res;
|
||||
}
|
||||
|
||||
module.exports = archiveWriter;
|
||||
33
packages/api/src/shell/collectorWriter.js
Normal file
33
packages/api/src/shell/collectorWriter.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const stream = require('stream');
|
||||
|
||||
class CollectorWriterStream extends stream.Writable {
|
||||
constructor(options) {
|
||||
super(options);
|
||||
this.rows = [];
|
||||
this.structure = null;
|
||||
this.runid = options.runid;
|
||||
}
|
||||
_write(chunk, enc, next) {
|
||||
if (!this.structure) this.structure = chunk;
|
||||
else this.rows.push(chunk);
|
||||
next();
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
process.send({
|
||||
msgtype: 'freeData',
|
||||
runid: this.runid,
|
||||
freeData: { rows: this.rows, structure: this.structure },
|
||||
});
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
async function collectorWriter({ runid }) {
|
||||
return new CollectorWriterStream({
|
||||
objectMode: true,
|
||||
runid,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = collectorWriter;
|
||||
16
packages/api/src/shell/consoleObjectWriter.js
Normal file
16
packages/api/src/shell/consoleObjectWriter.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const stream = require('stream');
|
||||
|
||||
class ObjectWriterStream extends stream.Writable {
|
||||
_write(chunk, enc, next) {
|
||||
console.log(JSON.stringify(chunk));
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
||||
async function consoleObjectWriter() {
|
||||
return new ObjectWriterStream({
|
||||
objectMode: true,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = consoleObjectWriter;
|
||||
@@ -1,13 +0,0 @@
|
||||
const csv = require('csv');
|
||||
const fs = require('fs');
|
||||
|
||||
async function csvWriter({ fileName, encoding = 'utf-8', ...options }) {
|
||||
console.log(`Writing file ${fileName}`);
|
||||
const csvStream = csv.stringify(options);
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
csvStream.pipe(fileStream);
|
||||
csvStream['finisher'] = fileStream;
|
||||
return csvStream;
|
||||
}
|
||||
|
||||
module.exports = csvWriter;
|
||||
@@ -5,10 +5,13 @@ async function fakeObjectReader({ delay = 0 } = {}) {
|
||||
objectMode: true,
|
||||
});
|
||||
function doWrite() {
|
||||
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }] });
|
||||
pass.write({ id: 1, country: 'Czechia' });
|
||||
pass.write({ id: 2, country: 'Austria' });
|
||||
pass.write({ id: 3, country: 'Germany' });
|
||||
pass.write({ id: 4, country: 'Romania' });
|
||||
pass.write({ country: 'Germany', id: 3 });
|
||||
pass.write({ country: 'Romania', id: 4 });
|
||||
pass.write({ country: 'Great Britain', id: 5 });
|
||||
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
|
||||
pass.end();
|
||||
}
|
||||
|
||||
|
||||
12
packages/api/src/shell/finalizer.js
Normal file
12
packages/api/src/shell/finalizer.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const finalizers = [];
|
||||
|
||||
module.exports = {
|
||||
async run() {
|
||||
for (const func of finalizers) {
|
||||
await func();
|
||||
}
|
||||
},
|
||||
register(func) {
|
||||
finalizers.push(func);
|
||||
},
|
||||
};
|
||||
@@ -1,13 +1,38 @@
|
||||
const queryReader = require('./queryReader');
|
||||
const csvWriter = require('./csvWriter');
|
||||
const runScript = require('./runScript');
|
||||
const tableWriter = require('./tableWriter');
|
||||
const tableReader = require('./tableReader');
|
||||
const copyStream = require('./copyStream');
|
||||
const fakeObjectReader = require('./fakeObjectReader');
|
||||
const consoleObjectWriter = require('./consoleObjectWriter');
|
||||
const jsonLinesWriter = require('./jsonLinesWriter');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const jslDataReader = require('./jslDataReader');
|
||||
const archiveWriter = require('./archiveWriter');
|
||||
const archiveReader = require('./archiveReader');
|
||||
const collectorWriter = require('./collectorWriter');
|
||||
const finalizer = require('./finalizer');
|
||||
const registerPlugins = require('./registerPlugins');
|
||||
const requirePlugin = require('./requirePlugin');
|
||||
|
||||
module.exports = {
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
csvWriter,
|
||||
runScript,
|
||||
tableWriter,
|
||||
tableReader,
|
||||
copyStream,
|
||||
jsonLinesWriter,
|
||||
jsonLinesReader,
|
||||
fakeObjectReader,
|
||||
consoleObjectWriter,
|
||||
jslDataReader,
|
||||
archiveWriter,
|
||||
archiveReader,
|
||||
collectorWriter,
|
||||
finalizer,
|
||||
registerPlugins,
|
||||
};
|
||||
|
||||
requirePlugin.initialize(dbgateApi);
|
||||
|
||||
module.exports = dbgateApi;
|
||||
|
||||
9
packages/api/src/shell/jslDataReader.js
Normal file
9
packages/api/src/shell/jslDataReader.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
|
||||
function jslDataReader({ jslid, ...other }) {
|
||||
const fileName = getJslFileName(jslid);
|
||||
return jsonLinesReader({ fileName, ...other });
|
||||
}
|
||||
|
||||
module.exports = jslDataReader;
|
||||
37
packages/api/src/shell/jsonLinesReader.js
Normal file
37
packages/api/src/shell/jsonLinesReader.js
Normal file
@@ -0,0 +1,37 @@
|
||||
const fs = require('fs');
|
||||
const stream = require('stream');
|
||||
const byline = require('byline');
|
||||
|
||||
class ParseStream extends stream.Transform {
|
||||
constructor({ header, limitRows }) {
|
||||
super({ objectMode: true });
|
||||
this.header = header;
|
||||
this.wasHeader = false;
|
||||
this.limitRows = limitRows;
|
||||
this.rowsWritten = 0;
|
||||
}
|
||||
_transform(chunk, encoding, done) {
|
||||
const obj = JSON.parse(chunk);
|
||||
if (!this.wasHeader) {
|
||||
if (!this.header) this.push({ columns: Object.keys(obj).map((columnName) => ({ columnName })) });
|
||||
this.wasHeader = true;
|
||||
}
|
||||
if (!this.limitRows || this.rowsWritten < this.limitRows) {
|
||||
this.push(obj);
|
||||
this.rowsWritten += 1;
|
||||
}
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true, limitRows = undefined }) {
|
||||
console.log(`Reading file ${fileName}`);
|
||||
|
||||
const fileStream = fs.createReadStream(fileName, encoding);
|
||||
const liner = byline(fileStream);
|
||||
const parser = new ParseStream({ header, limitRows });
|
||||
liner.pipe(parser);
|
||||
return parser;
|
||||
}
|
||||
|
||||
module.exports = jsonLinesReader;
|
||||
30
packages/api/src/shell/jsonLinesWriter.js
Normal file
30
packages/api/src/shell/jsonLinesWriter.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const fs = require('fs');
|
||||
const stream = require('stream');
|
||||
|
||||
class StringifyStream extends stream.Transform {
|
||||
constructor({ header }) {
|
||||
super({ objectMode: true });
|
||||
this.header = header;
|
||||
this.wasHeader = false;
|
||||
}
|
||||
_transform(chunk, encoding, done) {
|
||||
if (!this.wasHeader) {
|
||||
if (this.header) this.push(JSON.stringify(chunk) + '\n');
|
||||
this.wasHeader = true;
|
||||
} else {
|
||||
this.push(JSON.stringify(chunk) + '\n');
|
||||
}
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
|
||||
console.log(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
stringify.pipe(fileStream);
|
||||
stringify['finisher'] = fileStream;
|
||||
return stringify;
|
||||
}
|
||||
|
||||
module.exports = jsonLinesWriter;
|
||||
@@ -1,14 +1,12 @@
|
||||
const driverConnect = require('../utility/driverConnect');
|
||||
|
||||
const engines = require('@dbgate/engines');
|
||||
const requireEngineDriver = require("../utility/requireEngineDriver");
|
||||
|
||||
async function queryReader({ connection, sql }) {
|
||||
console.log(`Reading query ${sql}`);
|
||||
|
||||
const driver = engines(connection);
|
||||
const pool = await driverConnect(driver, connection);
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await driver.connect(connection);
|
||||
console.log(`Connected.`);
|
||||
return await driver.readableStream(pool, sql);
|
||||
return await driver.readQuery(pool, sql);
|
||||
}
|
||||
|
||||
module.exports = queryReader;
|
||||
|
||||
9
packages/api/src/shell/registerPlugins.js
Normal file
9
packages/api/src/shell/registerPlugins.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const requirePlugin = require('./requirePlugin');
|
||||
|
||||
function registerPlugins(...plugins) {
|
||||
for (const plugin of plugins) {
|
||||
requirePlugin(plugin.packageName, plugin);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = registerPlugins;
|
||||
37
packages/api/src/shell/requirePlugin.js
Normal file
37
packages/api/src/shell/requirePlugin.js
Normal file
@@ -0,0 +1,37 @@
|
||||
const path = require('path');
|
||||
const { pluginsdir } = require('../utility/directories');
|
||||
|
||||
const loadedPlugins = {};
|
||||
|
||||
const dbgateEnv = {
|
||||
dbgateApi: null,
|
||||
};
|
||||
|
||||
function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (!packageName) throw new Error('Missing packageName in plugin');
|
||||
if (loadedPlugins[packageName]) return loadedPlugins[packageName];
|
||||
|
||||
if (requiredPlugin == null) {
|
||||
let module;
|
||||
const modulePath = path.join(pluginsdir(), packageName, 'dist', 'backend.js');
|
||||
console.log(`Loading module ${packageName} from ${modulePath}`);
|
||||
try {
|
||||
// @ts-ignore
|
||||
module = __non_webpack_require__(modulePath);
|
||||
} catch (err) {
|
||||
console.error('Failed load webpacked module', err);
|
||||
module = require(modulePath);
|
||||
}
|
||||
requiredPlugin = module.__esModule ? module.default : module;
|
||||
}
|
||||
loadedPlugins[packageName] = requiredPlugin;
|
||||
if (requiredPlugin.initialize) requiredPlugin.initialize(dbgateEnv);
|
||||
|
||||
return requiredPlugin;
|
||||
}
|
||||
|
||||
requirePlugin.initialize = (value) => {
|
||||
dbgateEnv.dbgateApi = value;
|
||||
};
|
||||
|
||||
module.exports = requirePlugin;
|
||||
28
packages/api/src/shell/tableReader.js
Normal file
28
packages/api/src/shell/tableReader.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const { quoteFullName } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
|
||||
async function tableReader({ connection, pureName, schemaName }) {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await driver.connect(connection);
|
||||
console.log(`Connected.`);
|
||||
|
||||
const fullName = { pureName, schemaName };
|
||||
|
||||
const table = await driver.analyseSingleObject(pool, fullName, 'tables');
|
||||
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
||||
if (table) {
|
||||
console.log(`Reading table ${table.pureName}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(pool, query, table);
|
||||
}
|
||||
const view = await driver.analyseSingleObject(pool, fullName, 'views');
|
||||
if (view) {
|
||||
console.log(`Reading view ${view.pureName}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(pool, query, view);
|
||||
}
|
||||
|
||||
return await driver.readQuery(pool, query);
|
||||
}
|
||||
|
||||
module.exports = tableReader;
|
||||
12
packages/api/src/shell/tableWriter.js
Normal file
12
packages/api/src/shell/tableWriter.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const requireEngineDriver = require("../utility/requireEngineDriver");
|
||||
|
||||
async function tableWriter({ connection, schemaName, pureName, ...options }) {
|
||||
console.log(`Write table ${schemaName}.${pureName}`);
|
||||
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await driver.connect(connection);
|
||||
console.log(`Connected.`);
|
||||
return await driver.writeTable(pool, { schemaName, pureName }, options);
|
||||
}
|
||||
|
||||
module.exports = tableWriter;
|
||||
75
packages/api/src/utility/DatastoreProxy.js
Normal file
75
packages/api/src/utility/DatastoreProxy.js
Normal file
@@ -0,0 +1,75 @@
|
||||
const { fork } = require('child_process');
|
||||
const uuidv1 = require('uuid/v1');
|
||||
|
||||
class DatastoreProxy {
|
||||
constructor(file) {
|
||||
this.subprocess = null;
|
||||
this.disconnected = false;
|
||||
this.file = file;
|
||||
this.requests = {};
|
||||
this.handle_response = this.handle_response.bind(this);
|
||||
this.handle_ping = this.handle_ping.bind(this);
|
||||
this.notifyChangedCallback = null;
|
||||
}
|
||||
|
||||
handle_response({ msgid, rows }) {
|
||||
const [resolve, reject] = this.requests[msgid];
|
||||
resolve(rows);
|
||||
delete this.requests[msgid];
|
||||
}
|
||||
|
||||
handle_ping() {}
|
||||
|
||||
handle_notify({ msgid }) {
|
||||
const [resolve, reject] = this.requests[msgid];
|
||||
resolve();
|
||||
delete this.requests[msgid];
|
||||
}
|
||||
|
||||
async ensureSubprocess() {
|
||||
if (!this.subprocess) {
|
||||
this.subprocess = fork(process.argv[1], ['jslDatastoreProcess']);
|
||||
|
||||
// @ts-ignore
|
||||
this.subprocess.on('message', ({ msgtype, ...message }) => {
|
||||
// if (this.disconnected) return;
|
||||
this[`handle_${msgtype}`](message);
|
||||
});
|
||||
this.subprocess.on('exit', () => {
|
||||
// if (this.disconnected) return;
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
}
|
||||
return this.subprocess;
|
||||
}
|
||||
|
||||
async getRows(offset, limit) {
|
||||
await this.ensureSubprocess();
|
||||
const msgid = uuidv1();
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
this.requests[msgid] = [resolve, reject];
|
||||
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
async notifyChangedCore() {
|
||||
const msgid = uuidv1();
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
this.requests[msgid] = [resolve, reject];
|
||||
this.subprocess.send({ msgtype: 'notify', msgid });
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
async notifyChanged(callback) {
|
||||
this.notifyChangedCallback = callback;
|
||||
await this.notifyChangedCore();
|
||||
const call = this.notifyChangedCallback;
|
||||
this.notifyChangedCallback = null;
|
||||
if (call) call();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DatastoreProxy;
|
||||
156
packages/api/src/utility/JsonLinesDatastore.js
Normal file
156
packages/api/src/utility/JsonLinesDatastore.js
Normal file
@@ -0,0 +1,156 @@
|
||||
const lineReader = require('line-reader');
|
||||
const AsyncLock = require('async-lock');
|
||||
const lock = new AsyncLock();
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
|
||||
async function fetchNextLine(reader) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!reader.hasNextLine()) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
reader.nextLine((err, line) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
class JsonLinesDatastore {
|
||||
constructor(file) {
|
||||
this.file = file;
|
||||
this.reader = null;
|
||||
this.readedDataRowCount = 0;
|
||||
this.readedSchemaRow = false;
|
||||
this.notifyChangedCallback = null;
|
||||
this.currentFilter = null;
|
||||
}
|
||||
|
||||
_closeReader() {
|
||||
if (!this.reader) return;
|
||||
const reader = this.reader;
|
||||
this.reader = null;
|
||||
this.readedDataRowCount = 0;
|
||||
this.readedSchemaRow = false;
|
||||
this.currentFilter = null;
|
||||
reader.close(() => {});
|
||||
}
|
||||
|
||||
async notifyChanged(callback) {
|
||||
this.notifyChangedCallback = callback;
|
||||
await lock.acquire('reader', async () => {
|
||||
this._closeReader();
|
||||
});
|
||||
const call = this.notifyChangedCallback;
|
||||
this.notifyChangedCallback = null;
|
||||
if (call) call();
|
||||
}
|
||||
|
||||
async _openReader() {
|
||||
return new Promise((resolve, reject) =>
|
||||
lineReader.open(this.file, (err, reader) => {
|
||||
if (err) reject(err);
|
||||
resolve(reader);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async _readLine(parse) {
|
||||
for (;;) {
|
||||
const line = await fetchNextLine(this.reader);
|
||||
if (!line) {
|
||||
// EOF
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!this.readedSchemaRow) {
|
||||
this.readedSchemaRow = true;
|
||||
return true;
|
||||
}
|
||||
if (this.currentFilter) {
|
||||
const parsedLine = JSON.parse(line);
|
||||
if (evaluateCondition(this.currentFilter, parsedLine)) {
|
||||
this.readedDataRowCount += 1;
|
||||
return parse ? parsedLine : true;
|
||||
}
|
||||
} else {
|
||||
this.readedDataRowCount += 1;
|
||||
return parse ? JSON.parse(line) : true;
|
||||
}
|
||||
}
|
||||
|
||||
// return new Promise((resolve, reject) => {
|
||||
// const reader = this.reader;
|
||||
// if (!reader.hasNextLine()) {
|
||||
// resolve(null);
|
||||
// return;
|
||||
// }
|
||||
|
||||
// reader.nextLine((err, line) => {
|
||||
// if (err) {
|
||||
// reject(err);
|
||||
// return;
|
||||
// }
|
||||
// if (!this.readedSchemaRow) {
|
||||
// this.readedSchemaRow = true;
|
||||
// resolve(true);
|
||||
// return;
|
||||
// }
|
||||
// if (this.currentFilter) {
|
||||
// const parsedLine = JSON.parse(line);
|
||||
// if (evaluateCondition(this.currentFilter, parsedLine)) {
|
||||
// console.log('TRUE');
|
||||
// resolve(parse ? parsedLine : true);
|
||||
// this.readedDataRowCount += 1;
|
||||
// return;
|
||||
// } else {
|
||||
// console.log('FALSE');
|
||||
// // skip row
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
|
||||
// this.readedDataRowCount += 1;
|
||||
// resolve(parse ? JSON.parse(line) : true);
|
||||
// });
|
||||
// });
|
||||
}
|
||||
|
||||
async _ensureReader(offset, filter) {
|
||||
if (this.readedDataRowCount > offset || stableStringify(filter) != stableStringify(this.currentFilter)) {
|
||||
this._closeReader();
|
||||
}
|
||||
if (!this.reader) {
|
||||
const reader = await this._openReader();
|
||||
this.reader = reader;
|
||||
this.currentFilter = filter;
|
||||
}
|
||||
if (!this.readedSchemaRow) {
|
||||
await this._readLine(false); // skip structure
|
||||
}
|
||||
while (this.readedDataRowCount < offset) {
|
||||
await this._readLine(false);
|
||||
}
|
||||
}
|
||||
|
||||
async getRows(offset, limit, filter) {
|
||||
const res = [];
|
||||
await lock.acquire('reader', async () => {
|
||||
await this._ensureReader(offset, filter);
|
||||
for (let i = 0; i < limit; i += 1) {
|
||||
const line = await this._readLine(true);
|
||||
if (line == null) break;
|
||||
res.push(line);
|
||||
}
|
||||
});
|
||||
// console.log('RETURN', res.length);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = JsonLinesDatastore;
|
||||
24
packages/api/src/utility/cleanDirectory.js
Normal file
24
packages/api/src/utility/cleanDirectory.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const ageSeconds = 3600;
|
||||
|
||||
async function cleanDirectory(directory) {
|
||||
const files = await fs.readdir(directory);
|
||||
const now = new Date().getTime();
|
||||
|
||||
for (const file of files) {
|
||||
const full = path.join(directory, file);
|
||||
const stat = await fs.stat(full);
|
||||
const mtime = stat.mtime.getTime();
|
||||
const expirationTime = mtime + ageSeconds * 1000;
|
||||
if (now > expirationTime) {
|
||||
if (stat.isDirectory()) {
|
||||
await fs.rmdir(full, { recursive: true });
|
||||
} else {
|
||||
await fs.unlink(full);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = cleanDirectory;
|
||||
@@ -1,43 +1,49 @@
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const cleanDirectory = require('./cleanDirectory');
|
||||
|
||||
let createdDatadir = false;
|
||||
const createDirectories = {};
|
||||
const ensureDirectory = (dir, clean) => {
|
||||
if (!createDirectories[dir]) {
|
||||
if (clean && fs.existsSync(dir)) {
|
||||
console.log(`Cleaning directory ${dir}`);
|
||||
cleanDirectory(dir);
|
||||
}
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.log(`Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
createDirectories[dir] = true;
|
||||
}
|
||||
};
|
||||
|
||||
function datadir() {
|
||||
const dir = path.join(os.homedir(), 'dbgate-data');
|
||||
if (!createdDatadir) {
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.log(`Creating data directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
createdDatadir = true;
|
||||
}
|
||||
ensureDirectory(dir);
|
||||
|
||||
return dir;
|
||||
}
|
||||
|
||||
const dirFunc = (dirname) => () => {
|
||||
const dirFunc = (dirname, clean = false) => () => {
|
||||
const dir = path.join(datadir(), dirname);
|
||||
if (!createDirectories[dirname]) {
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.log(`Creating jsl directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
createDirectories[dirname] = true;
|
||||
}
|
||||
ensureDirectory(dir, clean);
|
||||
|
||||
return dir;
|
||||
};
|
||||
|
||||
const jsldir = dirFunc('jsl');
|
||||
const rundir = dirFunc('run');
|
||||
const uploadsdir = dirFunc('uploads');
|
||||
const jsldir = dirFunc('jsl', true);
|
||||
const rundir = dirFunc('run', true);
|
||||
const uploadsdir = dirFunc('uploads', true);
|
||||
const pluginsdir = dirFunc('plugins');
|
||||
const archivedir = dirFunc('archive');
|
||||
|
||||
module.exports = {
|
||||
datadir,
|
||||
jsldir,
|
||||
rundir,
|
||||
uploadsdir,
|
||||
archivedir,
|
||||
ensureDirectory,
|
||||
pluginsdir,
|
||||
};
|
||||
|
||||
64
packages/api/src/utility/downloadPackage.js
Normal file
64
packages/api/src/utility/downloadPackage.js
Normal file
@@ -0,0 +1,64 @@
|
||||
// const pacote = require('pacote');
|
||||
const axios = require('axios');
|
||||
// const tarballExtract = require('tarball-extract');
|
||||
const uuidv1 = require('uuid/v1');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const zlib = require('zlib');
|
||||
const tar = require('tar');
|
||||
const ncp = require('ncp').ncp;
|
||||
const { uploadsdir } = require('./directories');
|
||||
|
||||
function extractTarball(tmpFile, destination) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.createReadStream(tmpFile)
|
||||
.pipe(zlib.createGunzip())
|
||||
.pipe(tar.extract({ cwd: destination }))
|
||||
.on('error', (err) => reject(err))
|
||||
.on('end', () => resolve());
|
||||
});
|
||||
}
|
||||
|
||||
function saveStreamToFile(pipedStream, fileName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const fileStream = fs.createWriteStream(fileName);
|
||||
fileStream.on('close', () => resolve());
|
||||
pipedStream.pipe(fileStream);
|
||||
});
|
||||
}
|
||||
|
||||
function copyDirectory(source, target) {
|
||||
return new Promise((resolve, reject) => {
|
||||
ncp(source, target, (err) => {
|
||||
if (err) reject(err);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadPackage(packageName, directory) {
|
||||
// await pacote.extract(packageName, directory);
|
||||
const infoResp = await axios.default.get(`https://registry.npmjs.org/${packageName}`);
|
||||
|
||||
const { latest } = infoResp.data['dist-tags'] || {};
|
||||
if (!latest) return false;
|
||||
|
||||
const tarball = infoResp.data.versions[latest].dist.tarball;
|
||||
|
||||
const tmpFile = path.join(uploadsdir(), uuidv1() + '.tgz');
|
||||
console.log(`Downloading tarball ${tarball} into ${tmpFile}`);
|
||||
const tarballResp = await axios.default({
|
||||
method: 'get',
|
||||
url: tarball,
|
||||
responseType: 'stream',
|
||||
});
|
||||
await saveStreamToFile(tarballResp.data, tmpFile);
|
||||
const tmpDir = path.join(uploadsdir(), uuidv1());
|
||||
fs.mkdirSync(tmpDir);
|
||||
await extractTarball(tmpFile, tmpDir);
|
||||
await copyDirectory(path.join(tmpDir, 'package'), directory);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = downloadPackage;
|
||||
@@ -1,21 +0,0 @@
|
||||
const mssql = require('mssql');
|
||||
const mysql = require('mysql');
|
||||
const pg = require('pg');
|
||||
const pgQueryStream = require('pg-query-stream');
|
||||
const fs = require('fs');
|
||||
const stream = require('stream');
|
||||
|
||||
const nativeModules = {
|
||||
mssql,
|
||||
mysql,
|
||||
pg,
|
||||
pgQueryStream,
|
||||
fs,
|
||||
stream,
|
||||
};
|
||||
|
||||
function driverConnect(driver, connection) {
|
||||
return driver.connect(nativeModules, connection);
|
||||
}
|
||||
|
||||
module.exports = driverConnect;
|
||||
15
packages/api/src/utility/freeTableStorage.js
Normal file
15
packages/api/src/utility/freeTableStorage.js
Normal file
@@ -0,0 +1,15 @@
|
||||
const fs = require('fs-extra');
|
||||
|
||||
async function saveFreeTableData(file, data) {
|
||||
const { structure, rows } = data;
|
||||
const fileStream = fs.createWriteStream(file);
|
||||
await fileStream.write(JSON.stringify(structure) + '\n');
|
||||
for (const row of rows) {
|
||||
await fileStream.write(JSON.stringify(row) + '\n');
|
||||
}
|
||||
await fileStream.close();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveFreeTableData,
|
||||
};
|
||||
12
packages/api/src/utility/getJslFileName.js
Normal file
12
packages/api/src/utility/getJslFileName.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const path = require('path');
|
||||
const { jsldir, archivedir } = require('./directories');
|
||||
|
||||
function getJslFileName(jslid) {
|
||||
const archiveMatch = jslid.match(/^archive:\/\/([^/]+)\/(.*)$/);
|
||||
if (archiveMatch) {
|
||||
return path.join(archivedir(), archiveMatch[1], `${archiveMatch[2]}.jsonl`);
|
||||
}
|
||||
return path.join(jsldir(), `${jslid}.jsonl`);
|
||||
}
|
||||
|
||||
module.exports = getJslFileName;
|
||||
18
packages/api/src/utility/goSplit.js
Normal file
18
packages/api/src/utility/goSplit.js
Normal file
@@ -0,0 +1,18 @@
|
||||
function goSplit(sql) {
|
||||
if (!sql) return [];
|
||||
const lines = sql.split('\n');
|
||||
const res = [];
|
||||
let buffer = '';
|
||||
for (const line of lines) {
|
||||
if (/^\s*go\s*$/i.test(line)) {
|
||||
if (buffer.trim()) res.push(buffer);
|
||||
buffer = '';
|
||||
} else {
|
||||
buffer += line + '\n';
|
||||
}
|
||||
}
|
||||
if (buffer.trim()) res.push(buffer);
|
||||
return res;
|
||||
}
|
||||
|
||||
module.exports = goSplit;
|
||||
24
packages/api/src/utility/requireEngineDriver.js
Normal file
24
packages/api/src/utility/requireEngineDriver.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const _ = require('lodash');
|
||||
const requirePlugin = require('../shell/requirePlugin');
|
||||
|
||||
|
||||
/** @returns {import('dbgate-types').EngineDriver} */
|
||||
function requireEngineDriver(connection) {
|
||||
let engine = null;
|
||||
if (_.isString(connection)) {
|
||||
engine = connection;
|
||||
} else if (_.isPlainObject(connection)) {
|
||||
engine = connection.engine;
|
||||
}
|
||||
if (!engine) {
|
||||
throw new Error('Could not get driver from connection');
|
||||
}
|
||||
if (engine.includes('@')) {
|
||||
const [shortName, packageName] = engine.split('@');
|
||||
const plugin = requirePlugin(packageName);
|
||||
return plugin.driver;
|
||||
}
|
||||
throw new Error(`Could not found engine driver ${engine}`);
|
||||
}
|
||||
|
||||
module.exports = requireEngineDriver;
|
||||
@@ -8,11 +8,11 @@ module.exports = {
|
||||
return socket;
|
||||
},
|
||||
emit(message, data) {
|
||||
console.log('EMIT:', message, data);
|
||||
// console.log('EMIT:', message, data);
|
||||
socket.emit(message, data);
|
||||
},
|
||||
emitChanged(key) {
|
||||
console.log('EMIT_CHANGED:', key);
|
||||
// console.log('EMIT_CHANGED:', key);
|
||||
socket.emit('clean-cache', key);
|
||||
socket.emit(key);
|
||||
},
|
||||
|
||||
@@ -11,12 +11,13 @@ var config = {
|
||||
output: {
|
||||
path: path.resolve(__dirname, 'dist'),
|
||||
filename: 'bundle.js',
|
||||
libraryTarget: 'commonjs2',
|
||||
},
|
||||
|
||||
optimization: {
|
||||
minimize: false
|
||||
},
|
||||
|
||||
// optimization: {
|
||||
// minimize: false,
|
||||
// },
|
||||
|
||||
// module: {
|
||||
// rules: [
|
||||
// {
|
||||
@@ -28,7 +29,7 @@ var config = {
|
||||
plugins: [
|
||||
new webpack.IgnorePlugin({
|
||||
checkResource(resource) {
|
||||
const lazyImports = ['pg-native', 'uws'];
|
||||
const lazyImports = ['uws'];
|
||||
if (!lazyImports.includes(resource)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"version": "0.1.0",
|
||||
"name": "@dbgate/datalib",
|
||||
"version": "1.0.0",
|
||||
"name": "dbgate-datalib",
|
||||
"main": "lib/index.js",
|
||||
"typings": "lib/index.d.ts",
|
||||
"scripts": {
|
||||
@@ -12,11 +12,11 @@
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"@dbgate/sqltree": "^0.1.0",
|
||||
"@dbgate/filterparser": "^0.1.0"
|
||||
"dbgate-sqltree": "^1.0.0",
|
||||
"dbgate-filterparser": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@dbgate/types": "^0.1.0",
|
||||
"dbgate-types": "^1.0.0",
|
||||
"@types/node": "^13.7.0",
|
||||
"typescript": "^3.7.5"
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import _ from 'lodash';
|
||||
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from '@dbgate/sqltree';
|
||||
import { NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
|
||||
import { Command, Insert, Update, Delete, UpdateField, Condition, AllowIdentityInsert } from 'dbgate-sqltree';
|
||||
import { NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
|
||||
export interface ChangeSetItem {
|
||||
pureName: string;
|
||||
schemaName: string;
|
||||
schemaName?: string;
|
||||
insertedRowIndex?: number;
|
||||
condition?: { [column: string]: string };
|
||||
fields?: { [column: string]: string };
|
||||
|
||||
44
packages/datalib/src/FreeTableGridDisplay.ts
Normal file
44
packages/datalib/src/FreeTableGridDisplay.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import _ from 'lodash';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
import { FreeTableModel } from './FreeTableModel';
|
||||
|
||||
export class FreeTableGridDisplay extends GridDisplay {
|
||||
constructor(
|
||||
public model: FreeTableModel,
|
||||
config: GridConfig,
|
||||
setConfig: ChangeConfigFunc,
|
||||
cache: GridCache,
|
||||
setCache: ChangeCacheFunc
|
||||
) {
|
||||
super(config, setConfig, cache, setCache);
|
||||
this.columns = this.getDisplayColumns(model);
|
||||
this.filterable = false;
|
||||
this.sortable = false;
|
||||
}
|
||||
|
||||
getDisplayColumns(model: FreeTableModel) {
|
||||
return (
|
||||
model?.structure?.columns
|
||||
?.map((col) => this.getDisplayColumn(col))
|
||||
?.map((col) => ({
|
||||
...col,
|
||||
isChecked: this.isColumnChecked(col),
|
||||
})) || []
|
||||
);
|
||||
}
|
||||
|
||||
getDisplayColumn( col: ColumnInfo) {
|
||||
const uniquePath = [col.columnName];
|
||||
const uniqueName = uniquePath.join('.');
|
||||
return {
|
||||
...col,
|
||||
pureName: 'data',
|
||||
schemaName: '',
|
||||
headerText: col.columnName,
|
||||
uniqueName,
|
||||
uniquePath,
|
||||
};
|
||||
}
|
||||
}
|
||||
27
packages/datalib/src/FreeTableModel.ts
Normal file
27
packages/datalib/src/FreeTableModel.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { TableInfo } from 'dbgate-types';
|
||||
|
||||
export interface FreeTableModel {
|
||||
structure: TableInfo;
|
||||
rows: any[];
|
||||
}
|
||||
|
||||
export function createFreeTableModel() {
|
||||
return {
|
||||
structure: {
|
||||
columns: [
|
||||
{
|
||||
columnName: 'col1',
|
||||
},
|
||||
],
|
||||
foreignKeys: [],
|
||||
},
|
||||
rows: [
|
||||
{
|
||||
col1: 'val1',
|
||||
},
|
||||
{
|
||||
col1: 'val2',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DisplayColumn } from './GridDisplay';
|
||||
import { TableInfo } from '@dbgate/types';
|
||||
import { TableInfo } from 'dbgate-types';
|
||||
|
||||
export interface GridConfigColumns {
|
||||
hiddenColumns: string[];
|
||||
@@ -16,6 +16,8 @@ export interface GridReferenceDefinition {
|
||||
}[];
|
||||
}
|
||||
|
||||
export type GroupFunc = 'GROUP' | 'MAX' | 'MIN' | 'SUM' | 'AVG' | 'COUNT' | 'COUNT DISTINCT' | 'NULL';
|
||||
|
||||
export interface GridConfig extends GridConfigColumns {
|
||||
filters: { [uniqueName: string]: string };
|
||||
focusedColumn?: string;
|
||||
@@ -24,6 +26,9 @@ export interface GridConfig extends GridConfigColumns {
|
||||
uniqueName: string;
|
||||
order: 'ASC' | 'DESC';
|
||||
}[];
|
||||
grouping: { [uniqueName: string]: GroupFunc };
|
||||
childConfig?: GridConfig;
|
||||
reference?: GridReferenceDefinition;
|
||||
}
|
||||
|
||||
export interface GridCache {
|
||||
@@ -39,6 +44,7 @@ export function createGridConfig(): GridConfig {
|
||||
columnWidths: {},
|
||||
sort: [],
|
||||
focusedColumn: null,
|
||||
grouping: {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import _ from 'lodash';
|
||||
import { GridConfig, GridCache, GridConfigColumns, createGridCache } from './GridConfig';
|
||||
import { ForeignKeyInfo, TableInfo, ColumnInfo, DbType, EngineDriver, NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
|
||||
import { parseFilter, getFilterType } from '@dbgate/filterparser';
|
||||
import { GridConfig, GridCache, GridConfigColumns, createGridCache, GroupFunc } from './GridConfig';
|
||||
import { ForeignKeyInfo, TableInfo, ColumnInfo, EngineDriver, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
import { parseFilter, getFilterType } from 'dbgate-filterparser';
|
||||
import { filterName } from './filterName';
|
||||
import { ChangeSetFieldDefinition, ChangeSetRowDefinition } from './ChangeSet';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect } from '@dbgate/sqltree';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect, Condition } from 'dbgate-sqltree';
|
||||
import { isTypeLogical } from 'dbgate-tools';
|
||||
|
||||
export interface DisplayColumn {
|
||||
schemaName: string;
|
||||
@@ -19,7 +20,7 @@ export interface DisplayColumn {
|
||||
foreignKey?: ForeignKeyInfo;
|
||||
isChecked?: boolean;
|
||||
hintColumnName?: string;
|
||||
commonType?: DbType;
|
||||
dataType?: string;
|
||||
}
|
||||
|
||||
export interface DisplayedColumnEx extends DisplayColumn {
|
||||
@@ -57,6 +58,7 @@ export abstract class GridDisplay {
|
||||
filterable = false;
|
||||
editable = false;
|
||||
isLoadedCorrectly = true;
|
||||
supportsReload = false;
|
||||
|
||||
setColumnVisibility(uniquePath: string[], isVisible: boolean) {
|
||||
const uniqueName = uniquePath.join('.');
|
||||
@@ -75,6 +77,10 @@ export abstract class GridDisplay {
|
||||
}));
|
||||
}
|
||||
|
||||
get hasReferences() {
|
||||
return false;
|
||||
}
|
||||
|
||||
get focusedColumn() {
|
||||
return this.config.focusedColumn;
|
||||
}
|
||||
@@ -143,7 +149,7 @@ export abstract class GridDisplay {
|
||||
const column = displayedColumnInfo[uniqueName];
|
||||
if (!column) continue;
|
||||
try {
|
||||
const condition = parseFilter(filter, getFilterType(column.commonType?.typeCode));
|
||||
const condition = parseFilter(filter, getFilterType(column.dataType));
|
||||
if (condition) {
|
||||
conditions.push(
|
||||
_.cloneDeepWith(condition, (expr: Expression) => {
|
||||
@@ -174,16 +180,87 @@ export abstract class GridDisplay {
|
||||
if (this.config.sort?.length > 0) {
|
||||
select.orderBy = this.config.sort
|
||||
.map((col) => ({ ...col, dispInfo: displayedColumnInfo[col.uniqueName] }))
|
||||
.filter((col) => col.dispInfo)
|
||||
.map((col) => ({ ...col, expr: select.columns.find((x) => x.alias == col.uniqueName) }))
|
||||
.filter((col) => col.dispInfo && col.expr)
|
||||
.map((col) => ({
|
||||
exprType: 'column',
|
||||
columnName: col.dispInfo.columnName,
|
||||
...col.expr,
|
||||
direction: col.order,
|
||||
source: { alias: col.dispInfo.sourceAlias },
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
get isGrouped() {
|
||||
return !_.isEmpty(this.config.grouping);
|
||||
}
|
||||
|
||||
get groupColumns() {
|
||||
return this.isGrouped
|
||||
? _.keys(_.pickBy(this.config.grouping, (v) => v == 'GROUP' || v.startsWith('GROUP:')))
|
||||
: null;
|
||||
}
|
||||
|
||||
applyGroupOnSelect(select: Select, displayedColumnInfo: DisplayedColumnInfo) {
|
||||
const groupColumns = this.groupColumns;
|
||||
if (groupColumns && groupColumns.length > 0) {
|
||||
// @ts-ignore
|
||||
select.groupBy = groupColumns.map((col) => {
|
||||
const colExpr: Expression = {
|
||||
exprType: 'column',
|
||||
columnName: displayedColumnInfo[col].columnName,
|
||||
source: { alias: displayedColumnInfo[col].sourceAlias },
|
||||
};
|
||||
const grouping = this.config.grouping[col];
|
||||
if (grouping.startsWith('GROUP:')) {
|
||||
return {
|
||||
exprType: 'transform',
|
||||
transform: grouping,
|
||||
expr: colExpr,
|
||||
};
|
||||
} else {
|
||||
return colExpr;
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!_.isEmpty(this.config.grouping)) {
|
||||
for (let i = 0; i < select.columns.length; i++) {
|
||||
const uniqueName = select.columns[i].alias;
|
||||
// if (groupColumns && groupColumns.includes(uniqueName)) continue;
|
||||
const grouping = this.getGrouping(uniqueName);
|
||||
if (grouping == 'GROUP') {
|
||||
continue;
|
||||
} else if (grouping == 'NULL') {
|
||||
select.columns[i].alias = null;
|
||||
} else if (grouping && grouping.startsWith('GROUP:')) {
|
||||
select.columns[i] = {
|
||||
exprType: 'transform',
|
||||
transform: grouping as any,
|
||||
expr: select.columns[i],
|
||||
alias: select.columns[i].alias,
|
||||
};
|
||||
} else {
|
||||
let func = 'MAX';
|
||||
let argsPrefix = '';
|
||||
if (grouping) {
|
||||
if (grouping == 'COUNT DISTINCT') {
|
||||
func = 'COUNT';
|
||||
argsPrefix = 'DISTINCT ';
|
||||
} else {
|
||||
func = grouping;
|
||||
}
|
||||
}
|
||||
select.columns[i] = {
|
||||
alias: select.columns[i].alias,
|
||||
exprType: 'call',
|
||||
func,
|
||||
argsPrefix,
|
||||
args: [select.columns[i]],
|
||||
};
|
||||
}
|
||||
}
|
||||
select.columns = select.columns.filter((x) => x.alias);
|
||||
}
|
||||
}
|
||||
|
||||
getColumns(columnFilter) {
|
||||
return this.columns.filter((col) => filterName(columnFilter, col.columnName));
|
||||
}
|
||||
@@ -215,6 +292,17 @@ export abstract class GridDisplay {
|
||||
this.reload();
|
||||
}
|
||||
|
||||
setFilters(dct) {
|
||||
this.setConfig((cfg) => ({
|
||||
...cfg,
|
||||
filters: {
|
||||
...cfg.filters,
|
||||
...dct,
|
||||
},
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
|
||||
setSort(uniqueName, order) {
|
||||
this.setConfig((cfg) => ({
|
||||
...cfg,
|
||||
@@ -223,6 +311,38 @@ export abstract class GridDisplay {
|
||||
this.reload();
|
||||
}
|
||||
|
||||
setGrouping(uniqueName, groupFunc: GroupFunc) {
|
||||
this.setConfig((cfg) => ({
|
||||
...cfg,
|
||||
grouping: groupFunc
|
||||
? {
|
||||
...cfg.grouping,
|
||||
[uniqueName]: groupFunc,
|
||||
}
|
||||
: _.omitBy(cfg.grouping, (v, k) => k == uniqueName),
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
|
||||
getGrouping(uniqueName): GroupFunc {
|
||||
if (this.isGrouped) {
|
||||
if (this.config.grouping[uniqueName]) return this.config.grouping[uniqueName];
|
||||
const column = this.baseTable.columns.find((x) => x.columnName == uniqueName);
|
||||
if (isTypeLogical(column?.dataType)) return 'COUNT DISTINCT';
|
||||
if (column?.autoIncrement) return 'COUNT';
|
||||
return 'MAX';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
clearGrouping() {
|
||||
this.setConfig((cfg) => ({
|
||||
...cfg,
|
||||
grouping: {},
|
||||
}));
|
||||
this.reload();
|
||||
}
|
||||
|
||||
getSortOrder(uniqueName) {
|
||||
return this.config.sort.find((x) => x.uniqueName == uniqueName)?.order;
|
||||
}
|
||||
@@ -266,13 +386,13 @@ export abstract class GridDisplay {
|
||||
};
|
||||
}
|
||||
|
||||
createSelect(): Select {
|
||||
createSelect(options = {}): Select {
|
||||
return null;
|
||||
}
|
||||
|
||||
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo) {}
|
||||
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {}
|
||||
|
||||
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[]) {
|
||||
createSelectBase(name: NamedObjectInfo, columns: ColumnInfo[], options) {
|
||||
if (!columns) return null;
|
||||
const orderColumnName = columns[0].columnName;
|
||||
const select: Select = {
|
||||
@@ -296,13 +416,15 @@ export abstract class GridDisplay {
|
||||
this.columns.map((col) => ({ ...col, sourceAlias: 'basetbl' })),
|
||||
'uniqueName'
|
||||
);
|
||||
this.processReferences(select, displayedColumnInfo);
|
||||
this.processReferences(select, displayedColumnInfo, options);
|
||||
this.applyFilterOnSelect(select, displayedColumnInfo);
|
||||
this.applyGroupOnSelect(select, displayedColumnInfo);
|
||||
this.applySortOnSelect(select, displayedColumnInfo);
|
||||
return select;
|
||||
}
|
||||
|
||||
getPageQuery(offset: number, count: number) {
|
||||
if (!this.driver) return null;
|
||||
const select = this.createSelect();
|
||||
if (!select) return null;
|
||||
if (this.driver.dialect.rangeSelect) select.range = { offset: offset, limit: count };
|
||||
@@ -311,6 +433,13 @@ export abstract class GridDisplay {
|
||||
return sql;
|
||||
}
|
||||
|
||||
getExportQuery() {
|
||||
const select = this.createSelect({ isExport: true });
|
||||
if (!select) return null;
|
||||
const sql = treeToSql(this.driver, select, dumpSqlSelect);
|
||||
return sql;
|
||||
}
|
||||
|
||||
resizeColumn(uniqueName: string, computedSize: number, diff: number) {
|
||||
this.setConfig((cfg) => {
|
||||
const columnWidths = {
|
||||
@@ -329,16 +458,63 @@ export abstract class GridDisplay {
|
||||
}
|
||||
|
||||
getCountQuery() {
|
||||
const select = this.createSelect();
|
||||
select.columns = [
|
||||
{
|
||||
exprType: 'raw',
|
||||
sql: 'COUNT(*)',
|
||||
alias: 'count',
|
||||
},
|
||||
];
|
||||
let select = this.createSelect();
|
||||
select.orderBy = null;
|
||||
|
||||
if (this.isGrouped) {
|
||||
select = {
|
||||
commandType: 'select',
|
||||
from: {
|
||||
subQuery: select,
|
||||
alias: 'subq',
|
||||
},
|
||||
columns: [
|
||||
{
|
||||
exprType: 'raw',
|
||||
sql: 'COUNT(*)',
|
||||
alias: 'count',
|
||||
},
|
||||
],
|
||||
};
|
||||
} else {
|
||||
select.columns = [
|
||||
{
|
||||
exprType: 'raw',
|
||||
sql: 'COUNT(*)',
|
||||
alias: 'count',
|
||||
},
|
||||
];
|
||||
}
|
||||
const sql = treeToSql(this.driver, select, dumpSqlSelect);
|
||||
return sql;
|
||||
}
|
||||
|
||||
compileFilters(): Condition {
|
||||
const filters = this.config && this.config.filters;
|
||||
if (!filters) return null;
|
||||
const conditions = [];
|
||||
for (const name in filters) {
|
||||
const column = this.columns.find((x) => (x.columnName = name));
|
||||
if (!column) continue;
|
||||
const filterType = getFilterType(column.dataType);
|
||||
try {
|
||||
const condition = parseFilter(filters[name], filterType);
|
||||
const replaced = _.cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder')
|
||||
return {
|
||||
exprType: 'column',
|
||||
columnName: column.columnName,
|
||||
};
|
||||
});
|
||||
conditions.push(replaced);
|
||||
} catch (err) {
|
||||
// filter parse error - ignore filter
|
||||
}
|
||||
}
|
||||
if (conditions.length == 0) return null;
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { QueryResultColumn } from '@dbgate/types';
|
||||
import { QueryResultColumn } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
|
||||
export class JslGridDisplay extends GridDisplay {
|
||||
@@ -13,6 +13,8 @@ export class JslGridDisplay extends GridDisplay {
|
||||
) {
|
||||
super(config, setConfig, cache, setCache, null);
|
||||
|
||||
this.filterable = true;
|
||||
|
||||
this.columns = columns
|
||||
.map((col) => ({
|
||||
columnName: col.columnName,
|
||||
|
||||
22
packages/datalib/src/MacroDefinition.ts
Normal file
22
packages/datalib/src/MacroDefinition.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import _ from 'lodash';
|
||||
|
||||
export interface MacroArgument {
|
||||
type: 'text' | 'select';
|
||||
label: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface MacroDefinition {
|
||||
title: string;
|
||||
name: string;
|
||||
group: string;
|
||||
description?: string;
|
||||
type: 'transformValue';
|
||||
code: string;
|
||||
args?: MacroArgument[];
|
||||
}
|
||||
|
||||
export interface MacroSelectedCell {
|
||||
column: string;
|
||||
row: number;
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, DisplayColumn, DisplayedColumnInfo, ChangeConfigFunc } from './GridDisplay';
|
||||
import { TableInfo, EngineDriver, ViewInfo, ColumnInfo, NamedObjectInfo, DatabaseInfo } from '@dbgate/types';
|
||||
import { TableInfo, EngineDriver, ViewInfo, ColumnInfo, NamedObjectInfo, DatabaseInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache, createGridCache } from './GridConfig';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect } from '@dbgate/sqltree';
|
||||
import { Expression, Select, treeToSql, dumpSqlSelect } from 'dbgate-sqltree';
|
||||
import { filterName } from './filterName';
|
||||
|
||||
export class TableGridDisplay extends GridDisplay {
|
||||
@@ -32,6 +32,7 @@ export class TableGridDisplay extends GridDisplay {
|
||||
this.filterable = true;
|
||||
this.sortable = true;
|
||||
this.editable = true;
|
||||
this.supportsReload = true;
|
||||
this.baseTable = this.table;
|
||||
if (this.table && this.table.columns) {
|
||||
this.changeSetKeyFields = this.table.primaryKey
|
||||
@@ -40,7 +41,7 @@ export class TableGridDisplay extends GridDisplay {
|
||||
}
|
||||
}
|
||||
|
||||
findTable({ schemaName, pureName }) {
|
||||
findTable({ schemaName = undefined, pureName }) {
|
||||
return (
|
||||
this.dbinfo &&
|
||||
this.dbinfo.tables &&
|
||||
@@ -70,8 +71,8 @@ export class TableGridDisplay extends GridDisplay {
|
||||
|
||||
this.addReferenceToSelect(select, parentAlias, column);
|
||||
|
||||
this.addJoinsFromExpandedColumns(select, subcolumns, childAlias, columnSources)
|
||||
this.addAddedColumnsToSelect(select, subcolumns, childAlias, columnSources)
|
||||
this.addJoinsFromExpandedColumns(select, subcolumns, childAlias, columnSources);
|
||||
this.addAddedColumnsToSelect(select, subcolumns, childAlias, columnSources);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -81,7 +82,7 @@ export class TableGridDisplay extends GridDisplay {
|
||||
const childAlias = `${column.uniqueName}_ref`;
|
||||
if ((select.from.relations || []).find((x) => x.alias == childAlias)) return;
|
||||
const table = this.getFkTarget(column);
|
||||
if (table) {
|
||||
if (table && table.primaryKey) {
|
||||
select.from.relations = [
|
||||
...(select.from.relations || []),
|
||||
{
|
||||
@@ -111,10 +112,14 @@ export class TableGridDisplay extends GridDisplay {
|
||||
|
||||
addHintsToSelect(select: Select): boolean {
|
||||
let res = false;
|
||||
const groupColumns = this.groupColumns;
|
||||
for (const column of this.getGridColumns()) {
|
||||
if (column.foreignKey) {
|
||||
if (groupColumns && !groupColumns.includes(column.uniqueName)) {
|
||||
continue;
|
||||
}
|
||||
const table = this.getFkTarget(column);
|
||||
if (table && table.columns && table.columns.length > 0) {
|
||||
if (table && table.columns && table.columns.length > 0 && table.primaryKey) {
|
||||
const hintColumn = table.columns.find((x) => x?.dataType?.toLowerCase()?.includes('char'));
|
||||
if (hintColumn) {
|
||||
const parentUniqueName = column.uniquePath.slice(0, -1).join('.');
|
||||
@@ -158,14 +163,16 @@ export class TableGridDisplay extends GridDisplay {
|
||||
return this.findTable({ schemaName, pureName });
|
||||
}
|
||||
|
||||
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo) {
|
||||
processReferences(select: Select, displayedColumnInfo: DisplayedColumnInfo, options) {
|
||||
this.addJoinsFromExpandedColumns(select, this.columns, 'basetbl', displayedColumnInfo);
|
||||
this.addHintsToSelect(select);
|
||||
if (!options.isExport) {
|
||||
this.addHintsToSelect(select);
|
||||
}
|
||||
}
|
||||
|
||||
createSelect() {
|
||||
createSelect(options = {}) {
|
||||
if (!this.table) return null;
|
||||
const select = this.createSelectBase(this.table, this.table.columns);
|
||||
const select = this.createSelectBase(this.table, this.table.columns, options);
|
||||
return select;
|
||||
}
|
||||
|
||||
@@ -212,4 +219,11 @@ export class TableGridDisplay extends GridDisplay {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get hasReferences() {
|
||||
if (!this.table) return false;
|
||||
if (this.table.foreignKeys && this.table.foreignKeys.length > 0) return true;
|
||||
if (this.table.dependencies && this.table.dependencies.length > 0) return true;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { GridDisplay, ChangeCacheFunc, ChangeConfigFunc } from './GridDisplay';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo } from '@dbgate/types';
|
||||
import { EngineDriver, ViewInfo, ColumnInfo } from 'dbgate-types';
|
||||
import { GridConfig, GridCache } from './GridConfig';
|
||||
|
||||
export class ViewGridDisplay extends GridDisplay {
|
||||
@@ -16,7 +16,8 @@ export class ViewGridDisplay extends GridDisplay {
|
||||
this.columns = this.getDisplayColumns(view);
|
||||
this.filterable = true;
|
||||
this.sortable = true;
|
||||
this.editable = true;
|
||||
this.editable = false;
|
||||
this.supportsReload = true;
|
||||
}
|
||||
|
||||
getDisplayColumns(view: ViewInfo) {
|
||||
@@ -43,8 +44,8 @@ export class ViewGridDisplay extends GridDisplay {
|
||||
};
|
||||
}
|
||||
|
||||
createSelect() {
|
||||
const select = this.createSelectBase(this.view, this.view.columns);
|
||||
createSelect(options = {}) {
|
||||
const select = this.createSelectBase(this.view, this.view.columns, options);
|
||||
return select;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,4 +5,7 @@ export * from "./ViewGridDisplay";
|
||||
export * from "./JslGridDisplay";
|
||||
export * from "./ChangeSet";
|
||||
export * from "./filterName";
|
||||
export * from "./nameTools";
|
||||
export * from "./FreeTableGridDisplay";
|
||||
export * from "./FreeTableModel";
|
||||
export * from "./MacroDefinition";
|
||||
export * from "./runMacro";
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
export function fullNameFromString(name) {
|
||||
const m = name.match(/\[([^\]]+)\]\.\[([^\]]+)\]/);
|
||||
if (m) {
|
||||
return {
|
||||
schemaName: m[1],
|
||||
pureName: m[2],
|
||||
};
|
||||
}
|
||||
return {
|
||||
schemaName: null,
|
||||
pureName: name,
|
||||
};
|
||||
}
|
||||
|
||||
export function fullNameToString({ schemaName, pureName }) {
|
||||
if (schemaName) {
|
||||
return `[${schemaName}].[${pureName}]`;
|
||||
}
|
||||
return pureName;
|
||||
}
|
||||
|
||||
export function quoteFullName(dialect, { schemaName, pureName }) {
|
||||
if (schemaName) return `${dialect.quoteIdentifier(schemaName)}.${dialect.quoteIdentifier(pureName)}`;
|
||||
return `${dialect.quoteIdentifier(pureName)}`;
|
||||
}
|
||||
185
packages/datalib/src/runMacro.ts
Normal file
185
packages/datalib/src/runMacro.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { FreeTableModel } from './FreeTableModel';
|
||||
import _ from 'lodash';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
import uuidv4 from 'uuid/v4';
|
||||
import moment from 'moment';
|
||||
import { MacroDefinition, MacroSelectedCell } from './MacroDefinition';
|
||||
|
||||
const getMacroFunction = {
|
||||
transformValue: (code) => `
|
||||
(value, args, modules, rowIndex, row, columnName) => {
|
||||
${code}
|
||||
}
|
||||
`,
|
||||
transformRows: (code) => `
|
||||
(rows, args, modules, selectedCells, cols, columns) => {
|
||||
${code}
|
||||
}
|
||||
`,
|
||||
transformData: (code) => `
|
||||
(rows, args, modules, selectedCells, cols, columns) => {
|
||||
${code}
|
||||
}
|
||||
`,
|
||||
};
|
||||
|
||||
const modules = {
|
||||
lodash: _,
|
||||
uuidv1,
|
||||
uuidv4,
|
||||
moment,
|
||||
};
|
||||
|
||||
function runTramsformValue(
|
||||
func,
|
||||
macroArgs: {},
|
||||
data: FreeTableModel,
|
||||
preview: boolean,
|
||||
selectedCells: MacroSelectedCell[],
|
||||
errors: string[] = []
|
||||
) {
|
||||
const selectedRows = _.groupBy(selectedCells, 'row');
|
||||
const rows = data.rows.map((row, rowIndex) => {
|
||||
const selectedRow = selectedRows[rowIndex];
|
||||
if (selectedRow) {
|
||||
const modifiedFields = [];
|
||||
let res = null;
|
||||
for (const cell of selectedRow) {
|
||||
const { column } = cell;
|
||||
const oldValue = row[column];
|
||||
let newValue = oldValue;
|
||||
try {
|
||||
newValue = func(oldValue, macroArgs, modules, rowIndex, row, column);
|
||||
} catch (err) {
|
||||
errors.push(`Error processing column ${column} on row ${rowIndex}: ${err.message}`);
|
||||
}
|
||||
if (newValue != oldValue) {
|
||||
if (res == null) {
|
||||
res = { ...row };
|
||||
}
|
||||
res[column] = newValue;
|
||||
if (preview) modifiedFields.push(column);
|
||||
}
|
||||
}
|
||||
if (res) {
|
||||
if (modifiedFields.length > 0) {
|
||||
return {
|
||||
...res,
|
||||
__modifiedFields: new Set(modifiedFields),
|
||||
};
|
||||
}
|
||||
return res;
|
||||
}
|
||||
return row;
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
structure: data.structure,
|
||||
rows,
|
||||
};
|
||||
}
|
||||
|
||||
function removePreviewRowFlags(rows) {
|
||||
rows = rows.filter((row) => row.__rowStatus != 'deleted');
|
||||
rows = rows.map((row) => {
|
||||
if (row.__rowStatus || row.__modifiedFields || row.__insertedFields || row.__deletedFields)
|
||||
return _.omit(row, ['__rowStatus', '__modifiedFields', '__insertedFields', '__deletedFields']);
|
||||
return row;
|
||||
});
|
||||
return rows;
|
||||
}
|
||||
|
||||
function runTramsformRows(
|
||||
func,
|
||||
macroArgs: {},
|
||||
data: FreeTableModel,
|
||||
preview: boolean,
|
||||
selectedCells: MacroSelectedCell[],
|
||||
errors: string[] = []
|
||||
) {
|
||||
let rows = data.rows;
|
||||
try {
|
||||
rows = func(
|
||||
data.rows,
|
||||
macroArgs,
|
||||
modules,
|
||||
selectedCells,
|
||||
data.structure.columns.map((x) => x.columnName),
|
||||
data.structure.columns
|
||||
);
|
||||
if (!preview) {
|
||||
rows = removePreviewRowFlags(rows);
|
||||
}
|
||||
} catch (err) {
|
||||
errors.push(`Error processing rows: ${err.message}`);
|
||||
}
|
||||
return {
|
||||
structure: data.structure,
|
||||
rows,
|
||||
};
|
||||
}
|
||||
|
||||
function runTramsformData(
|
||||
func,
|
||||
macroArgs: {},
|
||||
data: FreeTableModel,
|
||||
preview: boolean,
|
||||
selectedCells: MacroSelectedCell[],
|
||||
errors: string[] = []
|
||||
) {
|
||||
try {
|
||||
let { rows, columns, cols } = func(
|
||||
data.rows,
|
||||
macroArgs,
|
||||
modules,
|
||||
selectedCells,
|
||||
data.structure.columns.map((x) => x.columnName),
|
||||
data.structure.columns
|
||||
);
|
||||
if (cols && !columns) {
|
||||
columns = cols.map((columnName) => ({ columnName }));
|
||||
}
|
||||
columns = _.uniqBy(columns, 'columnName');
|
||||
if (!preview) {
|
||||
rows = removePreviewRowFlags(rows);
|
||||
}
|
||||
return {
|
||||
structure: { columns },
|
||||
rows,
|
||||
};
|
||||
} catch (err) {
|
||||
errors.push(`Error processing data: ${err.message}`);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
export function runMacro(
|
||||
macro: MacroDefinition,
|
||||
macroArgs: {},
|
||||
data: FreeTableModel,
|
||||
preview: boolean,
|
||||
selectedCells: MacroSelectedCell[],
|
||||
errors: string[] = []
|
||||
): FreeTableModel {
|
||||
let func;
|
||||
try {
|
||||
func = eval(getMacroFunction[macro.type](macro.code));
|
||||
} catch (err) {
|
||||
errors.push(`Error compiling macro ${macro.name}: ${err.message}`);
|
||||
return data;
|
||||
}
|
||||
if (macro.type == 'transformValue') {
|
||||
return runTramsformValue(func, macroArgs, data, preview, selectedCells, errors);
|
||||
}
|
||||
if (macro.type == 'transformRows') {
|
||||
return runTramsformRows(func, macroArgs, data, preview, selectedCells, errors);
|
||||
}
|
||||
if (macro.type == 'transformData') {
|
||||
// @ts-ignore
|
||||
return runTramsformData(func, macroArgs, data, preview, selectedCells, errors);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
const _ = require('lodash');
|
||||
const fp = require('lodash/fp');
|
||||
|
||||
class DatabaseAnalyser {
|
||||
/**
|
||||
*
|
||||
* @param {import('@dbgate/types').EngineDriver} driver
|
||||
*/
|
||||
constructor(pool, driver) {
|
||||
this.pool = pool;
|
||||
this.driver = driver;
|
||||
// this.result = DatabaseAnalyser.createEmptyStructure();
|
||||
/** @type {import('@dbgate/types').DatabaseInfo} */
|
||||
this.structure = null;
|
||||
/** import('@dbgate/types').DatabaseModification[]) */
|
||||
this.modifications = null;
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
return DatabaseAnalyser.createEmptyStructure();
|
||||
}
|
||||
|
||||
/** @returns {Promise<import('@dbgate/types').DatabaseModification[]>} */
|
||||
async getModifications() {
|
||||
if (this.structure == null) throw new Error('DatabaseAnalyse.getModifications - structure must be filled');
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async fullAnalysis() {
|
||||
return this._runAnalysis();
|
||||
}
|
||||
|
||||
async incrementalAnalysis(structure) {
|
||||
this.structure = structure;
|
||||
|
||||
this.modifications = await this.getModifications();
|
||||
if (this.modifications == null) {
|
||||
// modifications not implemented, perform full analysis
|
||||
this.structure = null;
|
||||
return this._runAnalysis();
|
||||
}
|
||||
if (this.modifications.length == 0) return null;
|
||||
console.log('DB modifications detected:', this.modifications);
|
||||
return this._runAnalysis();
|
||||
}
|
||||
|
||||
mergeAnalyseResult(newlyAnalysed) {
|
||||
if (this.structure == null) {
|
||||
return {
|
||||
...DatabaseAnalyser.createEmptyStructure(),
|
||||
...newlyAnalysed,
|
||||
};
|
||||
}
|
||||
|
||||
const res = {};
|
||||
for (const field of ['tables', 'views', 'functions', 'procedures', 'triggers']) {
|
||||
const removedIds = this.modifications
|
||||
.filter((x) => x.action == 'remove' && x.objectTypeField == field)
|
||||
.map((x) => x.objectId);
|
||||
const newArray = newlyAnalysed[field] || [];
|
||||
const addedChangedIds = newArray.map((x) => x.objectId);
|
||||
const removeAllIds = [...removedIds, ...addedChangedIds];
|
||||
res[field] = _.sortBy(
|
||||
[...this.structure[field].filter((x) => !removeAllIds.includes(x.objectId)), ...newArray],
|
||||
(x) => x.pureName
|
||||
);
|
||||
}
|
||||
|
||||
return res;
|
||||
|
||||
// const {tables,views, functions, procedures, triggers} = this.structure;
|
||||
|
||||
// return {
|
||||
// tables:
|
||||
// }
|
||||
}
|
||||
|
||||
// findObjectById(id) {
|
||||
// return this.structure.tables.find((x) => x.objectId == id);
|
||||
// }
|
||||
}
|
||||
|
||||
/** @returns {import('@dbgate/types').DatabaseInfo} */
|
||||
DatabaseAnalyser.createEmptyStructure = () => ({
|
||||
tables: [],
|
||||
views: [],
|
||||
functions: [],
|
||||
procedures: [],
|
||||
triggers: [],
|
||||
});
|
||||
|
||||
DatabaseAnalyser.byTableFilter = (table) => (x) => x.pureName == table.pureName && x.schemaName == x.schemaName;
|
||||
|
||||
DatabaseAnalyser.extractPrimaryKeys = (table, pkColumns) => {
|
||||
const filtered = pkColumns.filter(DatabaseAnalyser.byTableFilter(table));
|
||||
if (filtered.length == 0) return undefined;
|
||||
return {
|
||||
..._.pick(filtered[0], ['constraintName', 'schemaName', 'pureName']),
|
||||
constraintType: 'primaryKey',
|
||||
columns: filtered.map(fp.pick('columnName')),
|
||||
};
|
||||
};
|
||||
|
||||
DatabaseAnalyser.extractForeignKeys = (table, fkColumns) => {
|
||||
const grouped = _.groupBy(fkColumns.filter(DatabaseAnalyser.byTableFilter(table)), 'constraintName');
|
||||
return _.keys(grouped).map((constraintName) => ({
|
||||
constraintName,
|
||||
constraintType: 'foreignKey',
|
||||
..._.pick(grouped[constraintName][0], [
|
||||
'constraintName',
|
||||
'schemaName',
|
||||
'pureName',
|
||||
'refSchemaName',
|
||||
'refTableName',
|
||||
'updateAction',
|
||||
'deleteAction',
|
||||
]),
|
||||
columns: grouped[constraintName].map(fp.pick(['columnName', 'refColumnName'])),
|
||||
}));
|
||||
};
|
||||
|
||||
module.exports = DatabaseAnalyser;
|
||||
7
packages/engines/index.d.ts
vendored
7
packages/engines/index.d.ts
vendored
@@ -1,7 +0,0 @@
|
||||
import types from "@dbgate/types";
|
||||
|
||||
declare function getDriver(
|
||||
connection: string | { engine: string }
|
||||
): types.EngineDriver;
|
||||
|
||||
export = getDriver;
|
||||
@@ -1,24 +0,0 @@
|
||||
const _ = require("lodash");
|
||||
const mssql = require("./mssql");
|
||||
const mysql = require("./mysql");
|
||||
const postgres = require("./postgres");
|
||||
|
||||
const drivers = {
|
||||
mssql,
|
||||
mysql,
|
||||
postgres
|
||||
};
|
||||
|
||||
function getDriver(connection) {
|
||||
if (_.isString(connection)) {
|
||||
return drivers[connection];
|
||||
}
|
||||
if (_.isPlainObject(connection)) {
|
||||
const { engine } = connection;
|
||||
if (engine) {
|
||||
return drivers[engine];
|
||||
}
|
||||
}
|
||||
throw new Error(`Cannot extract engine from ${connection}`);
|
||||
}
|
||||
module.exports = getDriver;
|
||||
@@ -1,311 +0,0 @@
|
||||
const fp = require('lodash/fp');
|
||||
const _ = require('lodash');
|
||||
const sql = require('./sql');
|
||||
|
||||
const DatabaseAnalyser = require('../default/DatabaseAnalyser');
|
||||
|
||||
function objectTypeToField(type) {
|
||||
switch (type.trim()) {
|
||||
case 'U':
|
||||
return 'tables';
|
||||
case 'V':
|
||||
return 'views';
|
||||
case 'P':
|
||||
return 'procedures';
|
||||
case 'IF':
|
||||
case 'FN':
|
||||
case 'TF':
|
||||
return 'functions';
|
||||
case 'TR':
|
||||
return 'triggers';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** @returns {import('@dbgate/types').DbType} */
|
||||
function detectType(col) {
|
||||
switch (col.dataType) {
|
||||
case 'binary':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
isBinary: true,
|
||||
};
|
||||
|
||||
case 'image':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
isBinary: true,
|
||||
isBlob: true,
|
||||
};
|
||||
|
||||
case 'timestamp':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
};
|
||||
case 'varbinary':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
length: col.maxLength,
|
||||
isBinary: true,
|
||||
isVarLength: true,
|
||||
};
|
||||
case 'bit':
|
||||
return {
|
||||
typeCode: 'logical',
|
||||
};
|
||||
|
||||
case 'tinyint':
|
||||
return {
|
||||
typeCode: 'int',
|
||||
bytes: 1,
|
||||
};
|
||||
case 'mediumint':
|
||||
return {
|
||||
typeCode: 'int',
|
||||
bytes: 3,
|
||||
};
|
||||
case 'datetime':
|
||||
return {
|
||||
typeCode: 'datetime',
|
||||
subType: 'datetime',
|
||||
};
|
||||
case 'time':
|
||||
return {
|
||||
typeCode: 'datetime',
|
||||
subType: 'time',
|
||||
};
|
||||
case 'year':
|
||||
return {
|
||||
typeCode: 'datetime',
|
||||
subType: 'year',
|
||||
};
|
||||
case 'date':
|
||||
return {
|
||||
typeCode: 'datetime',
|
||||
subType: 'date',
|
||||
};
|
||||
case 'decimal':
|
||||
case 'numeric':
|
||||
return {
|
||||
typeCode: 'numeric',
|
||||
precision: col.precision,
|
||||
scale: col.scale,
|
||||
};
|
||||
case 'float':
|
||||
return { typeCode: 'float' };
|
||||
case 'uniqueidentifier':
|
||||
return { typeCode: 'string' };
|
||||
case 'smallint':
|
||||
return {
|
||||
typeCode: 'int',
|
||||
bytes: 2,
|
||||
};
|
||||
case 'int':
|
||||
return {
|
||||
typeCode: 'int',
|
||||
bytes: 4,
|
||||
};
|
||||
case 'bigint':
|
||||
return {
|
||||
typeCode: 'int',
|
||||
bytes: 8,
|
||||
};
|
||||
case 'real':
|
||||
return { typeCode: 'float' };
|
||||
case 'char':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
length: col.maxLength,
|
||||
};
|
||||
case 'nchar':
|
||||
return { typeCode: 'string', length: col.maxLength, isUnicode: true };
|
||||
case 'varchar':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
length: col.maxLength,
|
||||
isVarLength: true,
|
||||
};
|
||||
case 'nvarchar':
|
||||
return {
|
||||
typeCode: 'string',
|
||||
length: col.maxLength,
|
||||
isVarLength: true,
|
||||
isUnicode: true,
|
||||
};
|
||||
case 'text':
|
||||
return {
|
||||
typeCode: 'blob',
|
||||
isText: true,
|
||||
};
|
||||
case 'ntext':
|
||||
return {
|
||||
typeCode: 'blob',
|
||||
isText: true,
|
||||
isUnicode: true,
|
||||
};
|
||||
case 'xml':
|
||||
return {
|
||||
typeCode: 'blob',
|
||||
isXml: true,
|
||||
};
|
||||
}
|
||||
return {
|
||||
typeCode: 'generic',
|
||||
sql: col.dataType,
|
||||
};
|
||||
}
|
||||
|
||||
class MsSqlAnalyser extends DatabaseAnalyser {
|
||||
constructor(pool, driver) {
|
||||
super(pool, driver);
|
||||
}
|
||||
|
||||
createQuery(resFileName, filterIdObjects) {
|
||||
let res = sql[resFileName];
|
||||
if (!this.modifications || !filterIdObjects || this.modifications.length == 0) {
|
||||
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
|
||||
} else {
|
||||
const filterIds = this.modifications
|
||||
.filter((x) => filterIdObjects.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
|
||||
.map((x) => x.objectId);
|
||||
if (filterIds.length == 0) {
|
||||
res = res.replace('=[OBJECT_ID_CONDITION]', ' = 0');
|
||||
} else {
|
||||
res = res.replace('=[OBJECT_ID_CONDITION]', ` in (${filterIds.join(',')})`);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
async _runAnalysis() {
|
||||
const tablesRows = await this.driver.query(this.pool, this.createQuery('tables', ['tables']));
|
||||
const columnsRows = await this.driver.query(this.pool, this.createQuery('columns', ['tables']));
|
||||
const pkColumnsRows = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
|
||||
const fkColumnsRows = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
|
||||
|
||||
const sqlCodeRows = await this.driver.query(
|
||||
this.pool,
|
||||
this.createQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers'])
|
||||
);
|
||||
const getCreateSql = (row) =>
|
||||
sqlCodeRows.rows
|
||||
.filter((x) => x.pureName == row.pureName && x.schemaName == row.schemaName)
|
||||
.map((x) => x.codeText)
|
||||
.join('');
|
||||
const viewsRows = await this.driver.query(this.pool, this.createQuery('views', ['views']));
|
||||
const programmableRows = await this.driver.query(
|
||||
this.pool,
|
||||
this.createQuery('programmables', ['procedures', 'functions'])
|
||||
);
|
||||
const viewColumnRows = await this.driver.query(this.pool, this.createQuery('viewColumns', ['views']));
|
||||
|
||||
const tables = tablesRows.rows.map((row) => ({
|
||||
...row,
|
||||
columns: columnsRows.rows
|
||||
.filter((col) => col.objectId == row.objectId)
|
||||
.map(({ isNullable, isIdentity, ...col }) => ({
|
||||
...col,
|
||||
notNull: !isNullable,
|
||||
autoIncrement: !!isIdentity,
|
||||
commonType: detectType(col),
|
||||
})),
|
||||
primaryKey: DatabaseAnalyser.extractPrimaryKeys(row, pkColumnsRows.rows),
|
||||
foreignKeys: DatabaseAnalyser.extractForeignKeys(row, fkColumnsRows.rows),
|
||||
}));
|
||||
|
||||
const views = viewsRows.rows.map((row) => ({
|
||||
...row,
|
||||
createSql: getCreateSql(row),
|
||||
columns: viewColumnRows.rows
|
||||
.filter((col) => col.objectId == row.objectId)
|
||||
.map(({ isNullable, isIdentity, ...col }) => ({
|
||||
...col,
|
||||
notNull: !isNullable,
|
||||
autoIncrement: !!isIdentity,
|
||||
commonType: detectType(col),
|
||||
})),
|
||||
}));
|
||||
|
||||
const procedures = programmableRows.rows
|
||||
.filter((x) => x.sqlObjectType.trim() == 'P')
|
||||
.map((row) => ({
|
||||
...row,
|
||||
createSql: getCreateSql(row),
|
||||
}));
|
||||
|
||||
const functions = programmableRows.rows
|
||||
.filter((x) => ['FN', 'IF', 'TF'].includes(x.sqlObjectType.trim()))
|
||||
.map((row) => ({
|
||||
...row,
|
||||
createSql: getCreateSql(row),
|
||||
}));
|
||||
|
||||
return this.mergeAnalyseResult({
|
||||
tables,
|
||||
views,
|
||||
procedures,
|
||||
functions,
|
||||
});
|
||||
}
|
||||
|
||||
getDeletedObjectsForField(idArray, objectTypeField) {
|
||||
return this.structure[objectTypeField]
|
||||
.filter((x) => !idArray.includes(x.objectId))
|
||||
.map((x) => ({
|
||||
oldName: _.pick(x, ['schemaName', 'pureName']),
|
||||
objectId: x.objectId,
|
||||
action: 'remove',
|
||||
objectTypeField,
|
||||
}));
|
||||
}
|
||||
|
||||
getDeletedObjects(idArray) {
|
||||
return [
|
||||
...this.getDeletedObjectsForField(idArray, 'tables'),
|
||||
...this.getDeletedObjectsForField(idArray, 'views'),
|
||||
...this.getDeletedObjectsForField(idArray, 'procedures'),
|
||||
...this.getDeletedObjectsForField(idArray, 'functions'),
|
||||
...this.getDeletedObjectsForField(idArray, 'triggers'),
|
||||
];
|
||||
}
|
||||
|
||||
async getModifications() {
|
||||
const modificationsQueryData = await this.driver.query(this.pool, this.createQuery('modifications'));
|
||||
// console.log('MOD - SRC', modifications);
|
||||
// console.log(
|
||||
// 'MODs',
|
||||
// this.structure.tables.map((x) => x.modifyDate)
|
||||
// );
|
||||
const modifications = modificationsQueryData.rows.map((x) => {
|
||||
const { type, objectId, modifyDate, schemaName, pureName } = x;
|
||||
const field = objectTypeToField(type);
|
||||
if (!this.structure[field]) return null;
|
||||
// @ts-ignore
|
||||
const obj = this.structure[field].find((x) => x.objectId == objectId);
|
||||
|
||||
// object not modified
|
||||
if (obj && Math.abs(new Date(modifyDate).getTime() - new Date(obj.modifyDate).getTime()) < 1000) return null;
|
||||
|
||||
/** @type {import('@dbgate/types').DatabaseModification} */
|
||||
const action = obj
|
||||
? {
|
||||
newName: { schemaName, pureName },
|
||||
oldName: _.pick(obj, ['schemaName', 'pureName']),
|
||||
action: 'change',
|
||||
objectTypeField: field,
|
||||
objectId,
|
||||
}
|
||||
: {
|
||||
newName: { schemaName, pureName },
|
||||
action: 'add',
|
||||
objectTypeField: field,
|
||||
objectId,
|
||||
};
|
||||
return action;
|
||||
});
|
||||
|
||||
return [..._.compact(modifications), ...this.getDeletedObjects(modificationsQueryData.rows.map((x) => x.objectId))];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MsSqlAnalyser;
|
||||
@@ -1,20 +0,0 @@
|
||||
const SqlDumper = require('../default/SqlDumper');
|
||||
|
||||
class MsSqlDumper extends SqlDumper {
|
||||
autoIncrement() {
|
||||
this.put(' ^identity');
|
||||
}
|
||||
|
||||
putStringValue(value) {
|
||||
if (/[^\u0000-\u00ff]/.test(value)) {
|
||||
this.putRaw('N');
|
||||
}
|
||||
super.putStringValue(value);
|
||||
}
|
||||
|
||||
allowIdentityInsert(table, allow) {
|
||||
this.putCmd("^set ^identity_insert %f %k;&n", table, allow ? "on" : "off");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MsSqlDumper;
|
||||
@@ -1,201 +0,0 @@
|
||||
const _ = require('lodash');
|
||||
const MsSqlAnalyser = require('./MsSqlAnalyser');
|
||||
const MsSqlDumper = require('./MsSqlDumper');
|
||||
|
||||
/** @type {import('@dbgate/types').SqlDialect} */
|
||||
const dialect = {
|
||||
limitSelect: true,
|
||||
rangeSelect: true,
|
||||
offsetFetchRangeSyntax: true,
|
||||
stringEscapeChar: "'",
|
||||
quoteIdentifier(s) {
|
||||
return `[${s}]`;
|
||||
},
|
||||
};
|
||||
|
||||
function extractColumns(columns) {
|
||||
const mapper = {};
|
||||
const res = _.sortBy(_.values(columns), 'index').map((col) => ({
|
||||
...col,
|
||||
columnName: col.name,
|
||||
notNull: !col.nullable,
|
||||
autoIncrement: !!col.identity,
|
||||
}));
|
||||
|
||||
const generateName = () => {
|
||||
let index = 1;
|
||||
while (res.find((x) => x.columnName == `col${index}`)) index += 1;
|
||||
return `col${index}`;
|
||||
};
|
||||
|
||||
// const groups = _.groupBy(res, 'columnName');
|
||||
// for (const colname of _.keys(groups)) {
|
||||
// if (groups[colname].length == 1) continue;
|
||||
// mapper[colname] = [];
|
||||
// for (const col of groups[colname]) {
|
||||
// col.columnName = generateName();
|
||||
// mapper[colname].push(colname);
|
||||
// }
|
||||
// }
|
||||
|
||||
for (const col of res) {
|
||||
if (!col.columnName) {
|
||||
const newName = generateName();
|
||||
mapper[col.columnName] = newName;
|
||||
col.columnName = newName;
|
||||
}
|
||||
}
|
||||
|
||||
return [res, mapper];
|
||||
}
|
||||
|
||||
/** @type {import('@dbgate/types').EngineDriver} */
|
||||
const driver = {
|
||||
async connect(nativeModules, { server, port, user, password, database }) {
|
||||
const pool = await nativeModules.mssql.connect({
|
||||
server,
|
||||
port,
|
||||
user,
|
||||
password,
|
||||
database,
|
||||
requestTimeout: 1000 * 3600,
|
||||
options: {
|
||||
enableArithAbort: true,
|
||||
},
|
||||
});
|
||||
pool._nativeModules = nativeModules;
|
||||
return pool;
|
||||
},
|
||||
// @ts-ignore
|
||||
async query(pool, sql) {
|
||||
const resp = await pool.request().query(sql);
|
||||
// console.log(Object.keys(resp.recordset));
|
||||
// console.log(resp);
|
||||
const res = {};
|
||||
|
||||
if (resp.recordset) {
|
||||
const [columns] = extractColumns(resp.recordset.columns);
|
||||
res.columns = columns;
|
||||
res.rows = resp.recordset;
|
||||
}
|
||||
if (resp.rowsAffected) {
|
||||
res.rowsAffected = _.sum(resp.rowsAffected);
|
||||
}
|
||||
return res;
|
||||
},
|
||||
async stream(pool, sql, options) {
|
||||
const request = await pool.request();
|
||||
let currentMapper = null;
|
||||
|
||||
const handleInfo = (info) => {
|
||||
const { message, lineNumber, procName } = info;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'info',
|
||||
});
|
||||
};
|
||||
|
||||
const handleDone = (result) => {
|
||||
// console.log('RESULT', result);
|
||||
options.done(result);
|
||||
};
|
||||
|
||||
const handleRow = (row) => {
|
||||
// if (currentMapper) {
|
||||
// for (const colname of _.keys(currentMapper)) {
|
||||
// let index = 0;
|
||||
// for (const newcolname of currentMapper[colname]) {
|
||||
// row[newcolname] = row[colname][index];
|
||||
// index += 1;
|
||||
// }
|
||||
// delete row[colname];
|
||||
// }
|
||||
// }
|
||||
if (currentMapper) {
|
||||
row = { ...row };
|
||||
for (const colname of _.keys(currentMapper)) {
|
||||
const newcolname = currentMapper[colname];
|
||||
row[newcolname] = row[colname];
|
||||
if (_.isArray(row[newcolname])) row[newcolname] = row[newcolname].join(',');
|
||||
delete row[colname];
|
||||
}
|
||||
}
|
||||
|
||||
options.row(row);
|
||||
};
|
||||
|
||||
const handleRecordset = (columns) => {
|
||||
const [extractedColumns, mapper] = extractColumns(columns);
|
||||
currentMapper = mapper;
|
||||
options.recordset(extractedColumns);
|
||||
};
|
||||
|
||||
const handleError = (error) => {
|
||||
const { message, lineNumber, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
});
|
||||
};
|
||||
|
||||
request.stream = true;
|
||||
request.on('recordset', handleRecordset);
|
||||
request.on('row', handleRow);
|
||||
request.on('error', handleError);
|
||||
request.on('done', handleDone);
|
||||
request.on('info', handleInfo);
|
||||
request.query(sql);
|
||||
|
||||
return request;
|
||||
},
|
||||
async readableStream(pool, sql) {
|
||||
const request = await pool.request();
|
||||
const { stream } = pool._nativeModules;
|
||||
|
||||
const pass = new stream.PassThrough({
|
||||
objectMode: true,
|
||||
highWaterMark: 100,
|
||||
});
|
||||
|
||||
request.stream = true;
|
||||
request.on('row', (row) => pass.write(row));
|
||||
request.on('error', (err) => {
|
||||
console.error(err);
|
||||
pass.end();
|
||||
});
|
||||
request.on('done', () => pass.end());
|
||||
|
||||
request.query(sql);
|
||||
|
||||
return pass;
|
||||
},
|
||||
async getVersion(pool) {
|
||||
const { version } = (await this.query(pool, 'SELECT @@VERSION AS version')).rows[0];
|
||||
return { version };
|
||||
},
|
||||
async listDatabases(pool) {
|
||||
const { rows } = await this.query(pool, 'SELECT name FROM sys.databases order by name');
|
||||
return rows;
|
||||
},
|
||||
async analyseFull(pool) {
|
||||
const analyser = new MsSqlAnalyser(pool, this);
|
||||
return analyser.fullAnalysis();
|
||||
},
|
||||
async analyseIncremental(pool, structure) {
|
||||
const analyser = new MsSqlAnalyser(pool, this);
|
||||
return analyser.incrementalAnalysis(structure);
|
||||
},
|
||||
createDumper() {
|
||||
return new MsSqlDumper(this);
|
||||
},
|
||||
dialect,
|
||||
engine: 'mssql',
|
||||
};
|
||||
|
||||
module.exports = driver;
|
||||
@@ -1,15 +0,0 @@
|
||||
module.exports = `
|
||||
select c.name as columnName, t.name as dataType, c.object_id as objectId, c.is_identity as isIdentity,
|
||||
c.max_length as maxLength, c.precision, c.scale, c.is_nullable as isNullable,
|
||||
d.definition as defaultValue, d.name as defaultConstraint,
|
||||
m.definition as computedExpression, m.is_persisted as isPersisted, c.column_id as columnId,
|
||||
-- TODO only if version >= 2008
|
||||
c.is_sparse as isSparse
|
||||
from sys.columns c
|
||||
inner join sys.types t on c.system_type_id = t.system_type_id and c.user_type_id = t.user_type_id
|
||||
inner join sys.objects o on c.object_id = o.object_id
|
||||
left join sys.default_constraints d on c.default_object_id = d.object_id
|
||||
left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id
|
||||
where o.type = 'U' and o.object_id =[OBJECT_ID_CONDITION]
|
||||
order by c.column_id
|
||||
`;
|
||||
@@ -1,40 +0,0 @@
|
||||
module.exports = `
|
||||
SELECT
|
||||
schemaName = FK.TABLE_SCHEMA,
|
||||
pureName = FK.TABLE_NAME,
|
||||
columnName = CU.COLUMN_NAME,
|
||||
|
||||
refSchemaName = ISNULL(IXS.name, PK.TABLE_SCHEMA),
|
||||
refTableName = ISNULL(IXT.name, PK.TABLE_NAME),
|
||||
refColumnName = IXCC.name,
|
||||
|
||||
constraintName = C.CONSTRAINT_NAME,
|
||||
updateAction = rc.UPDATE_RULE,
|
||||
deleteAction = rc.DELETE_RULE,
|
||||
|
||||
objectId = o.object_id
|
||||
|
||||
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS C
|
||||
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK ON C.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
|
||||
|
||||
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS PK ON C.UNIQUE_CONSTRAINT_NAME = PK.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE CU ON C.CONSTRAINT_NAME = CU.CONSTRAINT_NAME
|
||||
--LEFT JOIN (
|
||||
--SELECT i1.TABLE_NAME, i2.COLUMN_NAME
|
||||
--FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS i1
|
||||
--INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE i2 ON i1.CONSTRAINT_NAME = i2.CONSTRAINT_NAME
|
||||
--WHERE i1.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
--) PT ON PT.TABLE_NAME = PK.TABLE_NAME
|
||||
INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON FK.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
|
||||
|
||||
LEFT JOIN sys.indexes IX ON IX.name = C.UNIQUE_CONSTRAINT_NAME
|
||||
LEFT JOIN sys.objects IXT ON IXT.object_id = IX.object_id
|
||||
LEFT JOIN sys.index_columns IXC ON IX.index_id = IXC.index_id and IX.object_id = IXC.object_id
|
||||
LEFT JOIN sys.columns IXCC ON IXCC.column_id = IXC.column_id AND IXCC.object_id = IXC.object_id
|
||||
LEFT JOIN sys.schemas IXS ON IXT.schema_id = IXS.schema_id
|
||||
|
||||
inner join sys.objects o on FK.TABLE_NAME = o.name
|
||||
inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name
|
||||
|
||||
where o.object_id =[OBJECT_ID_CONDITION]
|
||||
`;
|
||||
@@ -1,21 +0,0 @@
|
||||
const columns = require('./columns');
|
||||
const foreignKeys = require('./foreignKeys');
|
||||
const primaryKeys = require('./primaryKeys');
|
||||
const tables = require('./tables');
|
||||
const modifications = require('./modifications');
|
||||
const loadSqlCode = require('./loadSqlCode');
|
||||
const views = require('./views');
|
||||
const programmables = require('./programmables');
|
||||
const viewColumns = require('./viewColumns');
|
||||
|
||||
module.exports = {
|
||||
columns,
|
||||
tables,
|
||||
foreignKeys,
|
||||
primaryKeys,
|
||||
modifications,
|
||||
loadSqlCode,
|
||||
views,
|
||||
programmables,
|
||||
viewColumns,
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
module.exports = `
|
||||
select s.name as pureName, u.name as schemaName, c.text AS codeText
|
||||
from sys.objects s
|
||||
inner join sys.syscomments c on s.object_id = c.id
|
||||
inner join sys.schemas u on u.schema_id = s.schema_id
|
||||
where (s.object_id =[OBJECT_ID_CONDITION])
|
||||
order by u.name, s.name, c.colid
|
||||
`;
|
||||
@@ -1,6 +0,0 @@
|
||||
module.exports = `
|
||||
select o.object_id as objectId, o.modify_date as modifyDate, o.type, o.name as pureName, s.name as schemaName
|
||||
from sys.objects o
|
||||
inner join sys.schemas s on o.schema_id = s.schema_id
|
||||
where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled
|
||||
`;
|
||||
@@ -1,14 +0,0 @@
|
||||
module.exports = `
|
||||
select o.object_id, pureName = t.Table_Name, schemaName = t.Table_Schema, columnName = c.Column_Name, constraintName=t.constraint_name from
|
||||
INFORMATION_SCHEMA.TABLE_CONSTRAINTS t,
|
||||
sys.objects o,
|
||||
sys.schemas s,
|
||||
INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE c
|
||||
where
|
||||
c.Constraint_Name = t.Constraint_Name
|
||||
and t.table_name = o.name
|
||||
and o.schema_id = s.schema_id and t.Table_Schema = s.name
|
||||
and c.Table_Name = t.Table_Name
|
||||
and Constraint_Type = 'PRIMARY KEY'
|
||||
and o.object_id =[OBJECT_ID_CONDITION]
|
||||
`;
|
||||
@@ -1,6 +0,0 @@
|
||||
module.exports = `
|
||||
select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType
|
||||
from sys.objects o
|
||||
inner join sys.schemas s on o.schema_id = s.schema_id
|
||||
where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =[OBJECT_ID_CONDITION]
|
||||
`;
|
||||
@@ -1,8 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
o.name as pureName, s.name as schemaName, o.object_id as objectId,
|
||||
o.create_date as createDate, o.modify_date as modifyDate
|
||||
from sys.tables o
|
||||
inner join sys.schemas s on o.schema_id = s.schema_id
|
||||
where o.object_id =[OBJECT_ID_CONDITION]
|
||||
`;
|
||||
@@ -1,18 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
o.object_id AS objectId,
|
||||
col.TABLE_SCHEMA as schemaName,
|
||||
col.TABLE_NAME as pureName,
|
||||
col.COLUMN_NAME as columnName,
|
||||
col.IS_NULLABLE as isNullable,
|
||||
col.DATA_TYPE as dataType,
|
||||
col.CHARACTER_MAXIMUM_LENGTH,
|
||||
col.NUMERIC_PRECISION as precision,
|
||||
col.NUMERIC_SCALE as scale,
|
||||
col.COLUMN_DEFAULT
|
||||
FROM sys.objects o
|
||||
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
|
||||
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name
|
||||
WHERE o.type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
|
||||
order by col.ORDINAL_POSITION
|
||||
`;
|
||||
@@ -1,10 +0,0 @@
|
||||
module.exports = `
|
||||
SELECT
|
||||
o.name as pureName,
|
||||
u.name as schemaName,
|
||||
o.object_id as objectId,
|
||||
o.create_date as createDate,
|
||||
o.modify_date as modifyDate
|
||||
FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
|
||||
WHERE type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
|
||||
`;
|
||||
@@ -1,41 +0,0 @@
|
||||
const fp = require('lodash/fp');
|
||||
const _ = require('lodash');
|
||||
const sql = require('./sql');
|
||||
|
||||
const DatabaseAnalayser = require('../default/DatabaseAnalyser');
|
||||
|
||||
class MySqlAnalyser extends DatabaseAnalayser {
|
||||
constructor(pool, driver) {
|
||||
super(pool, driver);
|
||||
}
|
||||
|
||||
createQuery(resFileName, tables = false, views = false, procedures = false, functions = false, triggers = false) {
|
||||
let res = sql[resFileName];
|
||||
res = res.replace('=[OBJECT_NAME_CONDITION]', ' is not null');
|
||||
res = res.replace('#DATABASE#', this.pool._database_name);
|
||||
return res;
|
||||
}
|
||||
async _runAnalysis() {
|
||||
const tables = await this.driver.query(this.pool, this.createQuery('tables'));
|
||||
const columns = await this.driver.query(this.pool, this.createQuery('columns'));
|
||||
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys'));
|
||||
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys'));
|
||||
|
||||
return this.mergeAnalyseResult({
|
||||
tables: tables.rows.map((table) => ({
|
||||
...table,
|
||||
columns: columns.rows
|
||||
.filter((col) => col.pureName == table.pureName)
|
||||
.map(({ isNullable, extra, ...col }) => ({
|
||||
...col,
|
||||
notNull: !isNullable,
|
||||
autoIncrement: extra && extra.toLowerCase().includes('auto_increment'),
|
||||
})),
|
||||
primaryKey: DatabaseAnalayser.extractPrimaryKeys(table, pkColumns.rows),
|
||||
foreignKeys: DatabaseAnalayser.extractForeignKeys(table, fkColumns.rows),
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MySqlAnalyser;
|
||||
@@ -1,5 +0,0 @@
|
||||
const SqlDumper = require('../default/SqlDumper');
|
||||
|
||||
class MySqlDumper extends SqlDumper {}
|
||||
|
||||
module.exports = MySqlDumper;
|
||||
@@ -1,115 +0,0 @@
|
||||
const MySqlAnalyser = require('./MySqlAnalyser');
|
||||
const MySqlDumper = require('./MySqlDumper');
|
||||
|
||||
/** @type {import('@dbgate/types').SqlDialect} */
|
||||
const dialect = {
|
||||
rangeSelect: true,
|
||||
stringEscapeChar: '\\',
|
||||
quoteIdentifier(s) {
|
||||
return '`' + s + '`';
|
||||
},
|
||||
};
|
||||
|
||||
function extractColumns(fields) {
|
||||
if (fields)
|
||||
return fields.map((col) => ({
|
||||
columnName: col.name,
|
||||
}));
|
||||
return null;
|
||||
}
|
||||
|
||||
/** @type {import('@dbgate/types').EngineDriver} */
|
||||
const driver = {
|
||||
async connect(nativeModules, { server, port, user, password, database }) {
|
||||
const connection = nativeModules.mysql.createConnection({
|
||||
host: server,
|
||||
port,
|
||||
user,
|
||||
password,
|
||||
database,
|
||||
});
|
||||
connection._database_name = database;
|
||||
connection._nativeModules = nativeModules;
|
||||
return connection;
|
||||
},
|
||||
async query(connection, sql) {
|
||||
return new Promise((resolve, reject) => {
|
||||
connection.query(sql, function (error, results, fields) {
|
||||
if (error) reject(error);
|
||||
resolve({ rows: results, columns: extractColumns(fields) });
|
||||
});
|
||||
});
|
||||
},
|
||||
async stream(connection, sql, options) {
|
||||
const query = connection.query(sql);
|
||||
|
||||
// const handleInfo = (info) => {
|
||||
// const { message, lineNumber, procName } = info;
|
||||
// options.info({
|
||||
// message,
|
||||
// line: lineNumber,
|
||||
// procedure: procName,
|
||||
// time: new Date(),
|
||||
// severity: 'info',
|
||||
// });
|
||||
// };
|
||||
|
||||
const handleEnd = (result) => {
|
||||
// console.log('RESULT', result);
|
||||
options.done(result);
|
||||
};
|
||||
|
||||
const handleRow = (row) => {
|
||||
options.row(row);
|
||||
};
|
||||
|
||||
const handleFields = (columns) => {
|
||||
console.log('FIELDS', columns[0].name);
|
||||
options.recordset(extractColumns(columns));
|
||||
};
|
||||
|
||||
const handleError = (error) => {
|
||||
console.log('ERROR', error);
|
||||
const { message, lineNumber, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
});
|
||||
};
|
||||
|
||||
query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd);
|
||||
|
||||
return query;
|
||||
},
|
||||
async readableStream(connection, sql) {
|
||||
const query = connection.query(sql);
|
||||
return query.stream({ highWaterMark: 100 });
|
||||
},
|
||||
async getVersion(connection) {
|
||||
const { rows } = await this.query(connection, "show variables like 'version'");
|
||||
const version = rows[0].Value;
|
||||
return { version };
|
||||
},
|
||||
async analyseFull(pool) {
|
||||
const analyser = new MySqlAnalyser(pool, this);
|
||||
return analyser.fullAnalysis();
|
||||
},
|
||||
async analyseIncremental(pool, structure) {
|
||||
const analyser = new MySqlAnalyser(pool, this);
|
||||
return analyser.incrementalAnalysis(structure);
|
||||
},
|
||||
async listDatabases(connection) {
|
||||
const { rows } = await this.query(connection, 'show databases');
|
||||
return rows.map((x) => ({ name: x.Database }));
|
||||
},
|
||||
createDumper() {
|
||||
return new MySqlDumper(this);
|
||||
},
|
||||
dialect,
|
||||
engine: 'mysql',
|
||||
};
|
||||
|
||||
module.exports = driver;
|
||||
@@ -1,15 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
TABLE_NAME as pureName,
|
||||
COLUMN_NAME as columnName,
|
||||
IS_NULLABLE as isNullable,
|
||||
DATA_TYPE as dataType,
|
||||
CHARACTER_MAXIMUM_LENGTH,
|
||||
NUMERIC_PRECISION,
|
||||
NUMERIC_SCALE,
|
||||
COLUMN_DEFAULT,
|
||||
EXTRA as extra
|
||||
from INFORMATION_SCHEMA.COLUMNS
|
||||
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION]
|
||||
order by ORDINAL_POSITION
|
||||
`;
|
||||
@@ -1,17 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
|
||||
REFERENTIAL_CONSTRAINTS.TABLE_NAME as pureName,
|
||||
REFERENTIAL_CONSTRAINTS.UPDATE_RULE as updateAction,
|
||||
REFERENTIAL_CONSTRAINTS.DELETE_RULE as deleteAction,
|
||||
REFERENTIAL_CONSTRAINTS.REFERENCED_TABLE_NAME as refTableName,
|
||||
KEY_COLUMN_USAGE.COLUMN_NAME as columnName,
|
||||
KEY_COLUMN_USAGE.REFERENCED_COLUMN_NAME as refColumnName
|
||||
from INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS
|
||||
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
on REFERENTIAL_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
|
||||
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
|
||||
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
|
||||
where REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and REFERENTIAL_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION]
|
||||
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
|
||||
`;
|
||||
@@ -1,13 +0,0 @@
|
||||
const columns = require('./columns');
|
||||
const tables = require('./tables');
|
||||
const primaryKeys = require('./primaryKeys');
|
||||
const foreignKeys = require('./foreignKeys');
|
||||
const tableModifications = require('./tableModifications');
|
||||
|
||||
module.exports = {
|
||||
columns,
|
||||
tables,
|
||||
primaryKeys,
|
||||
foreignKeys,
|
||||
tableModifications,
|
||||
};
|
||||
@@ -1,12 +0,0 @@
|
||||
module.exports = `select
|
||||
TABLE_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
|
||||
TABLE_CONSTRAINTS.TABLE_NAME as pureName,
|
||||
KEY_COLUMN_USAGE.COLUMN_NAME as columnName
|
||||
from INFORMATION_SCHEMA.TABLE_CONSTRAINTS
|
||||
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
on TABLE_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
|
||||
and TABLE_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
|
||||
and TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
|
||||
where TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and TABLE_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION] AND TABLE_CONSTRAINTS.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
|
||||
`;
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
TABLE_NAME,
|
||||
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as ALTER_TIME
|
||||
from information_schema.tables
|
||||
where TABLE_SCHEMA = '#DATABASE#'
|
||||
`;
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
TABLE_NAME as pureName,
|
||||
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as alterTime
|
||||
from information_schema.tables
|
||||
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION];
|
||||
`;
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@dbgate/engines",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"main": "index.js",
|
||||
"typings": "./index.d.ts",
|
||||
"devDependencies": {
|
||||
"@dbgate/types": "^0.1.0",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^3.7.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.15"
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
const fp = require('lodash/fp');
|
||||
const _ = require('lodash');
|
||||
const sql = require('./sql');
|
||||
|
||||
const DatabaseAnalayser = require('../default/DatabaseAnalyser');
|
||||
|
||||
class PostgreAnalyser extends DatabaseAnalayser {
|
||||
constructor(pool, driver) {
|
||||
super(pool, driver);
|
||||
}
|
||||
|
||||
createQuery(resFileName, tables = false, views = false, procedures = false, functions = false, triggers = false) {
|
||||
let res = sql[resFileName];
|
||||
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
|
||||
return res;
|
||||
}
|
||||
async _runAnalysis() {
|
||||
const tables = await this.driver.query(this.pool, this.createQuery('tableModifications'));
|
||||
const columns = await this.driver.query(this.pool, this.createQuery('columns'));
|
||||
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys'));
|
||||
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys'));
|
||||
// console.log('PG fkColumns', fkColumns.rows);
|
||||
|
||||
return this.mergeAnalyseResult({
|
||||
tables: tables.rows.map((table) => ({
|
||||
...table,
|
||||
columns: columns.rows
|
||||
.filter((col) => col.pureName == table.pureName && col.schemaName == table.schemaName)
|
||||
.map(({ isNullable, ...col }) => ({
|
||||
...col,
|
||||
notNull: !isNullable,
|
||||
})),
|
||||
primaryKey: DatabaseAnalayser.extractPrimaryKeys(table, pkColumns.rows),
|
||||
foreignKeys: DatabaseAnalayser.extractForeignKeys(table, fkColumns.rows),
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PostgreAnalyser;
|
||||
@@ -1,5 +0,0 @@
|
||||
const SqlDumper = require('../default/SqlDumper');
|
||||
|
||||
class PostgreDumper extends SqlDumper {}
|
||||
|
||||
module.exports = PostgreDumper;
|
||||
@@ -1,117 +0,0 @@
|
||||
const _ = require('lodash');
|
||||
const PostgreAnalyser = require('./PostgreAnalyser');
|
||||
const PostgreDumper = require('./PostgreDumper');
|
||||
|
||||
/** @type {import('@dbgate/types').SqlDialect} */
|
||||
const dialect = {
|
||||
rangeSelect: true,
|
||||
stringEscapeChar: '\\',
|
||||
quoteIdentifier(s) {
|
||||
return '"' + s + '"';
|
||||
},
|
||||
};
|
||||
|
||||
/** @type {import('@dbgate/types').EngineDriver} */
|
||||
const driver = {
|
||||
async connect(nativeModules, { server, port, user, password, database }) {
|
||||
const client = new nativeModules.pg.Client({
|
||||
host: server,
|
||||
port,
|
||||
user,
|
||||
password,
|
||||
database: database || 'postgres',
|
||||
});
|
||||
await client.connect();
|
||||
client._nativeModules = nativeModules;
|
||||
return client;
|
||||
},
|
||||
async query(client, sql) {
|
||||
const res = await client.query(sql);
|
||||
return { rows: res.rows, columns: res.fields };
|
||||
},
|
||||
async stream(client, sql, options) {
|
||||
const query = new client._nativeModules.pgQueryStream(sql);
|
||||
const stream = client.query(query);
|
||||
|
||||
// const handleInfo = (info) => {
|
||||
// const { message, lineNumber, procName } = info;
|
||||
// options.info({
|
||||
// message,
|
||||
// line: lineNumber,
|
||||
// procedure: procName,
|
||||
// time: new Date(),
|
||||
// severity: 'info',
|
||||
// });
|
||||
// };
|
||||
|
||||
let wasHeader = false;
|
||||
|
||||
const handleEnd = (result) => {
|
||||
// console.log('RESULT', result);
|
||||
options.done(result);
|
||||
};
|
||||
|
||||
const handleReadable = () => {
|
||||
let row = stream.read();
|
||||
if (!wasHeader && row) {
|
||||
options.recordset(_.keys(row).map((columnName) => ({ columnName })));
|
||||
wasHeader = true;
|
||||
}
|
||||
|
||||
while (row) {
|
||||
options.row(row);
|
||||
row = stream.read();
|
||||
}
|
||||
};
|
||||
|
||||
// const handleFields = (columns) => {
|
||||
// // console.log('FIELDS', columns[0].name);
|
||||
// options.recordset(columns);
|
||||
// // options.recordset(extractColumns(columns));
|
||||
// };
|
||||
|
||||
const handleError = (error) => {
|
||||
console.log('ERROR', error);
|
||||
const { message, lineNumber, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
});
|
||||
};
|
||||
|
||||
stream.on('error', handleError);
|
||||
stream.on('readable', handleReadable);
|
||||
// stream.on('result', handleRow)
|
||||
// stream.on('data', handleRow)
|
||||
stream.on('end', handleEnd);
|
||||
|
||||
return stream;
|
||||
},
|
||||
async getVersion(client) {
|
||||
const { rows } = await this.query(client, 'SELECT version()');
|
||||
const { version } = rows[0];
|
||||
return { version };
|
||||
},
|
||||
async analyseFull(pool) {
|
||||
const analyser = new PostgreAnalyser(pool, this);
|
||||
return analyser.fullAnalysis();
|
||||
},
|
||||
async analyseIncremental(pool, structure) {
|
||||
const analyser = new PostgreAnalyser(pool, this);
|
||||
return analyser.incrementalAnalysis(structure);
|
||||
},
|
||||
createDumper() {
|
||||
return new PostgreDumper(this);
|
||||
},
|
||||
async listDatabases(client) {
|
||||
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
|
||||
return rows;
|
||||
},
|
||||
dialect,
|
||||
engine: 'postgres',
|
||||
};
|
||||
|
||||
module.exports = driver;
|
||||
@@ -1,19 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
table_schema as "schemaName",
|
||||
table_name as "pureName",
|
||||
column_name as "columnName",
|
||||
is_nullable as "isNullable",
|
||||
data_type as dataType,
|
||||
character_maximum_length,
|
||||
numeric_precision,
|
||||
numeric_scale,
|
||||
column_default
|
||||
from information_schema.columns
|
||||
where
|
||||
table_schema <> 'information_schema'
|
||||
and table_schema <> 'pg_catalog'
|
||||
and table_schema !~ '^pg_toast'
|
||||
and 'table:' || table_schema || '.' || table_name =[OBJECT_ID_CONDITION]
|
||||
order by ordinal_position
|
||||
`;
|
||||
@@ -1,24 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
fk.constraint_name as "constraintName",
|
||||
fk.constraint_schema as "constraintSchema",
|
||||
base.table_name as "pureName",
|
||||
base.table_schema as "schemaName",
|
||||
fk.update_rule as "updateAction",
|
||||
fk.delete_rule as "deleteAction",
|
||||
ref.table_name as "refTableName",
|
||||
ref.table_schema as "refSchemaName",
|
||||
basecol.column_name as "columnName",
|
||||
refcol.column_name as "refColumnName"
|
||||
from information_schema.referential_constraints fk
|
||||
inner join information_schema.table_constraints base on fk.constraint_name = base.constraint_name and fk.constraint_schema = base.constraint_schema
|
||||
inner join information_schema.table_constraints ref on fk.unique_constraint_name = ref.constraint_name and fk.unique_constraint_schema = ref.constraint_schema
|
||||
inner join information_schema.key_column_usage basecol on base.table_name = basecol.table_name and base.constraint_name = basecol.constraint_name
|
||||
inner join information_schema.key_column_usage refcol on ref.table_name = refcol.table_name and ref.constraint_name = refcol.constraint_name and basecol.ordinal_position = refcol.ordinal_position
|
||||
where
|
||||
base.table_schema <> 'information_schema'
|
||||
and base.table_schema <> 'pg_catalog'
|
||||
and base.table_schema !~ '^pg_toast'
|
||||
and 'table:' || base.table_schema || '.' || base.table_name =[OBJECT_ID_CONDITION]
|
||||
order by basecol.ordinal_position
|
||||
`;
|
||||
@@ -1,11 +0,0 @@
|
||||
const columns = require('./columns');
|
||||
const tableModifications = require('./tableModifications');
|
||||
const primaryKeys = require('./primaryKeys');
|
||||
const foreignKeys = require('./foreignKeys');
|
||||
|
||||
module.exports = {
|
||||
columns,
|
||||
tableModifications,
|
||||
primaryKeys,
|
||||
foreignKeys,
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
module.exports = `
|
||||
select
|
||||
table_constraints.constraint_schema as "constraintSchema",
|
||||
table_constraints.constraint_name as "constraintName",
|
||||
table_constraints.table_schema as "schemaName",
|
||||
table_constraints.table_name as "pureName",
|
||||
key_column_usage.column_name as "columnName"
|
||||
from information_schema.table_constraints
|
||||
inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name
|
||||
where
|
||||
table_constraints.table_schema <> 'information_schema'
|
||||
and table_constraints.table_schema <> 'pg_catalog'
|
||||
and table_constraints.table_schema !~ '^pg_toast'
|
||||
and table_constraints.constraint_type = 'PRIMARY KEY'
|
||||
and 'table:' || table_constraints.table_schema || '.' || table_constraints.table_name =[OBJECT_ID_CONDITION]
|
||||
order by key_column_usage.ordinal_position
|
||||
`;
|
||||
@@ -1,52 +0,0 @@
|
||||
module.exports = `
|
||||
with pkey as
|
||||
(
|
||||
select cc.conrelid, format(E'create constraint %I primary key(%s);\\n', cc.conname,
|
||||
string_agg(a.attname, ', '
|
||||
order by array_position(cc.conkey, a.attnum))) pkey
|
||||
from pg_catalog.pg_constraint cc
|
||||
join pg_catalog.pg_class c on c.oid = cc.conrelid
|
||||
join pg_catalog.pg_attribute a on a.attrelid = cc.conrelid
|
||||
and a.attnum = any(cc.conkey)
|
||||
where cc.contype = 'p'
|
||||
group by cc.conrelid, cc.conname
|
||||
)
|
||||
|
||||
|
||||
SELECT oid as "objectId", nspname as "schemaName", relname as "pureName",
|
||||
md5('CREATE TABLE ' || nspname || '.' || relname || E'\\n(\\n' ||
|
||||
array_to_string(
|
||||
array_agg(
|
||||
' ' || column_name || ' ' || type || ' '|| not_null
|
||||
)
|
||||
, E',\\n'
|
||||
) || E'\\n);\\n' || (select pkey from pkey where pkey.conrelid = oid)) as "hash"
|
||||
from
|
||||
(
|
||||
SELECT
|
||||
c.relname, a.attname AS column_name, c.oid,
|
||||
n.nspname,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) as type,
|
||||
case
|
||||
when a.attnotnull
|
||||
then 'NOT NULL'
|
||||
else 'NULL'
|
||||
END as not_null
|
||||
FROM pg_class c,
|
||||
pg_namespace n,
|
||||
pg_attribute a,
|
||||
pg_type t
|
||||
|
||||
WHERE c.relkind = 'r'
|
||||
AND a.attnum > 0
|
||||
AND a.attrelid = c.oid
|
||||
AND a.atttypid = t.oid
|
||||
AND n.oid = c.relnamespace
|
||||
AND n.nspname <> 'pg_catalog'
|
||||
AND n.nspname <> 'information_schema'
|
||||
AND n.nspname !~ '^pg_toast'
|
||||
ORDER BY a.attnum
|
||||
) as tabledefinition
|
||||
where 'table:' || nspname || '.' || relname =[OBJECT_ID_CONDITION]
|
||||
group by relname, nspname, oid
|
||||
`;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user