Compare commits
1039 Commits
v5.3.2-bet
...
v6.0.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff94e46179 | ||
|
|
921bd4613a | ||
|
|
6f4a49ea97 | ||
|
|
9029fccad4 | ||
|
|
edf3a072c5 | ||
|
|
0fde8c49a7 | ||
|
|
767c835a8e | ||
|
|
38f0223dc0 | ||
|
|
ec707b5af3 | ||
|
|
462d5e3187 | ||
|
|
5cc4c07941 | ||
|
|
20de78f88a | ||
|
|
a63d70ca7e | ||
|
|
672d6b88b2 | ||
|
|
add1612c92 | ||
|
|
96b964e609 | ||
|
|
6b40190097 | ||
|
|
f5b0bc5605 | ||
|
|
64a58252e5 | ||
|
|
46571684f6 | ||
|
|
710022539b | ||
|
|
bc75d559b0 | ||
|
|
a82ee5cc65 | ||
|
|
6647dd16f8 | ||
|
|
6f4e1e07f7 | ||
|
|
5cd951a9c1 | ||
|
|
f492a215b4 | ||
|
|
bbd2d74a28 | ||
|
|
2b697e21ba | ||
|
|
3a4e4ecbdc | ||
|
|
05cbb915d6 | ||
|
|
26a2bb75fa | ||
|
|
71b0bb78ec | ||
|
|
4e4eb39a19 | ||
|
|
b6399c8271 | ||
|
|
eb4a764407 | ||
|
|
27188eb2c5 | ||
|
|
57a997adc3 | ||
|
|
a72a03cc3a | ||
|
|
bb185d9e9f | ||
|
|
0298660714 | ||
|
|
8bf1dbb10d | ||
|
|
8e5ef98a7c | ||
|
|
72bd536aec | ||
|
|
1a4009a6b2 | ||
|
|
e40357c052 | ||
|
|
222ea07cf2 | ||
|
|
6b3f398de3 | ||
|
|
d796fa7ff4 | ||
|
|
6fdfd8717f | ||
|
|
81cea4c0f2 | ||
|
|
6a64633650 | ||
|
|
21702f1593 | ||
|
|
32ddb9c4c7 | ||
|
|
10fc62ceb7 | ||
|
|
d3018a3136 | ||
|
|
f9bcbd588b | ||
|
|
5c7d2bfd85 | ||
|
|
788f0ebf77 | ||
|
|
0eca5dd95d | ||
|
|
47322b0bbb | ||
|
|
518a05a6f0 | ||
|
|
352e426e17 | ||
|
|
666122f265 | ||
|
|
61e32f6d95 | ||
|
|
7aabc8f0be | ||
|
|
a66dc03b99 | ||
|
|
cf5ecb3150 | ||
|
|
46c365c5cd | ||
|
|
ea76751e4a | ||
|
|
0bef3f8e71 | ||
|
|
c26c9fae12 | ||
|
|
446c615bb8 | ||
|
|
c516873541 | ||
|
|
d4326de087 | ||
|
|
eca966bb90 | ||
|
|
262b4732e3 | ||
|
|
7f9a30f568 | ||
|
|
a89d2e1365 | ||
|
|
fcd6f6c8fc | ||
|
|
8313d7f9f1 | ||
|
|
3a12601103 | ||
|
|
926949dc89 | ||
|
|
6b155083ef | ||
|
|
2b2ecac3ab | ||
|
|
35e9ff607d | ||
|
|
ae037834f2 | ||
|
|
3ac24436ba | ||
|
|
2ca17e826c | ||
|
|
4fb6128499 | ||
|
|
c359332746 | ||
|
|
1cd8e8e376 | ||
|
|
48ec2bdac8 | ||
|
|
2283e91532 | ||
|
|
647894ad60 | ||
|
|
574573abbb | ||
|
|
a735a03cd7 | ||
|
|
83881a0dac | ||
|
|
c04c6bbd2c | ||
|
|
42bbbc7ff4 | ||
|
|
1ecffeda71 | ||
|
|
92992d1e95 | ||
|
|
bc9df9750f | ||
|
|
27e70e8031 | ||
|
|
c823b8d19a | ||
|
|
170ff77eec | ||
|
|
c241f5c562 | ||
|
|
e06d964de4 | ||
|
|
dfdb86de6f | ||
|
|
a37b74f693 | ||
|
|
398d9f15df | ||
|
|
ab7c2d7a31 | ||
|
|
090549ff91 | ||
|
|
10330c6597 | ||
|
|
da2fe6a891 | ||
|
|
01b88221c5 | ||
|
|
46d25710b8 | ||
|
|
a40ec7e66b | ||
|
|
c8d2031d24 | ||
|
|
4f838e0ae3 | ||
|
|
c7cc1b7611 | ||
|
|
bf67a5f13d | ||
|
|
e6bbe66873 | ||
|
|
1a930acf0a | ||
|
|
a497467137 | ||
|
|
b463416633 | ||
|
|
ccf6240d65 | ||
|
|
5ccc12019d | ||
|
|
f57fa9aee9 | ||
|
|
80e841a43d | ||
|
|
e1d759041d | ||
|
|
fd6df055c0 | ||
|
|
669d0b9dac | ||
|
|
b9f9501e67 | ||
|
|
4b1c021871 | ||
|
|
1f79627dbe | ||
|
|
dc18be07ce | ||
|
|
83ac45f8cf | ||
|
|
c6cd865663 | ||
|
|
477636e0d7 | ||
|
|
77414ba934 | ||
|
|
86186072ed | ||
|
|
1216bcf9bf | ||
|
|
788ea70d32 | ||
|
|
18de37c4e4 | ||
|
|
aeb81bd97f | ||
|
|
a68660f1ab | ||
|
|
5abfa85a0e | ||
|
|
794f43d9ae | ||
|
|
4b2f762200 | ||
|
|
fc3664571b | ||
|
|
5db8f11fd6 | ||
|
|
598674a7e0 | ||
|
|
af17eceb27 | ||
|
|
90946c582d | ||
|
|
d619e0f961 | ||
|
|
08311145c8 | ||
|
|
a80e37a208 | ||
|
|
c88114cabe | ||
|
|
e33f3a1492 | ||
|
|
8328fdad33 | ||
|
|
8035380e7b | ||
|
|
b4ea528643 | ||
|
|
b0012872fa | ||
|
|
274fb595a2 | ||
|
|
c7ef4b9231 | ||
|
|
e64cfce423 | ||
|
|
c0c9c7be20 | ||
|
|
2ae98d0c2d | ||
|
|
a129834c16 | ||
|
|
71a9d6c5c0 | ||
|
|
1ce8f6bd1f | ||
|
|
85c4821606 | ||
|
|
06ed9d7dfc | ||
|
|
90de5edc99 | ||
|
|
72786e5dbb | ||
|
|
d92c08548b | ||
|
|
b1893234c7 | ||
|
|
534deff274 | ||
|
|
bdeffea79c | ||
|
|
ab2fdf26d2 | ||
|
|
0c902f037b | ||
|
|
c0595aec0a | ||
|
|
6fdb20fc34 | ||
|
|
8e74031fb1 | ||
|
|
81e4b947b6 | ||
|
|
c5d23410f4 | ||
|
|
f23575c405 | ||
|
|
8cb98cf643 | ||
|
|
b09a558670 | ||
|
|
abc3c5f880 | ||
|
|
1c0d966c3c | ||
|
|
20d7264aab | ||
|
|
4cf987b89a | ||
|
|
5bcd3f807d | ||
|
|
9e04f2b9c6 | ||
|
|
a7ceb8951c | ||
|
|
7392b223f4 | ||
|
|
51ce4f1bb5 | ||
|
|
059c310aaf | ||
|
|
11dad8ced3 | ||
|
|
18ed0fc020 | ||
|
|
eaa1b73851 | ||
|
|
5c2c24e009 | ||
|
|
1957531600 | ||
|
|
e47a165e11 | ||
|
|
3ca1adcb48 | ||
|
|
66bf1c847a | ||
|
|
47eea9b9b3 | ||
|
|
5c5a5f3b53 | ||
|
|
1c7729a797 | ||
|
|
cd06f13fcb | ||
|
|
632870d448 | ||
|
|
762055379a | ||
|
|
7374749340 | ||
|
|
e379be0107 | ||
|
|
2784053b83 | ||
|
|
12c4a0d498 | ||
|
|
35b5ea138d | ||
|
|
d75e1fc660 | ||
|
|
3ab6df5da2 | ||
|
|
8d4fc391a4 | ||
|
|
35f9fc3741 | ||
|
|
b465f3eb99 | ||
|
|
c4e6a90722 | ||
|
|
9cc4af2b56 | ||
|
|
5ccdd7633b | ||
|
|
880f4403cb | ||
|
|
cba391904a | ||
|
|
0fc397ace5 | ||
|
|
56a241b7f4 | ||
|
|
97eb999e4c | ||
|
|
9deaa89f21 | ||
|
|
74bae65e32 | ||
|
|
7091917578 | ||
|
|
bbf72d9ed7 | ||
|
|
552d10ef48 | ||
|
|
7c5479157a | ||
|
|
2463dba380 | ||
|
|
2f9209a92d | ||
|
|
370bd92518 | ||
|
|
ec083924fc | ||
|
|
22b450f7e0 | ||
|
|
1dd73e7319 | ||
|
|
c4fe4b40dd | ||
|
|
251137ac60 | ||
|
|
0ad7c99274 | ||
|
|
f53142d98a | ||
|
|
1f868523b0 | ||
|
|
94db02db2e | ||
|
|
9f0e06e663 | ||
|
|
c6dab85fc2 | ||
|
|
59aa2e3f33 | ||
|
|
21b26773e6 | ||
|
|
f308c5f6b0 | ||
|
|
2763b6028a | ||
|
|
a3df6d6e7d | ||
|
|
473bfcbec5 | ||
|
|
8976c9e653 | ||
|
|
86795dcc63 | ||
|
|
9b90f15621 | ||
|
|
7d0d9d3e22 | ||
|
|
17f0248a3e | ||
|
|
25d3dcad59 | ||
|
|
cbd857422f | ||
|
|
e65b4d0c2a | ||
|
|
6bcebb63e4 | ||
|
|
ac7708138c | ||
|
|
9d8ec9cc6b | ||
|
|
1b8a2cb923 | ||
|
|
a97ab9c09e | ||
|
|
9a73eb3620 | ||
|
|
f50e460335 | ||
|
|
fa72d9a39f | ||
|
|
75b4f49e31 | ||
|
|
a069093f6b | ||
|
|
62c741198a | ||
|
|
0266d912e0 | ||
|
|
55bc0fc93f | ||
|
|
47c00d7eb0 | ||
|
|
ad9fac861e | ||
|
|
14afd08fcb | ||
|
|
319580554f | ||
|
|
c750bd04ad | ||
|
|
bdd55d8432 | ||
|
|
98464e414b | ||
|
|
2f2d9c45a3 | ||
|
|
3665a0d064 | ||
|
|
c19c69266a | ||
|
|
bafa2c2fff | ||
|
|
2d823140b9 | ||
|
|
1fb4a06092 | ||
|
|
cb450a0313 | ||
|
|
7aaf6bb024 | ||
|
|
6a02ba3220 | ||
|
|
83610783e0 | ||
|
|
cec26b0614 | ||
|
|
fbf288198b | ||
|
|
193940fd63 | ||
|
|
bd169c316a | ||
|
|
5315f65cfb | ||
|
|
5a859d81d3 | ||
|
|
904fc4d500 | ||
|
|
634fe18127 | ||
|
|
89a9cc4380 | ||
|
|
57c62fbe27 | ||
|
|
590eff1e3b | ||
|
|
a71309a604 | ||
|
|
343e983b64 | ||
|
|
e31a52b659 | ||
|
|
41162ee2c3 | ||
|
|
55745c18e9 | ||
|
|
7d6b77ad2a | ||
|
|
90813b23d8 | ||
|
|
a999d29b1d | ||
|
|
f6f9b0a61a | ||
|
|
724edf44cb | ||
|
|
07248ca49f | ||
|
|
3a068c37b5 | ||
|
|
df44e5f6e9 | ||
|
|
9328d966ba | ||
|
|
1a293deec7 | ||
|
|
665a70ba3d | ||
|
|
967587c8e4 | ||
|
|
4927c13e55 | ||
|
|
1f9f997748 | ||
|
|
521e4ea3a2 | ||
|
|
941843e4c0 | ||
|
|
1434a42421 | ||
|
|
8f57d3a316 | ||
|
|
a74b789a8c | ||
|
|
ac4dd37249 | ||
|
|
75b3b4e012 | ||
|
|
188ab4c483 | ||
|
|
f9a562808d | ||
|
|
4ea763124b | ||
|
|
836d15c68f | ||
|
|
8ce4c0a7ce | ||
|
|
9613c2c410 | ||
|
|
e5d4bbadc1 | ||
|
|
5d4d2a447a | ||
|
|
d905962298 | ||
|
|
4ab9ad6881 | ||
|
|
2ce20b5fac | ||
|
|
81297383cb | ||
|
|
2aed60390c | ||
|
|
67386da136 | ||
|
|
bdc40c2c02 | ||
|
|
1d916e43d5 | ||
|
|
98bff4925a | ||
|
|
9af2c80b05 | ||
|
|
3a03c82f8d | ||
|
|
de66e75eb2 | ||
|
|
10d79dca4d | ||
|
|
460f511bf6 | ||
|
|
81207f95d8 | ||
|
|
3d3aca3290 | ||
|
|
d661b9f6a4 | ||
|
|
7deeb78d69 | ||
|
|
4dcf47b81f | ||
|
|
a674b9b3a1 | ||
|
|
b2aa4d9377 | ||
|
|
bc4a595815 | ||
|
|
2704825d03 | ||
|
|
456d3ba42e | ||
|
|
803a272331 | ||
|
|
89e4bfe3e1 | ||
|
|
697440693e | ||
|
|
c0cd04a96f | ||
|
|
c92f02bdda | ||
|
|
c4766d163c | ||
|
|
18889092aa | ||
|
|
adafa3c5c4 | ||
|
|
91994016a7 | ||
|
|
dfcf253217 | ||
|
|
bd3503912f | ||
|
|
b062c5fd66 | ||
|
|
b1cd60d0dd | ||
|
|
67ac1a1c8d | ||
|
|
c166eab2e8 | ||
|
|
c6b3ced493 | ||
|
|
4e922e806d | ||
|
|
0e087565b3 | ||
|
|
104b25d898 | ||
|
|
9bdff41ec1 | ||
|
|
d27b0a7be3 | ||
|
|
076b20ef6d | ||
|
|
d93d107039 | ||
|
|
708dcfd088 | ||
|
|
14501a70b9 | ||
|
|
3b82679c2d | ||
|
|
48d4fb4fec | ||
|
|
a03ca73d93 | ||
|
|
a46e592cfb | ||
|
|
634bea1bda | ||
|
|
3dfa23a30c | ||
|
|
24408dd7c2 | ||
|
|
32c7919885 | ||
|
|
967615b6e5 | ||
|
|
aee3a28465 | ||
|
|
3fdf27f820 | ||
|
|
e9302c7d6f | ||
|
|
c38fe83e48 | ||
|
|
adfc427d25 | ||
|
|
3912a58127 | ||
|
|
720d25e838 | ||
|
|
bc1d77a6f8 | ||
|
|
00a8d472ff | ||
|
|
5076ee0463 | ||
|
|
4a5ddd65f4 | ||
|
|
ec3c224f44 | ||
|
|
39b99ecf8f | ||
|
|
50232f9b90 | ||
|
|
66e8cc6e1d | ||
|
|
f6b4c94d00 | ||
|
|
f3ce240bcd | ||
|
|
7be9bf1bab | ||
|
|
d39871be70 | ||
|
|
3324eec011 | ||
|
|
dd9790fca5 | ||
|
|
41a3769c5f | ||
|
|
42601ff960 | ||
|
|
e54cffbaf3 | ||
|
|
f8dbad362c | ||
|
|
925db50418 | ||
|
|
f40db68579 | ||
|
|
3afde5f1fa | ||
|
|
b631519009 | ||
|
|
f05c60c628 | ||
|
|
e235de6d56 | ||
|
|
da47c437e0 | ||
|
|
f0048bc6cf | ||
|
|
f80ae284fa | ||
|
|
06753ff312 | ||
|
|
1e07614306 | ||
|
|
2d716ba43a | ||
|
|
c39dc6295d | ||
|
|
7557666135 | ||
|
|
5ee65124cb | ||
|
|
ab79617377 | ||
|
|
3b2a47a4ef | ||
|
|
7b4d9d8717 | ||
|
|
4c9734ac7f | ||
|
|
152e8a80ab | ||
|
|
98a348b091 | ||
|
|
e448f63ec3 | ||
|
|
0b13850eca | ||
|
|
de97404602 | ||
|
|
2c0b76fb3f | ||
|
|
cbd6ce7872 | ||
|
|
3c479eb33c | ||
|
|
beaff158cc | ||
|
|
6806620d90 | ||
|
|
4459347169 | ||
|
|
98e01497e9 | ||
|
|
4ed4c8c32c | ||
|
|
620acecdff | ||
|
|
2d214cfdb3 | ||
|
|
cd36259739 | ||
|
|
d049d8c571 | ||
|
|
7c51fcad96 | ||
|
|
1948c8ef89 | ||
|
|
f2b2ac6fd0 | ||
|
|
4098f63ce2 | ||
|
|
4d903abd85 | ||
|
|
f00eb2d3ef | ||
|
|
6752dcfd39 | ||
|
|
c3022eb80a | ||
|
|
2f6cbf25df | ||
|
|
8dfc2e7bcd | ||
|
|
fa0ad477cc | ||
|
|
ac6a68c38d | ||
|
|
25223471e7 | ||
|
|
56535b1e6f | ||
|
|
131c51f7c4 | ||
|
|
a27a2077ed | ||
|
|
7758fabc89 | ||
|
|
c9da9bdd23 | ||
|
|
9520b053af | ||
|
|
8a1e717c1b | ||
|
|
21127f661a | ||
|
|
7d614a2395 | ||
|
|
669ae024f9 | ||
|
|
9d8dd558e2 | ||
|
|
67f58a8dfe | ||
|
|
52d230b9e2 | ||
|
|
fd2a35fb4a | ||
|
|
d61b5e135f | ||
|
|
ef23b786ac | ||
|
|
29a66bfcb0 | ||
|
|
ef5e30df3d | ||
|
|
ab28a06bef | ||
|
|
9910c54aa6 | ||
|
|
6ec431f471 | ||
|
|
3ec7f651c1 | ||
|
|
87aa60bc3e | ||
|
|
73874aa5a1 | ||
|
|
976438f860 | ||
|
|
eb095b7c44 | ||
|
|
3ca745e74b | ||
|
|
040de84d93 | ||
|
|
4f1d63440e | ||
|
|
7c3cf1bb67 | ||
|
|
cbf1b0a3cc | ||
|
|
5287a86397 | ||
|
|
ae599ac6f6 | ||
|
|
a08a8ef208 | ||
|
|
19a4d97765 | ||
|
|
6f1f5f84c6 | ||
|
|
e2f352149d | ||
|
|
1fa39b20d2 | ||
|
|
53dc2e6f03 | ||
|
|
555f30c0b3 | ||
|
|
7549d37a04 | ||
|
|
29072eb71b | ||
|
|
48e9e77be5 | ||
|
|
4dd3f15ba3 | ||
|
|
3603501ae2 | ||
|
|
338180a21a | ||
|
|
28193ed6f3 | ||
|
|
0509710602 | ||
|
|
a4872b4159 | ||
|
|
888f5c6260 | ||
|
|
7a5abb5f47 | ||
|
|
61a9f02899 | ||
|
|
354c4201f7 | ||
|
|
d8340087c5 | ||
|
|
e3249c6d79 | ||
|
|
58e65608e4 | ||
|
|
2b6fdf5a6a | ||
|
|
967d7849ee | ||
|
|
7c476ab2f0 | ||
|
|
caaf35e45a | ||
|
|
6ddc9ee6c5 | ||
|
|
39aa250223 | ||
|
|
031a92db8e | ||
|
|
0c2579897f | ||
|
|
b63479bf45 | ||
|
|
4c89552265 | ||
|
|
517002e079 | ||
|
|
85bfb1986d | ||
|
|
632421eb73 | ||
|
|
21365be411 | ||
|
|
eaa54022fc | ||
|
|
55f7f39efd | ||
|
|
71e709b346 | ||
|
|
1d2d295a45 | ||
|
|
5ed23beff0 | ||
|
|
43a8db55a2 | ||
|
|
0a9cba7bf7 | ||
|
|
02af761bf7 | ||
|
|
6882a146e7 | ||
|
|
c9834f9792 | ||
|
|
21b4baf700 | ||
|
|
d3a24627dd | ||
|
|
8aac9cf59d | ||
|
|
ce70b2e71a | ||
|
|
62a5ef60f6 | ||
|
|
95430e9c11 | ||
|
|
1cee36cc9b | ||
|
|
aa475f81a0 | ||
|
|
1173d5db1d | ||
|
|
f34d0cbb90 | ||
|
|
780d187911 | ||
|
|
48d4374346 | ||
|
|
6b8b511d0d | ||
|
|
c6be115634 | ||
|
|
dadde225f1 | ||
|
|
31dfc1dc28 | ||
|
|
1de163af44 | ||
|
|
2181eada53 | ||
|
|
75e63d2710 | ||
|
|
fbfcdcbc40 | ||
|
|
0238e6a7f1 | ||
|
|
be17301c91 | ||
|
|
b1118c7f43 | ||
|
|
24bf5e5b0c | ||
|
|
122471f81f | ||
|
|
a6136cee25 | ||
|
|
83357ba2cc | ||
|
|
4fe10b26b0 | ||
|
|
485f6c9759 | ||
|
|
732c5b763b | ||
|
|
4431d08a88 | ||
|
|
cb7224ac94 | ||
|
|
66b39c1f80 | ||
|
|
b30f139b5d | ||
|
|
f39ec26c29 | ||
|
|
8c3c32aeba | ||
|
|
9eb27f5e92 | ||
|
|
3e5b45de8f | ||
|
|
e7b4a6ffcc | ||
|
|
e7ec75138d | ||
|
|
6c4679d83b | ||
|
|
5f23b29c4e | ||
|
|
55db98fe1b | ||
|
|
f7c5ffa0ce | ||
|
|
d1e98e5640 | ||
|
|
e785fdf9b7 | ||
|
|
fc0db925c5 | ||
|
|
5ab686b721 | ||
|
|
327d43096f | ||
|
|
1f7b632553 | ||
|
|
592d7987ab | ||
|
|
c429424fda | ||
|
|
0b4709d383 | ||
|
|
336929ff3f | ||
|
|
677f83cc4b | ||
|
|
5c58c35a64 | ||
|
|
b346a458a6 | ||
|
|
226512a4ca | ||
|
|
a0527d78e9 | ||
|
|
3357295d98 | ||
|
|
fc6a43b4fe | ||
|
|
260b2e4b12 | ||
|
|
f080b18d3f | ||
|
|
56f015ffd5 | ||
|
|
fd8a28831e | ||
|
|
503e09ddd1 | ||
|
|
880912806a | ||
|
|
665ce22741 | ||
|
|
e5c9ec7681 | ||
|
|
74fceeec78 | ||
|
|
77d60ccfa5 | ||
|
|
0c2b25f79a | ||
|
|
4065e05013 | ||
|
|
319a7fd003 | ||
|
|
26c01f43f9 | ||
|
|
88d7e07bea | ||
|
|
a9a5a3491e | ||
|
|
d255273368 | ||
|
|
a7846b4adf | ||
|
|
ce431e6e21 | ||
|
|
f8e39a2a5d | ||
|
|
e5135b1a9d | ||
|
|
c45a6f1299 | ||
|
|
873e60c26a | ||
|
|
b0134b221b | ||
|
|
f4bb13f617 | ||
|
|
c32955a7c9 | ||
|
|
f8fe444f29 | ||
|
|
08dd2ae38f | ||
|
|
a88a64710b | ||
|
|
c410a7bb07 | ||
|
|
0ba7b5fb39 | ||
|
|
334440f691 | ||
|
|
89c9d5e792 | ||
|
|
0d1b6702a7 | ||
|
|
b366a7d451 | ||
|
|
c1106c1b01 | ||
|
|
9c48608588 | ||
|
|
b32a6daeab | ||
|
|
b1f018905b | ||
|
|
17537e592f | ||
|
|
0211cf59af | ||
|
|
2728d60422 | ||
|
|
e5079f6dbf | ||
|
|
487ac94034 | ||
|
|
fda350c05b | ||
|
|
6f32e27eec | ||
|
|
3c4fad108b | ||
|
|
b232263708 | ||
|
|
086bc0d9f3 | ||
|
|
e21c6d4872 | ||
|
|
d2e49967e4 | ||
|
|
2f1cbbd75e | ||
|
|
670cfb9dc0 | ||
|
|
e54bd1da3f | ||
|
|
fb2b47615f | ||
|
|
00a6c19f09 | ||
|
|
51c8169232 | ||
|
|
8ab814cb8b | ||
|
|
577517e043 | ||
|
|
d17a667cf4 | ||
|
|
575f8f23a7 | ||
|
|
33eed816aa | ||
|
|
08fce96691 | ||
|
|
f74533b42f | ||
|
|
fb39cd1302 | ||
|
|
7ad1950777 | ||
|
|
b0165c14e9 | ||
|
|
4f429c27c0 | ||
|
|
ff33ec668b | ||
|
|
f6e0b634f0 | ||
|
|
36a65ea13a | ||
|
|
ae9ffe1aef | ||
|
|
15c400747e | ||
|
|
448c15c308 | ||
|
|
293ef047d0 | ||
|
|
5c50faa0a2 | ||
|
|
18e6200c3b | ||
|
|
8d865ab3b3 | ||
|
|
ceb51a2597 | ||
|
|
f2d29f97dc | ||
|
|
d75f533b76 | ||
|
|
7e74ce8366 | ||
|
|
c2f41e51da | ||
|
|
9158b69b1e | ||
|
|
f9ce6ed8f4 | ||
|
|
2f90106e32 | ||
|
|
a74f6db1e0 | ||
|
|
f1ad4e190a | ||
|
|
52e774f2cc | ||
|
|
14331501ba | ||
|
|
49e00a8a0f | ||
|
|
69bc9d6111 | ||
|
|
64ab1bb111 | ||
|
|
818f4eaa10 | ||
|
|
6e6699f60a | ||
|
|
ba665931dd | ||
|
|
5b010bcc53 | ||
|
|
fdb5fdfadd | ||
|
|
628d8eb5dc | ||
|
|
a78c375b90 | ||
|
|
f5f653965f | ||
|
|
0ea84fe034 | ||
|
|
11e8cff77e | ||
|
|
2db3f14509 | ||
|
|
db1d4aa555 | ||
|
|
1fcaf08644 | ||
|
|
703a4bdb57 | ||
|
|
3303fd1ee9 | ||
|
|
5b796a4d88 | ||
|
|
e5ab354d15 | ||
|
|
8fc8bc19d4 | ||
|
|
590bd166fd | ||
|
|
4f360eec96 | ||
|
|
d9c16e6d01 | ||
|
|
2a94e5da27 | ||
|
|
cb32d2152e | ||
|
|
a5d482ad18 | ||
|
|
017366f3aa | ||
|
|
582c982a9c | ||
|
|
ad6a93bfb5 | ||
|
|
bc92a63111 | ||
|
|
5ff1009c22 | ||
|
|
c1e6a01b63 | ||
|
|
5daf64360c | ||
|
|
3fd887d6cf | ||
|
|
486d7a946d | ||
|
|
22a81ed2ee | ||
|
|
77b6bddd87 | ||
|
|
0085505b7d | ||
|
|
880b07a328 | ||
|
|
f0f9be3051 | ||
|
|
176f28a178 | ||
|
|
e31c377d4e | ||
|
|
0f247450c7 | ||
|
|
2e67769491 | ||
|
|
b80c428224 | ||
|
|
6940bb4556 | ||
|
|
44142e8b25 | ||
|
|
ccb22be8bf | ||
|
|
64ff5d61a4 | ||
|
|
32ac4c1f28 | ||
|
|
365e697121 | ||
|
|
2bf717a2eb | ||
|
|
9d47ea61c7 | ||
|
|
b04b0afa03 | ||
|
|
6ed18c2dbb | ||
|
|
a68c04b355 | ||
|
|
25f8cb2dce | ||
|
|
a7509f511b | ||
|
|
6b31d728a8 | ||
|
|
787d6596bf | ||
|
|
a256acb203 | ||
|
|
d19c30d0b2 | ||
|
|
faa186c1e4 | ||
|
|
d8467b5ae1 | ||
|
|
2c096486f5 | ||
|
|
17e31270ae | ||
|
|
29debe0f80 | ||
|
|
60bbc45cb2 | ||
|
|
7c04dc00b1 | ||
|
|
eb56b6eab8 | ||
|
|
d0d226a9e1 | ||
|
|
cbdda06456 | ||
|
|
00ee4979fb | ||
|
|
3a0a3a2ddb | ||
|
|
90dfe889f7 | ||
|
|
43c3a4181c | ||
|
|
4838c29873 | ||
|
|
a3b6a7446d | ||
|
|
f015906347 | ||
|
|
40a4536d0b | ||
|
|
906ed3d237 | ||
|
|
416ed14a9d | ||
|
|
014d1a4572 | ||
|
|
eec4aba2f0 | ||
|
|
80a619bc85 | ||
|
|
0a2a43d12b | ||
|
|
b671816004 | ||
|
|
e9c8d86937 | ||
|
|
7362799a34 | ||
|
|
80106f82a9 | ||
|
|
891329de29 | ||
|
|
34e11b351e | ||
|
|
2d64d37f58 | ||
|
|
915c6f42ec | ||
|
|
875f1adb3d | ||
|
|
311bf3f706 | ||
|
|
2e9daba3aa | ||
|
|
106a09162b | ||
|
|
21f7623c29 | ||
|
|
31162ef175 | ||
|
|
50583f928a | ||
|
|
b87e53b704 | ||
|
|
2f42319d2b | ||
|
|
ff8a5f1658 | ||
|
|
be0be4d0a0 | ||
|
|
9a39fee663 | ||
|
|
075f92ac31 | ||
|
|
0c4ad146b8 | ||
|
|
3fa688c9cb | ||
|
|
fe9394103f | ||
|
|
b747c750e8 | ||
|
|
967daf3bb6 | ||
|
|
c097e78dd0 | ||
|
|
e982e8cd9b | ||
|
|
5559d51dfb | ||
|
|
791a2e8cd4 | ||
|
|
d243af323e | ||
|
|
73ec42a9c8 | ||
|
|
e71d278b20 | ||
|
|
61c3ff423a | ||
|
|
1afa9000f8 | ||
|
|
75ef8ec801 | ||
|
|
94dc292dc9 | ||
|
|
74adf1dd3f | ||
|
|
db6d5f498b | ||
|
|
b9737533bd | ||
|
|
93f64a6bab | ||
|
|
8367cc4b59 | ||
|
|
e97787113c | ||
|
|
6fb9c4b14f | ||
|
|
4436ff95a8 | ||
|
|
d54b47f713 | ||
|
|
62de736bce | ||
|
|
2232a7bab1 | ||
|
|
32ebd86171 | ||
|
|
8e17516d54 | ||
|
|
3bfa7d54d0 | ||
|
|
60bf682449 | ||
|
|
24c6205d81 | ||
|
|
018b97b197 | ||
|
|
4cbfa7c937 | ||
|
|
db7f3e5619 | ||
|
|
bcafd9a078 | ||
|
|
eaa943a39d | ||
|
|
3b813e93e7 | ||
|
|
23a52dc79e | ||
|
|
88e245da7d | ||
|
|
5ee9e5098c | ||
|
|
4ea55644c4 | ||
|
|
a13ca9f96a | ||
|
|
ba4826559b | ||
|
|
9d4803edc7 | ||
|
|
71850f8497 | ||
|
|
ccb28783a2 | ||
|
|
7ad8edcdae | ||
|
|
77b42e6a04 | ||
|
|
869e837ee5 | ||
|
|
b27f58be9f | ||
|
|
a51bd70e80 | ||
|
|
95f580d51c | ||
|
|
2b9fa9a70f | ||
|
|
1cbeeac7cd | ||
|
|
d131287ca0 | ||
|
|
9f553ef52a | ||
|
|
781d6f1585 | ||
|
|
76c8f8ef62 | ||
|
|
49e338bbbc | ||
|
|
968e69c7f2 | ||
|
|
8a69e94d79 | ||
|
|
80a4d3f238 | ||
|
|
30e3bc6eeb | ||
|
|
9bc654cd38 | ||
|
|
b9ad63c926 | ||
|
|
4bdcf219f2 | ||
|
|
303bd659ad | ||
|
|
9fedfcbb0e | ||
|
|
8cffeaa767 | ||
|
|
9e28f6f3aa | ||
|
|
19377bbeed | ||
|
|
12d60c7ed9 | ||
|
|
64e770f51e | ||
|
|
17cf9d5007 | ||
|
|
c3609e8c7b | ||
|
|
2a48e0c4a0 | ||
|
|
d0fa565704 | ||
|
|
b30286cd11 | ||
|
|
4b5c136589 | ||
|
|
84cd9d53b5 | ||
|
|
2ef4b534e3 | ||
|
|
b7c7e41375 | ||
|
|
c0d664d399 | ||
|
|
a89cb607b4 | ||
|
|
ecde2da2af | ||
|
|
7193a4d26c | ||
|
|
38d8a471b3 | ||
|
|
a9f9085daa | ||
|
|
83ce5710ae | ||
|
|
ddf385caac | ||
|
|
c582902902 | ||
|
|
e9cd1906bc | ||
|
|
2706297142 | ||
|
|
75465bf415 | ||
|
|
42a79b2557 | ||
|
|
838bc34a4f | ||
|
|
63cdb4e507 | ||
|
|
ff3c39ccad | ||
|
|
49597b4b01 | ||
|
|
a3b7490849 | ||
|
|
45a1c58dc5 | ||
|
|
61b9fd9210 | ||
|
|
7c8156fbb9 | ||
|
|
7a0635234a | ||
|
|
7e5364d400 | ||
|
|
cfa08286de | ||
|
|
9132bfb656 | ||
|
|
a9352f2a93 | ||
|
|
47729d8cc3 | ||
|
|
e537b43563 | ||
|
|
5f14da3844 | ||
|
|
e179b0f20b | ||
|
|
35532b718a | ||
|
|
42c71c1204 | ||
|
|
591945dc93 | ||
|
|
ecfaa7198b | ||
|
|
27e714111b | ||
|
|
c086eaa510 | ||
|
|
a7444a1475 | ||
|
|
399298d3bb | ||
|
|
196c0b8a3e | ||
|
|
5d6d827044 | ||
|
|
2440d6b75f | ||
|
|
623456b0a7 | ||
|
|
9bfb37ab94 | ||
|
|
630d909b73 | ||
|
|
33552e30b7 | ||
|
|
a64504ba02 | ||
|
|
04b195f4c6 | ||
|
|
17fd9035ee | ||
|
|
f867cc5a1e | ||
|
|
97aa563fe7 | ||
|
|
fb2e261a08 | ||
|
|
aad4df419c | ||
|
|
de60f1b335 | ||
|
|
c0c06a2099 | ||
|
|
bcfb54b7c7 | ||
|
|
8b56ebfb39 | ||
|
|
1128fe6c8f | ||
|
|
db5f5a9153 | ||
|
|
25fb3b71ca | ||
|
|
a6822dd293 | ||
|
|
112513a569 | ||
|
|
fc448ed578 | ||
|
|
f777530b1c | ||
|
|
7fcebedcdd | ||
|
|
cf39fd59f9 | ||
|
|
6beecd157f | ||
|
|
57b26a2729 | ||
|
|
6ee8ca5f86 | ||
|
|
1adf1da0eb | ||
|
|
d537a75d83 | ||
|
|
2e847eee9b | ||
|
|
07cb4defe6 | ||
|
|
74a597164e | ||
|
|
f7f4a0ed3f | ||
|
|
dc45b1e75f | ||
|
|
5e68ce3218 | ||
|
|
faf6339b41 | ||
|
|
33cd3b0647 | ||
|
|
4c5da50a04 | ||
|
|
2c805b3357 | ||
|
|
f345c80144 | ||
|
|
4346147bfc | ||
|
|
b0405855aa | ||
|
|
53ee6eacb2 | ||
|
|
f39b3dd347 | ||
|
|
385f8ff5fd | ||
|
|
fad8e91c7e | ||
|
|
74b0216714 | ||
|
|
af3529e5e7 | ||
|
|
d3936ae3ec | ||
|
|
0afee6e3fe | ||
|
|
f1920549a8 | ||
|
|
b5661afdcf | ||
|
|
38a80ec695 | ||
|
|
f697ba03f8 | ||
|
|
feaaa35590 | ||
|
|
74c04cf113 | ||
|
|
83e15ede5c | ||
|
|
6a942a5058 | ||
|
|
8864c3489d | ||
|
|
a4cb65b7b1 | ||
|
|
c3fe20b6f9 | ||
|
|
8db941dc06 | ||
|
|
05329951f9 | ||
|
|
dd964273cd | ||
|
|
c3c9ad1aed | ||
|
|
cd8fe5d691 | ||
|
|
15d99f98f8 | ||
|
|
be6e0f3bc8 | ||
|
|
3867b7f5ba | ||
|
|
1b347c7e0b | ||
|
|
10664b16fe | ||
|
|
e10e8ca161 | ||
|
|
227d81a01a | ||
|
|
bacb9510d7 | ||
|
|
48209509ae | ||
|
|
c2a01e4822 | ||
|
|
3e44fd823c | ||
|
|
47b98041c9 | ||
|
|
739205c192 | ||
|
|
8f0b44ade9 | ||
|
|
cb0a11fda9 | ||
|
|
befada8b87 | ||
|
|
85a43c7a5b | ||
|
|
50b64cf0c6 | ||
|
|
5c080568d8 | ||
|
|
9d5c7e6df2 | ||
|
|
4864a376c6 | ||
|
|
ef77dbf768 | ||
|
|
7999148f3c | ||
|
|
ed134d787b | ||
|
|
f04a3bdbd5 | ||
|
|
2a56b562eb | ||
|
|
2199a49126 | ||
|
|
14db7b1a98 | ||
|
|
314b72f148 | ||
|
|
db8b8feb3e | ||
|
|
6cdbfd1a89 | ||
|
|
49c90b9be9 | ||
|
|
8043869332 | ||
|
|
9f9c4d82da | ||
|
|
297b321bc8 | ||
|
|
a8999855bf | ||
|
|
0c12dcaf16 |
12
.github/workflows/build-app-beta.yaml
vendored
@@ -30,6 +30,9 @@ jobs:
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
yarn adjustPackageJson
|
||||
- name: setUpdaterChannel beta
|
||||
run: |
|
||||
node setUpdaterChannel beta
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
yarn config set network-timeout 100000
|
||||
@@ -44,9 +47,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
yarn fillPackagedPlugins
|
||||
@@ -99,9 +99,13 @@ jobs:
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
mv app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
|
||||
148
.github/workflows/build-app-pro-beta.yaml
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
name: Electron app PREMIUM BETA
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# os: [windows-2022]
|
||||
# os: [ubuntu-22.04]
|
||||
# os: [windows-2022, ubuntu-22.04]
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
# os: [macOS-10.15]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
- name: adjustAppPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustAppPackageJsonPremium
|
||||
- name: setUpdaterChannel premium-beta
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node setUpdaterChannel premium-beta
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillPackagedPlugins
|
||||
- name: Publish
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
|
||||
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp ../dbgate-merged/app/dist/*x86*.AppImage artifacts/dbgate-premium-beta.AppImage || true
|
||||
cp ../dbgate-merged/app/dist/*win*.exe artifacts/dbgate-premium-beta.exe || true
|
||||
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-beta.dmg || true
|
||||
|
||||
mv ../dbgate-merged/app/dist/*.exe artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.zip artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.tar.gz artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.AppImage artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.deb artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.snap artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.dmg artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: 'artifacts/**'
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
150
.github/workflows/build-app-pro.yaml
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
name: Electron app PREMIUM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
|
||||
# branches:
|
||||
# - production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# os: [ubuntu-22.04, windows-2016]
|
||||
os: [macos-12, windows-2022, ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
- name: adjustAppPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustAppPackageJsonPremium
|
||||
- name: setUpdaterChannel premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node setUpdaterChannel premium
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn fillPackagedPlugins
|
||||
- name: Publish
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }} # token for electron publish
|
||||
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
# WIN_CSC_LINK: ${{ secrets.WINCERT_CERTIFICATE }}
|
||||
# WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_PASSWORD }}
|
||||
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp ../dbgate-merged/app/dist/*x86*.AppImage artifacts/dbgate-premium-latest.AppImage || true
|
||||
cp ../dbgate-merged/app/dist/*.exe artifacts/dbgate-premium-latest.exe || true
|
||||
cp ../dbgate-merged/app/dist/*win_x64.zip artifacts/dbgate-premium-windows-latest.zip || true
|
||||
cp ../dbgate-merged/app/dist/*win_arm64.zip artifacts/dbgate-premium-windows-latest-arm64.zip || true
|
||||
cp ../dbgate-merged/app/dist/*-mac_universal.dmg artifacts/dbgate-premium-latest.dmg || true
|
||||
cp ../dbgate-merged/app/dist/*-mac_x64.dmg artifacts/dbgate-premium-latest-x64.dmg || true
|
||||
|
||||
mv ../dbgate-merged/app/dist/*.exe artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.zip artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.tar.gz artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.AppImage artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.deb artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.dmg artifacts/ || true
|
||||
mv ../dbgate-merged/app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv ../dbgate-merged/app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: 'artifacts/**'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
22
.github/workflows/build-app.yaml
vendored
@@ -50,9 +50,6 @@ jobs:
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillNativeModulesElectron
|
||||
run: |
|
||||
yarn fillNativeModulesElectron
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
yarn fillPackagedPlugins
|
||||
@@ -109,6 +106,10 @@ jobs:
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/dbgate-latest.snap || true
|
||||
mv app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
|
||||
# - name: Copy artifacts Linux, MacOs
|
||||
# if: matrix.os != 'windows-2016'
|
||||
@@ -134,24 +135,13 @@ jobs:
|
||||
|
||||
# mv app/dist/latest.yml artifacts/latest.yml || true
|
||||
|
||||
- name: Copy latest.yml (windows)
|
||||
- name: Copy PAD file
|
||||
if: matrix.os == 'windows-2022'
|
||||
run: |
|
||||
mv app/dist/latest.yml artifacts/latest.yml || true
|
||||
mv app/dist/dbgate-pad.xml artifacts/ || true
|
||||
|
||||
- name: Copy latest-linux.yml
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
mv app/dist/latest-linux.yml artifacts/latest-linux.yml || true
|
||||
|
||||
- name: Copy latest-mac.yml
|
||||
if: matrix.os == 'macos-12'
|
||||
run: |
|
||||
mv app/dist/latest-mac.yml artifacts/latest-mac.yml || true
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
|
||||
148
.github/workflows/build-aws-pro-beta.yaml
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
name: AWS image PREMIUM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-packer-beta.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Setup `packer`
|
||||
uses: hashicorp/setup-packer@main
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
|
||||
- name: Prepare packer build
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn run prepare:packer
|
||||
cd packer
|
||||
zip -r cloud-build.zip build
|
||||
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
cp ../dbgate-merged/packer/cloud-build.zip artifacts/cloud-build.zip || true
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: 'artifacts/**'
|
||||
prerelease: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run `packer init`
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer init ./aws-ubuntu.pkr.hcl
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
|
||||
- name: Run `packer build`
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
packer build ./aws-ubuntu.pkr.hcl
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
|
||||
# - name: Install AWS CLI
|
||||
# run: |
|
||||
# curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||
# unzip awscliv2.zip
|
||||
# sudo ./aws/install
|
||||
# sudo apt-get install jq -y
|
||||
|
||||
- name: Install jq
|
||||
run: |
|
||||
sudo apt-get install jq -y
|
||||
|
||||
- name: Delete old AMIs
|
||||
run: |
|
||||
cd ../dbgate-merged/packer
|
||||
chmod +x delete-old-amis.sh
|
||||
./delete-old-amis.sh
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}}
|
||||
110
.github/workflows/build-docker-pro.yaml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Docker image PREMIUM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-premium-beta.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
dbgate/dbgate-premium
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=beta,enable=${{ contains(github.ref_name, '-docker.') || contains(github.ref_name, '-beta.') }}
|
||||
|
||||
type=match,pattern=\d+.\d+.\d+,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
type=raw,value=latest,enable=${{ !contains(github.ref_name, '-docker.') && !contains(github.ref_name, '-beta.') }}
|
||||
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: Prepare docker image
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn run prepare:docker
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: ../dbgate-merged/docker
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
6
.github/workflows/build-docker.yaml
vendored
@@ -5,7 +5,6 @@ on:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-docker.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -57,6 +56,11 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: yarn adjustPackageJson
|
||||
run: |
|
||||
yarn adjustPackageJson
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
# yarn --version
|
||||
|
||||
119
.github/workflows/build-npm-pro.yaml
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
name: NPM packages PREMIUM
|
||||
|
||||
# on: [push]
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-alpha.[0-9]+'
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04]
|
||||
|
||||
steps:
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 18.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Checkout dbgate/dbgate-pro
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
mv dbgate-pro/* ../dbgate-pro/
|
||||
cd ..
|
||||
mkdir dbgate-merged
|
||||
cd dbgate-pro
|
||||
cd sync
|
||||
yarn
|
||||
node sync.js --nowatch
|
||||
cd ..
|
||||
|
||||
- name: adjustNpmPackageJsonPremium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
node adjustNpmPackageJsonPremium
|
||||
|
||||
- name: Configure NPM token
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
npm config set '//registry.npmjs.org/:_authToken' "${NPM_TOKEN}"
|
||||
|
||||
- name: Remove dbmodel - should be not published
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
rm -rf packages/dbmodel
|
||||
|
||||
- name: yarn install
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn install
|
||||
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn setCurrentVersion
|
||||
|
||||
- name: printSecrets
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET : ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
|
||||
- name: Publish dbgate-api-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/api
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-web-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/web
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-serve-premium
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/packages/serve
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-plugin-cosmosdb
|
||||
run: |
|
||||
cd ..
|
||||
cd dbgate-merged/plugins/dbgate-plugin-cosmosdb
|
||||
npm publish
|
||||
10
.github/workflows/build-npm.yaml
vendored
@@ -90,11 +90,6 @@ jobs:
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate (obsolete)
|
||||
working-directory: packages/dbgate
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-serve
|
||||
working-directory: packages/serve
|
||||
run: |
|
||||
@@ -154,3 +149,8 @@ jobs:
|
||||
working-directory: plugins/dbgate-plugin-oracle
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
- name: Publish dbgate-plugin-clickhouse
|
||||
working-directory: plugins/dbgate-plugin-clickhouse
|
||||
run: |
|
||||
npm publish
|
||||
|
||||
8
.github/workflows/run-tests.yaml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- 'feature/**'
|
||||
|
||||
jobs:
|
||||
test-runner:
|
||||
@@ -77,6 +78,11 @@ jobs:
|
||||
ACCEPT_EULA: Y
|
||||
SA_PASSWORD: Pwd2020Db
|
||||
MSSQL_PID: Express
|
||||
|
||||
|
||||
clickhouse:
|
||||
image: bitnami/clickhouse:24.8.4
|
||||
env:
|
||||
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
|
||||
|
||||
# cockroachdb:
|
||||
# image: cockroachdb/cockroach
|
||||
|
||||
6
.gitignore
vendored
@@ -28,7 +28,7 @@ docker/plugins
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
app/src/nativeModulesContent.js
|
||||
packages/api/src/nativeModulesContent.js
|
||||
packages/api/src/packagedPluginsContent.js
|
||||
.VSCodeCounter
|
||||
.VSCodeCounter
|
||||
|
||||
packages/web/public/*.html
|
||||
142
CHANGELOG.md
@@ -8,6 +8,148 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
### Not published
|
||||
- ADDED: Order or filter the indexes for huge tables #922
|
||||
- ADDED: Empty string filters
|
||||
- CHANGED: (Premium) Workflow for new installation (used in Docker and AWS distribution)
|
||||
- ADDED: Show stored procedure and function parameters (MySQL, PostgreSQL, SQL Server, MariaDB) #348
|
||||
- FIXED: Selected database has changed when closing database grouped tab #983
|
||||
- ADDED: Add line break option to editor #823
|
||||
- ADDED: Order or filter the indexes for huge tables #922
|
||||
- ADDED: Preview mode for the top bar tab like vscode #767
|
||||
- ADDED: Keyboard navigatioon between connections, databases and tables
|
||||
- FIXED: Fixed some issues in connection search
|
||||
- FIXED: Schema selection in Export does not provide all schemas #924
|
||||
- CHANGED: Standardized Window menu in MacOS app
|
||||
- FIXED: Typecast ::date is treated as a parameter #925
|
||||
- FIXED: App crashes when trying to 'Open Structure' in a readonly connection #926
|
||||
- FIXED: Selected database has changed when closing database grouped tab #938
|
||||
- CHANGED: (Premium) Query designer and Query perspective designer moved to Premium editioin
|
||||
- CHANGED: (Premium) Compare database tool - many improvements, moved to Premium edition
|
||||
- ADDED: (Premium) Export DB model - exporting model to YAML folder, JSON or SQL folder
|
||||
- CHANGED: Model deployer - many improvements, support of rename missing objects
|
||||
- ADDED: (Premium) Premium NPM distribution
|
||||
- CHANGED: (Premium) Amazon Redshift driver moved to Premium edition
|
||||
- ADDED: Generated API documentation https://dbgate.org/docs/apidoc.html
|
||||
- ADDED: NPM distribution now supports all dbgate database connectors, many improvements NPM packages
|
||||
- CHANGED: Optimalized size of NPM plugins (eg. dbgate-plugin-mssql from 1.34 MB to 71 kB)
|
||||
- CHANGED: Unsaved connections are now shown in "Recent and unsaved" folder after disconnect
|
||||
- FIXED: Correctly show focused control, as defined by UX standards
|
||||
- ADDED: Data duplicator - weak references
|
||||
- ADDED: View JSON detail of log messages from export/import jobs and query executions
|
||||
- ADDED: Rename procedure/function context menu
|
||||
- ADDED: Show SQL quick view
|
||||
|
||||
### 5.5.6
|
||||
- FIXED: DbGate process consumes 100% after UI closed - Mac, Linux (#917, #915)
|
||||
- FIXED: Correctly closing connection behind SSH tunnel (#920)
|
||||
- FIXED: Updating MongoDB documents on MongoDB 4 (#916)
|
||||
- FIXED: (Premium) DbGate container correctly waits for underlying storage database, if database container is started after dbgate container is started
|
||||
- FIXED: (Premium) Better handling of connection storage errors
|
||||
|
||||
### 5.5.5
|
||||
- ADDED: AWS IAM authentication for MySQL, MariaDB, PostgreSQL (Premium)
|
||||
- FIXED: Datitme filtering #912
|
||||
- FIXED: Load redis keys
|
||||
- ADDED: Query parameters #913
|
||||
- FIXED: Data grid with hidden columns #911
|
||||
- ADDED: Added buttons for one-click authentification methods (Anonymous, OAuth) (Team Premium)
|
||||
- ADDED: Link for switching Admin/user login (Team Premium)
|
||||
- FIXED: Save connection params in administration for MS SQL and Postgres storages (Team Premium)
|
||||
|
||||
### 5.5.4
|
||||
- FIXED: correct handling when use LOGIN and PASSWORD env variables #903
|
||||
- FIXED: fixed problems in dbmodel commandline tool
|
||||
- ADDED: dbmodel - allow connection defined in environment variables
|
||||
- FIXED: Load postgres schema on Azure #906
|
||||
- FIXED: Oauth2 in combination with Google doesn't log payload #727
|
||||
- CHANGED: Improved error reporting for unhandler errors
|
||||
- CHANGED: Don't restart docker container in case of unhandler error
|
||||
- FIXED: Crash when displaying specific data values from MongoDB #908
|
||||
- ADDED: (Premium) Show purchase button after trial license is expired
|
||||
|
||||
### 5.5.3
|
||||
- FIXED: Separate schema mode #894 - for databases with many schemas
|
||||
- FIXED: Sort by UUID column in POstgreSQL #895
|
||||
- ADDED: Load pg_dump outputs #893
|
||||
- ADDED: Improved column mapping in import/export #330
|
||||
- FIXED: Fixed some errors in create-table workflow
|
||||
- CHANGED: Show single schema by default only if all objects are from default schema
|
||||
- FIXED: MS Entra authentication for Azure SQL
|
||||
|
||||
### 5.5.2
|
||||
- FIXED: MySQL, PostgreSQL readonly conections #900
|
||||
|
||||
### 5.5.1
|
||||
- ADDED: Clickhouse support (#532)
|
||||
- ADDED: MySQL - specify table engine, show table engine in table list
|
||||
- FIXED: Hidden primary key name in PK editor for DB engines with anonymous PK (MySQL)
|
||||
- CHANGED: Import/export dialog is now tacub instead of modal
|
||||
- ADDED: Saving import/export job
|
||||
- REMOVED: Ability to reopen export/import wizard from generated script. This was a bit hack, now you could save import/export job instead
|
||||
- ADDED: Autodetect CSV delimited
|
||||
- FIXED: Import CSV files with spaces around quotes
|
||||
- ADDED: JSON file import
|
||||
- ADDED: JSON export can export objects with ID field used as object key
|
||||
- ADDED: JSON and JSON lines imports supports importing from web URL
|
||||
- FIXED: Editing imported URL in job editor
|
||||
- ADDED: Quick export from table data grid (#892)
|
||||
- CHANGED: Create table workflow is reworked, you can specify schema and table name in table editor
|
||||
- FIXED: After saving new table, table editor is reset to empty state
|
||||
- ADDED: (PostgreSQL, SQL Server) - ability to filter objects by schema
|
||||
- ADDED: (PostgreSQL, SQL Server) - Use separate schemas option - for databases with lot of schemas, only selected schema is loaded
|
||||
- FIXED: Internal refactor of drivers, client objects are not more messed up with auxiliary fields
|
||||
- ADDED: Copy connection error to clipboard after clicking on error icon
|
||||
- FIXED: (MySQL) Fixed importing SQL dump exported from mysqldump (#702)
|
||||
- FIXED: (PostgreSQL) Fixed filtering JSONB fields (#889)
|
||||
- FIXED: OIDC authentication not working anymore (#891)
|
||||
- ADDED: Added tests for import from CSV and JSON
|
||||
- FIXED: multiple shortcuts handling #898
|
||||
- ADDED: (Premium) MS Entra authentization for Azure SQL databases
|
||||
|
||||
### 5.4.4
|
||||
- CHANGED: Improved autoupdate, notification is now in app
|
||||
- CHANGED: Default behaviour of autoupdate, new version is downloaded after click of "Download" button
|
||||
- ADDED: Ability to configure autoupdate (check only, check+download, don't check)
|
||||
- ADDED: Option to run check for new version manually
|
||||
- FIXED: Fixed autoupgrade channel for premium edition
|
||||
- FIXED: Fixes following issues: #886, #865, #782, #375
|
||||
|
||||
### 5.4.2
|
||||
- FIXED: DbGate now works correctly with Oracle 10g
|
||||
- FIXED: Fixed update channel for premium edition
|
||||
|
||||
### 5.4.1
|
||||
- FIXED: Broken older plugins #881
|
||||
- ADDED: Premium edition - "Start trial" button
|
||||
|
||||
### 5.4.0
|
||||
- ADDED: Support for CosmosDB (Premium only)
|
||||
- ADDED: Administration UI (Premium only)
|
||||
- ADDED: New application icon
|
||||
- ADDED: MongoDB type support in data editing
|
||||
- ADDED: MongoDB - posibility to remove field
|
||||
- ADDED: Oracle - posibility to connect via SID
|
||||
- FIXED: Many improvements in MongoDB filtering
|
||||
- FIXED: Switch to form and back to table rows missing #343
|
||||
- ADDED: Posibility to deactivate MongoDB Profiler #745
|
||||
- ADDED: Ability to use Oracle thick driver - neccessary for connecting older Oracle servers #843
|
||||
- FIXED: Connection permissions configuration is broken #860
|
||||
- ADDED: ssh key file authentication option missing #876
|
||||
- ADDED: Ability to reset layout #878
|
||||
- FIXED: Script with escaped backslash causes erro #880
|
||||
|
||||
### 5.3.4
|
||||
- FIXED: On blank system does not start (window does not appear) #862
|
||||
- FIXED: Missing Execute, Export bar #861
|
||||
|
||||
### 5.3.3
|
||||
- FIXED: The application Window is not visible when openning after changing monitor configuration. #856
|
||||
- FIXED: Multi column filter is broken for Postgresql #855
|
||||
- ADDED: Do not display internal timescaledb objects in postgres databases #839
|
||||
- FIXED: When in splitview mode and Clicking "Refresh" button on the right side, will refresh the left side, and not the right side #810
|
||||
- FIXED: Cannot filter by uuid field in psql #538
|
||||
|
||||
### 5.3.1
|
||||
- FIXED: Column sorting on query tab not working #819
|
||||
- FIXED: Postgres Connection stays in "Loading database structure" until reloading the page #826
|
||||
|
||||
@@ -17,6 +17,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* Try it online - [demo.dbgate.org](https://demo.dbgate.org) - online demo application
|
||||
* **Download** application for Windows, Linux or Mac from [dbgate.org](https://dbgate.org/download/)
|
||||
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Use nodeJs [scripting interface](https://dbgate.org/docs/scripting.html) ([API documentation](https://dbgate.org/docs/apidoc.html))
|
||||
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
|
||||
|
||||
## Supported databases
|
||||
* MySQL
|
||||
@@ -26,9 +28,11 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* MongoDB
|
||||
* Redis
|
||||
* SQLite
|
||||
* Amazon Redshift
|
||||
* Amazon Redshift (Premium)
|
||||
* CockroachDB
|
||||
* MariaDB
|
||||
* CosmosDB (Premium)
|
||||
* ClickHouse
|
||||
|
||||
<!-- Learn more about DbGate features at the [DbGate website](https://dbgate.org/), or try our online [demo application](https://demo.dbgate.org) -->
|
||||
|
||||
@@ -68,8 +72,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* Redis tree view, generate script from keys, run Redis script
|
||||
* Runs as application for Windows, Linux and Mac. Or in Docker container on server and in web Browser on client.
|
||||
* Import, export from/to CSV, Excel, JSON, NDJSON, XML
|
||||
* Free table editor - quick table data editing (cleanup data after import/before export, prototype tables etc.)
|
||||
* Archives - backup your data in NDJSON files on local filesystem (or on DbGate server, when using web application)
|
||||
* NDJSON data viewer and editor - browse NDJSON data, edit data and structure directly on NDJSON files. Works also for big NDSON files
|
||||
* Charts, export chart to HTML page
|
||||
* For detailed info, how to run DbGate in docker container, visit [docker hub](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Extensible plugin architecture
|
||||
|
||||
@@ -1,12 +1,50 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const volatilePackages = require('./common/volatilePackages');
|
||||
|
||||
function adjustFile(file) {
|
||||
function adjustFile(file, isApp = false) {
|
||||
const json = JSON.parse(fs.readFileSync(file, { encoding: 'utf-8' }));
|
||||
|
||||
function processPackageFile(packageFile) {
|
||||
const pluginJson = JSON.parse(fs.readFileSync(packageFile, { encoding: 'utf-8' }));
|
||||
for (const depkey of ['dependencies', 'optionalDependencies']) {
|
||||
for (const dependency of Object.keys(pluginJson[depkey] || {})) {
|
||||
if (!volatilePackages.includes(dependency)) {
|
||||
// add only voletile packages
|
||||
continue;
|
||||
}
|
||||
if (!json[depkey]) {
|
||||
json[depkey] = {};
|
||||
}
|
||||
if (json[depkey][dependency]) {
|
||||
if (json[depkey][dependency] != pluginJson[depkey][dependency]) {
|
||||
console.log(`Dependency ${dependency} in ${packageName} is different from ${file}`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
json[depkey][dependency] = pluginJson[depkey][dependency];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const packageName of fs.readdirSync('plugins')) {
|
||||
if (!packageName.startsWith('dbgate-plugin-')) continue;
|
||||
processPackageFile(path.join('plugins', packageName, 'package.json'));
|
||||
}
|
||||
|
||||
if (isApp) {
|
||||
// add volatile dependencies from api to app
|
||||
processPackageFile(path.join('packages', 'api', 'package.json'));
|
||||
}
|
||||
|
||||
if (process.platform != 'win32') {
|
||||
delete json.optionalDependencies.msnodesqlv8;
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
adjustFile('packages/api/package.json');
|
||||
adjustFile('app/package.json');
|
||||
adjustFile('app/package.json', true);
|
||||
|
||||
fs.writeFileSync('common/useBundleExternals.js', "module.exports = 'true';", 'utf-8');
|
||||
|
||||
674
app/LICENSE
Normal file
@@ -0,0 +1,674 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
25
app/LICENSE-OLD
Normal file
@@ -0,0 +1,25 @@
|
||||
This project is licensed under the GPLv3 License. See the LICENSE file for full text of the GPLv3 license.
|
||||
|
||||
The original project was licensed under the MIT License, and the following notice applies to the original code:
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Jan Prochazka
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
BIN
app/icon.ico
|
Before Width: | Height: | Size: 192 KiB After Width: | Height: | Size: 202 KiB |
BIN
app/icon.png
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 68 KiB |
BIN
app/icon32.png
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 89 KiB After Width: | Height: | Size: 64 KiB |
BIN
app/icon512.png
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 143 KiB |
|
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 68 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 9.6 KiB |
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 143 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 14 KiB |
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"name": "dbgate",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"private": true,
|
||||
"author": "Jan Prochazka <jenasoft.database@gmail.com>",
|
||||
"description": "Opensource database administration tool",
|
||||
"dependencies": {
|
||||
"electron-log": "^4.4.1",
|
||||
"electron-updater": "^4.6.1",
|
||||
"electron-updater": "^6.3.4",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lodash.clonedeepwith": "^4.5.0",
|
||||
"patch-package": "^6.4.7"
|
||||
},
|
||||
@@ -27,7 +28,11 @@
|
||||
"entitlements": "entitlements.mac.plist",
|
||||
"entitlementsInherit": "entitlements.mac.plist",
|
||||
"publish": [
|
||||
"github"
|
||||
{
|
||||
"provider": "github",
|
||||
"owner": "dbgate",
|
||||
"repo": "dbgate"
|
||||
}
|
||||
],
|
||||
"target": {
|
||||
"target": "default",
|
||||
@@ -55,7 +60,11 @@
|
||||
"category": "Development",
|
||||
"synopsis": "Database manager for SQL Server, MySQL, PostgreSQL, MongoDB and SQLite",
|
||||
"publish": [
|
||||
"github"
|
||||
{
|
||||
"provider": "github",
|
||||
"owner": "dbgate",
|
||||
"repo": "dbgate"
|
||||
}
|
||||
]
|
||||
},
|
||||
"appImage": {
|
||||
@@ -90,14 +99,18 @@
|
||||
],
|
||||
"icon": "icon.ico",
|
||||
"publish": [
|
||||
"github"
|
||||
],
|
||||
"rfc3161TimeStampServer": "http://sha256timestamp.ws.symantec.com/sha256/timestamp"
|
||||
{
|
||||
"provider": "github",
|
||||
"owner": "dbgate",
|
||||
"repo": "dbgate"
|
||||
}
|
||||
]
|
||||
},
|
||||
"files": [
|
||||
"packages",
|
||||
"src",
|
||||
"icon.png"
|
||||
"icon.png",
|
||||
"!node_modules/cpu-features/build/**"
|
||||
]
|
||||
},
|
||||
"homepage": "./",
|
||||
@@ -118,9 +131,5 @@
|
||||
"electron": "30.0.2",
|
||||
"electron-builder": "23.1.0",
|
||||
"electron-builder-notarize": "^1.5.2"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,17 +16,20 @@ const BrowserWindow = electron.BrowserWindow;
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const mainMenuDefinition = require('./mainMenuDefinition');
|
||||
const { settings } = require('cluster');
|
||||
let disableAutoUpgrade = false;
|
||||
const { isProApp } = require('./proTools');
|
||||
const updaterChannel = require('./updaterChannel');
|
||||
|
||||
// require('@electron/remote/main').initialize();
|
||||
|
||||
const configRootPath = path.join(app.getPath('userData'), 'config-root.json');
|
||||
let saveConfigOnExit = true;
|
||||
let initialConfig = {};
|
||||
let apiLoaded = false;
|
||||
let mainModule;
|
||||
// let getLogger;
|
||||
// let loadLogsContent;
|
||||
let appUpdateStatus = '';
|
||||
let settingsJson = {};
|
||||
|
||||
process.on('uncaughtException', function (error) {
|
||||
console.error('uncaughtException', error);
|
||||
@@ -50,21 +53,11 @@ const isMac = () => os.platform() == 'darwin';
|
||||
|
||||
try {
|
||||
initialConfig = JSON.parse(fs.readFileSync(configRootPath, { encoding: 'utf-8' }));
|
||||
disableAutoUpgrade = initialConfig['disableAutoUpgrade'] || false;
|
||||
} catch (err) {
|
||||
console.log('Error loading config-root:', err.message);
|
||||
initialConfig = {};
|
||||
}
|
||||
|
||||
if (process.argv.includes('--disable-auto-upgrade')) {
|
||||
console.log('Disabling auto-upgrade');
|
||||
disableAutoUpgrade = true;
|
||||
}
|
||||
if (process.argv.includes('--enable-auto-upgrade')) {
|
||||
console.log('Enabling auto-upgrade');
|
||||
disableAutoUpgrade = false;
|
||||
}
|
||||
|
||||
// Keep a global reference of the window object, if you don't, the window will
|
||||
// be closed automatically when the JavaScript object is garbage collected.
|
||||
let mainWindow;
|
||||
@@ -73,6 +66,10 @@ let runCommandOnLoad = null;
|
||||
|
||||
log.transports.file.level = 'debug';
|
||||
autoUpdater.logger = log;
|
||||
if (updaterChannel) {
|
||||
autoUpdater.channel = updaterChannel;
|
||||
autoUpdater.allowPrerelease = updaterChannel.includes('beta');
|
||||
}
|
||||
// TODO - create settings for this
|
||||
// appUpdater.channel = 'beta';
|
||||
|
||||
@@ -111,7 +108,7 @@ function commandItem(item) {
|
||||
}
|
||||
|
||||
function buildMenu() {
|
||||
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true }), item => {
|
||||
let template = _cloneDeepWith(mainMenuDefinition({ editMenu: true, isMac: isMac() }), item => {
|
||||
if (item.divider) {
|
||||
return { type: 'separator' };
|
||||
}
|
||||
@@ -173,6 +170,21 @@ ipcMain.on('quit-app', async (event, arg) => {
|
||||
mainWindow.close();
|
||||
}
|
||||
});
|
||||
ipcMain.on('reset-settings', async (event, arg) => {
|
||||
try {
|
||||
saveConfigOnExit = false;
|
||||
fs.unlinkSync(configRootPath);
|
||||
console.log('Deleted file:', configRootPath);
|
||||
} catch (err) {
|
||||
console.log('Error deleting config-root:', err.message);
|
||||
}
|
||||
|
||||
if (isMac()) {
|
||||
app.quit();
|
||||
} else {
|
||||
mainWindow.close();
|
||||
}
|
||||
});
|
||||
ipcMain.on('set-title', async (event, arg) => {
|
||||
mainWindow.setTitle(arg);
|
||||
});
|
||||
@@ -191,6 +203,15 @@ ipcMain.on('app-started', async (event, arg) => {
|
||||
if (initialConfig['winIsMaximized']) {
|
||||
mainWindow.webContents.send('setIsMaximized', true);
|
||||
}
|
||||
if (autoUpdater.isUpdaterActive()) {
|
||||
mainWindow.webContents.send('setAppUpdaterActive');
|
||||
}
|
||||
if (!process.env.DEVMODE) {
|
||||
if (settingsJson['app.autoUpdateMode'] != 'skip') {
|
||||
autoUpdater.autoDownload = settingsJson['app.autoUpdateMode'] == 'download';
|
||||
autoUpdater.checkForUpdates();
|
||||
}
|
||||
}
|
||||
});
|
||||
ipcMain.on('window-action', async (event, arg) => {
|
||||
if (!mainWindow) {
|
||||
@@ -264,6 +285,20 @@ ipcMain.handle('showItemInFolder', async (event, path) => {
|
||||
ipcMain.handle('openExternal', async (event, url) => {
|
||||
electron.shell.openExternal(url);
|
||||
});
|
||||
ipcMain.on('downloadUpdate', async (event, url) => {
|
||||
autoUpdater.downloadUpdate();
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon loading',
|
||||
message: `Downloading update...`,
|
||||
});
|
||||
});
|
||||
ipcMain.on('applyUpdate', async (event, url) => {
|
||||
autoUpdater.quitAndInstall(false, true);
|
||||
});
|
||||
ipcMain.on('check-for-updates', async (event, url) => {
|
||||
autoUpdater.autoDownload = false;
|
||||
autoUpdater.checkForUpdates();
|
||||
});
|
||||
|
||||
function fillMissingSettings(value) {
|
||||
const res = {
|
||||
@@ -299,9 +334,9 @@ function ensureBoundsVisible(bounds) {
|
||||
}
|
||||
|
||||
function createWindow() {
|
||||
let settingsJson = {};
|
||||
const datadir = path.join(os.homedir(), '.dbgate');
|
||||
|
||||
try {
|
||||
const datadir = path.join(os.homedir(), '.dbgate');
|
||||
settingsJson = fillMissingSettings(
|
||||
JSON.parse(fs.readFileSync(path.join(datadir, 'settings.json'), { encoding: 'utf-8' }))
|
||||
);
|
||||
@@ -311,18 +346,20 @@ function createWindow() {
|
||||
}
|
||||
|
||||
let bounds = initialConfig['winBounds'];
|
||||
bounds = ensureBoundsVisible(bounds);
|
||||
if (bounds) {
|
||||
bounds = ensureBoundsVisible(bounds);
|
||||
}
|
||||
useNativeMenu = settingsJson['app.useNativeMenu'];
|
||||
|
||||
mainWindow = new BrowserWindow({
|
||||
width: 1200,
|
||||
height: 800,
|
||||
title: 'DbGate',
|
||||
title: isProApp() ? 'DbGate Premium' : 'DbGate',
|
||||
frame: useNativeMenu,
|
||||
titleBarStyle: useNativeMenu ? undefined : 'hidden',
|
||||
...bounds,
|
||||
icon: os.platform() == 'win32' ? 'icon.ico' : path.resolve(__dirname, '../icon.png'),
|
||||
partition: 'persist:dbgate',
|
||||
partition: isProApp() ? 'persist:dbgate-premium' : 'persist:dbgate',
|
||||
webPreferences: {
|
||||
nodeIntegration: true,
|
||||
contextIsolation: false,
|
||||
@@ -350,24 +387,27 @@ function createWindow() {
|
||||
});
|
||||
mainWindow.on('close', () => {
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
configRootPath,
|
||||
JSON.stringify({
|
||||
winBounds: mainWindow.getBounds(),
|
||||
winIsMaximized: mainWindow.isMaximized(),
|
||||
disableAutoUpgrade,
|
||||
}),
|
||||
'utf-8'
|
||||
);
|
||||
if (saveConfigOnExit) {
|
||||
fs.writeFileSync(
|
||||
configRootPath,
|
||||
JSON.stringify({
|
||||
winBounds: mainWindow.getBounds(),
|
||||
winIsMaximized: mainWindow.isMaximized(),
|
||||
}),
|
||||
'utf-8'
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error saving config-root:', err.message);
|
||||
}
|
||||
});
|
||||
|
||||
// mainWindow.webContents.toggleDevTools();
|
||||
|
||||
mainWindow.loadURL(startUrl);
|
||||
if (os.platform() == 'linux') {
|
||||
mainWindow.setIcon(path.resolve(__dirname, '../icon.png'));
|
||||
}
|
||||
// mainWindow.webContents.toggleDevTools();
|
||||
|
||||
mainWindow.on('maximize', () => {
|
||||
mainWindow.webContents.send('setIsMaximized', true);
|
||||
@@ -390,7 +430,6 @@ function createWindow() {
|
||||
);
|
||||
|
||||
global.API_PACKAGE = apiPackage;
|
||||
global.NATIVE_MODULES = path.join(__dirname, 'nativeModules');
|
||||
|
||||
// console.log('global.API_PACKAGE', global.API_PACKAGE);
|
||||
const api = require(apiPackage);
|
||||
@@ -422,13 +461,61 @@ function createWindow() {
|
||||
});
|
||||
}
|
||||
|
||||
function changeAppUpdateStatus(status) {
|
||||
appUpdateStatus = status;
|
||||
mainWindow.webContents.send('app-update-status', appUpdateStatus);
|
||||
}
|
||||
|
||||
autoUpdater.on('checking-for-update', () => {
|
||||
console.log('Checking for updates');
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon loading',
|
||||
message: 'Checking for updates...',
|
||||
});
|
||||
});
|
||||
|
||||
autoUpdater.on('update-available', info => {
|
||||
console.log('Update available', info);
|
||||
if (autoUpdater.autoDownload) {
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon loading',
|
||||
message: `Downloading update...`,
|
||||
});
|
||||
} else {
|
||||
mainWindow.webContents.send('update-available', info.version);
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon download',
|
||||
message: `Update available`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
autoUpdater.on('update-not-available', info => {
|
||||
console.log('Update not available', info);
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon check',
|
||||
message: `No new updates`,
|
||||
});
|
||||
});
|
||||
|
||||
autoUpdater.on('update-downloaded', info => {
|
||||
console.log('Update downloaded from', info);
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon download',
|
||||
message: `Downloaded ${info.version}`,
|
||||
});
|
||||
mainWindow.webContents.send('downloaded-new-version', info.version);
|
||||
});
|
||||
|
||||
autoUpdater.on('error', error => {
|
||||
changeAppUpdateStatus({
|
||||
icon: 'icon error',
|
||||
message: `Autoupdate error`,
|
||||
});
|
||||
console.error('Update error', error);
|
||||
});
|
||||
|
||||
function onAppReady() {
|
||||
if (disableAutoUpgrade) {
|
||||
console.log('Auto-upgrade is disabled, run dbgate --enable-auto-upgrade to enable');
|
||||
}
|
||||
if (!process.env.DEVMODE && !disableAutoUpgrade) {
|
||||
autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
createWindow();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module.exports = ({ editMenu }) => [
|
||||
module.exports = ({ editMenu, isMac }) => [
|
||||
{
|
||||
label: 'File',
|
||||
submenu: [
|
||||
@@ -9,9 +9,9 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'new.queryDesign', hideDisabled: true },
|
||||
{ command: 'new.diagram', hideDisabled: true },
|
||||
{ command: 'new.perspective', hideDisabled: true },
|
||||
{ command: 'new.freetable', hideDisabled: true },
|
||||
{ command: 'new.shell', hideDisabled: true },
|
||||
{ command: 'new.jsonl', hideDisabled: true },
|
||||
{ command: 'new.modelTransform', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'file.open', hideDisabled: true },
|
||||
{ command: 'file.openArchive', hideDisabled: true },
|
||||
@@ -24,20 +24,6 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'app.disconnect', hideDisabled: true, skipInApp: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Window',
|
||||
submenu: [
|
||||
{ command: 'tabs.closeTab', hideDisabled: false },
|
||||
{ command: 'tabs.closeAll', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
|
||||
{ divider: true },
|
||||
{ command: 'app.zoomIn', hideDisabled: true },
|
||||
{ command: 'app.zoomOut', hideDisabled: true },
|
||||
{ command: 'app.zoomReset', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
|
||||
editMenu
|
||||
? {
|
||||
label: 'Edit',
|
||||
@@ -75,6 +61,15 @@ module.exports = ({ editMenu }) => [
|
||||
{ divider: true },
|
||||
{ command: 'theme.changeTheme', hideDisabled: true },
|
||||
{ command: 'settings.show' },
|
||||
{ divider: true },
|
||||
{ command: 'tabs.closeTab', hideDisabled: false },
|
||||
{ command: 'tabs.closeAll', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsWithCurrentDb', hideDisabled: false },
|
||||
{ command: 'tabs.closeTabsButCurrentDb', hideDisabled: false },
|
||||
{ divider: true },
|
||||
{ command: 'app.zoomIn', hideDisabled: true },
|
||||
{ command: 'app.zoomOut', hideDisabled: true },
|
||||
{ command: 'app.zoomReset', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -91,8 +86,17 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'folder.showLogs', hideDisabled: true },
|
||||
{ command: 'folder.showData', hideDisabled: true },
|
||||
{ command: 'new.gist', hideDisabled: true },
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
...(isMac
|
||||
? [
|
||||
{
|
||||
role: 'window',
|
||||
submenu: [{ role: 'minimize' }, { role: 'zoom' }, { type: 'separator' }, { role: 'front' }],
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: 'Help',
|
||||
submenu: [
|
||||
@@ -104,6 +108,8 @@ module.exports = ({ editMenu }) => [
|
||||
{ command: 'settings.commands', hideDisabled: true },
|
||||
{ command: 'tabs.changelog', hideDisabled: true },
|
||||
{ command: 'about.show', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'file.checkForUpdates', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
const content = require('./nativeModulesContent');
|
||||
|
||||
module.exports = content;
|
||||
9
app/src/nativeModulesContent.js
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
12
app/src/proTools.js
Normal file
@@ -0,0 +1,12 @@
|
||||
function isProApp() {
|
||||
return false;
|
||||
}
|
||||
|
||||
function checkLicense(license) {
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isProApp,
|
||||
checkLicense,
|
||||
};
|
||||
1
app/src/updaterChannel.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = null;
|
||||
139
app/yarn.lock
@@ -193,11 +193,6 @@
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/semver@^7.3.6":
|
||||
version "7.5.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e"
|
||||
integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==
|
||||
|
||||
"@types/verror@^1.10.3":
|
||||
version "1.10.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/verror/-/verror-1.10.10.tgz#d5a4b56abac169bfbc8b23d291363a682e6fa087"
|
||||
@@ -476,6 +471,11 @@ buffer-crc32@~0.2.3:
|
||||
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
|
||||
integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
|
||||
|
||||
buffer-equal-constant-time@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
|
||||
integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==
|
||||
|
||||
buffer-equal@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-1.0.0.tgz#59616b498304d556abd466966b22eeda3eca5fbe"
|
||||
@@ -499,14 +499,6 @@ buffer@^5.1.0, buffer@^5.5.0:
|
||||
base64-js "^1.3.1"
|
||||
ieee754 "^1.1.13"
|
||||
|
||||
builder-util-runtime@8.9.2:
|
||||
version "8.9.2"
|
||||
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-8.9.2.tgz#a9669ae5b5dcabfe411ded26678e7ae997246c28"
|
||||
integrity sha512-rhuKm5vh7E0aAmT6i8aoSfEjxzdYEFX7zDApK+eNgOhjofnWb74d9SRJv0H/8nsgOkos0TZ4zxW0P8J4N7xQ2A==
|
||||
dependencies:
|
||||
debug "^4.3.2"
|
||||
sax "^1.2.4"
|
||||
|
||||
builder-util-runtime@9.0.2:
|
||||
version "9.0.2"
|
||||
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.0.2.tgz#dc54f8581bbcf1e0428da4483fa46d09524be857"
|
||||
@@ -515,6 +507,14 @@ builder-util-runtime@9.0.2:
|
||||
debug "^4.3.4"
|
||||
sax "^1.2.4"
|
||||
|
||||
builder-util-runtime@9.2.5:
|
||||
version "9.2.5"
|
||||
resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.2.5.tgz#0afdffa0adb5c84c14926c7dd2cf3c6e96e9be83"
|
||||
integrity sha512-HjIDfhvqx/8B3TDN4GbABQcgpewTU4LMRTQPkVpKYV3lsuxEJoIfvg09GyWTNmfVNSUAYf+fbTN//JX4TH20pg==
|
||||
dependencies:
|
||||
debug "^4.3.4"
|
||||
sax "^1.2.4"
|
||||
|
||||
builder-util@23.0.9:
|
||||
version "23.0.9"
|
||||
resolved "https://registry.yarnpkg.com/builder-util/-/builder-util-23.0.9.tgz#8b1aeeeee679060e39ad2bd0f50f5b3f3cb53a59"
|
||||
@@ -786,7 +786,7 @@ crypto-random-string@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
|
||||
integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
|
||||
|
||||
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4:
|
||||
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.4:
|
||||
version "4.3.4"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
||||
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
|
||||
@@ -927,6 +927,13 @@ duplexer3@^0.1.4:
|
||||
resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e"
|
||||
integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==
|
||||
|
||||
ecdsa-sig-formatter@1.0.11:
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
|
||||
integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
|
||||
dependencies:
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
ejs@^3.1.7:
|
||||
version "3.1.10"
|
||||
resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b"
|
||||
@@ -1000,19 +1007,19 @@ electron-publish@23.0.9:
|
||||
lazy-val "^1.0.5"
|
||||
mime "^2.5.2"
|
||||
|
||||
electron-updater@^4.6.1:
|
||||
version "4.6.5"
|
||||
resolved "https://registry.yarnpkg.com/electron-updater/-/electron-updater-4.6.5.tgz#e9a75458bbfd6bb41a58a829839e150ad2eb2d3d"
|
||||
integrity sha512-kdTly8O9mSZfm9fslc1mnCY+mYOeaYRy7ERa2Fed240u01BKll3aiupzkd07qKw69KvhBSzuHroIW3mF0D8DWA==
|
||||
electron-updater@^6.3.4:
|
||||
version "6.3.4"
|
||||
resolved "https://registry.yarnpkg.com/electron-updater/-/electron-updater-6.3.4.tgz#3934bc89875bb524c2cbbd11041114e97c0c2496"
|
||||
integrity sha512-uZUo7p1Y53G4tl6Cgw07X1yF8Jlz6zhaL7CQJDZ1fVVkOaBfE2cWtx80avwDVi8jHp+I/FWawrMgTAeCCNIfAg==
|
||||
dependencies:
|
||||
"@types/semver" "^7.3.6"
|
||||
builder-util-runtime "8.9.2"
|
||||
fs-extra "^10.0.0"
|
||||
builder-util-runtime "9.2.5"
|
||||
fs-extra "^10.1.0"
|
||||
js-yaml "^4.1.0"
|
||||
lazy-val "^1.0.5"
|
||||
lodash.escaperegexp "^4.1.2"
|
||||
lodash.isequal "^4.5.0"
|
||||
semver "^7.3.5"
|
||||
semver "^7.6.3"
|
||||
tiny-typed-emitter "^2.1.0"
|
||||
|
||||
electron@30.0.2:
|
||||
version "30.0.2"
|
||||
@@ -1663,6 +1670,39 @@ jsonfile@^6.0.1:
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsonwebtoken@^9.0.2:
|
||||
version "9.0.2"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3"
|
||||
integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==
|
||||
dependencies:
|
||||
jws "^3.2.2"
|
||||
lodash.includes "^4.3.0"
|
||||
lodash.isboolean "^3.0.3"
|
||||
lodash.isinteger "^4.0.4"
|
||||
lodash.isnumber "^3.0.3"
|
||||
lodash.isplainobject "^4.0.6"
|
||||
lodash.isstring "^4.0.1"
|
||||
lodash.once "^4.0.0"
|
||||
ms "^2.1.1"
|
||||
semver "^7.5.4"
|
||||
|
||||
jwa@^1.4.1:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
|
||||
integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==
|
||||
dependencies:
|
||||
buffer-equal-constant-time "1.0.1"
|
||||
ecdsa-sig-formatter "1.0.11"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
jws@^3.2.2:
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
|
||||
integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==
|
||||
dependencies:
|
||||
jwa "^1.4.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
keyv@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
|
||||
@@ -1718,11 +1758,46 @@ lodash.escaperegexp@^4.1.2:
|
||||
resolved "https://registry.yarnpkg.com/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz#64762c48618082518ac3df4ccf5d5886dae20347"
|
||||
integrity sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==
|
||||
|
||||
lodash.includes@^4.3.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
|
||||
integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==
|
||||
|
||||
lodash.isboolean@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
|
||||
integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==
|
||||
|
||||
lodash.isequal@^4.5.0:
|
||||
version "4.5.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0"
|
||||
integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==
|
||||
|
||||
lodash.isinteger@^4.0.4:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
|
||||
integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==
|
||||
|
||||
lodash.isnumber@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
|
||||
integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==
|
||||
|
||||
lodash.isplainobject@^4.0.6:
|
||||
version "4.0.6"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
|
||||
integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==
|
||||
|
||||
lodash.isstring@^4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
|
||||
integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==
|
||||
|
||||
lodash.once@^4.0.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
|
||||
integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==
|
||||
|
||||
lodash@^4.17.15:
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
@@ -1860,6 +1935,11 @@ ms@2.1.2:
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
|
||||
|
||||
ms@^2.1.1:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
msnodesqlv8@^4.2.1:
|
||||
version "4.2.1"
|
||||
resolved "https://registry.yarnpkg.com/msnodesqlv8/-/msnodesqlv8-4.2.1.tgz#59f2930e7f3b9b201d7288425a6ffa923ea1a573"
|
||||
@@ -1944,6 +2024,11 @@ open@^7.4.2:
|
||||
is-docker "^2.0.0"
|
||||
is-wsl "^2.1.1"
|
||||
|
||||
oracledb@^6.6.0:
|
||||
version "6.6.0"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-6.6.0.tgz#bb40adbe81a84a1e544c48af9f120c61f030e936"
|
||||
integrity sha512-T3dx+o3j+tVN53wQyr4yGTmoPHLy+a2V8yb1T2PmWrsj3ZlSt2Yu1BgV2yTDqnmBZYpRi/I3yJXRCOHHD7PiyA==
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||
@@ -2317,6 +2402,11 @@ semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7:
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.1.tgz#60bfe090bf907a25aa8119a72b9f90ef7ca281b2"
|
||||
integrity sha512-f/vbBsu+fOiYt+lmwZV0rVwJScl46HppnOA1ZvIuBWKOTlllpyJ3bfVax76/OrhCH38dyxoDIA8K7uB963IYgA==
|
||||
|
||||
semver@^7.5.4, semver@^7.6.3:
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
|
||||
integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==
|
||||
|
||||
serialize-error@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-7.0.1.tgz#f1360b0447f61ffb483ec4157c737fab7d778e18"
|
||||
@@ -2555,6 +2645,11 @@ through2@^2.0.1:
|
||||
readable-stream "~2.3.6"
|
||||
xtend "~4.0.1"
|
||||
|
||||
tiny-typed-emitter@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/tiny-typed-emitter/-/tiny-typed-emitter-2.1.0.tgz#b3b027fdd389ff81a152c8e847ee2f5be9fad7b5"
|
||||
integrity sha512-qVtvMxeXbVej0cQWKqVSSAHmKZEHAvxdF8HEUBFWts8h+xEo5m/lEiPakuyZ3BnCBjOD8i24kzNOiOLLgsSxhA==
|
||||
|
||||
tmp-promise@^3.0.2:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/tmp-promise/-/tmp-promise-3.0.3.tgz#60a1a1cc98c988674fcbfd23b6e3367bdeac4ce7"
|
||||
|
||||
19
common/buildPluginExternals.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const useBundleExternals = require('./useBundleExternals');
|
||||
const getBundleExternals = require('./getBundleExternals');
|
||||
|
||||
function buildExternalsFromDependencies(packageJson) {
|
||||
if (useBundleExternals == 'true') {
|
||||
return getBundleExternals();
|
||||
}
|
||||
const { dependencies, optionalDependencies } = packageJson;
|
||||
const externals = {};
|
||||
for (let dep in dependencies || {}) {
|
||||
externals[dep] = `commonjs ${dep}`;
|
||||
}
|
||||
for (let dep in optionalDependencies || {}) {
|
||||
externals[dep] = `commonjs ${dep}`;
|
||||
}
|
||||
return externals;
|
||||
}
|
||||
|
||||
module.exports = buildExternalsFromDependencies;
|
||||
33
common/defineVolatileDependencies.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const directory = process.argv[2];
|
||||
const fs = require('fs');
|
||||
|
||||
const volatilePackages = require('./volatilePackages');
|
||||
const apiPackageJson = JSON.parse(fs.readFileSync(`packages/api/package.json`, { encoding: 'utf-8' }));
|
||||
|
||||
const dependencies = {};
|
||||
const optionalDependencies = {};
|
||||
for (const pkg of volatilePackages) {
|
||||
if (pkg == 'msnodesqlv8' && process.platform != 'win32') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (apiPackageJson.dependencies[pkg]) {
|
||||
dependencies[pkg] = apiPackageJson.dependencies[pkg];
|
||||
}
|
||||
if (apiPackageJson.optionalDependencies[pkg]) {
|
||||
optionalDependencies[pkg] = apiPackageJson.optionalDependencies[pkg];
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
`${directory}/package.json`,
|
||||
JSON.stringify(
|
||||
{
|
||||
dependencies,
|
||||
optionalDependencies,
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
'utf-8'
|
||||
);
|
||||
10
common/getBundleExternals.js
Normal file
@@ -0,0 +1,10 @@
|
||||
const volatilePackages = require('./volatilePackages');
|
||||
|
||||
function getBundleExternals() {
|
||||
return volatilePackages.reduce((acc, item) => {
|
||||
acc[item] = `commonjs ${item}`;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
module.exports = getBundleExternals;
|
||||
1
common/useBundleExternals.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = 'false';
|
||||
24
common/volatilePackages.js
Normal file
@@ -0,0 +1,24 @@
|
||||
// these packages will be never bundled with webpack
|
||||
|
||||
const volatilePackages = [
|
||||
'@clickhouse/client',
|
||||
'bson', // this package is already bundled and is used in mongodb
|
||||
'mongodb',
|
||||
'mongodb-client-encryption',
|
||||
'tedious',
|
||||
'msnodesqlv8',
|
||||
'mysql2',
|
||||
'oracledb',
|
||||
'pg-copy-streams',
|
||||
'pg',
|
||||
'ioredis',
|
||||
'node-redis-dump2',
|
||||
'better-sqlite3',
|
||||
'@azure/cosmos',
|
||||
'@aws-sdk/rds-signer',
|
||||
'activedirectory2',
|
||||
'axios',
|
||||
'ssh2',
|
||||
];
|
||||
|
||||
module.exports = volatilePackages;
|
||||
@@ -1,23 +0,0 @@
|
||||
const fs = require('fs');
|
||||
|
||||
let fillContent = '';
|
||||
|
||||
if (process.platform == 'win32') {
|
||||
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');`;
|
||||
}
|
||||
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');`;
|
||||
|
||||
const getContent = empty => `
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
${empty ? '' : fillContent}
|
||||
|
||||
module.exports = content;
|
||||
`;
|
||||
|
||||
fs.writeFileSync(
|
||||
'packages/api/src/nativeModulesContent.js',
|
||||
getContent(process.argv.includes('--electron') ? true : false)
|
||||
);
|
||||
fs.writeFileSync('app/src/nativeModulesContent.js', getContent(false));
|
||||
@@ -5,9 +5,13 @@ const { testWrapper } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const { getAlterDatabaseScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
|
||||
|
||||
function flatSource() {
|
||||
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
|
||||
|
||||
function flatSource(engineCond = x => !x.skipReferences) {
|
||||
return _.flatten(
|
||||
engines.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
|
||||
engines
|
||||
.filter(engineCond)
|
||||
.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -41,7 +45,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||
}
|
||||
|
||||
describe('Alter database', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
'Drop referenced table - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDiff(conn, driver, db => {
|
||||
@@ -64,4 +68,24 @@ describe('Alter database', () => {
|
||||
expect(db[type].length).toEqual(0);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(flatSource(x => x.supportRenameSqlObject))(
|
||||
'Rename object - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
|
||||
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
|
||||
|
||||
const dmp = driver.createDumper();
|
||||
dmp.renameSqlObject(structure[type][0], 'renamed1');
|
||||
|
||||
await driver.query(conn, dmp.s);
|
||||
|
||||
const structure2 = await driver.analyseFull(conn);
|
||||
expect(structure2[type].length).toEqual(1);
|
||||
expect(structure2[type][0].pureName).toEqual('renamed1');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -6,39 +6,47 @@ const engines = require('../engines');
|
||||
const crypto = require('crypto');
|
||||
const { getAlterTableScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
|
||||
|
||||
function pickImportantTableInfo(table) {
|
||||
function pickImportantTableInfo(engine, table) {
|
||||
const props = ['columnName', 'defaultValue'];
|
||||
if (!engine.skipNullability) props.push('notNull');
|
||||
if (!engine.skipAutoIncrement) props.push('autoIncrement');
|
||||
return {
|
||||
pureName: table.pureName,
|
||||
columns: table.columns
|
||||
.filter(x => x.columnName != 'rowid')
|
||||
.map(fp.pick(['columnName', 'notNull', 'autoIncrement'])),
|
||||
.map(fp.pick(props))
|
||||
.map(props => _.omitBy(props, x => x == null)),
|
||||
};
|
||||
}
|
||||
|
||||
function checkTableStructure(t1, t2) {
|
||||
function checkTableStructure(engine, t1, t2) {
|
||||
// expect(t1.pureName).toEqual(t2.pureName)
|
||||
expect(pickImportantTableInfo(t1)).toEqual(pickImportantTableInfo(t2));
|
||||
expect(pickImportantTableInfo(engine, t1)).toEqual(pickImportantTableInfo(engine, t2));
|
||||
}
|
||||
|
||||
async function testTableDiff(conn, driver, mangle) {
|
||||
async function testTableDiff(engine, conn, driver, mangle) {
|
||||
await driver.query(conn, `create table t0 (id int not null primary key)`);
|
||||
|
||||
await driver.query(
|
||||
conn,
|
||||
`create table t1 (
|
||||
col_pk int not null primary key,
|
||||
col_std int null,
|
||||
col_def int null default 12,
|
||||
col_fk int null references t0(id),
|
||||
col_idx int null,
|
||||
col_uq int null unique,
|
||||
col_ref int null unique
|
||||
col_std int,
|
||||
col_def int default 12,
|
||||
${engine.skipReferences ? '' : 'col_fk int references t0(id),'}
|
||||
col_idx int,
|
||||
col_uq int ${engine.skipUnique ? '' : 'unique'} ,
|
||||
col_ref int ${engine.skipUnique ? '' : 'unique'}
|
||||
)`
|
||||
);
|
||||
|
||||
await driver.query(conn, `create index idx1 on t1(col_idx)`);
|
||||
if (!engine.skipIndexes) {
|
||||
await driver.query(conn, `create index idx1 on t1(col_idx)`);
|
||||
}
|
||||
|
||||
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
|
||||
if (!engine.skipReferences) {
|
||||
await driver.query(conn, `create table t2 (id int not null primary key, fkval int null references t1(col_ref))`);
|
||||
}
|
||||
|
||||
const tget = x => x.tables.find(y => y.pureName == 't1');
|
||||
const structure1 = generateDbPairingId(extendDatabaseInfo(await driver.analyseFull(conn)));
|
||||
@@ -53,7 +61,7 @@ async function testTableDiff(conn, driver, mangle) {
|
||||
|
||||
const structure2Real = extendDatabaseInfo(await driver.analyseFull(conn));
|
||||
|
||||
checkTableStructure(tget(structure2Real), tget(structure2));
|
||||
checkTableStructure(engine, tget(structure2Real), tget(structure2));
|
||||
// expect(stableStringify(structure2)).toEqual(stableStringify(structure2Real));
|
||||
}
|
||||
|
||||
@@ -65,14 +73,22 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
|
||||
// const TESTED_COLUMNS = ['col_ref'];
|
||||
|
||||
function engines_columns_source() {
|
||||
return _.flatten(engines.map(engine => TESTED_COLUMNS.map(column => [engine.label, column, engine])));
|
||||
return _.flatten(
|
||||
engines.map(engine =>
|
||||
TESTED_COLUMNS.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting).map(column => [
|
||||
engine.label,
|
||||
column,
|
||||
engine,
|
||||
])
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
describe('Alter table', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Add column - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(conn, driver, tbl => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.push({
|
||||
columnName: 'added',
|
||||
dataType: 'int',
|
||||
@@ -87,7 +103,7 @@ describe('Alter table', () => {
|
||||
test.each(engines_columns_source())(
|
||||
'Drop column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
|
||||
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
|
||||
})
|
||||
);
|
||||
|
||||
@@ -95,6 +111,7 @@ describe('Alter table', () => {
|
||||
'Change nullability - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
|
||||
@@ -106,6 +123,7 @@ describe('Alter table', () => {
|
||||
'Rename column - %s - %s',
|
||||
testWrapper(async (conn, driver, column, engine) => {
|
||||
await testTableDiff(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, columnName: 'col_renamed' } : x)))
|
||||
@@ -116,9 +134,46 @@ describe('Alter table', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Drop index - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(conn, driver, tbl => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.indexes = [];
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Add default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Unset default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Change default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableDiff(engine, conn, driver, tbl => {
|
||||
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// test.each(engines.map(engine => [engine.label, engine]))(
|
||||
// 'Change autoincrement - %s',
|
||||
// testWrapper(async (conn, driver, engine) => {
|
||||
// await testTableDiff(engine, conn, driver, tbl => {
|
||||
// tbl.columns.find(x => x.columnName == 'col_pk').autoIncrement = true;
|
||||
// });
|
||||
// })
|
||||
// );
|
||||
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
|
||||
const { runCommandOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data duplicator', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
@@ -91,4 +91,68 @@ describe('Data duplicator', () => {
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await driver.query(conn, `select count(*) as cnt from t2 where valfk is not null`);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const tableWriter = require('dbgate-api/src/shell/tableWriter');
|
||||
const copyStream = require('dbgate-api/src/shell/copyStream');
|
||||
const importDatabase = require('dbgate-api/src/shell/importDatabase');
|
||||
const fakeObjectReader = require('dbgate-api/src/shell/fakeObjectReader');
|
||||
|
||||
function createImportStream() {
|
||||
@@ -72,4 +73,29 @@ describe('DB Import', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
|
||||
'Import SQL dump - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
// const reader = await fakeObjectReader({ delay: 10 });
|
||||
// const reader = await fakeObjectReader();
|
||||
await importDatabase({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
inputFile: engine.dumpFile,
|
||||
});
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
for (const check of engine.dumpChecks || []) {
|
||||
const res = await driver.query(conn, check.sql);
|
||||
expect(res.rows[0].res.toString()).toEqual(check.res);
|
||||
}
|
||||
|
||||
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
|
||||
// expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
|
||||
// expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,58 +1,148 @@
|
||||
/// TODO
|
||||
|
||||
const { testWrapper } = require('../tools');
|
||||
const { testWrapper, testWrapperPrepareOnly } = require('../tools');
|
||||
const _ = require('lodash');
|
||||
const engines = require('../engines');
|
||||
const deployDb = require('dbgate-api/src/shell/deployDb');
|
||||
const { databaseInfoFromYamlModel } = require('dbgate-tools');
|
||||
const generateDeploySql = require('dbgate-api/src/shell/generateDeploySql');
|
||||
const connectUtility = require('dbgate-api/src/utility/connectUtility');
|
||||
|
||||
function checkStructure(structure, model) {
|
||||
function checkStructure(
|
||||
engine,
|
||||
structure,
|
||||
model,
|
||||
{ checkRenameDeletedObjects = false, disallowExtraObjects = false } = {}
|
||||
) {
|
||||
const expected = databaseInfoFromYamlModel(model);
|
||||
expect(structure.tables.length).toEqual(expected.tables.length);
|
||||
|
||||
for (const [realTable, expectedTable] of _.zip(
|
||||
_.sortBy(structure.tables, 'pureName'),
|
||||
_.sortBy(expected.tables, 'pureName')
|
||||
)) {
|
||||
expect(realTable.columns.length).toBeGreaterThanOrEqual(expectedTable.columns.length);
|
||||
for (const expectedTable of expected.tables) {
|
||||
const realTable = structure.tables.find(x => x.pureName == expectedTable.pureName);
|
||||
|
||||
for (const column of expectedTable.columns) {
|
||||
const realColumn = realTable.columns.find(x => x.columnName == column.columnName);
|
||||
expect(realColumn).toBeTruthy();
|
||||
if (!engine.skipNullability) {
|
||||
expect(realColumn.notNull).toEqual(column.notNull);
|
||||
}
|
||||
}
|
||||
|
||||
for (const realColumn of realTable.columns) {
|
||||
const column = expectedTable.columns.find(x => x.columnName == realColumn.columnName);
|
||||
if (!column) {
|
||||
if (checkRenameDeletedObjects) {
|
||||
expect(realColumn.columnName).toMatch(/^_deleted_/);
|
||||
}
|
||||
|
||||
if (disallowExtraObjects) {
|
||||
expect(realColumn).toBeFalsy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const realTable of structure.tables) {
|
||||
const expectedTable = expected.tables.find(x => x.pureName == realTable.pureName);
|
||||
if (!expectedTable) {
|
||||
if (checkRenameDeletedObjects) {
|
||||
expect(realTable.pureName).toMatch(/^_deleted_/);
|
||||
}
|
||||
|
||||
if (disallowExtraObjects) {
|
||||
expect(realTable).toBeFalsy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const expectedView of expected.views) {
|
||||
const realView = structure.views.find(x => x.pureName == expectedView.pureName);
|
||||
expect(realView).toBeTruthy();
|
||||
}
|
||||
|
||||
for (const realView of structure.views) {
|
||||
const expectedView = expected.views.find(x => x.pureName == realView.pureName);
|
||||
if (!expectedView) {
|
||||
if (disallowExtraObjects) {
|
||||
expect(realView).toBeFalsy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function testDatabaseDeploy(conn, driver, dbModelsYaml, testEmptyLastScript) {
|
||||
async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
|
||||
const { testEmptyLastScript, finalCheckAgainstModel, markDeleted, allowDropStatements } = options || {};
|
||||
let index = 0;
|
||||
const dbdiffOptionsExtra = markDeleted
|
||||
? {
|
||||
deletedTablePrefix: '_deleted_',
|
||||
deletedColumnPrefix: '_deleted_',
|
||||
deletedSqlObjectPrefix: '_deleted_',
|
||||
}
|
||||
: {};
|
||||
dbdiffOptionsExtra.schemaMode = 'ignore';
|
||||
|
||||
for (const loadedDbModel of dbModelsYaml) {
|
||||
const { sql, isEmpty } = await generateDeploySql({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
});
|
||||
console.debug('Generated deploy script:', sql);
|
||||
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
|
||||
if (_.isString(loadedDbModel)) {
|
||||
await driver.script(conn, loadedDbModel);
|
||||
} else {
|
||||
const { sql, isEmpty } = await generateDeploySql({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
console.debug('Generated deploy script:', sql);
|
||||
if (!allowDropStatements) {
|
||||
expect(sql.toUpperCase().includes('DROP ')).toBeFalsy();
|
||||
}
|
||||
|
||||
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
|
||||
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
|
||||
expect(isEmpty).toBeTruthy();
|
||||
console.log('dbModelsYaml.length', dbModelsYaml.length, index);
|
||||
if (testEmptyLastScript && index == dbModelsYaml.length - 1) {
|
||||
expect(isEmpty).toBeTruthy();
|
||||
}
|
||||
|
||||
await deployDb({
|
||||
systemConnection: conn.isPreparedOnly ? undefined : conn,
|
||||
connection: conn.isPreparedOnly ? conn : undefined,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
dbdiffOptionsExtra,
|
||||
});
|
||||
}
|
||||
|
||||
await deployDb({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
loadedDbModel,
|
||||
});
|
||||
|
||||
index++;
|
||||
}
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
checkStructure(structure, dbModelsYaml[dbModelsYaml.length - 1]);
|
||||
const dbhan = conn.isPreparedOnly ? await connectUtility(driver, conn, 'read') : conn;
|
||||
const structure = await driver.analyseFull(dbhan);
|
||||
if (conn.isPreparedOnly) await driver.close(dbhan);
|
||||
checkStructure(engine, structure, finalCheckAgainstModel ?? _.findLast(dbModelsYaml, x => _.isArray(x)), options);
|
||||
}
|
||||
|
||||
describe('Deploy database', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Deploy database simple - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [{ name: 'id', type: 'int' }],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
]);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Deploy database simple - %s - not connected',
|
||||
testWrapperPrepareOnly(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
@@ -71,6 +161,7 @@ describe('Deploy database', () => {
|
||||
'Deploy database simple twice - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
@@ -95,7 +186,7 @@ describe('Deploy database', () => {
|
||||
},
|
||||
],
|
||||
],
|
||||
true
|
||||
{ testEmptyLastScript: true }
|
||||
);
|
||||
})
|
||||
);
|
||||
@@ -103,7 +194,7 @@ describe('Deploy database', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Add column - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
@@ -135,6 +226,7 @@ describe('Deploy database', () => {
|
||||
'Dont drop column - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
@@ -162,15 +254,16 @@ describe('Deploy database', () => {
|
||||
},
|
||||
],
|
||||
],
|
||||
true
|
||||
{ testEmptyLastScript: true }
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
'Foreign keys - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
@@ -217,15 +310,15 @@ describe('Deploy database', () => {
|
||||
},
|
||||
],
|
||||
],
|
||||
true
|
||||
{ testEmptyLastScript: true }
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Deploy preloaded data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
@@ -251,10 +344,10 @@ describe('Deploy database', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Deploy preloaded data - update - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
@@ -301,7 +394,7 @@ describe('Deploy database', () => {
|
||||
test.each(engines.enginesPostgre.map(engine => [engine.label, engine]))(
|
||||
'Current timestamp default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(conn, driver, [
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
@@ -326,4 +419,344 @@ describe('Deploy database', () => {
|
||||
expect(res.rows[0].val.toString().substring(0, 2)).toEqual('20');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipChangeColumn).map(engine => [engine.label, engine]))(
|
||||
'Change column to NOT NULL column with default - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int', notNull: true },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'insert into t1 (id, val) values (1, 1); insert into t1 (id) values (2)',
|
||||
[
|
||||
{
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int', notNull: true },
|
||||
{ name: 'val', type: 'int', notNull: true, default: '20' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'insert into t1 (id) values (3);',
|
||||
]);
|
||||
|
||||
const res1 = await driver.query(conn, `select val from t1 where id = 1`);
|
||||
expect(res1.rows[0].val).toEqual(1);
|
||||
|
||||
const res2 = await driver.query(conn, `select val from t1 where id = 2`);
|
||||
expect(res2.rows[0].val).toEqual(20);
|
||||
|
||||
const res3 = await driver.query(conn, `select val from t1 where id = 3`);
|
||||
expect(res2.rows[0].val).toEqual(20);
|
||||
})
|
||||
);
|
||||
|
||||
const T1 = {
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const T2 = {
|
||||
name: 't2.table.yaml',
|
||||
json: {
|
||||
name: 't2',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const T1_DELETED = {
|
||||
name: '_deleted_t1.table.yaml',
|
||||
json: {
|
||||
name: '_deleted_t1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{ name: 'val', type: 'int' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const T1_NO_VAL = {
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [{ name: 'id', type: 'int' }],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const T1_DELETED_VAL = {
|
||||
name: 't1.table.yaml',
|
||||
json: {
|
||||
name: 't1',
|
||||
columns: [
|
||||
{ name: 'id', type: 'int' },
|
||||
{ name: '_deleted_val', type: 'int' },
|
||||
],
|
||||
primaryKey: ['id'],
|
||||
},
|
||||
};
|
||||
|
||||
const V1 = {
|
||||
name: 'v1.view.sql',
|
||||
text: 'create view v1 as select * from t1',
|
||||
};
|
||||
|
||||
const V1_VARIANT2 = {
|
||||
name: 'v1.view.sql',
|
||||
text: 'create view v1 as select 1 as c1',
|
||||
};
|
||||
|
||||
const V1_DELETED = {
|
||||
name: '_deleted_v1.view.sql',
|
||||
text: 'create view _deleted_v1 as select * from t1',
|
||||
};
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Dont remove column - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
|
||||
finalCheckAgainstModel: [T1],
|
||||
disallowExtraObjects: true,
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Dont remove table - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], []], {
|
||||
finalCheckAgainstModel: [T1],
|
||||
disallowExtraObjects: true,
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Mark table removed - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
finalCheckAgainstModel: [T1_DELETED],
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
|
||||
'Mark view removed - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
finalCheckAgainstModel: [T1, V1_DELETED],
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Mark column removed - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
finalCheckAgainstModel: [T1_DELETED_VAL],
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Undelete table - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
[T1],
|
||||
// delete table
|
||||
[],
|
||||
// undelete table
|
||||
[T1],
|
||||
],
|
||||
{
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
}
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
|
||||
'Undelete view - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1, V1]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
allowDropStatements: true,
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Undelete column - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL], [T1]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'View redeploy - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
[T1, V1],
|
||||
[T1, V1],
|
||||
[T1, V1],
|
||||
],
|
||||
{
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
allowDropStatements: true,
|
||||
}
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Change view - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(
|
||||
engine,
|
||||
conn,
|
||||
driver,
|
||||
[
|
||||
[T1, V1],
|
||||
[T1, V1_VARIANT2],
|
||||
],
|
||||
{
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
allowDropStatements: true,
|
||||
}
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Script drived deploy - basic predeploy - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: '1.predeploy.sql',
|
||||
text: 'create table t1 (id int primary key); insert into t1 (id) values (1);',
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
const res1 = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t1');
|
||||
expect(res1.rows[0].cnt == 1).toBeTruthy();
|
||||
|
||||
const res2 = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM dbgate_deploy_journal');
|
||||
expect(res2.rows[0].cnt == 1).toBeTruthy();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Script drived deploy - install+uninstall - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [
|
||||
[
|
||||
{
|
||||
name: 't1.uninstall.sql',
|
||||
text: 'drop table t1',
|
||||
},
|
||||
{
|
||||
name: 't1.install.sql',
|
||||
text: 'create table t1 (id int primary key); insert into t1 (id) values (1)',
|
||||
},
|
||||
{
|
||||
name: 't2.once.sql',
|
||||
text: 'create table t2 (id int primary key); insert into t2 (id) values (1)',
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
name: 't1.uninstall.sql',
|
||||
text: 'drop table t1',
|
||||
},
|
||||
{
|
||||
name: 't1.install.sql',
|
||||
text: 'create table t1 (id int primary key, val int); insert into t1 (id, val) values (1, 11)',
|
||||
},
|
||||
{
|
||||
name: 't2.once.sql',
|
||||
text: 'insert into t2 (id) values (2)',
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
const res1 = await driver.query(conn, 'SELECT val from t1 where id = 1');
|
||||
expect(res1.rows[0].val == 11).toBeTruthy();
|
||||
|
||||
const res2 = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t2');
|
||||
expect(res2.rows[0].cnt == 1).toBeTruthy();
|
||||
|
||||
const res3 = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM dbgate_deploy_journal');
|
||||
expect(res3.rows[0].cnt == 3).toBeTruthy();
|
||||
|
||||
const res4 = await driver.query(conn, "SELECT run_count from dbgate_deploy_journal where name = 't2.once.sql'");
|
||||
expect(res4.rows[0].run_count == 1).toBeTruthy();
|
||||
|
||||
const res5 = await driver.query(
|
||||
conn,
|
||||
"SELECT run_count from dbgate_deploy_journal where name = 't1.install.sql'"
|
||||
);
|
||||
expect(res5.rows[0].run_count == 2).toBeTruthy();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Mark table removed, one remains - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {
|
||||
markDeleted: true,
|
||||
disallowExtraObjects: true,
|
||||
finalCheckAgainstModel: [T1_DELETED, T2],
|
||||
});
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
151
integration-tests/__tests__/import-formats.spec.js
Normal file
@@ -0,0 +1,151 @@
|
||||
const dbgateApi = require('dbgate-api/src/shell');
|
||||
// const jsonLinesWriter = require('dbgate-api/src/shell/jsonLinesWriter');
|
||||
const tmp = require('tmp');
|
||||
// const dbgatePluginCsv = require('dbgate-plugin-csv/src/backend');
|
||||
const fs = require('fs');
|
||||
const requirePlugin = require('dbgate-api/src/shell/requirePlugin');
|
||||
|
||||
const CSV_DATA = `Issue Number; Title; Github URL; Labels; State; Created At; Updated At; Reporter; Assignee
|
||||
801; "Does it 'burst' the database on startup or first lUI load ? "; https://github.com/dbgate/dbgate/issues/801; ""; open; 05/23/2024; 05/23/2024; rgarrigue;
|
||||
799; "BUG: latest AppImage crashes on opening in Fedora 39"; https://github.com/dbgate/dbgate/issues/799; ""; open; 05/21/2024; 05/24/2024; BenGraham-Git;
|
||||
798; "MongoDB write operations fail"; https://github.com/dbgate/dbgate/issues/798; "bug,solved"; open; 05/21/2024; 05/24/2024; mahmed0715;
|
||||
797; "BUG: Unable to open SQL files"; https://github.com/dbgate/dbgate/issues/797; "bug"; open; 05/20/2024; 05/21/2024; cesarValdivia;
|
||||
795; "BUG: MS SQL Server connection error (KEY_USAGE_BIT_INCORRECT)"; https://github.com/dbgate/dbgate/issues/795; ""; open; 05/20/2024; 05/20/2024; keskinonur;
|
||||
794; "GLIBC_2.29' not found and i have 2.31"; https://github.com/dbgate/dbgate/issues/794; ""; closed; 05/20/2024; 05/21/2024; MFdanGM;
|
||||
793; "BUG: PostgresSQL doesn't show tables when connected"; https://github.com/dbgate/dbgate/issues/793; ""; open; 05/20/2024; 05/22/2024; stomper013;
|
||||
792; "FEAT: Wayland support"; https://github.com/dbgate/dbgate/issues/792; ""; closed; 05/19/2024; 05/21/2024; VosaXalo;
|
||||
`;
|
||||
|
||||
async function getReaderRows(reader) {
|
||||
const jsonLinesFileName = tmp.tmpNameSync();
|
||||
|
||||
const writer = await dbgateApi.jsonLinesWriter({
|
||||
fileName: jsonLinesFileName,
|
||||
});
|
||||
await dbgateApi.copyStream(reader, writer);
|
||||
|
||||
const jsonData = fs.readFileSync(jsonLinesFileName, 'utf-8');
|
||||
const rows = jsonData
|
||||
.split('\n')
|
||||
.filter(x => x.trim() !== '')
|
||||
.map(x => JSON.parse(x));
|
||||
|
||||
return rows;
|
||||
}
|
||||
|
||||
test('csv import test', async () => {
|
||||
const dbgatePluginCsv = requirePlugin('dbgate-plugin-csv');
|
||||
|
||||
const csvFileName = tmp.tmpNameSync();
|
||||
|
||||
fs.writeFileSync(csvFileName, CSV_DATA);
|
||||
|
||||
const reader = await dbgatePluginCsv.shellApi.reader({
|
||||
fileName: csvFileName,
|
||||
});
|
||||
|
||||
const rows = await getReaderRows(reader);
|
||||
|
||||
expect(rows[0].columns).toEqual([
|
||||
{ columnName: 'Issue Number' },
|
||||
{ columnName: 'Title' },
|
||||
{ columnName: 'Github URL' },
|
||||
{ columnName: 'Labels' },
|
||||
{ columnName: 'State' },
|
||||
{ columnName: 'Created At' },
|
||||
{ columnName: 'Updated At' },
|
||||
{ columnName: 'Reporter' },
|
||||
{ columnName: 'Assignee' },
|
||||
]);
|
||||
expect(rows.length).toEqual(9);
|
||||
expect(rows[1]).toEqual({
|
||||
'Issue Number': '801',
|
||||
Title: "Does it 'burst' the database on startup or first lUI load ? ",
|
||||
'Github URL': 'https://github.com/dbgate/dbgate/issues/801',
|
||||
Labels: '',
|
||||
State: 'open',
|
||||
'Created At': '05/23/2024',
|
||||
'Updated At': '05/23/2024',
|
||||
Reporter: 'rgarrigue',
|
||||
Assignee: '',
|
||||
});
|
||||
});
|
||||
|
||||
test('JSON array import test', async () => {
|
||||
const jsonFileName = tmp.tmpNameSync();
|
||||
|
||||
fs.writeFileSync(
|
||||
jsonFileName,
|
||||
JSON.stringify([
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
])
|
||||
);
|
||||
|
||||
const reader = await dbgateApi.jsonReader({
|
||||
fileName: jsonFileName,
|
||||
});
|
||||
|
||||
const rows = await getReaderRows(reader);
|
||||
|
||||
expect(rows.length).toEqual(2);
|
||||
expect(rows).toEqual([
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
]);
|
||||
});
|
||||
|
||||
test('JSON object import test', async () => {
|
||||
const jsonFileName = tmp.tmpNameSync();
|
||||
|
||||
fs.writeFileSync(
|
||||
jsonFileName,
|
||||
JSON.stringify({
|
||||
k1: { id: 1, val: 'v1' },
|
||||
k2: { id: 2, val: 'v2' },
|
||||
})
|
||||
);
|
||||
|
||||
const reader = await dbgateApi.jsonReader({
|
||||
fileName: jsonFileName,
|
||||
jsonStyle: 'object',
|
||||
keyField: 'mykey',
|
||||
});
|
||||
|
||||
const rows = await getReaderRows(reader);
|
||||
|
||||
expect(rows.length).toEqual(2);
|
||||
expect(rows).toEqual([
|
||||
{ mykey: 'k1', id: 1, val: 'v1' },
|
||||
{ mykey: 'k2', id: 2, val: 'v2' },
|
||||
]);
|
||||
});
|
||||
|
||||
test('JSON filtered object import test', async () => {
|
||||
const jsonFileName = tmp.tmpNameSync();
|
||||
|
||||
fs.writeFileSync(
|
||||
jsonFileName,
|
||||
JSON.stringify({
|
||||
filtered: {
|
||||
k1: { id: 1, val: 'v1' },
|
||||
k2: { id: 2, val: 'v2' },
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const reader = await dbgateApi.jsonReader({
|
||||
fileName: jsonFileName,
|
||||
jsonStyle: 'object',
|
||||
keyField: 'mykey',
|
||||
rootField: 'filtered',
|
||||
});
|
||||
|
||||
const rows = await getReaderRows(reader);
|
||||
|
||||
expect(rows.length).toEqual(2);
|
||||
expect(rows).toEqual([
|
||||
{ mykey: 'k1', id: 1, val: 'v1' },
|
||||
{ mykey: 'k2', id: 2, val: 'v2' },
|
||||
]);
|
||||
});
|
||||
@@ -2,7 +2,7 @@ const { testWrapper } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const _ = require('lodash');
|
||||
|
||||
const initSql = ['CREATE TABLE t1 (id int)', 'CREATE TABLE t2 (id int)'];
|
||||
const initSql = ['CREATE TABLE t1 (id int primary key)', 'CREATE TABLE t2 (id int primary key)'];
|
||||
|
||||
function flatSource() {
|
||||
return _.flatten(
|
||||
@@ -10,6 +10,14 @@ function flatSource() {
|
||||
);
|
||||
}
|
||||
|
||||
function flatSourceParameters() {
|
||||
return _.flatten(
|
||||
engines.map(engine =>
|
||||
(engine.parameters || []).map(parameter => [engine.label, parameter.testName, parameter, engine])
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const obj1Match = expect.objectContaining({
|
||||
pureName: 'obj1',
|
||||
});
|
||||
@@ -26,9 +34,9 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Full analysis - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, object.create1);
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
expect(structure[type].length).toEqual(1);
|
||||
@@ -39,11 +47,11 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Incremental analysis - add - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, object.create2);
|
||||
await driver.query(conn, object.create2, { discardResult: true });
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.create1);
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2[type].length).toEqual(2);
|
||||
@@ -54,12 +62,12 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Incremental analysis - drop - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, object.create1);
|
||||
await driver.query(conn, object.create2);
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
await driver.query(conn, object.create2, { discardResult: true });
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.drop2);
|
||||
await driver.query(conn, object.drop2, { discardResult: true });
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2[type].length).toEqual(1);
|
||||
@@ -70,15 +78,15 @@ describe('Object analyse', () => {
|
||||
test.each(flatSource())(
|
||||
'Create SQL - add - %s - %s',
|
||||
testWrapper(async (conn, driver, type, object, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, object.create1);
|
||||
await driver.query(conn, object.create1, { discardResult: true });
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, object.drop1);
|
||||
await driver.query(conn, object.drop1, { discardResult: true });
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
expect(structure2[type].length).toEqual(0);
|
||||
|
||||
await driver.query(conn, structure1[type][0].createSql);
|
||||
await driver.script(conn, structure1[type][0].createSql);
|
||||
|
||||
const structure3 = await driver.analyseIncremental(conn, structure2);
|
||||
|
||||
@@ -86,4 +94,45 @@ describe('Object analyse', () => {
|
||||
expect(structure3[type][0]).toEqual(type.includes('views') ? view1Match : obj1Match);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(flatSourceParameters())(
|
||||
'Test parameters simple analyse - %s - %s',
|
||||
testWrapper(async (conn, driver, testName, parameter, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of engine.parametersOtherSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, parameter.create, { discardResult: true });
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
const parameters = structure[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
|
||||
|
||||
expect(parameters.length).toEqual(parameter.list.length);
|
||||
for (let i = 0; i < parameters.length; i += 1) {
|
||||
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
test.each(flatSourceParameters())(
|
||||
'Test parameters create SQL - %s - %s',
|
||||
testWrapper(async (conn, driver, testName, parameter, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
for (const sql of engine.parametersOtherSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.query(conn, parameter.create, { discardResult: true });
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
await driver.query(conn, parameter.drop, { discardResult: true });
|
||||
|
||||
const obj = structure1[parameter.objectTypeField].find(x => x.pureName == 'obj1');
|
||||
await driver.script(conn, obj.createSql);
|
||||
|
||||
const structure2 = await driver.analyseFull(conn);
|
||||
const parameters = structure2[parameter.objectTypeField].find(x => x.pureName == 'obj1').parameters;
|
||||
|
||||
expect(parameters.length).toEqual(parameter.list.length);
|
||||
for (let i = 0; i < parameters.length; i += 1) {
|
||||
expect(parameters[i]).toEqual(expect.objectContaining(parameter.list[i]));
|
||||
}
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2,7 +2,11 @@ const engines = require('../engines');
|
||||
const { splitQuery } = require('dbgate-query-splitter');
|
||||
const { testWrapper } = require('../tools');
|
||||
|
||||
const initSql = ['CREATE TABLE t1 (id int)', 'INSERT INTO t1 (id) VALUES (1)', 'INSERT INTO t1 (id) VALUES (2)'];
|
||||
const initSql = [
|
||||
'CREATE TABLE t1 (id int primary key)',
|
||||
'INSERT INTO t1 (id) VALUES (1)',
|
||||
'INSERT INTO t1 (id) VALUES (2)',
|
||||
];
|
||||
|
||||
expect.extend({
|
||||
dataRow(row, expected) {
|
||||
@@ -64,7 +68,7 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Simple query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
const res = await driver.query(conn, 'SELECT id FROM t1 ORDER BY id');
|
||||
expect(res.columns).toEqual([
|
||||
@@ -87,7 +91,7 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Simple stream query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
const results = await executeStream(driver, conn, 'SELECT id FROM t1 ORDER BY id');
|
||||
expect(results.length).toEqual(1);
|
||||
const res = results[0];
|
||||
@@ -100,7 +104,7 @@ describe('Query', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'More queries - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
const results = await executeStream(
|
||||
driver,
|
||||
conn,
|
||||
@@ -124,7 +128,7 @@ describe('Query', () => {
|
||||
const results = await executeStream(
|
||||
driver,
|
||||
conn,
|
||||
'CREATE TABLE t1 (id int); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
|
||||
'CREATE TABLE t1 (id int primary key); INSERT INTO t1 (id) VALUES (1); INSERT INTO t1 (id) VALUES (2); SELECT id FROM t1 ORDER BY id; '
|
||||
);
|
||||
expect(results.length).toEqual(1);
|
||||
|
||||
@@ -146,14 +150,15 @@ describe('Query', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
|
||||
'Save data query - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
for (const sql of initSql) await driver.query(conn, sql);
|
||||
for (const sql of initSql) await driver.query(conn, sql, { discardResult: true });
|
||||
|
||||
await driver.script(
|
||||
conn,
|
||||
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;'
|
||||
'INSERT INTO t1 (id) VALUES (3);INSERT INTO t1 (id) VALUES (4);UPDATE t1 SET id=10 WHERE id=1;DELETE FROM t1 WHERE id=2;',
|
||||
{ discardResult: true }
|
||||
);
|
||||
const res = await driver.query(conn, 'SELECT COUNT(*) AS cnt FROM t1');
|
||||
// console.log(res);
|
||||
|
||||
90
integration-tests/__tests__/schema-tests.spec.js
Normal file
@@ -0,0 +1,90 @@
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const _ = require('lodash');
|
||||
const fp = require('lodash/fp');
|
||||
const { testWrapper, extractConnection } = require('../tools');
|
||||
const engines = require('../engines');
|
||||
const { runCommandOnDriver } = require('dbgate-tools');
|
||||
|
||||
async function baseStructure(conn, driver) {
|
||||
await driver.query(conn, `create table t1 (id int not null primary key)`);
|
||||
|
||||
await driver.query(
|
||||
conn,
|
||||
`create table t2 (
|
||||
id int not null primary key,
|
||||
t1_id int
|
||||
)`
|
||||
);
|
||||
}
|
||||
|
||||
describe('Schema tests', () => {
|
||||
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
|
||||
'Create schema - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await baseStructure(conn, driver);
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
const schemas1 = await driver.listSchemas(conn);
|
||||
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeFalsy();
|
||||
const count = schemas1.length;
|
||||
expect(structure1.tables.length).toEqual(2);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
const schemas2 = await driver.listSchemas(conn);
|
||||
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeTruthy();
|
||||
expect(schemas2.length).toEqual(count + 1);
|
||||
expect(schemas2.find(x => x.isDefault).schemaName).toEqual(engine.defaultSchemaName);
|
||||
expect(structure2).toBeNull();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))(
|
||||
'Drop schema - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await baseStructure(conn, driver);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
|
||||
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
const schemas1 = await driver.listSchemas(conn);
|
||||
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeTruthy();
|
||||
expect(structure1.tables.length).toEqual(2);
|
||||
await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema'));
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
const schemas2 = await driver.listSchemas(conn);
|
||||
expect(schemas2.find(x => x.schemaName == 'myschema')).toBeFalsy();
|
||||
expect(structure2).toBeNull();
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => x.supportSchemas && !x.skipSeparateSchemas).map(engine => [engine.label, engine]))(
|
||||
'Table inside schema - %s',
|
||||
testWrapper(async (handle, driver, engine) => {
|
||||
await baseStructure(handle, driver);
|
||||
await runCommandOnDriver(handle, driver, dmp => dmp.createSchema('myschema'));
|
||||
|
||||
const schemaConnDef = {
|
||||
...extractConnection(engine),
|
||||
database: `${handle.database}::myschema`,
|
||||
};
|
||||
|
||||
const schemaConn = await driver.connect(schemaConnDef);
|
||||
await driver.query(schemaConn, `create table myschema.myt1 (id int not null primary key)`);
|
||||
const structure1 = await driver.analyseFull(schemaConn);
|
||||
expect(structure1.tables.length).toEqual(1);
|
||||
expect(structure1.tables[0].pureName).toEqual('myt1');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('Base analyser test', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Structure without change - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await baseStructure(conn, driver);
|
||||
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
expect(structure1.tables.length).toEqual(2);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
expect(structure2).toBeNull();
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -1,32 +1,42 @@
|
||||
const engines = require('../engines');
|
||||
const { testWrapper } = require('../tools');
|
||||
|
||||
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50) null)';
|
||||
const t1Sql = 'CREATE TABLE t1 (id int not null primary key, val1 varchar(50))';
|
||||
const ix1Sql = 'CREATE index ix1 ON t1(val1, id)';
|
||||
const t2Sql = 'CREATE TABLE t2 (id int not null primary key, val2 varchar(50) null unique)';
|
||||
const t2Sql = engine =>
|
||||
`CREATE TABLE t2 (id int not null primary key, val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
|
||||
const t3Sql = 'CREATE TABLE t3 (id int not null primary key, valfk int, foreign key (valfk) references t2(id))';
|
||||
const t4Sql = 'CREATE TABLE t4 (id int not null primary key, valdef int not null default 12)';
|
||||
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
|
||||
|
||||
const txMatch = (tname, vcolname, nextcol) =>
|
||||
const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
|
||||
expect.objectContaining({
|
||||
pureName: tname,
|
||||
columns: [
|
||||
expect.objectContaining({
|
||||
columnName: 'id',
|
||||
notNull: true,
|
||||
dataType: expect.stringMatching(/int/i),
|
||||
dataType: expect.stringMatching(/int.*/i),
|
||||
...(engine.skipNullability ? {} : { notNull: true }),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
columnName: vcolname,
|
||||
notNull: false,
|
||||
dataType: expect.stringMatching(/.*char.*\(50\)/),
|
||||
...(engine.skipNullability ? {} : { notNull: !!defaultValue }),
|
||||
...(defaultValue
|
||||
? { defaultValue }
|
||||
: {
|
||||
dataType: engine.skipStringLength
|
||||
? expect.stringMatching(/.*string|char.*/i)
|
||||
: expect.stringMatching(/.*char.*\(50\)/i),
|
||||
}),
|
||||
}),
|
||||
...(nextcol
|
||||
? [
|
||||
expect.objectContaining({
|
||||
columnName: 'nextcol',
|
||||
notNull: false,
|
||||
dataType: expect.stringMatching(/.*char.*\(50\)/),
|
||||
...(engine.skipNullability ? {} : { notNull: false }),
|
||||
dataType: engine.skipStringLength
|
||||
? expect.stringMatching(/.*string.*|char.*/i)
|
||||
: expect.stringMatching(/.*char.*\(50\).*/i),
|
||||
}),
|
||||
]
|
||||
: []),
|
||||
@@ -40,9 +50,10 @@ const txMatch = (tname, vcolname, nextcol) =>
|
||||
}),
|
||||
});
|
||||
|
||||
const t1Match = txMatch('t1', 'val1');
|
||||
const t2Match = txMatch('t2', 'val2');
|
||||
const t2NextColMatch = txMatch('t2', 'val2', true);
|
||||
const t1Match = engine => txMatch(engine, 't1', 'val1');
|
||||
const t2Match = engine => txMatch(engine, 't2', 'val2');
|
||||
const t2NextColMatch = engine => txMatch(engine, 't2', 'val2', true);
|
||||
const t4Match = engine => txMatch(engine, 't4', 'valdef', null, '12');
|
||||
|
||||
describe('Table analyse', () => {
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
@@ -53,25 +64,25 @@ describe('Table analyse', () => {
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
expect(structure.tables.length).toEqual(1);
|
||||
expect(structure.tables[0]).toEqual(t1Match);
|
||||
expect(structure.tables[0]).toEqual(t1Match(engine));
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table add - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
expect(structure1.tables.length).toEqual(1);
|
||||
expect(structure1.tables[0]).toEqual(t2Match);
|
||||
expect(structure1.tables[0]).toEqual(t2Match(engine));
|
||||
|
||||
await driver.query(conn, t1Sql);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2.tables.length).toEqual(2);
|
||||
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
|
||||
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2Match);
|
||||
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
|
||||
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
|
||||
})
|
||||
);
|
||||
|
||||
@@ -79,17 +90,17 @@ describe('Table analyse', () => {
|
||||
'Table remove - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await driver.query(conn, t2Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
expect(structure1.tables.length).toEqual(2);
|
||||
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
|
||||
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match);
|
||||
expect(structure1.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
|
||||
expect(structure1.tables.find(x => x.pureName == 't2')).toEqual(t2Match(engine));
|
||||
|
||||
await driver.query(conn, 'DROP TABLE t2');
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2.tables.length).toEqual(1);
|
||||
expect(structure2.tables[0]).toEqual(t1Match);
|
||||
expect(structure2.tables[0]).toEqual(t1Match(engine));
|
||||
})
|
||||
);
|
||||
|
||||
@@ -97,23 +108,26 @@ describe('Table analyse', () => {
|
||||
'Table change - incremental analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
await driver.query(conn, t2Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
const structure1 = await driver.analyseFull(conn);
|
||||
|
||||
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
|
||||
|
||||
await driver.query(conn, 'ALTER TABLE t2 ADD nextcol varchar(50)');
|
||||
await driver.query(
|
||||
conn,
|
||||
`ALTER TABLE t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} nextcol varchar(50)`
|
||||
);
|
||||
const structure2 = await driver.analyseIncremental(conn, structure1);
|
||||
|
||||
expect(structure2).toBeTruthy(); // if falsy, no modification is detected
|
||||
|
||||
expect(structure2.tables.length).toEqual(2);
|
||||
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match);
|
||||
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch);
|
||||
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
|
||||
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch(engine));
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
|
||||
'Index - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t1Sql);
|
||||
@@ -128,10 +142,10 @@ describe('Table analyse', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
|
||||
'Unique - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
const t2 = structure.tables.find(x => x.pureName == 't2');
|
||||
@@ -142,10 +156,10 @@ describe('Table analyse', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
'Foreign key - full analysis - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t2Sql);
|
||||
await driver.query(conn, t2Sql(engine));
|
||||
await driver.query(conn, t3Sql);
|
||||
// await driver.query(conn, fkSql);
|
||||
|
||||
@@ -161,4 +175,16 @@ describe('Table analyse', () => {
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
'Table structure - default value - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await driver.query(conn, t4Sql);
|
||||
|
||||
const structure = await driver.analyseFull(conn);
|
||||
|
||||
expect(structure.tables.length).toEqual(1);
|
||||
expect(structure.tables[0]).toEqual(t4Match(engine));
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -62,7 +62,7 @@ describe('Table create', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
|
||||
'Table with index - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableCreate(conn, driver, {
|
||||
@@ -92,7 +92,7 @@ describe('Table create', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
|
||||
'Table with foreign key - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableCreate(conn, driver, {
|
||||
@@ -122,7 +122,7 @@ describe('Table create', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
|
||||
'Table with unique - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await testTableCreate(conn, driver, {
|
||||
|
||||
15963
integration-tests/data/chinook-mysql.sql
Normal file
16173
integration-tests/data/chinook-postgre.sql
Normal file
@@ -1,21 +1,21 @@
|
||||
version: '3'
|
||||
services:
|
||||
# postgres:
|
||||
# image: postgres
|
||||
# restart: always
|
||||
# environment:
|
||||
# POSTGRES_PASSWORD: Pwd2020Db
|
||||
# ports:
|
||||
# - 15000:5432
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: Pwd2020Db
|
||||
ports:
|
||||
- 15000:5432
|
||||
|
||||
# mariadb:
|
||||
# image: mariadb
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15004:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 15004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
@@ -26,15 +26,23 @@ services:
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server
|
||||
restart: always
|
||||
ports:
|
||||
- 15002:1433
|
||||
environment:
|
||||
- ACCEPT_EULA=Y
|
||||
- SA_PASSWORD=Pwd2020Db
|
||||
- MSSQL_PID=Express
|
||||
# clickhouse:
|
||||
# image: bitnami/clickhouse:24.8.4
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15005:8123
|
||||
# environment:
|
||||
# - CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
|
||||
|
||||
# mssql:
|
||||
# image: mcr.microsoft.com/mssql/server
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15002:1433
|
||||
# environment:
|
||||
# - ACCEPT_EULA=Y
|
||||
# - SA_PASSWORD=Pwd2020Db
|
||||
# - MSSQL_PID=Express
|
||||
|
||||
# cockroachdb:
|
||||
# image: cockroachdb/cockroach
|
||||
|
||||
@@ -28,8 +28,86 @@ const engines = [
|
||||
port: 15001,
|
||||
},
|
||||
// skipOnCI: true,
|
||||
objects: [views],
|
||||
objects: [
|
||||
views,
|
||||
{
|
||||
type: 'procedures',
|
||||
create1: 'CREATE PROCEDURE obj1() BEGIN SELECT * FROM t1; END',
|
||||
create2: 'CREATE PROCEDURE obj2() BEGIN SELECT * FROM t2; END',
|
||||
drop1: 'DROP PROCEDURE obj1',
|
||||
drop2: 'DROP PROCEDURE obj2',
|
||||
},
|
||||
],
|
||||
dbSnapshotBySeconds: true,
|
||||
dumpFile: 'data/chinook-mysql.sql',
|
||||
dumpChecks: [
|
||||
{
|
||||
sql: 'select count(*) as res from genre',
|
||||
res: '25',
|
||||
},
|
||||
],
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2(a int, b int) BEGIN SELECT * FROM t1; END'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1(a int) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramTypes',
|
||||
create: 'CREATE PROCEDURE obj1(a int, b varchar(50), c numeric(10,2)) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'varchar(50)',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'decimal(10,2)',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModes',
|
||||
create: 'CREATE PROCEDURE obj1(IN a int, OUT b int, INOUT c int) BEGIN SELECT * FROM t1; END',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'INOUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'MariaDB',
|
||||
@@ -47,6 +125,13 @@ const engines = [
|
||||
skipOnCI: true,
|
||||
objects: [views],
|
||||
dbSnapshotBySeconds: true,
|
||||
dumpFile: 'data/chinook-mysql.sql',
|
||||
dumpChecks: [
|
||||
{
|
||||
sql: 'select count(*) as res from genre',
|
||||
res: '25',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'PostgreSQL',
|
||||
@@ -81,6 +166,104 @@ const engines = [
|
||||
drop2: 'DROP FUNCTION obj2',
|
||||
},
|
||||
],
|
||||
supportSchemas: true,
|
||||
supportRenameSqlObject: true,
|
||||
defaultSchemaName: 'public',
|
||||
dumpFile: 'data/chinook-postgre.sql',
|
||||
dumpChecks: [
|
||||
{
|
||||
sql: 'select count(*) as res from "public"."Genre"',
|
||||
res: '25',
|
||||
},
|
||||
],
|
||||
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2(a integer, b integer) LANGUAGE SQL AS $$ select * from t1 $$'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1(a integer) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'dataTypes',
|
||||
create:
|
||||
'CREATE PROCEDURE obj1(a integer, b varchar(20), c numeric(18,2)) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'varchar',
|
||||
},
|
||||
{
|
||||
parameterName: 'c',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'numeric',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModes',
|
||||
create: 'CREATE PROCEDURE obj1(IN a integer, INOUT b integer) LANGUAGE SQL AS $$ select * from t1 $$',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'a',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'b',
|
||||
parameterMode: 'INOUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'paramModesFunction',
|
||||
objectTypeField: 'functions',
|
||||
create: `
|
||||
create or replace function obj1(
|
||||
out min_len int,
|
||||
out max_len int)
|
||||
language plpgsql
|
||||
as $$
|
||||
begin
|
||||
select min(id),
|
||||
max(id)
|
||||
into min_len, max_len
|
||||
from t1;
|
||||
end;$$`,
|
||||
drop: 'DROP FUNCTION obj1',
|
||||
list: [
|
||||
{
|
||||
parameterName: 'min_len',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
{
|
||||
parameterName: 'max_len',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'integer',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'SQL Server',
|
||||
@@ -105,6 +288,67 @@ const engines = [
|
||||
drop2: 'DROP PROCEDURE obj2',
|
||||
},
|
||||
],
|
||||
parametersOtherSql: ['CREATE PROCEDURE obj2 (@p1 int, @p2 int) AS SELECT id from t1'],
|
||||
parameters: [
|
||||
{
|
||||
testName: 'simple',
|
||||
create: 'CREATE PROCEDURE obj1 (@param1 int) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@param1',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'dataTypes',
|
||||
create: 'CREATE PROCEDURE obj1 (@p1 bit, @p2 nvarchar(20), @p3 decimal(18,2), @p4 float) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@p1',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'bit',
|
||||
},
|
||||
{
|
||||
parameterName: '@p2',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'nvarchar(20)',
|
||||
},
|
||||
{
|
||||
parameterName: '@p3',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'decimal(18,2)',
|
||||
},
|
||||
{
|
||||
parameterName: '@p4',
|
||||
parameterMode: 'IN',
|
||||
dataType: 'float',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
testName: 'outputParam',
|
||||
create: 'CREATE PROCEDURE obj1 (@p1 int OUTPUT) AS SELECT id from t1',
|
||||
drop: 'DROP PROCEDURE obj1',
|
||||
objectTypeField: 'procedures',
|
||||
list: [
|
||||
{
|
||||
parameterName: '@p1',
|
||||
parameterMode: 'OUT',
|
||||
dataType: 'int',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
supportSchemas: true,
|
||||
supportRenameSqlObject: true,
|
||||
defaultSchemaName: 'dbo',
|
||||
// skipSeparateSchemas: true,
|
||||
},
|
||||
{
|
||||
label: 'SQLite',
|
||||
@@ -113,6 +357,8 @@ const engines = [
|
||||
engine: 'sqlite@dbgate-plugin-sqlite',
|
||||
},
|
||||
objects: [views],
|
||||
skipOnCI: false,
|
||||
skipChangeColumn: true,
|
||||
},
|
||||
{
|
||||
label: 'CockroachDB',
|
||||
@@ -129,16 +375,42 @@ const engines = [
|
||||
skipOnCI: true,
|
||||
objects: [views, matviews],
|
||||
},
|
||||
{
|
||||
label: 'ClickHouse',
|
||||
connection: {
|
||||
engine: 'clickhouse@dbgate-plugin-clickhouse',
|
||||
databaseUrl: 'http://clickhouse:8123',
|
||||
password: 'Pwd2020Db',
|
||||
},
|
||||
local: {
|
||||
databaseUrl: 'http://localhost:15005',
|
||||
},
|
||||
skipOnCI: false,
|
||||
objects: [views],
|
||||
skipDataModifications: true,
|
||||
skipReferences: true,
|
||||
skipIndexes: true,
|
||||
skipNullability: true,
|
||||
skipUnique: true,
|
||||
skipAutoIncrement: true,
|
||||
skipPkColumnTesting: true,
|
||||
skipDataDuplicator: true,
|
||||
skipStringLength: true,
|
||||
alterTableAddColumnSyntax: true,
|
||||
dbSnapshotBySeconds: true,
|
||||
skipChangeColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
const filterLocal = [
|
||||
// filter local testing
|
||||
'-MySQL',
|
||||
'MySQL',
|
||||
'-MariaDB',
|
||||
'-PostgreSQL',
|
||||
'-SQL Server',
|
||||
'SQLite',
|
||||
'-SQLite',
|
||||
'-CockroachDB',
|
||||
'-ClickHouse',
|
||||
];
|
||||
|
||||
const enginesPostgre = engines.filter(x => x.label == 'PostgreSQL');
|
||||
|
||||
3
integration-tests/jest.config.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
setupFilesAfterEnv: ['<rootDir>/setupTests.js'],
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dbgate-integration-tests",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -11,12 +11,9 @@
|
||||
"scripts": {
|
||||
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
|
||||
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
|
||||
|
||||
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest",
|
||||
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
|
||||
|
||||
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults",
|
||||
|
||||
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit",
|
||||
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
|
||||
},
|
||||
"jest": {
|
||||
@@ -24,7 +21,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"cross-env": "^7.0.3",
|
||||
"jest": "^27.0.1"
|
||||
},
|
||||
"dependencies": {}
|
||||
"jest": "^27.0.1",
|
||||
"pino-pretty": "^11.2.2",
|
||||
"tmp": "^0.2.3"
|
||||
}
|
||||
}
|
||||
|
||||
30
integration-tests/setupTests.js
Normal file
@@ -0,0 +1,30 @@
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
const { prettyFactory } = require('pino-pretty');
|
||||
const tmp = require('tmp');
|
||||
|
||||
const pretty = prettyFactory({
|
||||
colorize: true,
|
||||
translateTime: 'SYS:standard',
|
||||
ignore: 'pid,hostname',
|
||||
});
|
||||
|
||||
global.console = {
|
||||
...console,
|
||||
log: (...messages) => {
|
||||
try {
|
||||
const parsedMessage = JSON.parse(messages[0]);
|
||||
process.stdout.write(pretty(parsedMessage));
|
||||
} catch (error) {
|
||||
process.stdout.write(messages.join(' ') + '\n');
|
||||
}
|
||||
},
|
||||
debug: (...messages) => {
|
||||
process.stdout.write(messages.join(' ') + '\n');
|
||||
},
|
||||
};
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
@@ -1,4 +1,3 @@
|
||||
global.DBGATE_TOOLS = require('dbgate-tools');
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const crypto = require('crypto');
|
||||
|
||||
@@ -44,20 +43,55 @@ async function connect(engine, database) {
|
||||
}
|
||||
}
|
||||
|
||||
const testWrapper = body => async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await connect(engine, randomDbName());
|
||||
try {
|
||||
await body(conn, driver, ...other);
|
||||
} finally {
|
||||
async function prepareConnection(engine, database) {
|
||||
const connection = extractConnection(engine);
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
if (engine.generateDbFile) {
|
||||
return {
|
||||
...connection,
|
||||
databaseFile: `dbtemp/${database}`,
|
||||
isPreparedOnly: true,
|
||||
};
|
||||
} else {
|
||||
const conn = await driver.connect(connection);
|
||||
await driver.query(conn, `CREATE DATABASE ${database}`);
|
||||
await driver.close(conn);
|
||||
|
||||
return {
|
||||
...connection,
|
||||
database,
|
||||
isPreparedOnly: true,
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const testWrapper =
|
||||
body =>
|
||||
async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await connect(engine, randomDbName());
|
||||
try {
|
||||
await body(conn, driver, ...other);
|
||||
} finally {
|
||||
await driver.close(conn);
|
||||
}
|
||||
};
|
||||
|
||||
const testWrapperPrepareOnly =
|
||||
body =>
|
||||
async (label, ...other) => {
|
||||
const engine = other[other.length - 1];
|
||||
const driver = requireEngineDriver(engine.connection);
|
||||
const conn = await prepareConnection(engine, randomDbName());
|
||||
await body(conn, driver, ...other);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
randomDbName,
|
||||
connect,
|
||||
extractConnection,
|
||||
testWrapper,
|
||||
testWrapperPrepareOnly,
|
||||
};
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
|
||||
const engines = require('./engines');
|
||||
const { extractConnection } = require('./tools');
|
||||
global.DBGATE_TOOLS = require('dbgate-tools');
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
async function connectEngine(engine) {
|
||||
const connection = extractConnection(engine);
|
||||
|
||||
@@ -35,38 +35,40 @@ RIGHT=850
|
||||
|
||||
|
||||
|
||||
magick \
|
||||
\( \
|
||||
-size 1000x1000 -define gradient:direction=east 'gradient:#0050b3-#1890ff' \
|
||||
\( +clone -fill Black -colorize 100 \
|
||||
-fill White -stroke White -draw "arc $LEFT,750 $RIGHT,950 0,360" -draw "rectangle $LEFT,150 $RIGHT,850" \
|
||||
\) \
|
||||
-alpha off \
|
||||
-compose CopyOpacity -composite \
|
||||
\) \
|
||||
\( \
|
||||
-size 1000x1000 -define gradient:direction=east 'gradient:#096dd9-#40a9ff' \
|
||||
\( +clone -fill Black -colorize 100 \
|
||||
-fill White -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
\) \
|
||||
-alpha off \
|
||||
-compose CopyOpacity -composite \
|
||||
\) \
|
||||
-compose Over -composite \
|
||||
-strokewidth $STROKE_WIDTH -stroke '#0050b3' -fill transparent \
|
||||
-draw "arc $LEFT,225 $RIGHT,425 0,180" \
|
||||
-draw "arc $LEFT,400 $RIGHT,600 0,180" \
|
||||
-draw "arc $LEFT,575 $RIGHT,775 0,180" \
|
||||
-draw "arc $LEFT,750 $RIGHT,950 0,180" \
|
||||
-draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
-draw "line $LEFT,150 $LEFT,850" \
|
||||
-draw "line $RIGHT,150 $RIGHT,850" \
|
||||
-fill '#fafafa' -stroke '#8c8c8c' -strokewidth 3 \
|
||||
-pointsize 800 -font './Mcbungus-Regular.ttf' \
|
||||
-gravity center \
|
||||
-draw 'text 0,100 "G"' \
|
||||
icon.png
|
||||
# magick \
|
||||
# \( \
|
||||
# -size 1000x1000 -define gradient:direction=east 'gradient:#0050b3-#1890ff' \
|
||||
# \( +clone -fill Black -colorize 100 \
|
||||
# -fill White -stroke White -draw "arc $LEFT,750 $RIGHT,950 0,360" -draw "rectangle $LEFT,150 $RIGHT,850" \
|
||||
# \) \
|
||||
# -alpha off \
|
||||
# -compose CopyOpacity -composite \
|
||||
# \) \
|
||||
# \( \
|
||||
# -size 1000x1000 -define gradient:direction=east 'gradient:#096dd9-#40a9ff' \
|
||||
# \( +clone -fill Black -colorize 100 \
|
||||
# -fill White -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
# \) \
|
||||
# -alpha off \
|
||||
# -compose CopyOpacity -composite \
|
||||
# \) \
|
||||
# -compose Over -composite \
|
||||
# -strokewidth $STROKE_WIDTH -stroke '#0050b3' -fill transparent \
|
||||
# -draw "arc $LEFT,225 $RIGHT,425 0,180" \
|
||||
# -draw "arc $LEFT,400 $RIGHT,600 0,180" \
|
||||
# -draw "arc $LEFT,575 $RIGHT,775 0,180" \
|
||||
# -draw "arc $LEFT,750 $RIGHT,950 0,180" \
|
||||
# -draw "arc $LEFT,50 $RIGHT,250 0,360" \
|
||||
# -draw "line $LEFT,150 $LEFT,850" \
|
||||
# -draw "line $RIGHT,150 $RIGHT,850" \
|
||||
# -fill '#fafafa' -stroke '#8c8c8c' -strokewidth 3 \
|
||||
# -pointsize 800 -font './Mcbungus-Regular.ttf' \
|
||||
# -gravity center \
|
||||
# -draw 'text 0,100 "G"' \
|
||||
# icon.png
|
||||
|
||||
convert icon-input.png -background white -alpha remove -alpha off icon.png
|
||||
convert -size 1000x1000 xc:none -fill white -draw "circle 500,500 500,0" icon.png -compose SrcIn -composite icon.png
|
||||
|
||||
# magick \
|
||||
# \( \
|
||||
@@ -106,4 +108,6 @@ magick icon.png -resize 512x512! ../packages/web/public/logo512.png
|
||||
magick icon.png -define icon:auto-resize="256,128,96,64,48,32,16" ../packages/web/public/favicon.ico
|
||||
|
||||
convert icon.png -resize 800x800 -background transparent -gravity center -extent 1000x1000 iconmac.png
|
||||
magick composite iconmac.png macbg.png -resize 600x600! ../app/icon512-mac.png
|
||||
|
||||
convert macbg.png icon.png -compose SrcIn -composite -resize 600x600! ../app/icon512-mac.png
|
||||
# magick composite iconmac.png macbg.png -resize 600x600! ../app/icon512-mac.png
|
||||
|
||||
BIN
misc/icon-input.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
misc/icon.png
|
Before Width: | Height: | Size: 134 KiB After Width: | Height: | Size: 111 KiB |
BIN
misc/iconmac.png
|
Before Width: | Height: | Size: 211 KiB After Width: | Height: | Size: 241 KiB |
20
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "5.3.2-beta.1",
|
||||
"version": "6.0.0-beta.3",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -20,6 +20,9 @@
|
||||
"start:api:singledb": "yarn workspace dbgate-api start:singledb | pino-pretty",
|
||||
"start:api:auth": "yarn workspace dbgate-api start:auth | pino-pretty",
|
||||
"start:api:dblogin": "yarn workspace dbgate-api start:dblogin | pino-pretty",
|
||||
"start:api:storage": "yarn workspace dbgate-api start:storage | pino-pretty",
|
||||
"start:api:storage:built": "yarn workspace dbgate-api start:storage:built | pino-pretty",
|
||||
"sync:pro": "cd sync && yarn start",
|
||||
"start:web": "yarn workspace dbgate-web dev",
|
||||
"start:sqltree": "yarn workspace dbgate-sqltree start",
|
||||
"start:tools": "yarn workspace dbgate-tools start",
|
||||
@@ -32,9 +35,12 @@
|
||||
"build:lib": "yarn build:sqltree && yarn build:tools && yarn build:filterparser && yarn build:datalib",
|
||||
"build:app": "yarn plugins:copydist && cd app && yarn install && yarn build",
|
||||
"build:api": "yarn workspace dbgate-api build",
|
||||
"build:web:docker": "yarn workspace dbgate-web build",
|
||||
"build:api:doc": "yarn workspace dbgate-api build:doc",
|
||||
"build:web": "yarn workspace dbgate-web build",
|
||||
"build:plugins:frontend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:frontend",
|
||||
"build:plugins:backend": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn build:backend",
|
||||
"build:plugins:frontend:watch": "workspaces-run --parallel --only=\"dbgate-plugin-*\" -- yarn build:frontend:watch",
|
||||
"storage-json": "dbmodel model-to-json storage-db packages/api/src/storageModel.js --commonjs",
|
||||
"plugins:copydist": "workspaces-run --only=\"dbgate-plugin-*\" -- yarn copydist",
|
||||
"build:app:local": "yarn plugins:copydist && cd app && yarn build:local",
|
||||
"start:app:local": "cd app && yarn start:local",
|
||||
@@ -42,20 +48,20 @@
|
||||
"printSecrets": "node printSecrets",
|
||||
"generatePadFile": "node generatePadFile",
|
||||
"adjustPackageJson": "node adjustPackageJson",
|
||||
"fillNativeModules": "node fillNativeModules",
|
||||
"fillNativeModulesElectron": "node fillNativeModules --electron",
|
||||
"fillPackagedPlugins": "node fillPackagedPlugins",
|
||||
"resetPackagedPlugins": "node resetPackagedPlugins",
|
||||
"prettier": "prettier --write packages/api/src && prettier --write packages/datalib/src && prettier --write packages/filterparser/src && prettier --write packages/sqltree/src && prettier --write packages/tools/src && prettier --write packages/types && prettier --write packages/web/src && prettier --write app/src",
|
||||
"copy:docker:build": "copyfiles packages/api/dist/* docker -f && copyfiles packages/web/public/* docker -u 2 && copyfiles \"packages/web/public/**/*\" docker -u 2 && copyfiles \"plugins/dist/**/*\" docker/plugins -u 2",
|
||||
"install:sqlite:docker": "cd docker && yarn init --yes && yarn add better-sqlite3 && cd ..",
|
||||
"prepare:docker": "yarn plugins:copydist && yarn build:web:docker && yarn build:api && yarn copy:docker:build && yarn install:sqlite:docker",
|
||||
"copy:packer:build": "copyfiles packages/api/dist/* packer/build -f && copyfiles packages/web/public/* packer/build -u 2 && copyfiles \"packages/web/public/**/*\" packer/build -u 2 && copyfiles \"plugins/dist/**/*\" packer/build/plugins -u 2 && copyfiles packer/install-packages.sh packer/build -f",
|
||||
"install:drivers:docker": "node common/defineVolatileDependencies.js docker && cd docker && yarn install && cd ..",
|
||||
"prepare:docker": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:docker:build && yarn install:drivers:docker",
|
||||
"prepare:packer": "yarn plugins:copydist && yarn build:web && yarn build:api && yarn copy:packer:build",
|
||||
"start": "concurrently --kill-others-on-fail \"yarn start:api\" \"yarn start:web\"",
|
||||
"lib": "concurrently --kill-others-on-fail \"yarn start:sqltree\" \"yarn start:filterparser\" \"yarn start:datalib\" \"yarn start:tools\" \"yarn build:plugins:frontend:watch\"",
|
||||
"ts:api": "yarn workspace dbgate-api ts",
|
||||
"ts:web": "yarn workspace dbgate-web ts",
|
||||
"ts": "yarn ts:api && yarn ts:web",
|
||||
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn fillNativeModules && yarn build:plugins:frontend",
|
||||
"postinstall": "yarn resetPackagedPlugins && yarn build:lib && patch-package && yarn build:plugins:frontend",
|
||||
"dbgate-serve": "node packages/dbgate/bin/dbgate-serve.js"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
|
||||
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
|
||||
# PERMISSIONS=~widgets/app,~widgets/plugins
|
||||
# DISABLE_SHELL=1
|
||||
# HIDE_APP_EDITOR=1
|
||||
|
||||
@@ -12,15 +12,14 @@ This example exports table Customer info CSV file.
|
||||
|
||||
```javascript
|
||||
const dbgateApi = require('dbgate-api');
|
||||
const dbgatePluginMssql = require("dbgate-plugin-mssql");
|
||||
const dbgatePluginMysql = require("dbgate-plugin-mysql");
|
||||
const dbgatePluginCsv = require("dbgate-plugin-csv");
|
||||
|
||||
dbgateApi.registerPlugins(dbgatePluginMssql);
|
||||
dbgateApi.registerPlugins(dbgatePluginMysql);
|
||||
|
||||
async function run() {
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql', user: 'sa', password: 'xxxx', database: 'Chinook' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'Chinook' },
|
||||
pureName: 'Customer',
|
||||
});
|
||||
const writer = await dbgatePluginCsv.shellApi.writer({ fileName: 'Customer.csv' });
|
||||
@@ -59,8 +58,8 @@ Copies data from reader into writer. Reader and writer should be created from fu
|
||||
Reads table or view.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'postgres@dbgate-plugin-postgres', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'public',
|
||||
pureName: 'Customer',
|
||||
});
|
||||
```
|
||||
@@ -69,7 +68,7 @@ Reads table or view.
|
||||
Executes query and reads its result.
|
||||
```js
|
||||
const reader = await dbgateApi.tableReader({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
sql: 'SELECT * FROM Album',
|
||||
});
|
||||
```
|
||||
@@ -81,8 +80,7 @@ Imports data into table. Options are optional, default values are false.
|
||||
- createIfNotExists - create table, if not exists
|
||||
```js
|
||||
const reader = await dbgateApi.tableWriter({
|
||||
connection: { server: 'localhost', engine: 'mssql' | 'postgres' | 'mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
schemaName: 'dbo',
|
||||
connection: { server: 'localhost', engine: 'mysql@dbgate-plugin-mysql', user: 'root', password: 'xxxx', database: 'DB_NAME' },
|
||||
pureName: 'Customer',
|
||||
options: {
|
||||
dropIfExists: false,
|
||||
|
||||
11
packages/api/doctpl.hbs
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
layout: docs
|
||||
title: API documentation
|
||||
order: 21
|
||||
docs_left: true
|
||||
hide_hero: true
|
||||
---
|
||||
|
||||
# API Documentation
|
||||
|
||||
{{>main}}
|
||||
71
packages/api/env/portal/.env
vendored
@@ -1,60 +1,47 @@
|
||||
DEVMODE=1
|
||||
|
||||
CONNECTIONS=mysql,postgres,postgres1,mongo,mongo2,mysqlssh,sqlite,relational
|
||||
CONNECTIONS=mysql,postgres,mongo,redis,mssql,oracle
|
||||
|
||||
LABEL_mysql=MySql localhost
|
||||
SERVER_mysql=localhost
|
||||
LABEL_mysql=MySql
|
||||
SERVER_mysql=dbgatedckstage1.sprinx.cz
|
||||
USER_mysql=root
|
||||
PASSWORD_mysql=test
|
||||
PORT_mysql=3307
|
||||
PASSWORD_mysql=Pwd2020Db
|
||||
PORT_mysql=3306
|
||||
ENGINE_mysql=mysql@dbgate-plugin-mysql
|
||||
|
||||
LABEL_postgres=Postgres localhost
|
||||
SERVER_postgres=localhost
|
||||
LABEL_postgres=Postgres
|
||||
SERVER_postgres=dbgatedckstage1.sprinx.cz
|
||||
USER_postgres=postgres
|
||||
PASSWORD_postgres=Pwd2020Db
|
||||
PORT_postgres=5432
|
||||
ENGINE_postgres=postgres@dbgate-plugin-postgres
|
||||
|
||||
LABEL_postgres1=Postgres localhost test DB
|
||||
SERVER_postgres1=localhost
|
||||
USER_postgres1=postgres
|
||||
PASSWORD_postgres1=Pwd2020Db
|
||||
PORT_postgres1=5432
|
||||
ENGINE_postgres1=postgres@dbgate-plugin-postgres
|
||||
DATABASE_postgres1=test
|
||||
|
||||
LABEL_mongo=Mongo URL
|
||||
URL_mongo=mongodb://localhost:27017
|
||||
LABEL_mongo=Mongo
|
||||
SERVER_mongo=dbgatedckstage1.sprinx.cz
|
||||
USER_mongo=root
|
||||
PASSWORD_mongo=Pwd2020Db
|
||||
PORT_mongo=27017
|
||||
ENGINE_mongo=mongo@dbgate-plugin-mongo
|
||||
|
||||
LABEL_mongo2=Mongo Server
|
||||
SERVER_mongo2=localhost
|
||||
ENGINE_mongo2=mongo@dbgate-plugin-mongo
|
||||
LABEL_redis=Redis
|
||||
SERVER_redis=dbgatedckstage1.sprinx.cz
|
||||
ENGINE_redis=redis@dbgate-plugin-redis
|
||||
PORT_redis=6379
|
||||
|
||||
LABEL_mysqlssh=MySql SSH
|
||||
SERVER_mysqlssh=localhost
|
||||
USER_mysqlssh=root
|
||||
PASSWORD_mysqlssh=xxx
|
||||
PORT_mysqlssh=3316
|
||||
ENGINE_mysqlssh=mysql@dbgate-plugin-mysql
|
||||
USE_SSH_mysqlssh=1
|
||||
SSH_HOST_mysqlssh=demo.dbgate.org
|
||||
SSH_PORT_mysqlssh=22
|
||||
SSH_MODE_mysqlssh=userPassword
|
||||
SSH_LOGIN_mysqlssh=root
|
||||
SSH_PASSWORD_mysqlssh=xxx
|
||||
LABEL_mssql=SQL Server
|
||||
SERVER_mssql=dbgatedckstage1.sprinx.cz
|
||||
USER_mssql=sa
|
||||
PASSWORD_mssql=Pwd2020Db
|
||||
PORT_mssql=1433
|
||||
ENGINE_mssql=mssql@dbgate-plugin-mssql
|
||||
|
||||
LABEL_sqlite=sqlite
|
||||
FILE_sqlite=/home/jena/.dbgate/files/sqlite/feeds.sqlite
|
||||
ENGINE_sqlite=sqlite@dbgate-plugin-sqlite
|
||||
|
||||
LABEL_relational=Relational dataset repo
|
||||
SERVER_relational=relational.fit.cvut.cz
|
||||
USER_relational=guest
|
||||
PASSWORD_relational=relational
|
||||
ENGINE_relational=mariadb@dbgate-plugin-mysql
|
||||
READONLY_relational=1
|
||||
LABEL_oracle=Oracle
|
||||
SERVER_oracle=dbgatedckstage1.sprinx.cz
|
||||
USER_oracle=system
|
||||
PASSWORD_oracle=Pwd2020Db
|
||||
PORT_oracle=1521
|
||||
ENGINE_oracle=oracle@dbgate-plugin-oracle
|
||||
SERVICE_NAME_oracle=xe
|
||||
|
||||
# SETTINGS_dataGrid.showHintColumns=1
|
||||
|
||||
|
||||
2
packages/api/env/singledb/.env
vendored
@@ -12,6 +12,6 @@ DBCONFIG_mysql=[{"name":"Chinook","connectionColor":"cyan"}]
|
||||
|
||||
|
||||
SINGLE_CONNECTION=mysql
|
||||
SINGLE_DATABASE=Chinook
|
||||
# SINGLE_DATABASE=Chinook
|
||||
|
||||
PERMISSIONS=files/charts/read
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dbgate-api",
|
||||
"main": "src/index.js",
|
||||
"version": "5.0.0-alpha.1",
|
||||
"version": "6.0.0-alpha.1",
|
||||
"homepage": "https://dbgate.org/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -16,20 +16,23 @@
|
||||
"export",
|
||||
"dbgate"
|
||||
],
|
||||
"files": [
|
||||
"src"
|
||||
],
|
||||
"dependencies": {
|
||||
"@aws-sdk/rds-signer": "^3.665.0",
|
||||
"activedirectory2": "^2.1.0",
|
||||
"async-lock": "^1.2.4",
|
||||
"async-lock": "^1.2.6",
|
||||
"axios": "^0.21.1",
|
||||
"body-parser": "^1.19.0",
|
||||
"bufferutil": "^4.0.1",
|
||||
"byline": "^5.0.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.2",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
@@ -55,8 +58,10 @@
|
||||
"pinomin": "^1.0.4",
|
||||
"portfinder": "^1.0.28",
|
||||
"rimraf": "^3.0.0",
|
||||
"semver": "^7.6.3",
|
||||
"simple-encryptor": "^4.0.0",
|
||||
"ssh2": "^1.11.0",
|
||||
"stream-json": "^1.8.0",
|
||||
"tar": "^6.0.5"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -66,23 +71,23 @@
|
||||
"start:auth": "env-cmd -f env/auth/.env node src/index.js --listen-api",
|
||||
"start:dblogin": "env-cmd -f env/dblogin/.env node src/index.js --listen-api",
|
||||
"start:filedb": "env-cmd node src/index.js /home/jena/test/chinook/Chinook.db --listen-api",
|
||||
"start:storage": "env-cmd -f env/storage/.env node src/index.js --listen-api",
|
||||
"start:storage:built": "env-cmd -f env/storage/.env cross-env DEVMODE= BUILTWEBMODE=1 node dist/bundle.js --listen-api",
|
||||
"start:singleconn": "env-cmd node src/index.js --server localhost --user root --port 3307 --engine mysql@dbgate-plugin-mysql --password test --listen-api",
|
||||
"ts": "tsc",
|
||||
"build": "webpack"
|
||||
"build": "webpack",
|
||||
"build:doc": "jsdoc2md --template doctpl.hbs ./src/shell/* > ../../../dbgate.github.io/_docs/apidoc.md"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"env-cmd": "^10.1.0",
|
||||
"jsdoc-to-markdown": "^9.0.5",
|
||||
"node-loader": "^1.0.2",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^4.4.3",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1"
|
||||
}
|
||||
}
|
||||
|
||||
16
packages/api/src/auth/authCommon.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const crypto = require('crypto');
|
||||
|
||||
const tokenSecret = crypto.randomUUID();
|
||||
|
||||
function getTokenLifetime() {
|
||||
return process.env.TOKEN_LIFETIME || '1d';
|
||||
}
|
||||
|
||||
function getTokenSecret() {
|
||||
return tokenSecret;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTokenLifetime,
|
||||
getTokenSecret,
|
||||
};
|
||||
343
packages/api/src/auth/authProvider.js
Normal file
@@ -0,0 +1,343 @@
|
||||
const { getTokenSecret, getTokenLifetime } = require('./authCommon');
|
||||
const _ = require('lodash');
|
||||
const axios = require('axios');
|
||||
const { getLogger, getPredefinedPermissions } = require('dbgate-tools');
|
||||
|
||||
const AD = require('activedirectory2').promiseWrapper;
|
||||
const jwt = require('jsonwebtoken');
|
||||
|
||||
const logger = getLogger('authProvider');
|
||||
|
||||
class AuthProviderBase {
|
||||
amoid = 'none';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
oauthToken(params) {
|
||||
return {};
|
||||
}
|
||||
|
||||
getCurrentLogin(req) {
|
||||
const login = req?.user?.login ?? req?.auth?.user ?? null;
|
||||
return login;
|
||||
}
|
||||
|
||||
isUserLoggedIn(req) {
|
||||
return !!req?.user || !!req?.auth;
|
||||
}
|
||||
|
||||
getCurrentPermissions(req) {
|
||||
const login = this.getCurrentLogin(req);
|
||||
const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
|
||||
return permissions || process.env.PERMISSIONS;
|
||||
}
|
||||
|
||||
getLoginPageConnections() {
|
||||
return null;
|
||||
}
|
||||
|
||||
getSingleConnectionId(req) {
|
||||
return null;
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
amoid: this.amoid,
|
||||
workflowType: 'anonymous',
|
||||
name: 'Anonymous',
|
||||
};
|
||||
}
|
||||
|
||||
async redirect({ state }) {
|
||||
return {
|
||||
status: 'error',
|
||||
};
|
||||
}
|
||||
|
||||
async getLogoutUrl() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class OAuthProvider extends AuthProviderBase {
|
||||
amoid = 'oauth';
|
||||
|
||||
async oauthToken(params) {
|
||||
const { redirectUri, code } = params;
|
||||
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
const resp = await axios.default.post(
|
||||
`${process.env.OAUTH_TOKEN}`,
|
||||
`grant_type=authorization_code&code=${encodeURIComponent(code)}&redirect_uri=${encodeURIComponent(
|
||||
redirectUri
|
||||
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
|
||||
);
|
||||
|
||||
const { access_token, refresh_token, id_token } = resp.data;
|
||||
|
||||
let payload = jwt.decode(access_token);
|
||||
|
||||
// Fallback to id_token in case the access_token is not a JWT
|
||||
// https://www.oauth.com/oauth2-servers/access-tokens/
|
||||
// https://github.com/dbgate/dbgate/issues/727
|
||||
if (!payload && id_token) {
|
||||
payload = jwt.decode(id_token);
|
||||
}
|
||||
|
||||
logger.info({ payload }, 'User payload returned from OAUTH');
|
||||
|
||||
const login =
|
||||
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
? payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
: 'oauth';
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_LOGINS &&
|
||||
!process.env.OAUTH_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
|
||||
const groups =
|
||||
process.env.OAUTH_GROUP_FIELD && payload && payload[process.env.OAUTH_GROUP_FIELD]
|
||||
? payload[process.env.OAUTH_GROUP_FIELD]
|
||||
: [];
|
||||
|
||||
const allowedGroups = process.env.OAUTH_ALLOWED_GROUPS
|
||||
? process.env.OAUTH_ALLOWED_GROUPS.split(',').map(group => group.toLowerCase().trim())
|
||||
: [];
|
||||
|
||||
if (process.env.OAUTH_ALLOWED_GROUPS && !groups.some(group => allowedGroups.includes(group.toLowerCase().trim()))) {
|
||||
return { error: `Username ${login} does not belong to an allowed group` };
|
||||
}
|
||||
|
||||
if (access_token) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, getTokenSecret(), { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Token not found' };
|
||||
}
|
||||
|
||||
async getLogoutUrl() {
|
||||
return process.env.OAUTH_LOGOUT;
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'redirect',
|
||||
name: 'OAuth 2.0',
|
||||
};
|
||||
}
|
||||
|
||||
redirect({ state, redirectUri }) {
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
return {
|
||||
status: 'ok',
|
||||
uri: `${process.env.OAUTH_AUTH}?client_id=${
|
||||
process.env.OAUTH_CLIENT_ID
|
||||
}&response_type=code&redirect_uri=${encodeURIComponent(redirectUri)}&state=${encodeURIComponent(
|
||||
state
|
||||
)}${scopeParam}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ADProvider extends AuthProviderBase {
|
||||
amoid = 'ad';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
const adConfig = {
|
||||
url: process.env.AD_URL,
|
||||
baseDN: process.env.AD_BASEDN,
|
||||
username: process.env.AD_USERNAME,
|
||||
password: process.env.AD_PASSWORD,
|
||||
};
|
||||
const ad = new AD(adConfig);
|
||||
try {
|
||||
const res = await ad.authenticate(login, password);
|
||||
if (!res) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
if (
|
||||
process.env.AD_ALLOWED_LOGINS &&
|
||||
!process.env.AD_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
login,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
} catch (e) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Active Directory',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class LoginsProvider extends AuthProviderBase {
|
||||
amoid = 'logins';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
if (login && password && process.env['LOGIN'] == login && process.env['PASSWORD'] == password) {
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
login,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (password == process.env[`LOGIN_PASSWORD_${login}`]) {
|
||||
return {
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
amoid: this.amoid,
|
||||
login,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{ expiresIn: getTokenLifetime() }
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Invalid credentials' };
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Login & Password',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class DenyAllProvider extends AuthProviderBase {
|
||||
amoid = 'deny';
|
||||
|
||||
async login(login, password, options = undefined) {
|
||||
return { error: 'Login not allowed' };
|
||||
}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
...super.toJson(),
|
||||
workflowType: 'credentials',
|
||||
name: 'Deny all',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function hasEnvLogins() {
|
||||
if (process.env.LOGIN && process.env.PASSWORD) {
|
||||
return true;
|
||||
}
|
||||
for (const key in process.env) {
|
||||
if (key.startsWith('LOGIN_PASSWORD_')) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function detectEnvAuthProvider() {
|
||||
if (process.env.AUTH_PROVIDER) {
|
||||
return process.env.AUTH_PROVIDER;
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
return 'denyall';
|
||||
}
|
||||
if (process.env.OAUTH_AUTH) {
|
||||
return 'oauth';
|
||||
}
|
||||
if (process.env.AD_URL) {
|
||||
return 'ad';
|
||||
}
|
||||
if (hasEnvLogins()) {
|
||||
return 'logins';
|
||||
}
|
||||
return 'none';
|
||||
}
|
||||
|
||||
function createEnvAuthProvider() {
|
||||
const authProvider = detectEnvAuthProvider();
|
||||
switch (authProvider) {
|
||||
case 'oauth':
|
||||
return new OAuthProvider();
|
||||
case 'ad':
|
||||
return new ADProvider();
|
||||
case 'logins':
|
||||
return new LoginsProvider();
|
||||
case 'denyall':
|
||||
return new DenyAllProvider();
|
||||
default:
|
||||
return new AuthProviderBase();
|
||||
}
|
||||
}
|
||||
|
||||
let defaultAuthProvider = createEnvAuthProvider();
|
||||
let authProviders = [defaultAuthProvider];
|
||||
|
||||
function getAuthProviders() {
|
||||
return authProviders;
|
||||
}
|
||||
|
||||
function getAuthProviderById(amoid) {
|
||||
return authProviders.find(x => x.amoid == amoid);
|
||||
}
|
||||
|
||||
function getDefaultAuthProvider() {
|
||||
return defaultAuthProvider;
|
||||
}
|
||||
|
||||
function getAuthProviderFromReq(req) {
|
||||
const authProviderId = req?.auth?.amoid || req?.user?.amoid;
|
||||
return getAuthProviderById(authProviderId) ?? getDefaultAuthProvider();
|
||||
}
|
||||
|
||||
function setAuthProviders(value, defaultProvider = null) {
|
||||
authProviders = value;
|
||||
defaultAuthProvider = defaultProvider || value[0];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AuthProviderBase,
|
||||
detectEnvAuthProvider,
|
||||
getAuthProviders,
|
||||
getDefaultAuthProvider,
|
||||
setAuthProviders,
|
||||
getAuthProviderById,
|
||||
getAuthProviderFromReq,
|
||||
};
|
||||
@@ -6,7 +6,7 @@ const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('..
|
||||
const socket = require('../utility/socket');
|
||||
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const dbgateApi = require('../shell');
|
||||
const jsldata = require('./jsldata');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
@@ -74,7 +74,7 @@ module.exports = {
|
||||
...fileType('.matview.sql', 'matview.sql'),
|
||||
];
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error reading archive files');
|
||||
logger.error(extractErrorLogData(err), 'Error reading archive files');
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,24 +1,20 @@
|
||||
const axios = require('axios');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const getExpressPath = require('../utility/getExpressPath');
|
||||
const { getLogins } = require('../utility/hasPermission');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const AD = require('activedirectory2').promiseWrapper;
|
||||
const crypto = require('crypto');
|
||||
const { getTokenSecret, getTokenLifetime } = require('../auth/authCommon');
|
||||
const {
|
||||
getAuthProviderFromReq,
|
||||
getAuthProviders,
|
||||
getDefaultAuthProvider,
|
||||
getAuthProviderById,
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
const tokenSecret = crypto.randomUUID();
|
||||
|
||||
function shouldAuthorizeApi() {
|
||||
const logins = getLogins();
|
||||
return !!process.env.OAUTH_AUTH || !!process.env.AD_URL || (!!logins && !process.env.BASIC_AUTH);
|
||||
}
|
||||
|
||||
function getTokenLifetime() {
|
||||
return process.env.TOKEN_LIFETIME || '1d';
|
||||
}
|
||||
|
||||
function unauthorizedResponse(req, res, text) {
|
||||
// if (req.path == getExpressPath('/config/get-settings')) {
|
||||
// return res.json({});
|
||||
@@ -26,15 +22,38 @@ function unauthorizedResponse(req, res, text) {
|
||||
// if (req.path == getExpressPath('/connections/list')) {
|
||||
// return res.json([]);
|
||||
// }
|
||||
return res.sendStatus(401).send(text);
|
||||
|
||||
return res.status(401).send(text);
|
||||
}
|
||||
|
||||
function authMiddleware(req, res, next) {
|
||||
const SKIP_AUTH_PATHS = ['/config/get', '/auth/oauth-token', '/auth/login', '/stream'];
|
||||
const SKIP_AUTH_PATHS = [
|
||||
'/config/get',
|
||||
'/config/logout',
|
||||
'/config/get-settings',
|
||||
'/config/save-license-key',
|
||||
'/auth/oauth-token',
|
||||
'/auth/login',
|
||||
'/auth/redirect',
|
||||
'/stream',
|
||||
'/storage/get-connections-for-login-page',
|
||||
'/storage/set-admin-password',
|
||||
'/auth/get-providers',
|
||||
'/connections/dblogin-web',
|
||||
'/connections/dblogin-app',
|
||||
'/connections/dblogin-auth',
|
||||
'/connections/dblogin-auth-token',
|
||||
];
|
||||
|
||||
if (!shouldAuthorizeApi()) {
|
||||
// console.log('********************* getAuthProvider()', getAuthProvider());
|
||||
|
||||
// const isAdminPage = req.headers['x-is-admin-page'] == 'true';
|
||||
|
||||
if (process.env.BASIC_AUTH) {
|
||||
// API is not authorized for basic auth
|
||||
return next();
|
||||
}
|
||||
|
||||
let skipAuth = !!SKIP_AUTH_PATHS.find(x => req.path == getExpressPath(x));
|
||||
|
||||
const authHeader = req.headers.authorization;
|
||||
@@ -46,15 +65,16 @@ function authMiddleware(req, res, next) {
|
||||
}
|
||||
const token = authHeader.split(' ')[1];
|
||||
try {
|
||||
const decoded = jwt.verify(token, tokenSecret);
|
||||
const decoded = jwt.verify(token, getTokenSecret());
|
||||
req.user = decoded;
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (skipAuth) {
|
||||
req.isInvalidToken = true;
|
||||
return next();
|
||||
}
|
||||
|
||||
logger.error({ err }, 'Sending invalid token error');
|
||||
logger.error(extractErrorLogData(err), 'Sending invalid token error');
|
||||
|
||||
return unauthorizedResponse(req, res, 'invalid token');
|
||||
}
|
||||
@@ -63,106 +83,54 @@ function authMiddleware(req, res, next) {
|
||||
module.exports = {
|
||||
oauthToken_meta: true,
|
||||
async oauthToken(params) {
|
||||
const { redirectUri, code } = params;
|
||||
|
||||
const scopeParam = process.env.OAUTH_SCOPE ? `&scope=${process.env.OAUTH_SCOPE}` : '';
|
||||
const resp = await axios.default.post(
|
||||
`${process.env.OAUTH_TOKEN}`,
|
||||
`grant_type=authorization_code&code=${encodeURIComponent(code)}&redirect_uri=${encodeURIComponent(
|
||||
redirectUri
|
||||
)}&client_id=${process.env.OAUTH_CLIENT_ID}&client_secret=${process.env.OAUTH_CLIENT_SECRET}${scopeParam}`
|
||||
);
|
||||
|
||||
const { access_token, refresh_token } = resp.data;
|
||||
|
||||
const payload = jwt.decode(access_token);
|
||||
|
||||
logger.info({ payload }, 'User payload returned from OAUTH');
|
||||
|
||||
const login =
|
||||
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
? payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
: 'oauth';
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_LOGINS &&
|
||||
!process.env.OAUTH_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
|
||||
const groups =
|
||||
process.env.OAUTH_GROUP_FIELD && payload && payload[process.env.OAUTH_GROUP_FIELD]
|
||||
? payload[process.env.OAUTH_GROUP_FIELD]
|
||||
: [];
|
||||
|
||||
const allowedGroups =
|
||||
process.env.OAUTH_ALLOWED_GROUPS
|
||||
? process.env.OAUTH_ALLOWED_GROUPS.split(',').map(group => group.toLowerCase().trim())
|
||||
: [];
|
||||
|
||||
if (
|
||||
process.env.OAUTH_ALLOWED_GROUPS &&
|
||||
!groups.some(group => allowedGroups.includes(group.toLowerCase().trim()))
|
||||
) {
|
||||
return { error: `Username ${login} does not belong to an allowed group` };
|
||||
}
|
||||
|
||||
if (access_token) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Token not found' };
|
||||
const { amoid } = params;
|
||||
return getAuthProviderById(amoid).oauthToken(params);
|
||||
},
|
||||
login_meta: true,
|
||||
async login(params) {
|
||||
const { login, password } = params;
|
||||
const { amoid, login, password, isAdminPage } = params;
|
||||
|
||||
if (process.env.AD_URL) {
|
||||
const adConfig = {
|
||||
url: process.env.AD_URL,
|
||||
baseDN: process.env.AD_BASEDN,
|
||||
username: process.env.AD_USERNAME,
|
||||
password: process.env.AD_PASSOWRD,
|
||||
};
|
||||
const ad = new AD(adConfig);
|
||||
try {
|
||||
const res = await ad.authenticate(login, password);
|
||||
if (!res) {
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
if (
|
||||
process.env.AD_ALLOWED_LOGINS &&
|
||||
!process.env.AD_ALLOWED_LOGINS.split(',').find(x => x.toLowerCase().trim() == login.toLowerCase().trim())
|
||||
) {
|
||||
return { error: `Username ${login} not allowed to log in` };
|
||||
}
|
||||
if (isAdminPage) {
|
||||
let adminPassword = process.env.ADMIN_PASSWORD;
|
||||
if (!adminPassword) {
|
||||
const adminConfig = await storage.readConfig({ group: 'admin' });
|
||||
adminPassword = adminConfig?.adminPassword;
|
||||
}
|
||||
if (adminPassword && adminPassword == password) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed active directory authentization');
|
||||
return {
|
||||
error: err.message,
|
||||
accessToken: jwt.sign(
|
||||
{
|
||||
login: 'superadmin',
|
||||
permissions: await storage.loadSuperadminPermissions(),
|
||||
roleId: -3,
|
||||
},
|
||||
getTokenSecret(),
|
||||
{
|
||||
expiresIn: getTokenLifetime(),
|
||||
}
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return { error: 'Login failed' };
|
||||
}
|
||||
|
||||
const logins = getLogins();
|
||||
if (!logins) {
|
||||
return { error: 'Logins not configured' };
|
||||
}
|
||||
const foundLogin = logins.find(x => x.login == login);
|
||||
if (foundLogin && foundLogin.password && foundLogin.password == password) {
|
||||
return {
|
||||
accessToken: jwt.sign({ login }, tokenSecret, { expiresIn: getTokenLifetime() }),
|
||||
};
|
||||
}
|
||||
return { error: 'Invalid credentials' };
|
||||
return getAuthProviderById(amoid).login(login, password);
|
||||
},
|
||||
|
||||
getProviders_meta: true,
|
||||
getProviders() {
|
||||
return {
|
||||
providers: getAuthProviders().map(x => x.toJson()),
|
||||
default: getDefaultAuthProvider()?.amoid,
|
||||
};
|
||||
},
|
||||
|
||||
redirect_meta: true,
|
||||
async redirect(params) {
|
||||
const { amoid } = params;
|
||||
return getAuthProviderById(amoid).redirect(params);
|
||||
},
|
||||
|
||||
authMiddleware,
|
||||
shouldAuthorizeApi,
|
||||
};
|
||||
|
||||
@@ -3,14 +3,21 @@ const os = require('os');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const { datadir, getLogsFilePath } = require('../utility/directories');
|
||||
const { hasPermission, getLogins } = require('../utility/hasPermission');
|
||||
const { hasPermission } = require('../utility/hasPermission');
|
||||
const socket = require('../utility/socket');
|
||||
const _ = require('lodash');
|
||||
const AsyncLock = require('async-lock');
|
||||
const jwt = require('jsonwebtoken');
|
||||
|
||||
const currentVersion = require('../currentVersion');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const { getAuthProviderFromReq } = require('../auth/authProvider');
|
||||
const { checkLicense, checkLicenseKey } = require('../utility/checkLicense');
|
||||
const storage = require('./storage');
|
||||
const { getAuthProxyUrl } = require('../utility/authProxy');
|
||||
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
|
||||
const { extractErrorMessage } = require('dbgate-tools');
|
||||
|
||||
const lock = new AsyncLock();
|
||||
|
||||
@@ -27,31 +34,78 @@ module.exports = {
|
||||
|
||||
get_meta: true,
|
||||
async get(_params, req) {
|
||||
const logins = getLogins();
|
||||
const loginName =
|
||||
req && req.user && req.user.login ? req.user.login : req && req.auth && req.auth.user ? req.auth.user : null;
|
||||
const login = logins && loginName ? logins.find(x => x.login == loginName) : null;
|
||||
const permissions = login ? login.permissions : process.env.PERMISSIONS;
|
||||
const authProvider = getAuthProviderFromReq(req);
|
||||
const login = authProvider.getCurrentLogin(req);
|
||||
const permissions = authProvider.getCurrentPermissions(req);
|
||||
const isUserLoggedIn = authProvider.isUserLoggedIn(req);
|
||||
|
||||
return {
|
||||
const singleConid = authProvider.getSingleConnectionId(req);
|
||||
const storageConnectionError = storage.getStorageConnectionError();
|
||||
|
||||
const singleConnection =
|
||||
singleConid && !storageConnectionError
|
||||
? await connections.getCore({ conid: singleConid })
|
||||
: connections.singleConnection;
|
||||
|
||||
let configurationError = null;
|
||||
if (process.env.STORAGE_DATABASE && process.env.BASIC_AUTH) {
|
||||
configurationError =
|
||||
'Basic authentization is not allowed, when using storage. Cannot use both STORAGE_DATABASE and BASIC_AUTH';
|
||||
}
|
||||
|
||||
if (storageConnectionError && !configurationError) {
|
||||
configurationError = extractErrorMessage(storageConnectionError);
|
||||
}
|
||||
|
||||
const checkedLicense = storageConnectionError ? null : await checkLicense();
|
||||
const isLicenseValid = checkedLicense?.status == 'ok';
|
||||
const logoutUrl = storageConnectionError ? null : await authProvider.getLogoutUrl();
|
||||
const adminConfig = storageConnectionError ? null : await storage.readConfig({ group: 'admin' });
|
||||
|
||||
storage.startRefreshLicense();
|
||||
|
||||
const isAdminPasswordMissing = !!(
|
||||
process.env.STORAGE_DATABASE &&
|
||||
!process.env.ADMIN_PASSWORD &&
|
||||
!process.env.BASIC_AUTH &&
|
||||
!adminConfig?.adminPasswordState
|
||||
);
|
||||
|
||||
const configResult = {
|
||||
runAsPortal: !!connections.portalConnections,
|
||||
singleDbConnection: connections.singleDbConnection,
|
||||
singleConnection: connections.singleConnection,
|
||||
singleConnection: singleConnection,
|
||||
isUserLoggedIn,
|
||||
// hideAppEditor: !!process.env.HIDE_APP_EDITOR,
|
||||
allowShellConnection: platformInfo.allowShellConnection,
|
||||
allowShellScripting: platformInfo.allowShellScripting,
|
||||
isDocker: platformInfo.isDocker,
|
||||
isElectron: platformInfo.isElectron,
|
||||
isLicenseValid,
|
||||
isLicenseExpired: checkedLicense?.isExpired,
|
||||
trialDaysLeft: checkedLicense?.licenseTypeObj?.isTrial && !checkedLicense?.isExpired ? checkedLicense?.daysLeft : null,
|
||||
checkedLicense,
|
||||
configurationError,
|
||||
logoutUrl,
|
||||
permissions,
|
||||
login,
|
||||
oauth: process.env.OAUTH_AUTH,
|
||||
oauthClient: process.env.OAUTH_CLIENT_ID,
|
||||
oauthScope: process.env.OAUTH_SCOPE,
|
||||
oauthLogout: process.env.OAUTH_LOGOUT,
|
||||
isLoginForm: !!process.env.AD_URL || (!!logins && !process.env.BASIC_AUTH),
|
||||
// ...additionalConfigProps,
|
||||
isBasicAuth: !!process.env.BASIC_AUTH,
|
||||
isAdminLoginForm: !!(
|
||||
process.env.STORAGE_DATABASE &&
|
||||
(process.env.ADMIN_PASSWORD || adminConfig?.adminPasswordState == 'set') &&
|
||||
!process.env.BASIC_AUTH
|
||||
),
|
||||
isAdminPasswordMissing,
|
||||
isInvalidToken: req?.isInvalidToken,
|
||||
adminPasswordState: adminConfig?.adminPasswordState,
|
||||
storageDatabase: process.env.STORAGE_DATABASE,
|
||||
logsFilePath: getLogsFilePath(),
|
||||
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
|
||||
...currentVersion,
|
||||
};
|
||||
|
||||
return configResult;
|
||||
},
|
||||
|
||||
logout_meta: {
|
||||
@@ -75,6 +129,12 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
deleteSettings_meta: true,
|
||||
async deleteSettings() {
|
||||
await fs.unlink(path.join(datadir(), 'settings.json'));
|
||||
return true;
|
||||
},
|
||||
|
||||
fillMissingSettings(value) {
|
||||
const res = {
|
||||
...value,
|
||||
@@ -97,12 +157,80 @@ module.exports = {
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(path.join(datadir(), 'settings.json'), { encoding: 'utf-8' });
|
||||
return this.fillMissingSettings(JSON.parse(settingsText));
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
},
|
||||
|
||||
async loadLicenseKey() {
|
||||
try {
|
||||
const licenseKey = await fs.readFile(path.join(datadir(), 'license.key'), { encoding: 'utf-8' });
|
||||
return licenseKey;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
saveLicenseKey_meta: true,
|
||||
async saveLicenseKey({ licenseKey }) {
|
||||
const decoded = jwt.decode(licenseKey);
|
||||
if (!decoded) {
|
||||
return {
|
||||
status: 'error',
|
||||
errorMessage: 'Invalid license key',
|
||||
};
|
||||
}
|
||||
|
||||
const { exp } = decoded;
|
||||
if (exp * 1000 < Date.now()) {
|
||||
return {
|
||||
status: 'error',
|
||||
errorMessage: 'License key is expired',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
await storage.writeConfig({ group: 'license', config: { licenseKey } });
|
||||
// await storageWriteConfig('license', { licenseKey });
|
||||
} else {
|
||||
await fs.writeFile(path.join(datadir(), 'license.key'), licenseKey);
|
||||
}
|
||||
socket.emitChanged(`config-changed`);
|
||||
return { status: 'ok' };
|
||||
} catch (err) {
|
||||
return {
|
||||
status: 'error',
|
||||
errorMessage: err.message,
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
startTrial_meta: true,
|
||||
async startTrial() {
|
||||
try {
|
||||
const fingerprint = await getPublicHardwareFingerprint();
|
||||
|
||||
const resp = await axios.default.post(`${getAuthProxyUrl()}/trial-license`, {
|
||||
type: 'premium-trial',
|
||||
days: 30,
|
||||
fingerprint,
|
||||
});
|
||||
const { token } = resp.data;
|
||||
|
||||
return await this.saveLicenseKey({ licenseKey: token });
|
||||
} catch (err) {
|
||||
return {
|
||||
status: 'error',
|
||||
errorMessage: err.message,
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
updateSettings_meta: true,
|
||||
async updateSettings(values, req) {
|
||||
if (!hasPermission(`settings/change`, req)) return false;
|
||||
@@ -112,10 +240,16 @@ module.exports = {
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
...values,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(path.join(datadir(), 'settings.json'), JSON.stringify(updated, undefined, 2));
|
||||
// this.settingsValue = updated;
|
||||
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
}
|
||||
|
||||
socket.emitChanged(`settings-changed`);
|
||||
return updated;
|
||||
} catch (err) {
|
||||
@@ -130,4 +264,10 @@ module.exports = {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
},
|
||||
|
||||
checkLicense_meta: true,
|
||||
async checkLicense({ licenseKey }) {
|
||||
const resp = await checkLicenseKey(licenseKey);
|
||||
return resp;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,10 +12,13 @@ const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const JsonLinesDatabase = require('../utility/JsonLinesDatabase');
|
||||
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const { safeJsonParse, getLogger } = require('dbgate-tools');
|
||||
const { safeJsonParse, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { connectionHasPermission, testConnectionPermission } = require('../utility/hasPermission');
|
||||
const pipeForkLogs = require('../utility/pipeForkLogs');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { getAuthProviderById } = require('../auth/authProvider');
|
||||
const { startTokenChecking } = require('../utility/authProxy');
|
||||
|
||||
const logger = getLogger('connections');
|
||||
|
||||
@@ -70,7 +73,10 @@ function getPortalCollections() {
|
||||
displayName: process.env[`LABEL_${id}`],
|
||||
isReadOnly: process.env[`READONLY_${id}`],
|
||||
databases: process.env[`DBCONFIG_${id}`] ? safeJsonParse(process.env[`DBCONFIG_${id}`]) : null,
|
||||
allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'),
|
||||
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
|
||||
parent: process.env[`PARENT_${id}`] || undefined,
|
||||
useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`],
|
||||
|
||||
// SSH tunnel
|
||||
useSshTunnel: process.env[`USE_SSH_${id}`],
|
||||
@@ -195,10 +201,17 @@ module.exports = {
|
||||
// @ts-ignore
|
||||
this.datastore = new JsonLinesDatabase(path.join(dir, 'connections.jsonl'));
|
||||
}
|
||||
await this.checkUnsavedConnectionsLimit();
|
||||
},
|
||||
|
||||
list_meta: true,
|
||||
async list(_params, req) {
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnections = await storage.connections(req);
|
||||
if (storageConnections) {
|
||||
return storageConnections;
|
||||
}
|
||||
if (portalConnections) {
|
||||
if (platformInfo.allowShellConnection) return portalConnections;
|
||||
return portalConnections.map(maskConnection).filter(x => connectionHasPermission(x, req));
|
||||
@@ -236,14 +249,16 @@ module.exports = {
|
||||
},
|
||||
|
||||
saveVolatile_meta: true,
|
||||
async saveVolatile({ conid, user, password, test }) {
|
||||
async saveVolatile({ conid, user = undefined, password = undefined, accessToken = undefined, test = false }) {
|
||||
const old = await this.getCore({ conid });
|
||||
const res = {
|
||||
...old,
|
||||
_id: crypto.randomUUID(),
|
||||
password,
|
||||
accessToken,
|
||||
passwordMode: undefined,
|
||||
unsaved: true,
|
||||
useRedirectDbLogin: false,
|
||||
};
|
||||
if (old.passwordMode == 'askUser') {
|
||||
res.user = user;
|
||||
@@ -286,6 +301,32 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
async checkUnsavedConnectionsLimit() {
|
||||
if (!this.datastore) {
|
||||
return;
|
||||
}
|
||||
const MAX_UNSAVED_CONNECTIONS = 5;
|
||||
await this.datastore.transformAll(connections => {
|
||||
const count = connections.filter(x => x.unsaved).length;
|
||||
if (count > MAX_UNSAVED_CONNECTIONS) {
|
||||
const res = [];
|
||||
let unsavedToSkip = count - MAX_UNSAVED_CONNECTIONS;
|
||||
for (const item of connections) {
|
||||
if (item.unsaved) {
|
||||
if (unsavedToSkip > 0) {
|
||||
unsavedToSkip--;
|
||||
} else {
|
||||
res.push(item);
|
||||
}
|
||||
} else {
|
||||
res.push(item);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
update_meta: true,
|
||||
async update({ _id, values }, req) {
|
||||
if (portalConnections) return;
|
||||
@@ -336,6 +377,14 @@ module.exports = {
|
||||
if (volatile) {
|
||||
return volatile;
|
||||
}
|
||||
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
if (storageConnection) {
|
||||
return storageConnection;
|
||||
}
|
||||
|
||||
if (portalConnections) {
|
||||
const res = portalConnections.find(x => x._id == conid) || null;
|
||||
return mask && !platformInfo.allowShellConnection ? maskConnection(res) : res;
|
||||
@@ -346,6 +395,11 @@ module.exports = {
|
||||
|
||||
get_meta: true,
|
||||
async get({ conid }, req) {
|
||||
if (conid == '__model') {
|
||||
return {
|
||||
_id: '__model',
|
||||
};
|
||||
}
|
||||
testConnectionPermission(conid, req);
|
||||
return this.getCore({ conid, mask: true });
|
||||
},
|
||||
@@ -365,4 +419,95 @@ module.exports = {
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
dbloginWeb_meta: {
|
||||
raw: true,
|
||||
method: 'get',
|
||||
},
|
||||
async dbloginWeb(req, res) {
|
||||
const { conid, state, redirectUri } = req.query;
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const authResp = await driver.getRedirectAuthUrl(connection, {
|
||||
redirectUri,
|
||||
state,
|
||||
client: 'web',
|
||||
});
|
||||
res.redirect(authResp.url);
|
||||
},
|
||||
|
||||
dbloginApp_meta: true,
|
||||
async dbloginApp({ conid, state }) {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const resp = await driver.getRedirectAuthUrl(connection, {
|
||||
state,
|
||||
client: 'app',
|
||||
});
|
||||
startTokenChecking(resp.sid, async token => {
|
||||
const volatile = await this.saveVolatile({ conid, accessToken: token });
|
||||
socket.emit('got-volatile-token', { savedConId: conid, volatileConId: volatile._id });
|
||||
});
|
||||
return resp;
|
||||
},
|
||||
|
||||
dbloginToken_meta: true,
|
||||
async dbloginToken({ code, conid, strmid, redirectUri, sid }) {
|
||||
try {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAuthTokenFromCode(connection, { sid, code, redirectUri });
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
// console.log('******************************** WE HAVE ACCESS TOKEN', accessToken);
|
||||
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
|
||||
return { success: true };
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
|
||||
dbloginAuthToken_meta: true,
|
||||
async dbloginAuthToken({ amoid, code, conid, redirectUri, sid }) {
|
||||
try {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAuthTokenFromCode(connection, { code, redirectUri, sid });
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
const authProvider = getAuthProviderById(amoid);
|
||||
const resp = await authProvider.login(null, null, { conid: volatile._id });
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
|
||||
dbloginAuth_meta: true,
|
||||
async dbloginAuth({ amoid, conid, user, password }) {
|
||||
if (user || password) {
|
||||
const saveResp = await this.saveVolatile({ conid, user, password, test: true });
|
||||
if (saveResp.msgtype == 'connected') {
|
||||
const loginResp = await getAuthProviderById(amoid).login(user, password, { conid: saveResp._id });
|
||||
return loginResp;
|
||||
}
|
||||
return saveResp;
|
||||
}
|
||||
|
||||
// user and password is stored in connection, volatile connection is not needed
|
||||
const loginResp = await getAuthProviderById(amoid).login(null, null, { conid });
|
||||
return loginResp;
|
||||
},
|
||||
|
||||
volatileDbloginFromAuth_meta: true,
|
||||
async volatileDbloginFromAuth({ conid }, req) {
|
||||
const connection = await this.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
const accessToken = await driver.getAccessTokenFromAuth(connection, req);
|
||||
if (accessToken) {
|
||||
const volatile = await this.saveVolatile({ conid, accessToken });
|
||||
return volatile;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,6 +12,8 @@ const {
|
||||
extendDatabaseInfo,
|
||||
modelCompareDbDiffOptions,
|
||||
getLogger,
|
||||
extractErrorLogData,
|
||||
filterStructureBySchema,
|
||||
} = require('dbgate-tools');
|
||||
const { html, parse } = require('diff2html');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
@@ -30,6 +32,8 @@ const { testConnectionPermission } = require('../utility/hasPermission');
|
||||
const { MissingCredentialsError } = require('../utility/exceptions');
|
||||
const pipeForkLogs = require('../utility/pipeForkLogs');
|
||||
const crypto = require('crypto');
|
||||
const loadModelTransform = require('../utility/loadModelTransform');
|
||||
const exportDbModelSql = require('../utility/exportDbModelSql');
|
||||
|
||||
const logger = getLogger('databaseConnections');
|
||||
|
||||
@@ -89,6 +93,9 @@ module.exports = {
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
if (connection.useRedirectDbLogin) {
|
||||
throw new MissingCredentialsError({ conid, redirectToDbLogin: true });
|
||||
}
|
||||
const subprocess = fork(
|
||||
global['API_PACKAGE'] || process.argv[1],
|
||||
[
|
||||
@@ -143,7 +150,7 @@ module.exports = {
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error sending request do process');
|
||||
logger.error(extractErrorLogData(err), 'Error sending request do process');
|
||||
this.close(conn.conid, conn.database);
|
||||
}
|
||||
});
|
||||
@@ -179,6 +186,15 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
runOperation_meta: true,
|
||||
async runOperation({ conid, database, operation, useTransaction }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ conid, database, operation }, 'Processing operation');
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
const res = await this.sendRequest(opened, { msgtype: 'runOperation', operation, useTransaction });
|
||||
return res;
|
||||
},
|
||||
|
||||
collectionData_meta: true,
|
||||
async collectionData({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
@@ -201,6 +217,17 @@ module.exports = {
|
||||
return res.result || null;
|
||||
},
|
||||
|
||||
schemaList_meta: true,
|
||||
async schemaList({ conid, database }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('schemaList', { conid, database });
|
||||
},
|
||||
|
||||
dispatchDatabaseChangedEvent_meta: true,
|
||||
dispatchDatabaseChangedEvent({ event, conid, database }) {
|
||||
socket.emitChanged(event, { conid, database });
|
||||
},
|
||||
|
||||
loadKeys_meta: true,
|
||||
async loadKeys({ conid, database, root, filter }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
@@ -295,7 +322,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error pinging DB connection');
|
||||
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
|
||||
this.close(conid, database);
|
||||
|
||||
return {
|
||||
@@ -325,6 +352,11 @@ module.exports = {
|
||||
|
||||
syncModel_meta: true,
|
||||
async syncModel({ conid, database, isFullRefresh }, req) {
|
||||
if (conid == '__model') {
|
||||
socket.emitChanged('database-structure-changed', { conid, database });
|
||||
return { status: 'ok' };
|
||||
}
|
||||
|
||||
testConnectionPermission(conid, req);
|
||||
const conn = await this.ensureOpened(conid, database);
|
||||
conn.subprocess.send({ msgtype: 'syncModel', isFullRefresh });
|
||||
@@ -339,7 +371,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.kill();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error killing subprocess');
|
||||
logger.error(extractErrorLogData(err), 'Error killing subprocess');
|
||||
}
|
||||
}
|
||||
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
|
||||
@@ -368,11 +400,12 @@ module.exports = {
|
||||
},
|
||||
|
||||
structure_meta: true,
|
||||
async structure({ conid, database }, req) {
|
||||
async structure({ conid, database, modelTransFile = null }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
if (conid == '__model') {
|
||||
const model = await importDbModel(database);
|
||||
return model;
|
||||
const trans = await loadModelTransform(modelTransFile);
|
||||
return trans ? trans(model) : model;
|
||||
}
|
||||
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
@@ -408,14 +441,35 @@ module.exports = {
|
||||
},
|
||||
|
||||
exportModel_meta: true,
|
||||
async exportModel({ conid, database }, req) {
|
||||
async exportModel({ conid, database, outputFolder, schema }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const archiveFolder = await archive.getNewArchiveFolder({ database });
|
||||
await fs.mkdir(path.join(archivedir(), archiveFolder));
|
||||
|
||||
const realFolder = outputFolder.startsWith('archive:')
|
||||
? resolveArchiveFolder(outputFolder.substring('archive:'.length))
|
||||
: outputFolder;
|
||||
|
||||
const model = await this.structure({ conid, database });
|
||||
await exportDbModel(model, path.join(archivedir(), archiveFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
return { archiveFolder };
|
||||
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
||||
await exportDbModel(extendDatabaseInfo(filteredModel), realFolder);
|
||||
|
||||
if (outputFolder.startsWith('archive:')) {
|
||||
socket.emitChanged(`archive-files-changed`, { folder: outputFolder.substring('archive:'.length) });
|
||||
}
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
exportModelSql_meta: true,
|
||||
async exportModelSql({ conid, database, outputFolder, outputFile, schema }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
|
||||
const connection = await connections.getCore({ conid });
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
const model = await this.structure({ conid, database });
|
||||
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
||||
await exportDbModelSql(extendDatabaseInfo(filteredModel), driver, outputFolder, outputFile);
|
||||
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
generateDeploySql_meta: true,
|
||||
|
||||
@@ -18,11 +18,14 @@ function readFirstLine(file) {
|
||||
}
|
||||
if (reader.hasNextLine()) {
|
||||
reader.nextLine((err, line) => {
|
||||
if (err) reject(err);
|
||||
resolve(line);
|
||||
if (err) {
|
||||
reader.close(() => reject(err)); // Ensure reader is closed on error
|
||||
return;
|
||||
}
|
||||
reader.close(() => resolve(line)); // Ensure reader is closed after reading
|
||||
});
|
||||
} else {
|
||||
resolve(null);
|
||||
reader.close(() => resolve(null)); // Properly close if no lines are present
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -42,13 +42,14 @@ module.exports = {
|
||||
|
||||
info_meta: true,
|
||||
async info({ packageName }) {
|
||||
// @ts-ignore
|
||||
const isPackaged = await fs.exists(path.join(packagedPluginsDir(), packageName));
|
||||
|
||||
try {
|
||||
const infoResp = await axios.default.get(`https://registry.npmjs.org/${packageName}`);
|
||||
const { latest } = infoResp.data['dist-tags'];
|
||||
const manifest = infoResp.data.versions[latest];
|
||||
const { readme } = infoResp.data;
|
||||
// @ts-ignore
|
||||
const isPackaged = await fs.exists(path.join(packagedPluginsDir(), packageName));
|
||||
|
||||
return {
|
||||
readme,
|
||||
@@ -57,6 +58,7 @@ module.exports = {
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
isPackaged,
|
||||
state: 'error',
|
||||
error: err.message,
|
||||
};
|
||||
@@ -92,7 +94,7 @@ module.exports = {
|
||||
if (!manifest.keywords) {
|
||||
continue;
|
||||
}
|
||||
if (!manifest.keywords.includes('dbgateplugin')) {
|
||||
if (!manifest.keywords.includes('dbgateplugin') && !manifest.keywords.includes('dbgatebuiltin')) {
|
||||
continue;
|
||||
}
|
||||
const readmeFile = path.join(isPackaged ? packagedPluginsDir() : pluginsdir(), packageName, 'README.md');
|
||||
|
||||
@@ -12,6 +12,7 @@ const {
|
||||
jsonScriptToJavascript,
|
||||
getLogger,
|
||||
safeJsonParse,
|
||||
pinoLogRecordToMessageRecord,
|
||||
} = require('dbgate-tools');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
@@ -68,18 +69,20 @@ module.exports = {
|
||||
|
||||
dispatchMessage(runid, message) {
|
||||
if (message) {
|
||||
const json = safeJsonParse(message.message);
|
||||
if (_.isPlainObject(message)) logger.log(message);
|
||||
else logger.info(message);
|
||||
|
||||
if (json) logger.log(json);
|
||||
else logger.info(message.message);
|
||||
const toEmit = _.isPlainObject(message)
|
||||
? {
|
||||
time: new Date(),
|
||||
...message,
|
||||
}
|
||||
: {
|
||||
message,
|
||||
time: new Date(),
|
||||
};
|
||||
|
||||
const toEmit = {
|
||||
time: new Date(),
|
||||
...message,
|
||||
message: json ? json.msg : message.message,
|
||||
};
|
||||
|
||||
if (json && json.level >= 50) {
|
||||
if (toEmit.level >= 50) {
|
||||
toEmit.severity = 'error';
|
||||
}
|
||||
|
||||
@@ -108,7 +111,7 @@ module.exports = {
|
||||
const scriptFile = path.join(uploadsdir(), runid + '.js');
|
||||
fs.writeFileSync(`${scriptFile}`, scriptText);
|
||||
fs.mkdirSync(directory);
|
||||
const pluginNames = _.union(fs.readdirSync(pluginsdir()), packagedPluginList);
|
||||
const pluginNames = extractPlugins(scriptText);
|
||||
logger.info({ scriptFile }, 'Running script');
|
||||
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
|
||||
const subprocess = fork(
|
||||
@@ -131,7 +134,16 @@ module.exports = {
|
||||
}
|
||||
);
|
||||
const pipeDispatcher = severity => data => {
|
||||
return this.dispatchMessage(runid, { severity, message: data.toString().trim() });
|
||||
const json = safeJsonParse(data, null);
|
||||
|
||||
if (json) {
|
||||
return this.dispatchMessage(runid, pinoLogRecordToMessageRecord(json));
|
||||
} else {
|
||||
return this.dispatchMessage(runid, {
|
||||
message: json == null ? data.toString().trim() : null,
|
||||
severity,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
||||
@@ -165,7 +177,7 @@ module.exports = {
|
||||
|
||||
start_meta: true,
|
||||
async start({ script }) {
|
||||
const runid = crypto.randomUUID()
|
||||
const runid = crypto.randomUUID();
|
||||
|
||||
if (script.type == 'json') {
|
||||
const js = jsonScriptToJavascript(script);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const crypto = require('crypto');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { fork } = require('child_process');
|
||||
@@ -10,7 +11,7 @@ const processArgs = require('../utility/processArgs');
|
||||
const { testConnectionPermission } = require('../utility/hasPermission');
|
||||
const { MissingCredentialsError } = require('../utility/exceptions');
|
||||
const pipeForkLogs = require('../utility/pipeForkLogs');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('serverConnection');
|
||||
|
||||
@@ -51,11 +52,14 @@ module.exports = {
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`Connection with conid="${conid}" not fund`);
|
||||
throw new Error(`Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
if (connection.useRedirectDbLogin) {
|
||||
throw new MissingCredentialsError({ conid, redirectToDbLogin: true });
|
||||
}
|
||||
const subprocess = fork(
|
||||
global['API_PACKAGE'] || process.argv[1],
|
||||
[
|
||||
@@ -108,7 +112,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.kill();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error killing subprocess');
|
||||
logger.error(extractErrorLogData(err), 'Error killing subprocess');
|
||||
}
|
||||
}
|
||||
this.opened = this.opened.filter(x => x.conid != conid);
|
||||
@@ -130,6 +134,7 @@ module.exports = {
|
||||
listDatabases_meta: true,
|
||||
async listDatabases({ conid }, req) {
|
||||
if (!conid) return [];
|
||||
if (conid == '__model') return [];
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
return opened.databases;
|
||||
@@ -163,12 +168,12 @@ module.exports = {
|
||||
try {
|
||||
opened.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error pinging server connection');
|
||||
logger.error(extractErrorLogData(err), 'Error pinging server connection');
|
||||
this.close(conid);
|
||||
}
|
||||
})
|
||||
);
|
||||
socket.setStreamIdFilter(strmid, { conid: conidArray });
|
||||
socket.setStreamIdFilter(strmid, { conid: [...(conidArray ?? []), '__model'] });
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
@@ -181,22 +186,29 @@ module.exports = {
|
||||
return { status: 'ok' };
|
||||
},
|
||||
|
||||
createDatabase_meta: true,
|
||||
async createDatabase({ conid, name }, req) {
|
||||
async sendDatabaseOp({ conid, msgtype, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'createDatabase', name });
|
||||
return { status: 'ok' };
|
||||
const res = await this.sendRequest(opened, { msgtype, name });
|
||||
if (res.errorMessage) {
|
||||
console.error(res.errorMessage);
|
||||
|
||||
return {
|
||||
apiErrorMessage: res.errorMessage,
|
||||
};
|
||||
}
|
||||
return res.result || null;
|
||||
},
|
||||
|
||||
createDatabase_meta: true,
|
||||
async createDatabase({ conid, name }, req) {
|
||||
return this.sendDatabaseOp({ conid, msgtype: 'createDatabase', name }, req);
|
||||
},
|
||||
|
||||
dropDatabase_meta: true,
|
||||
async dropDatabase({ conid, name }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
const opened = await this.ensureOpened(conid);
|
||||
if (opened.connection.isReadOnly) return false;
|
||||
opened.subprocess.send({ msgtype: 'dropDatabase', name });
|
||||
return { status: 'ok' };
|
||||
return this.sendDatabaseOp({ conid, msgtype: 'dropDatabase', name }, req);
|
||||
},
|
||||
|
||||
sendRequest(conn, message) {
|
||||
@@ -206,7 +218,7 @@ module.exports = {
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error sending request');
|
||||
logger.error(extractErrorLogData(err), 'Error sending request');
|
||||
this.close(conn.conid);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -8,7 +8,7 @@ const path = require('path');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const processArgs = require('../utility/processArgs');
|
||||
const { appdir } = require('../utility/directories');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const pipeForkLogs = require('../utility/pipeForkLogs');
|
||||
const config = require('./config');
|
||||
|
||||
@@ -222,7 +222,7 @@ module.exports = {
|
||||
try {
|
||||
session.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error pinging session');
|
||||
logger.error(extractErrorLogData(err), 'Error pinging session');
|
||||
|
||||
return {
|
||||
status: 'error',
|
||||
|
||||
31
packages/api/src/controllers/storage.js
Normal file
@@ -0,0 +1,31 @@
|
||||
module.exports = {
|
||||
connections_meta: true,
|
||||
async connections(req) {
|
||||
return null;
|
||||
},
|
||||
|
||||
getConnection_meta: true,
|
||||
async getConnection({ conid }) {
|
||||
return null;
|
||||
},
|
||||
|
||||
async loadSuperadminPermissions() {
|
||||
return [];
|
||||
},
|
||||
|
||||
getConnectionsForLoginPage_meta: true,
|
||||
async getConnectionsForLoginPage() {
|
||||
return null;
|
||||
},
|
||||
|
||||
getStorageConnectionError() {
|
||||
return null;
|
||||
},
|
||||
|
||||
readConfig_meta: true,
|
||||
async readConfig({ group }) {
|
||||
return {};
|
||||
},
|
||||
|
||||
startRefreshLicense() {},
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { uploadsdir, getLogsFilePath } = require('../utility/directories');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('uploads');
|
||||
const axios = require('axios');
|
||||
const os = require('os');
|
||||
@@ -110,7 +110,7 @@ module.exports = {
|
||||
|
||||
return response.data;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error uploading gist');
|
||||
logger.error(extractErrorLogData(err), 'Error uploading gist');
|
||||
|
||||
return {
|
||||
apiErrorMessage: err.message,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
module.exports = {
|
||||
version: '5.0.0-alpha.1',
|
||||
buildTime: '2021-04-17T07:22:49.702Z'
|
||||
version: '6.0.0-alpha.1',
|
||||
buildTime: '2024-12-01T00:00:00Z'
|
||||
};
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
const { setLogConfig, getLogger, setLoggerName } = require('dbgate-tools');
|
||||
const { setLogConfig, getLogger, setLoggerName, extractErrorLogData } = require('dbgate-tools');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
const fs = require('fs');
|
||||
const moment = require('moment');
|
||||
const path = require('path');
|
||||
const { logsdir, setLogsFilePath, getLogsFilePath } = require('./utility/directories');
|
||||
const { createLogger } = require('pinomin');
|
||||
const currentVersion = require('./currentVersion');
|
||||
|
||||
const logger = getLogger('apiIndex');
|
||||
|
||||
process.on('uncaughtException', err => {
|
||||
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
if (processArgs.startProcess) {
|
||||
setLoggerName(processArgs.startProcess.replace(/Process$/, ''));
|
||||
@@ -94,12 +101,22 @@ function configureLogger() {
|
||||
|
||||
if (processArgs.listenApi) {
|
||||
configureLogger();
|
||||
logger.info(`Starting API process version ${currentVersion.version}`);
|
||||
|
||||
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
|
||||
logger.info('Debug print environment variables:');
|
||||
for (const key of Object.keys(process.env)) {
|
||||
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shell = require('./shell/index');
|
||||
const dbgateTools = require('dbgate-tools');
|
||||
|
||||
global['DBGATE_TOOLS'] = dbgateTools;
|
||||
global.DBGATE_PACKAGES = {
|
||||
'dbgate-tools': require('dbgate-tools'),
|
||||
'dbgate-sqltree': require('dbgate-sqltree'),
|
||||
};
|
||||
|
||||
if (processArgs.startProcess) {
|
||||
const proc = require('./proc');
|
||||
@@ -116,6 +133,7 @@ module.exports = {
|
||||
...shell,
|
||||
getLogger,
|
||||
configureLogger,
|
||||
currentVersion,
|
||||
// loadLogsContent,
|
||||
getMainModule: () => require('./main'),
|
||||
};
|
||||
|
||||
@@ -18,6 +18,7 @@ const sessions = require('./controllers/sessions');
|
||||
const runners = require('./controllers/runners');
|
||||
const jsldata = require('./controllers/jsldata');
|
||||
const config = require('./controllers/config');
|
||||
const storage = require('./controllers/storage');
|
||||
const archive = require('./controllers/archive');
|
||||
const apps = require('./controllers/apps');
|
||||
const auth = require('./controllers/auth');
|
||||
@@ -31,9 +32,11 @@ const onFinished = require('on-finished');
|
||||
const { rundir } = require('./utility/directories');
|
||||
const platformInfo = require('./utility/platformInfo');
|
||||
const getExpressPath = require('./utility/getExpressPath');
|
||||
const { getLogins } = require('./utility/hasPermission');
|
||||
const _ = require('lodash');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -44,11 +47,23 @@ function start() {
|
||||
|
||||
const server = http.createServer(app);
|
||||
|
||||
const logins = getLogins();
|
||||
if (logins && process.env.BASIC_AUTH) {
|
||||
if (process.env.BASIC_AUTH && !process.env.STORAGE_DATABASE) {
|
||||
async function authorizer(username, password, cb) {
|
||||
try {
|
||||
const resp = await getDefaultAuthProvider().login(username, password);
|
||||
if (resp.accessToken) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(null, false);
|
||||
}
|
||||
} catch (err) {
|
||||
cb(err, false);
|
||||
}
|
||||
}
|
||||
app.use(
|
||||
basicAuth({
|
||||
users: _.fromPairs(logins.filter(x => x.password).map(x => [x.login, x.password])),
|
||||
authorizer,
|
||||
authorizeAsync: true,
|
||||
challenge: true,
|
||||
realm: 'DbGate Web App',
|
||||
})
|
||||
@@ -60,8 +75,13 @@ function start() {
|
||||
if (platformInfo.isDocker) {
|
||||
// server static files inside docker container
|
||||
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
|
||||
} else if (platformInfo.isAwsUbuntuLayout) {
|
||||
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
|
||||
} else if (platformInfo.isNpmDist) {
|
||||
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
|
||||
app.use(
|
||||
getExpressPath('/'),
|
||||
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
|
||||
);
|
||||
} else if (process.env.DEVWEB) {
|
||||
// console.log('__dirname', __dirname);
|
||||
// console.log(path.join(__dirname, '../../web/public/build'));
|
||||
@@ -72,9 +92,7 @@ function start() {
|
||||
});
|
||||
}
|
||||
|
||||
if (auth.shouldAuthorizeApi()) {
|
||||
app.use(auth.authMiddleware);
|
||||
}
|
||||
app.use(auth.authMiddleware);
|
||||
|
||||
app.get(getExpressPath('/stream'), async function (req, res) {
|
||||
const strmid = req.query.strmid;
|
||||
@@ -115,6 +133,10 @@ function start() {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (docker build)`);
|
||||
server.listen(port);
|
||||
} else if (platformInfo.isAwsUbuntuLayout) {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
|
||||
server.listen(port);
|
||||
} else if (platformInfo.isNpmDist) {
|
||||
getPort({
|
||||
port: parseInt(
|
||||
@@ -151,6 +173,10 @@ function start() {
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGBREAK', shutdown);
|
||||
|
||||
if (process.env.CLOUD_UPGRADE_FILE) {
|
||||
startCloudUpgradeTimer();
|
||||
}
|
||||
}
|
||||
|
||||
function useAllControllers(app, electron) {
|
||||
@@ -162,6 +188,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/runners', runners);
|
||||
useController(app, electron, '/jsldata', jsldata);
|
||||
useController(app, electron, '/config', config);
|
||||
useController(app, electron, '/storage', storage);
|
||||
useController(app, electron, '/archive', archive);
|
||||
useController(app, electron, '/uploads', uploads);
|
||||
useController(app, electron, '/plugins', plugins);
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
const argIndex = process.argv.indexOf('--native-modules');
|
||||
const redirectFile = global['NATIVE_MODULES'] || (argIndex > 0 ? process.argv[argIndex + 1] : null);
|
||||
|
||||
function requireDynamic(file) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
return __non_webpack_require__(redirectFile);
|
||||
} catch (err) {
|
||||
return require(redirectFile);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = redirectFile ? requireDynamic(redirectFile) : require('./nativeModulesContent');
|
||||
9
packages/api/src/nativeModulesContent.js
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
// this file is generated automatically by script fillNativeModules.js, do not edit it manually
|
||||
const content = {};
|
||||
|
||||
content['better-sqlite3'] = () => require('better-sqlite3');
|
||||
content['oracledb'] = () => require('oracledb');
|
||||
|
||||
|
||||
module.exports = content;
|
||||
@@ -20,9 +20,10 @@ function start() {
|
||||
if (handleProcessCommunication(connection)) return;
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const conn = await connectUtility(driver, connection, 'app');
|
||||
const res = await driver.getVersion(conn);
|
||||
const dbhan = await connectUtility(driver, connection, 'app');
|
||||
const res = await driver.getVersion(dbhan);
|
||||
process.send({ msgtype: 'connected', ...res });
|
||||
await driver.close(dbhan);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
process.send({
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const { splitQuery } = require('dbgate-query-splitter');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
|
||||
const {
|
||||
extractBoolSettingsValue,
|
||||
extractIntSettingsValue,
|
||||
getLogger,
|
||||
isCompositeDbName,
|
||||
dbNameLogCategory,
|
||||
extractErrorMessage,
|
||||
extractErrorLogData,
|
||||
} = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
@@ -11,7 +19,7 @@ const { dumpSqlSelect } = require('dbgate-sqltree');
|
||||
|
||||
const logger = getLogger('dbconnProcess');
|
||||
|
||||
let systemConnection;
|
||||
let dbhan;
|
||||
let storedConnection;
|
||||
let afterConnectCallbacks = [];
|
||||
let afterAnalyseCallbacks = [];
|
||||
@@ -35,7 +43,7 @@ async function checkedAsyncCall(promise) {
|
||||
} catch (err) {
|
||||
setStatus({
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
message: extractErrorMessage(err, 'Checked call error'),
|
||||
});
|
||||
// console.error(err);
|
||||
setTimeout(() => process.exit(1), 1000);
|
||||
@@ -46,10 +54,16 @@ async function checkedAsyncCall(promise) {
|
||||
let loadingModel = false;
|
||||
|
||||
async function handleFullRefresh() {
|
||||
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
|
||||
resolveAnalysedPromises();
|
||||
// skip loading DB structure
|
||||
return;
|
||||
}
|
||||
|
||||
loadingModel = true;
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
setStatusName('loadStructure');
|
||||
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection, serverVersion));
|
||||
analysedStructure = await checkedAsyncCall(driver.analyseFull(dbhan, serverVersion));
|
||||
analysedTime = new Date().getTime();
|
||||
process.send({ msgtype: 'structure', structure: analysedStructure });
|
||||
process.send({ msgtype: 'structureTime', analysedTime });
|
||||
@@ -60,12 +74,15 @@ async function handleFullRefresh() {
|
||||
}
|
||||
|
||||
async function handleIncrementalRefresh(forceSend) {
|
||||
if (storedConnection.useSeparateSchemas && !isCompositeDbName(dbhan?.database)) {
|
||||
resolveAnalysedPromises();
|
||||
// skip loading DB structure
|
||||
return;
|
||||
}
|
||||
loadingModel = true;
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
setStatusName('checkStructure');
|
||||
const newStructure = await checkedAsyncCall(
|
||||
driver.analyseIncremental(systemConnection, analysedStructure, serverVersion)
|
||||
);
|
||||
const newStructure = await checkedAsyncCall(driver.analyseIncremental(dbhan, analysedStructure, serverVersion));
|
||||
analysedTime = new Date().getTime();
|
||||
if (newStructure != null) {
|
||||
analysedStructure = newStructure;
|
||||
@@ -103,7 +120,8 @@ function setStatusName(name) {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(systemConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
process.send({ msgtype: 'version', version });
|
||||
serverVersion = version;
|
||||
}
|
||||
@@ -114,8 +132,13 @@ async function handleConnect({ connection, structure, globalSettings }) {
|
||||
|
||||
if (!structure) setStatusName('pending');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
|
||||
systemConnection.feedback = feedback => setStatus({ feedback });
|
||||
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
|
||||
logger.debug(
|
||||
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
|
||||
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
|
||||
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
|
||||
);
|
||||
dbhan.feedback = feedback => setStatus({ feedback });
|
||||
await checkedAsyncCall(readVersion());
|
||||
if (structure) {
|
||||
analysedStructure = structure;
|
||||
@@ -138,7 +161,7 @@ async function handleConnect({ connection, structure, globalSettings }) {
|
||||
}
|
||||
|
||||
function waitConnected() {
|
||||
if (systemConnection) return Promise.resolve();
|
||||
if (dbhan) return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
afterConnectCallbacks.push([resolve, reject]);
|
||||
});
|
||||
@@ -163,10 +186,30 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
await driver.script(systemConnection, sql, { useTransaction });
|
||||
await driver.script(dbhan, sql, { useTransaction });
|
||||
process.send({ msgtype: 'response', msgid });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
msgid,
|
||||
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRunOperation({ msgid, operation, useTransaction }, skipReadonlyCheck = false) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
await driver.operation(dbhan, operation, { useTransaction });
|
||||
process.send({ msgtype: 'response', msgid });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
msgid,
|
||||
errorMessage: extractErrorMessage(err, 'Error executing DB operation'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,10 +219,14 @@ async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
// console.log(sql);
|
||||
const res = await driver.query(systemConnection, sql);
|
||||
const res = await driver.query(dbhan, sql);
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' });
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
msgid,
|
||||
errorMessage: extractErrorMessage(err, 'Error executing SQL script'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,52 +237,64 @@ async function handleSqlSelect({ msgid, select }) {
|
||||
return handleQueryData({ msgid, sql: dmp.s }, true);
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod) {
|
||||
async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
await waitConnected();
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const result = await callMethod(driver);
|
||||
process.send({ msgtype: 'response', msgid, result });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSchemaList({ msgid }) {
|
||||
logger.debug('Loading schema list');
|
||||
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
|
||||
}
|
||||
|
||||
async function handleCollectionData({ msgid, options }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.readCollection(systemConnection, options));
|
||||
return handleDriverDataCore(msgid, driver => driver.readCollection(dbhan, options), { logName: 'readCollection' });
|
||||
}
|
||||
|
||||
async function handleLoadKeys({ msgid, root, filter }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeys(systemConnection, root, filter));
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter), { logName: 'loadKeys' });
|
||||
}
|
||||
|
||||
async function handleExportKeys({ msgid, options }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.exportKeys(systemConnection, options));
|
||||
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
|
||||
}
|
||||
|
||||
async function handleLoadKeyInfo({ msgid, key }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(systemConnection, key));
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(dbhan, key), { logName: 'loadKeyInfo' });
|
||||
}
|
||||
|
||||
async function handleCallMethod({ msgid, method, args }) {
|
||||
return handleDriverDataCore(msgid, driver => {
|
||||
if (storedConnection.isReadOnly) {
|
||||
throw new Error('Connection is read only, cannot call custom methods');
|
||||
}
|
||||
return handleDriverDataCore(
|
||||
msgid,
|
||||
driver => {
|
||||
if (storedConnection.isReadOnly) {
|
||||
throw new Error('Connection is read only, cannot call custom methods');
|
||||
}
|
||||
|
||||
ensureExecuteCustomScript(driver);
|
||||
return driver.callMethod(systemConnection, method, args);
|
||||
});
|
||||
ensureExecuteCustomScript(driver);
|
||||
return driver.callMethod(dbhan, method, args);
|
||||
},
|
||||
{ logName: `callMethod:${method}` }
|
||||
);
|
||||
}
|
||||
|
||||
async function handleLoadKeyTableRange({ msgid, key, cursor, count }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(systemConnection, key, cursor, count));
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(dbhan, key, cursor, count), {
|
||||
logName: 'loadKeyTableRange',
|
||||
});
|
||||
}
|
||||
|
||||
async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) {
|
||||
return handleDriverDataCore(msgid, driver =>
|
||||
driver.loadFieldValues(systemConnection, { schemaName, pureName }, field, search)
|
||||
);
|
||||
return handleDriverDataCore(msgid, driver => driver.loadFieldValues(dbhan, { schemaName, pureName }, field, search), {
|
||||
logName: 'loadFieldValues',
|
||||
});
|
||||
}
|
||||
|
||||
function ensureExecuteCustomScript(driver) {
|
||||
@@ -252,10 +311,10 @@ async function handleUpdateCollection({ msgid, changeSet }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
ensureExecuteCustomScript(driver);
|
||||
const result = await driver.updateCollection(systemConnection, changeSet);
|
||||
const result = await driver.updateCollection(dbhan, changeSet);
|
||||
process.send({ msgtype: 'response', msgid, result });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error updating collection') });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -265,18 +324,24 @@ async function handleSqlPreview({ msgid, objects, options }) {
|
||||
|
||||
try {
|
||||
const dmp = driver.createDumper();
|
||||
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, systemConnection);
|
||||
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, dbhan);
|
||||
|
||||
await generator.dump();
|
||||
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
|
||||
if (generator.isUnhandledException) {
|
||||
setTimeout(() => {
|
||||
setTimeout(async () => {
|
||||
logger.error('Exiting because of unhandled exception');
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}, 500);
|
||||
}
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
msgid,
|
||||
isError: true,
|
||||
errorMessage: extractErrorMessage(err, 'Error generating SQL preview'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -285,14 +350,19 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
|
||||
|
||||
try {
|
||||
const res = await generateDeploySql({
|
||||
systemConnection,
|
||||
systemConnection: dbhan,
|
||||
connection: storedConnection,
|
||||
analysedStructure,
|
||||
modelFolder,
|
||||
});
|
||||
process.send({ ...res, msgtype: 'response', msgid });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, isError: true, errorMessage: err.message });
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
msgid,
|
||||
isError: true,
|
||||
errorMessage: extractErrorMessage(err, 'Error generating deploy SQL'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,6 +381,7 @@ const messageHandlers = {
|
||||
connect: handleConnect,
|
||||
queryData: handleQueryData,
|
||||
runScript: handleRunScript,
|
||||
runOperation: handleRunOperation,
|
||||
updateCollection: handleUpdateCollection,
|
||||
collectionData: handleCollectionData,
|
||||
loadKeys: handleLoadKeys,
|
||||
@@ -324,6 +395,7 @@ const messageHandlers = {
|
||||
loadFieldValues: handleLoadFieldValues,
|
||||
sqlSelect: handleSqlSelect,
|
||||
exportKeys: handleExportKeys,
|
||||
schemaList: handleSchemaList,
|
||||
// runCommand: handleRunCommand,
|
||||
};
|
||||
|
||||
@@ -335,10 +407,12 @@ async function handleMessage({ msgtype, ...other }) {
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
|
||||
setInterval(() => {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
logger.info('Database connection not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}
|
||||
}, 10 * 1000);
|
||||
@@ -348,8 +422,8 @@ function start() {
|
||||
try {
|
||||
await handleMessage(message);
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error in DB connection');
|
||||
process.send({ msgtype: 'error', error: err.message });
|
||||
logger.error(extractErrorLogData(err), 'Error in DB connection');
|
||||
process.send({ msgtype: 'error', error: extractErrorMessage(err, 'Error processing message') });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
|
||||
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const connectUtility = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const logger = getLogger('srvconnProcess');
|
||||
|
||||
let systemConnection;
|
||||
let dbhan;
|
||||
let storedConnection;
|
||||
let lastDatabases = null;
|
||||
let lastStatus = null;
|
||||
@@ -16,7 +16,18 @@ let afterConnectCallbacks = [];
|
||||
async function handleRefresh() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const databases = await driver.listDatabases(systemConnection);
|
||||
let databases = await driver.listDatabases(dbhan);
|
||||
if (storedConnection?.allowedDatabases?.trim()) {
|
||||
const allowedDatabaseList = storedConnection.allowedDatabases
|
||||
.split('\n')
|
||||
.map(x => x.trim().toLowerCase())
|
||||
.filter(x => x);
|
||||
databases = databases.filter(x => allowedDatabaseList.includes(x.name.toLocaleLowerCase()));
|
||||
}
|
||||
if (storedConnection?.allowedDatabasesRegex?.trim()) {
|
||||
const regex = new RegExp(storedConnection.allowedDatabasesRegex, 'i');
|
||||
databases = databases.filter(x => regex.test(x.name));
|
||||
}
|
||||
setStatusName('ok');
|
||||
const databasesString = stableStringify(databases);
|
||||
if (lastDatabases != databasesString) {
|
||||
@@ -28,14 +39,14 @@ async function handleRefresh() {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
// console.error(err);
|
||||
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
|
||||
setTimeout(() => process.exit(1), 1000);
|
||||
}
|
||||
}
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(systemConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
process.send({ msgtype: 'version', version });
|
||||
}
|
||||
|
||||
@@ -59,7 +70,7 @@ async function handleConnect(connection) {
|
||||
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
dbhan = await connectUtility(driver, storedConnection, 'app');
|
||||
readVersion();
|
||||
handleRefresh();
|
||||
if (extractBoolSettingsValue(globalSettings, 'connection.autoRefresh', false)) {
|
||||
@@ -73,7 +84,7 @@ async function handleConnect(connection) {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
// console.error(err);
|
||||
logger.error(extractErrorLogData(err), 'Error connecting to server');
|
||||
setTimeout(() => process.exit(1), 1000);
|
||||
}
|
||||
|
||||
@@ -84,7 +95,7 @@ async function handleConnect(connection) {
|
||||
}
|
||||
|
||||
function waitConnected() {
|
||||
if (systemConnection) return Promise.resolve();
|
||||
if (dbhan) return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
afterConnectCallbacks.push([resolve, reject]);
|
||||
});
|
||||
@@ -94,18 +105,24 @@ function handlePing() {
|
||||
lastPing = new Date().getTime();
|
||||
}
|
||||
|
||||
async function handleDatabaseOp(op, { name }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
if (driver[op]) {
|
||||
await driver[op](systemConnection, name);
|
||||
} else {
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
logger.info({ sql: dmp.s }, 'Running script');
|
||||
await driver.query(systemConnection, dmp.s);
|
||||
async function handleDatabaseOp(op, { msgid, name }) {
|
||||
try {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
dbhan = await connectUtility(driver, storedConnection, 'app');
|
||||
if (driver[op]) {
|
||||
await driver[op](dbhan, name);
|
||||
} else {
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
logger.info({ sql: dmp.s }, 'Running script');
|
||||
await driver.query(dbhan, dmp.s, { discardResult: true });
|
||||
}
|
||||
await handleRefresh();
|
||||
|
||||
process.send({ msgtype: 'response', msgid, status: 'ok' });
|
||||
} catch (err) {
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
||||
}
|
||||
await handleRefresh();
|
||||
}
|
||||
|
||||
async function handleDriverDataCore(msgid, callMethod) {
|
||||
@@ -120,11 +137,11 @@ async function handleDriverDataCore(msgid, callMethod) {
|
||||
}
|
||||
|
||||
async function handleServerSummary({ msgid }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.serverSummary(systemConnection));
|
||||
return handleDriverDataCore(msgid, driver => driver.serverSummary(dbhan));
|
||||
}
|
||||
|
||||
async function handleSummaryCommand({ msgid, command, row }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.summaryCommand(systemConnection, command, row));
|
||||
return handleDriverDataCore(msgid, driver => driver.summaryCommand(dbhan, command, row));
|
||||
}
|
||||
|
||||
const messageHandlers = {
|
||||
@@ -144,10 +161,12 @@ async function handleMessage({ msgtype, ...other }) {
|
||||
function start() {
|
||||
childProcessChecker();
|
||||
|
||||
setInterval(() => {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
logger.info('Server connection not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}
|
||||
}, 10 * 1000);
|
||||
@@ -161,6 +180,7 @@ function start() {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require
|
||||
|
||||
const logger = getLogger('sessionProcess');
|
||||
|
||||
let systemConnection;
|
||||
let dbhan;
|
||||
let storedConnection;
|
||||
let afterConnectCallbacks = [];
|
||||
// let currentHandlers = [];
|
||||
@@ -177,7 +177,7 @@ function handleStream(driver, resultIndexHolder, sqlItem) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
|
||||
driver.stream(systemConnection, sqlItem.text, handler);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -196,7 +196,7 @@ async function handleConnect(connection) {
|
||||
storedConnection = connection;
|
||||
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
systemConnection = await connectUtility(driver, storedConnection, 'app');
|
||||
dbhan = await connectUtility(driver, storedConnection, 'app');
|
||||
for (const [resolve] of afterConnectCallbacks) {
|
||||
resolve();
|
||||
}
|
||||
@@ -210,7 +210,7 @@ async function handleConnect(connection) {
|
||||
// }
|
||||
|
||||
function waitConnected() {
|
||||
if (systemConnection) return Promise.resolve();
|
||||
if (dbhan) return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
afterConnectCallbacks.push([resolve, reject]);
|
||||
});
|
||||
@@ -230,7 +230,7 @@ async function handleStartProfiler({ jslid }) {
|
||||
const writer = new TableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
currentProfiler = await driver.startProfiler(systemConnection, {
|
||||
currentProfiler = await driver.startProfiler(dbhan, {
|
||||
row: data => writer.rowFromReader(data),
|
||||
});
|
||||
currentProfiler.writer = writer;
|
||||
@@ -241,7 +241,7 @@ async function handleStopProfiler({ jslid }) {
|
||||
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
currentProfiler.writer.close();
|
||||
driver.stopProfiler(systemConnection, currentProfiler);
|
||||
driver.stopProfiler(dbhan, currentProfiler);
|
||||
currentProfiler = null;
|
||||
}
|
||||
|
||||
@@ -304,7 +304,7 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
|
||||
const writer = new TableWriter();
|
||||
writer.initializeFromReader(jslid);
|
||||
|
||||
const reader = await driver.readQuery(systemConnection, sql);
|
||||
const reader = await driver.readQuery(dbhan, sql);
|
||||
|
||||
reader.on('data', data => {
|
||||
writer.rowFromReader(data);
|
||||
@@ -340,10 +340,12 @@ function start() {
|
||||
|
||||
lastPing = new Date().getTime();
|
||||
|
||||
setInterval(() => {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 25 * 1000) {
|
||||
logger.info('Session not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@@ -362,6 +364,8 @@ function start() {
|
||||
executingScripts == 0
|
||||
) {
|
||||
logger.info('Session not active, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}
|
||||
}, 10 * 1000);
|
||||
|
||||
@@ -3,7 +3,7 @@ const platformInfo = require('../utility/platformInfo');
|
||||
const childProcessChecker = require('../utility/childProcessChecker');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { SSHConnection } = require('../utility/SSHConnection');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData, extractErrorMessage } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('sshProcess');
|
||||
|
||||
@@ -15,10 +15,12 @@ async function getSshConnection(connection) {
|
||||
agentForward: connection.sshMode == 'agent',
|
||||
passphrase: connection.sshMode == 'keyFile' ? connection.sshKeyfilePassword : undefined,
|
||||
username: connection.sshLogin,
|
||||
password: connection.sshMode == 'userPassword' ? connection.sshPassword : undefined,
|
||||
password: (connection.sshMode || 'userPassword') == 'userPassword' ? connection.sshPassword : undefined,
|
||||
agentSocket: connection.sshMode == 'agent' ? platformInfo.sshAuthSock : undefined,
|
||||
privateKey:
|
||||
connection.sshMode == 'keyFile' && connection.sshKeyfile ? await fs.readFile(connection.sshKeyfile) : undefined,
|
||||
connection.sshMode == 'keyFile' && (connection.sshKeyfile || platformInfo?.defaultKeyfile)
|
||||
? await fs.readFile(connection.sshKeyfile || platformInfo?.defaultKeyfile)
|
||||
: undefined,
|
||||
skipAutoPrivateKey: true,
|
||||
noReadline: true,
|
||||
};
|
||||
@@ -38,13 +40,13 @@ async function handleStart({ connection, tunnelConfig }) {
|
||||
tunnelConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error creating SSH tunnel connection:');
|
||||
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
|
||||
|
||||
process.send({
|
||||
msgtype: 'error',
|
||||
connection,
|
||||
tunnelConfig,
|
||||
errorMessage: err.message,
|
||||
errorMessage: extractErrorMessage(err.message),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
19
packages/api/src/shell/autoIndexForeignKeysTransform.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const autoIndexForeignKeysTransform = () => database => {
|
||||
return {
|
||||
...database,
|
||||
tables: database.tables.map(table => {
|
||||
return {
|
||||
...table,
|
||||
indexes: [
|
||||
...(table.indexes || []),
|
||||
...table.foreignKeys.map(fk => ({
|
||||
constraintName: `IX_${fk.constraintName}`,
|
||||
columns: fk.columns.map(x => ({ columnName: x.columnName })),
|
||||
})),
|
||||
],
|
||||
};
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = autoIndexForeignKeysTransform;
|
||||
@@ -2,6 +2,13 @@ const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
|
||||
const Stream = require('stream');
|
||||
const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
|
||||
|
||||
/**
|
||||
* Copies reader to writer. Used for import, export tables and transfer data between tables
|
||||
* @param {readerType} input - reader object
|
||||
* @param {writerType} output - writer object
|
||||
* @param {object} options - options
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function copyStream(input, output, options) {
|
||||
const { columns } = options || {};
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ const { resolveArchiveFolder } = require('../utility/directories');
|
||||
async function dataDuplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
@@ -19,32 +20,44 @@ async function dataDuplicator({
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
logger.info(`Connected.`);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(pool);
|
||||
try {
|
||||
logger.info(`Connected.`);
|
||||
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => ({
|
||||
name: item.name,
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await dupl.run();
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
pool,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => ({
|
||||
name: item.name,
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream ||
|
||||
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await dupl.run();
|
||||
}
|
||||
|
||||
module.exports = dataDuplicator;
|
||||
|
||||