Merge pull request #1423 from dbgate/feature/remote-code-execution

Add validation for JavaScript identifiers and shell API function names
This commit is contained in:
Stela Augustinova
2026-04-14 09:51:20 +02:00
committed by GitHub
5 changed files with 213 additions and 24 deletions

View File

@@ -19,6 +19,26 @@ const unzipDirectory = require('../shell/unzipDirectory');
const logger = getLogger('archive'); const logger = getLogger('archive');
/**
* Rejects any archive name (folder or file) that contains path-traversal
* sequences, directory separators, or null bytes. These values are used
* directly in path.join() calls; allowing traversal would let callers read
* or write arbitrary files outside the archive directory.
*/
function assertSafeArchiveName(name, label) {
if (typeof name !== 'string' || name.length === 0) {
throw new Error(`DBGM-00000 Invalid ${label}: must be a non-empty string`);
}
if (name.includes('\0') || name.includes('..') || name.includes('/') || name.includes('\\')) {
throw new Error(`DBGM-00000 Invalid ${label}: path traversal not allowed`);
}
// Reject names that resolve to the archive root itself (e.g. '.')
const resolved = path.resolve(archivedir(), name);
if (resolved === path.resolve(archivedir())) {
throw new Error(`DBGM-00000 Invalid ${label}: must not resolve to the archive root`);
}
}
module.exports = { module.exports = {
folders_meta: true, folders_meta: true,
async folders() { async folders() {
@@ -39,6 +59,7 @@ module.exports = {
createFolder_meta: true, createFolder_meta: true,
async createFolder({ folder }) { async createFolder({ folder }) {
assertSafeArchiveName(folder, 'folder');
await fs.mkdir(path.join(archivedir(), folder)); await fs.mkdir(path.join(archivedir(), folder));
socket.emitChanged('archive-folders-changed'); socket.emitChanged('archive-folders-changed');
return true; return true;
@@ -46,8 +67,12 @@ module.exports = {
createLink_meta: true, createLink_meta: true,
async createLink({ linkedFolder }) { async createLink({ linkedFolder }) {
if ( typeof linkedFolder !== 'string' || linkedFolder.length === 0) {
throw new Error(`DBGM-00000 Invalid linkedFolder: must be a non-empty string`);
}
assertSafeArchiveName(path.parse(linkedFolder).name, 'linkedFolder');
const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' }); const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' });
fs.writeFile(path.join(archivedir(), folder), linkedFolder); await fs.writeFile(path.join(archivedir(), folder), linkedFolder);
clearArchiveLinksCache(); clearArchiveLinksCache();
socket.emitChanged('archive-folders-changed'); socket.emitChanged('archive-folders-changed');
return folder; return folder;
@@ -71,6 +96,7 @@ module.exports = {
files_meta: true, files_meta: true,
async files({ folder }) { async files({ folder }) {
assertSafeArchiveName(folder, 'folder');
try { try {
if (folder.endsWith('.zip')) { if (folder.endsWith('.zip')) {
if (await fs.exists(path.join(archivedir(), folder))) { if (await fs.exists(path.join(archivedir(), folder))) {
@@ -121,6 +147,9 @@ module.exports = {
createFile_meta: true, createFile_meta: true,
async createFile({ folder, file, fileType, tableInfo }) { async createFile({ folder, file, fileType, tableInfo }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.writeFile( await fs.writeFile(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`), path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : '' tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
@@ -131,6 +160,9 @@ module.exports = {
deleteFile_meta: true, deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) { async deleteFile({ folder, file, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(fileType, 'fileType');
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`)); await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
socket.emitChanged(`archive-files-changed`, { folder }); socket.emitChanged(`archive-files-changed`, { folder });
return true; return true;
@@ -138,6 +170,10 @@ module.exports = {
renameFile_meta: true, renameFile_meta: true,
async renameFile({ folder, file, newFile, fileType }) { async renameFile({ folder, file, newFile, fileType }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
assertSafeArchiveName(newFile, 'newFile');
assertSafeArchiveName(fileType, 'fileType');
await fs.rename( await fs.rename(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`), path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`) path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
@@ -148,6 +184,8 @@ module.exports = {
modifyFile_meta: true, modifyFile_meta: true,
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) { async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await jsldata.closeDataStore(`archive://${folder}/${file}`); await jsldata.closeDataStore(`archive://${folder}/${file}`);
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`); const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
@@ -187,6 +225,8 @@ module.exports = {
renameFolder_meta: true, renameFolder_meta: true,
async renameFolder({ folder, newFolder }) { async renameFolder({ folder, newFolder }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(newFolder, 'newFolder');
const uniqueName = await this.getNewArchiveFolder({ database: newFolder }); const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName)); await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
socket.emitChanged(`archive-folders-changed`); socket.emitChanged(`archive-folders-changed`);
@@ -196,6 +236,7 @@ module.exports = {
deleteFolder_meta: true, deleteFolder_meta: true,
async deleteFolder({ folder }) { async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter'); if (!folder) throw new Error('Missing folder parameter');
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.link') || folder.endsWith('.zip')) { if (folder.endsWith('.link') || folder.endsWith('.zip')) {
await fs.unlink(path.join(archivedir(), folder)); await fs.unlink(path.join(archivedir(), folder));
} else { } else {
@@ -207,6 +248,8 @@ module.exports = {
saveText_meta: true, saveText_meta: true,
async saveText({ folder, file, text }) { async saveText({ folder, file, text }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text); await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
socket.emitChanged(`archive-files-changed`, { folder }); socket.emitChanged(`archive-files-changed`, { folder });
return true; return true;
@@ -214,6 +257,8 @@ module.exports = {
saveJslData_meta: true, saveJslData_meta: true,
async saveJslData({ folder, file, jslid, changeSet }) { async saveJslData({ folder, file, jslid, changeSet }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const source = getJslFileName(jslid); const source = getJslFileName(jslid);
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`); const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
if (changeSet) { if (changeSet) {
@@ -232,11 +277,20 @@ module.exports = {
saveRows_meta: true, saveRows_meta: true,
async saveRows({ folder, file, rows }) { async saveRows({ folder, file, rows }) {
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`)); assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
const filePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
const fileStream = fs.createWriteStream(filePath);
for (const row of rows) { for (const row of rows) {
await fileStream.write(JSON.stringify(row) + '\n'); const ok = fileStream.write(JSON.stringify(row) + '\n');
if (!ok) {
await new Promise(resolve => fileStream.once('drain', resolve));
}
} }
await fileStream.close(); await new Promise((resolve, reject) => {
fileStream.end(() => resolve());
fileStream.on('error', reject);
});
socket.emitChanged(`archive-files-changed`, { folder }); socket.emitChanged(`archive-files-changed`, { folder });
return true; return true;
}, },
@@ -256,6 +310,8 @@ module.exports = {
getArchiveData_meta: true, getArchiveData_meta: true,
async getArchiveData({ folder, file }) { async getArchiveData({ folder, file }) {
assertSafeArchiveName(folder, 'folder');
assertSafeArchiveName(file, 'file');
let rows; let rows;
if (folder.endsWith('.zip')) { if (folder.endsWith('.zip')) {
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`); rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
@@ -270,7 +326,7 @@ module.exports = {
if (!fileName?.endsWith('.zip')) { if (!fileName?.endsWith('.zip')) {
throw new Error(`${fileName} is not a ZIP file`); throw new Error(`${fileName} is not a ZIP file`);
} }
assertSafeArchiveName(fileName.slice(0, -4), 'fileName');
const folder = await this.getNewArchiveFolder({ database: fileName }); const folder = await this.getNewArchiveFolder({ database: fileName });
await fs.copyFile(filePath, path.join(archivedir(), folder)); await fs.copyFile(filePath, path.join(archivedir(), folder));
socket.emitChanged(`archive-folders-changed`); socket.emitChanged(`archive-folders-changed`);
@@ -280,6 +336,7 @@ module.exports = {
zip_meta: true, zip_meta: true,
async zip({ folder }) { async zip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' }); const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder)); await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`); socket.emitChanged(`archive-folders-changed`);
@@ -289,6 +346,7 @@ module.exports = {
unzip_meta: true, unzip_meta: true,
async unzip({ folder }) { async unzip({ folder }) {
assertSafeArchiveName(folder, 'folder');
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) }); const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder)); await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`); socket.emitChanged(`archive-folders-changed`);
@@ -298,6 +356,7 @@ module.exports = {
getZippedPath_meta: true, getZippedPath_meta: true,
async getZippedPath({ folder }) { async getZippedPath({ folder }) {
assertSafeArchiveName(folder, 'folder');
if (folder.endsWith('.zip')) { if (folder.endsWith('.zip')) {
return { filePath: path.join(archivedir(), folder) }; return { filePath: path.join(archivedir(), folder) };
} }

View File

@@ -10,6 +10,7 @@ const {
extractShellApiPlugins, extractShellApiPlugins,
compileShellApiFunctionName, compileShellApiFunctionName,
jsonScriptToJavascript, jsonScriptToJavascript,
assertValidShellApiFunctionName,
getLogger, getLogger,
safeJsonParse, safeJsonParse,
pinoLogRecordToMessageRecord, pinoLogRecordToMessageRecord,
@@ -54,19 +55,23 @@ logger.info('DBGM-00014 Finished job script');
dbgateApi.runScript(run); dbgateApi.runScript(run);
`; `;
const loaderScriptTemplate = (prefix, functionName, props, runid) => ` const loaderScriptTemplate = (functionName, props, runid) => {
const plugins = extractShellApiPlugins(functionName, props);
const prefix = plugins.map(packageName => `// @require ${packageName}\n`).join('');
return `
${prefix} ${prefix}
const dbgateApi = require(process.env.DBGATE_API); const dbgateApi = require(process.env.DBGATE_API);
dbgateApi.initializeApiEnvironment(); dbgateApi.initializeApiEnvironment();
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))} ${requirePluginsTemplate(plugins)}
require=null; require=null;
async function run() { async function run() {
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)}); const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const writer=await dbgateApi.collectorWriter({runid: '${runid}'}); const writer=await dbgateApi.collectorWriter({runid: ${JSON.stringify(runid)}});
await dbgateApi.copyStream(reader, writer); await dbgateApi.copyStream(reader, writer);
} }
dbgateApi.runScript(run); dbgateApi.runScript(run);
`; `;
};
module.exports = { module.exports = {
/** @type {import('dbgate-types').OpenedRunner[]} */ /** @type {import('dbgate-types').OpenedRunner[]} */
@@ -377,14 +382,12 @@ module.exports = {
return { errorMessage: 'DBGM-00289 Unallowed file' }; return { errorMessage: 'DBGM-00289 Unallowed file' };
} }
} }
const prefix = extractShellApiPlugins(functionName)
.map(packageName => `// @require ${packageName}\n`)
.join('');
const promise = new Promise((resolve, reject) => { const promise = new Promise((resolve, reject) => {
assertValidShellApiFunctionName(functionName);
const runid = crypto.randomUUID(); const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true }; this.requests[runid] = { resolve, reject, exitOnStreamError: true };
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid)); this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
}); });
return promise; return promise;
}, },

View File

@@ -16,23 +16,53 @@ function unzipDirectory(zipPath, outputDirectory) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => { yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err); if (err) return reject(err);
let settled = false;
/** Track active streams so we can destroy them on early abort */
const activeStreams = new Set();
const safeReject = rejectErr => {
if (settled) return;
settled = true;
for (const s of activeStreams) {
s.destroy();
}
activeStreams.clear();
zipFile.close();
reject(rejectErr);
};
/** Pending per-file extractions we resolve the main promise after theyre all done */ /** Pending per-file extractions we resolve the main promise after theyre all done */
const pending = []; const pending = [];
// Resolved output boundary used for zip-slip checks on every entry
const resolvedOutputDir = path.resolve(outputDirectory);
// kick things off // kick things off
zipFile.readEntry(); zipFile.readEntry();
zipFile.on('entry', entry => { zipFile.on('entry', entry => {
// Null-byte poison check
if (entry.fileName.includes('\0')) {
return safeReject(new Error(`DBGM-00000 ZIP entry with null byte in filename rejected`));
}
const destPath = path.join(outputDirectory, entry.fileName); const destPath = path.join(outputDirectory, entry.fileName);
const resolvedDest = path.resolve(destPath);
// Zip-slip protection: every extracted path must stay inside outputDirectory
if (resolvedDest !== resolvedOutputDir && !resolvedDest.startsWith(resolvedOutputDir + path.sep)) {
return safeReject(
new Error(`DBGM-00000 ZIP slip detected: entry "${entry.fileName}" would escape output directory`)
);
}
// Handle directories (their names always end with “/” in ZIPs) // Handle directories (their names always end with “/” in ZIPs)
if (/\/$/.test(entry.fileName)) { if (/\/$/.test(entry.fileName)) {
// Ensure directory exists, then continue to next entry // Ensure directory exists, then continue to next entry
fs.promises fs.promises
.mkdir(destPath, { recursive: true }) .mkdir(destPath, { recursive: true })
.then(() => zipFile.readEntry()) .then(() => {
.catch(reject); if (!settled) zipFile.readEntry();
})
.catch(safeReject);
return; return;
} }
@@ -46,17 +76,29 @@ function unzipDirectory(zipPath, outputDirectory) {
if (err) return rej(err); if (err) return rej(err);
const writeStream = fs.createWriteStream(destPath); const writeStream = fs.createWriteStream(destPath);
activeStreams.add(readStream);
activeStreams.add(writeStream);
readStream.pipe(writeStream); readStream.pipe(writeStream);
// proceed to next entry once weve consumed *this* one // proceed to next entry once we've consumed *this* one
readStream.on('end', () => zipFile.readEntry()); readStream.on('end', () => {
activeStreams.delete(readStream);
if (!settled) zipFile.readEntry();
});
readStream.on('error', readErr => {
activeStreams.delete(readStream);
rej(readErr);
});
writeStream.on('finish', () => { writeStream.on('finish', () => {
activeStreams.delete(writeStream);
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`); logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res(); res();
}); });
writeStream.on('error', writeErr => { writeStream.on('error', writeErr => {
activeStreams.delete(writeStream);
logger.error( logger.error(
extractErrorLogData(writeErr), extractErrorLogData(writeErr),
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".` `DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
@@ -67,22 +109,29 @@ function unzipDirectory(zipPath, outputDirectory) {
}) })
); );
// Immediately abort the whole unzip if this file fails; otherwise the
// zip would never emit 'end' (lazyEntries won't advance without readEntry).
filePromise.catch(safeReject);
pending.push(filePromise); pending.push(filePromise);
}); });
// Entire archive enumerated; wait for all streams to finish // Entire archive enumerated; wait for all streams to finish
zipFile.on('end', () => { zipFile.on('end', () => {
if (settled) return;
Promise.all(pending) Promise.all(pending)
.then(() => { .then(() => {
if (settled) return;
settled = true;
zipFile.close();
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`); logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true); resolve(true);
}) })
.catch(reject); .catch(safeReject);
}); });
zipFile.on('error', err => { zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`); logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err); safeReject(err);
}); });
}); });
}); });

View File

@@ -1,6 +1,6 @@
import _uniq from 'lodash/uniq'; import _uniq from 'lodash/uniq';
import _cloneDeepWith from 'lodash/cloneDeepWith'; import _cloneDeepWith from 'lodash/cloneDeepWith';
import { evalShellApiFunctionName, compileShellApiFunctionName, extractShellApiPlugins } from './packageTools'; import { evalShellApiFunctionName, compileShellApiFunctionName, extractShellApiPlugins, assertValidJsIdentifier, assertValidShellApiFunctionName } from './packageTools';
export interface ScriptWriterGeneric { export interface ScriptWriterGeneric {
allocVariable(prefix?: string); allocVariable(prefix?: string);
@@ -40,6 +40,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
} }
assignCore(variableName, functionName, props) { assignCore(variableName, functionName, props) {
assertValidJsIdentifier(variableName, 'variableName');
this._put(`const ${variableName} = await ${functionName}(${JSON.stringify(props)});`); this._put(`const ${variableName} = await ${functionName}(${JSON.stringify(props)});`);
} }
@@ -49,6 +50,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
} }
assignValue(variableName, jsonValue) { assignValue(variableName, jsonValue) {
assertValidJsIdentifier(variableName, 'variableName');
this._put(`const ${variableName} = ${JSON.stringify(jsonValue)};`); this._put(`const ${variableName} = ${JSON.stringify(jsonValue)};`);
} }
@@ -57,8 +59,13 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
} }
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) { copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
assertValidJsIdentifier(sourceVar, 'sourceVar');
assertValidJsIdentifier(targetVar, 'targetVar');
let opts = '{'; let opts = '{';
if (colmapVar) opts += `columns: ${colmapVar}, `; if (colmapVar) {
assertValidJsIdentifier(colmapVar, 'colmapVar');
opts += `columns: ${colmapVar}, `;
}
if (progressName) opts += `progressName: ${JSON.stringify(progressName)}, `; if (progressName) opts += `progressName: ${JSON.stringify(progressName)}, `;
opts += '}'; opts += '}';
@@ -89,7 +96,7 @@ export class ScriptWriterJavaScript implements ScriptWriterGeneric {
} }
zipDirectory(inputDirectory, outputFile) { zipDirectory(inputDirectory, outputFile) {
this._put(`await dbgateApi.zipDirectory('${inputDirectory}', '${outputFile}');`); this._put(`await dbgateApi.zipDirectory(${JSON.stringify(inputDirectory)}, ${JSON.stringify(outputFile)});`);
} }
} }
@@ -214,6 +221,8 @@ export class ScriptWriterEval implements ScriptWriterGeneric {
requirePackage(packageName) {} requirePackage(packageName) {}
async assign(variableName, functionName, props) { async assign(variableName, functionName, props) {
assertValidJsIdentifier(variableName, 'variableName');
assertValidShellApiFunctionName(functionName);
const func = evalShellApiFunctionName(functionName, this.dbgateApi, this.requirePlugin); const func = evalShellApiFunctionName(functionName, this.dbgateApi, this.requirePlugin);
this.variables[variableName] = await func( this.variables[variableName] = await func(
@@ -226,10 +235,14 @@ export class ScriptWriterEval implements ScriptWriterGeneric {
} }
assignValue(variableName, jsonValue) { assignValue(variableName, jsonValue) {
assertValidJsIdentifier(variableName, 'variableName');
this.variables[variableName] = jsonValue; this.variables[variableName] = jsonValue;
} }
async copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) { async copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
assertValidJsIdentifier(sourceVar, 'sourceVar');
assertValidJsIdentifier(targetVar, 'targetVar');
if (colmapVar != null) assertValidJsIdentifier(colmapVar, 'colmapVar');
await this.dbgateApi.copyStream(this.variables[sourceVar], this.variables[targetVar], { await this.dbgateApi.copyStream(this.variables[sourceVar], this.variables[targetVar], {
progressName: _cloneDeepWith(progressName, node => { progressName: _cloneDeepWith(progressName, node => {
if (node?.$runid) { if (node?.$runid) {

View File

@@ -3,6 +3,64 @@ import _camelCase from 'lodash/camelCase';
import _isString from 'lodash/isString'; import _isString from 'lodash/isString';
import _isPlainObject from 'lodash/isPlainObject'; import _isPlainObject from 'lodash/isPlainObject';
const JS_IDENTIFIER_RE = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
// ECMAScript reserved words, strict-mode keywords, and async-context keywords
// that cannot be used as variable or function names in the generated scripts.
// Sources: ECMA-262 §12.7.2 (reserved words), §12.7.3 (strict mode), §14 (contextual).
const JS_RESERVED_WORDS = new Set([
// Keywords
'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger', 'default',
'delete', 'do', 'else', 'export', 'extends', 'false', 'finally', 'for',
'function', 'if', 'import', 'in', 'instanceof', 'let', 'new', 'null', 'return',
'static', 'super', 'switch', 'this', 'throw', 'true', 'try', 'typeof', 'var',
'void', 'while', 'with', 'yield',
// Strict-mode reserved words
'implements', 'interface', 'package', 'private', 'protected', 'public',
// Async context keywords
'async', 'await',
// Future reserved
'enum',
'eval', 'arguments',
]);
export function isValidJsIdentifier(name: string): boolean {
return typeof name === 'string' && JS_IDENTIFIER_RE.test(name) && !JS_RESERVED_WORDS.has(name);
}
export function assertValidJsIdentifier(name: string, label: string): void {
if (!isValidJsIdentifier(name)) {
throw new Error(`DBGM-00000 Invalid ${label}: ${String(name).substring(0, 100)}`);
}
}
/**
* Validates a shell API function name.
* Allowed forms:
* - "someFunctionName" (plain identifier, resolved as dbgateApi.someFunctionName)
* - "funcName@dbgate-plugin-xxx" (namespaced, resolved as plugin.shellApi.funcName)
*/
export function assertValidShellApiFunctionName(functionName: string): void {
if (typeof functionName !== 'string') {
throw new Error('DBGM-00000 functionName must be a string');
}
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) {
if (!isValidJsIdentifier(nsMatch[1])) {
throw new Error(`DBGM-00000 Invalid function part in functionName: ${nsMatch[1].substring(0, 100)}`);
}
if (!/^dbgate-plugin-[a-zA-Z0-9_-]+$/.test(nsMatch[2])) {
throw new Error(`DBGM-00000 Invalid plugin package in functionName: ${nsMatch[2].substring(0, 100)}`);
}
} else {
if (!isValidJsIdentifier(functionName)) {
throw new Error(`DBGM-00000 Invalid functionName: ${functionName.substring(0, 100)}`);
}
}
}
const VALID_PLUGIN_NAME_RE = /^dbgate-plugin-[a-zA-Z0-9_-]+$/;
export function extractShellApiPlugins(functionName, props): string[] { export function extractShellApiPlugins(functionName, props): string[] {
const res = []; const res = [];
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/); const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
@@ -15,6 +73,11 @@ export function extractShellApiPlugins(functionName, props): string[] {
res.push(nsMatchEngine[2]); res.push(nsMatchEngine[2]);
} }
} }
for (const plugin of res) {
if (!VALID_PLUGIN_NAME_RE.test(plugin)) {
throw new Error(`DBGM-00000 Invalid plugin name: ${String(plugin).substring(0, 100)}`);
}
}
return res; return res;
} }
@@ -28,7 +91,8 @@ export function extractPackageName(name): string {
} }
export function compileShellApiFunctionName(functionName) { export function compileShellApiFunctionName(functionName) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/); assertValidShellApiFunctionName(functionName);
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) { if (nsMatch) {
return `${_camelCase(nsMatch[2])}.shellApi.${nsMatch[1]}`; return `${_camelCase(nsMatch[2])}.shellApi.${nsMatch[1]}`;
} }
@@ -36,7 +100,8 @@ export function compileShellApiFunctionName(functionName) {
} }
export function evalShellApiFunctionName(functionName, dbgateApi, requirePlugin) { export function evalShellApiFunctionName(functionName, dbgateApi, requirePlugin) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/); assertValidShellApiFunctionName(functionName);
const nsMatch = functionName.match(/^([^@]+)@([^@]+)$/);
if (nsMatch) { if (nsMatch) {
return requirePlugin(nsMatch[2]).shellApi[nsMatch[1]]; return requirePlugin(nsMatch[2]).shellApi[nsMatch[1]];
} }