Переработано окно эндпоинта, добавлены элементы дебага, добавлена возможность сохранять и загружать конфигурацию эндпоинта, добавлено отображение ошибок при загрузке конфигурации. Исправлены мелкие баги.

This commit is contained in:
2026-03-01 16:00:26 +03:00
parent 7e2e0103fe
commit 6766cd81a1
15 changed files with 1677 additions and 172 deletions

View File

@@ -2,6 +2,8 @@ import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
import { v4 as uuidv4 } from 'uuid';
import { ExportedEndpoint, ExportedScriptQuery } from '../types';
import { encryptEndpointData, decryptEndpointData } from '../services/endpointCrypto';
export const getEndpoints = async (req: AuthRequest, res: Response) => {
try {
@@ -314,6 +316,11 @@ export const testEndpoint = async (req: AuthRequest, res: Response) => {
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
logs: [
{ type: 'info', message: `Query executed in ${result.executionTime}ms, returned ${result.rowCount} rows`, timestamp: Date.now() },
],
queries: [],
processedQuery,
});
} else if (execType === 'script') {
if (!script_language || !script_code) {
@@ -338,7 +345,9 @@ export const testEndpoint = async (req: AuthRequest, res: Response) => {
res.json({
success: true,
data: scriptResult,
data: scriptResult.result,
logs: scriptResult.logs,
queries: scriptResult.queries,
});
} else if (execType === 'aql') {
if (!database_id) {
@@ -370,6 +379,10 @@ export const testEndpoint = async (req: AuthRequest, res: Response) => {
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
logs: [
{ type: 'info', message: `AQL ${aql_method} ${aql_endpoint} executed in ${result.executionTime}ms`, timestamp: Date.now() },
],
queries: [],
});
} else {
return res.status(400).json({ error: 'Invalid execution_type' });
@@ -378,6 +391,347 @@ export const testEndpoint = async (req: AuthRequest, res: Response) => {
res.status(400).json({
success: false,
error: error.message,
detail: error.detail || undefined,
hint: error.hint || undefined,
logs: [],
queries: [],
});
}
};
export const exportEndpoint = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const endpointResult = await mainPool.query(
'SELECT * FROM endpoints WHERE id = $1',
[id]
);
if (endpointResult.rows.length === 0) {
return res.status(404).json({ error: 'Endpoint not found' });
}
const endpoint = endpointResult.rows[0];
// Resolve database_id -> name & type
let databaseName: string | null = null;
let databaseType: string | null = null;
if (endpoint.database_id) {
const dbResult = await mainPool.query(
'SELECT name, type FROM databases WHERE id = $1',
[endpoint.database_id]
);
if (dbResult.rows.length > 0) {
databaseName = dbResult.rows[0].name;
databaseType = dbResult.rows[0].type;
}
}
// Resolve folder_id -> name
let folderName: string | null = null;
if (endpoint.folder_id) {
const folderResult = await mainPool.query(
'SELECT name FROM folders WHERE id = $1',
[endpoint.folder_id]
);
if (folderResult.rows.length > 0) {
folderName = folderResult.rows[0].name;
}
}
// Resolve database_ids in script_queries
const scriptQueries = endpoint.script_queries || [];
const exportedScriptQueries: ExportedScriptQuery[] = [];
for (const sq of scriptQueries) {
let sqDbName: string | undefined;
let sqDbType: string | undefined;
if (sq.database_id) {
const sqDbResult = await mainPool.query(
'SELECT name, type FROM databases WHERE id = $1',
[sq.database_id]
);
if (sqDbResult.rows.length > 0) {
sqDbName = sqDbResult.rows[0].name;
sqDbType = sqDbResult.rows[0].type;
}
}
exportedScriptQueries.push({
name: sq.name,
sql: sq.sql,
database_name: sqDbName,
database_type: sqDbType,
aql_method: sq.aql_method,
aql_endpoint: sq.aql_endpoint,
aql_body: sq.aql_body,
aql_query_params: sq.aql_query_params,
});
}
const exportData: ExportedEndpoint = {
_format: 'kabe_v1',
name: endpoint.name,
description: endpoint.description || '',
method: endpoint.method,
path: endpoint.path,
execution_type: endpoint.execution_type || 'sql',
database_name: databaseName,
database_type: databaseType,
sql_query: endpoint.sql_query || '',
parameters: endpoint.parameters || [],
script_language: endpoint.script_language || null,
script_code: endpoint.script_code || null,
script_queries: exportedScriptQueries,
aql_method: endpoint.aql_method || null,
aql_endpoint: endpoint.aql_endpoint || null,
aql_body: endpoint.aql_body || null,
aql_query_params: endpoint.aql_query_params || null,
is_public: endpoint.is_public || false,
enable_logging: endpoint.enable_logging || false,
detailed_response: endpoint.detailed_response || false,
folder_name: folderName,
};
const encrypted = encryptEndpointData(exportData);
const safeFileName = endpoint.name.replace(/[^a-zA-Z0-9_\-а-яА-ЯёЁ]/g, '_');
res.setHeader('Content-Type', 'application/octet-stream');
res.setHeader('Content-Disposition', `attachment; filename="${safeFileName}.kabe"`);
res.send(encrypted);
} catch (error) {
console.error('Export endpoint error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const importPreview = async (req: AuthRequest, res: Response) => {
try {
const buffer = req.body as Buffer;
if (!buffer || buffer.length === 0) {
return res.status(400).json({ error: 'No file uploaded' });
}
let exportData: ExportedEndpoint;
try {
exportData = decryptEndpointData(buffer) as ExportedEndpoint;
} catch (err) {
return res.status(400).json({ error: 'Invalid or corrupted .kabe file' });
}
if (exportData._format !== 'kabe_v1') {
return res.status(400).json({ error: 'Unsupported file format version' });
}
// Collect all referenced database names
const referencedDatabases: { name: string; type: string }[] = [];
if (exportData.database_name) {
referencedDatabases.push({
name: exportData.database_name,
type: exportData.database_type || 'unknown',
});
}
for (const sq of exportData.script_queries || []) {
if (sq.database_name && !referencedDatabases.find(d => d.name === sq.database_name)) {
referencedDatabases.push({
name: sq.database_name,
type: sq.database_type || 'unknown',
});
}
}
// Check which databases exist locally
const localDatabases = await mainPool.query(
'SELECT id, name, type FROM databases WHERE is_active = true'
);
const databaseMapping = referencedDatabases.map(ref => {
const found = localDatabases.rows.find(
(db: any) => db.name === ref.name && db.type === ref.type
);
return {
name: ref.name,
type: ref.type,
found: !!found,
local_id: found?.id || null,
};
});
// Check folder
let folder: { name: string; found: boolean; local_id: string | null } | null = null;
if (exportData.folder_name) {
const folderResult = await mainPool.query(
'SELECT id FROM folders WHERE name = $1',
[exportData.folder_name]
);
folder = {
name: exportData.folder_name,
found: folderResult.rows.length > 0,
local_id: folderResult.rows.length > 0 ? folderResult.rows[0].id : null,
};
}
// Check if path already exists
const pathCheck = await mainPool.query(
'SELECT id FROM endpoints WHERE path = $1',
[exportData.path]
);
res.json({
endpoint: {
name: exportData.name,
description: exportData.description,
method: exportData.method,
path: exportData.path,
execution_type: exportData.execution_type,
is_public: exportData.is_public,
enable_logging: exportData.enable_logging,
detailed_response: exportData.detailed_response,
folder_name: exportData.folder_name,
},
databases: databaseMapping,
all_databases_found: databaseMapping.every(d => d.found),
local_databases: localDatabases.rows.map((db: any) => ({
id: db.id,
name: db.name,
type: db.type,
})),
folder,
path_exists: pathCheck.rows.length > 0,
});
} catch (error) {
console.error('Import preview error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const importEndpoint = async (req: AuthRequest, res: Response) => {
try {
const {
file_data,
database_mapping,
folder_id,
override_path,
} = req.body;
if (!file_data) {
return res.status(400).json({ error: 'No file data provided' });
}
const buffer = Buffer.from(file_data, 'base64');
let exportData: ExportedEndpoint;
try {
exportData = decryptEndpointData(buffer) as ExportedEndpoint;
} catch (err) {
return res.status(400).json({ error: 'Invalid or corrupted .kabe file' });
}
// Resolve main database_id
let databaseId: string | null = null;
if (exportData.database_name) {
const mappedId = database_mapping?.[exportData.database_name];
if (mappedId) {
databaseId = mappedId;
} else {
const dbResult = await mainPool.query(
'SELECT id FROM databases WHERE name = $1 AND is_active = true',
[exportData.database_name]
);
if (dbResult.rows.length > 0) {
databaseId = dbResult.rows[0].id;
} else {
return res.status(400).json({
error: `Database "${exportData.database_name}" not found and no mapping provided`
});
}
}
}
// Resolve script_queries database_ids
const resolvedScriptQueries = [];
for (const sq of exportData.script_queries || []) {
let sqDatabaseId: string | undefined;
if (sq.database_name) {
const mappedId = database_mapping?.[sq.database_name];
if (mappedId) {
sqDatabaseId = mappedId;
} else {
const sqDbResult = await mainPool.query(
'SELECT id FROM databases WHERE name = $1 AND is_active = true',
[sq.database_name]
);
if (sqDbResult.rows.length > 0) {
sqDatabaseId = sqDbResult.rows[0].id;
} else {
return res.status(400).json({
error: `Database "${sq.database_name}" (script query "${sq.name}") not found and no mapping provided`
});
}
}
}
resolvedScriptQueries.push({
name: sq.name,
sql: sq.sql,
database_id: sqDatabaseId,
aql_method: sq.aql_method,
aql_endpoint: sq.aql_endpoint,
aql_body: sq.aql_body,
aql_query_params: sq.aql_query_params,
});
}
// Resolve folder
let resolvedFolderId: string | null = folder_id || null;
if (!resolvedFolderId && exportData.folder_name) {
const folderResult = await mainPool.query(
'SELECT id FROM folders WHERE name = $1',
[exportData.folder_name]
);
if (folderResult.rows.length > 0) {
resolvedFolderId = folderResult.rows[0].id;
}
}
const finalPath = override_path || exportData.path;
const result = await mainPool.query(
`INSERT INTO endpoints (
name, description, method, path, database_id, sql_query, parameters,
folder_id, user_id, is_public, enable_logging,
execution_type, script_language, script_code, script_queries,
aql_method, aql_endpoint, aql_body, aql_query_params, detailed_response
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20)
RETURNING *`,
[
exportData.name,
exportData.description || '',
exportData.method,
finalPath,
databaseId,
exportData.sql_query || '',
JSON.stringify(exportData.parameters || []),
resolvedFolderId,
req.user!.id,
exportData.is_public || false,
exportData.enable_logging || false,
exportData.execution_type || 'sql',
exportData.script_language || null,
exportData.script_code || null,
JSON.stringify(resolvedScriptQueries),
exportData.aql_method || null,
exportData.aql_endpoint || null,
exportData.aql_body || null,
JSON.stringify(exportData.aql_query_params || {}),
exportData.detailed_response || false,
]
);
res.status(201).json(result.rows[0]);
} catch (error: any) {
console.error('Import endpoint error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'Endpoint path already exists' });
}
res.status(500).json({ error: 'Internal server error' });
}
};

View File

@@ -7,6 +7,9 @@ import {
updateEndpoint,
deleteEndpoint,
testEndpoint,
exportEndpoint,
importPreview,
importEndpoint,
} from '../controllers/endpointController';
const router = express.Router();
@@ -36,6 +39,44 @@ router.use(authMiddleware);
*/
router.get('/', getEndpoints);
// Import routes must be before /:id to avoid "import" being treated as an id
router.post('/import/preview', express.raw({ type: 'application/octet-stream', limit: '10mb' }), importPreview);
router.post('/import', importEndpoint);
/**
* @swagger
* /api/endpoints/test:
* post:
* tags: [Endpoints]
* summary: Test SQL query
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Query test result
*/
router.post('/test', testEndpoint);
/**
* @swagger
* /api/endpoints:
* post:
* tags: [Endpoints]
* summary: Create new endpoint
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* responses:
* 201:
* description: Endpoint created
*/
router.post('/', createEndpoint);
/**
* @swagger
* /api/endpoints/{id}:
@@ -58,23 +99,23 @@ router.get('/:id', getEndpoint);
/**
* @swagger
* /api/endpoints:
* post:
* /api/endpoints/{id}/export:
* get:
* tags: [Endpoints]
* summary: Create new endpoint
* summary: Export endpoint as .kabe file
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: string
* responses:
* 201:
* description: Endpoint created
* 200:
* description: Encrypted .kabe file
*/
router.post('/', createEndpoint);
router.get('/:id/export', exportEndpoint);
/**
* @swagger
@@ -116,18 +157,4 @@ router.put('/:id', updateEndpoint);
*/
router.delete('/:id', deleteEndpoint);
/**
* @swagger
* /api/endpoints/test:
* post:
* tags: [Endpoints]
* summary: Test SQL query
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Query test result
*/
router.post('/test', testEndpoint);
export default router;

View File

@@ -0,0 +1,247 @@
import * as vm from 'vm';
import { sqlExecutor } from './SqlExecutor';
import { aqlExecutor } from './AqlExecutor';
import { ScriptQuery, EndpointParameter, LogEntry, QueryExecution, IsolatedExecutionResult } from '../types';
import { databasePoolManager } from './DatabasePoolManager';
interface IsolatedScriptContext {
databaseId: string;
scriptQueries: ScriptQuery[];
requestParams: Record<string, any>;
endpointParameters: EndpointParameter[];
}
export class IsolatedScriptExecutor {
private readonly TIMEOUT_MS = 600000; // 10 minutes
async execute(code: string, context: IsolatedScriptContext): Promise<IsolatedExecutionResult> {
const logs: LogEntry[] = [];
const queries: QueryExecution[] = [];
// Build captured console proxy
const capturedConsole = {
log: (...args: any[]) => {
logs.push({ type: 'log', message: args.map(a => this.stringify(a)).join(' '), timestamp: Date.now() });
},
error: (...args: any[]) => {
logs.push({ type: 'error', message: args.map(a => this.stringify(a)).join(' '), timestamp: Date.now() });
},
warn: (...args: any[]) => {
logs.push({ type: 'warn', message: args.map(a => this.stringify(a)).join(' '), timestamp: Date.now() });
},
info: (...args: any[]) => {
logs.push({ type: 'info', message: args.map(a => this.stringify(a)).join(' '), timestamp: Date.now() });
},
};
// Build execQuery function with tracking
const execQuery = async (queryName: string, additionalParams: Record<string, any> = {}) => {
const startTime = Date.now();
const query = context.scriptQueries.find(q => q.name === queryName);
if (!query) {
const entry: QueryExecution = {
name: queryName,
executionTime: Date.now() - startTime,
success: false,
error: `Query '${queryName}' not found`,
};
queries.push(entry);
throw new Error(`Query '${queryName}' not found`);
}
const allParams = { ...context.requestParams, ...additionalParams };
const dbId = (query as any).database_id || context.databaseId;
if (!dbId) {
const errMsg = `Database ID not found for query '${queryName}'. Please specify database_id in the Script Queries configuration.`;
queries.push({ name: queryName, executionTime: Date.now() - startTime, success: false, error: errMsg });
throw new Error(errMsg);
}
const dbConfig = await databasePoolManager.getDatabaseConfig(dbId);
if (!dbConfig) {
const errMsg = `Database configuration not found for ID: ${dbId}`;
queries.push({ name: queryName, executionTime: Date.now() - startTime, success: false, error: errMsg });
throw new Error(errMsg);
}
if (dbConfig.type === 'aql') {
try {
const result = await aqlExecutor.executeAqlQuery(dbId, {
method: query.aql_method || 'GET',
endpoint: query.aql_endpoint || '',
body: query.aql_body || '',
queryParams: query.aql_query_params || {},
parameters: allParams,
});
queries.push({
name: queryName,
executionTime: Date.now() - startTime,
rowCount: result.rowCount,
success: true,
});
return {
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
} catch (error: any) {
queries.push({
name: queryName,
executionTime: Date.now() - startTime,
success: false,
error: error.message,
});
return { success: false, error: error.message, data: [], rowCount: 0 };
}
} else {
if (!query.sql) {
const errMsg = `SQL query is required for database '${dbConfig.name}' (type: ${dbConfig.type})`;
queries.push({ name: queryName, executionTime: Date.now() - startTime, success: false, error: errMsg });
throw new Error(errMsg);
}
try {
let processedQuery = query.sql;
const paramValues: any[] = [];
const paramMatches = query.sql.match(/\$\w+/g) || [];
const uniqueParams = [...new Set(paramMatches.map(p => p.substring(1)))];
uniqueParams.forEach((paramName, index) => {
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${index + 1}`);
const value = allParams[paramName];
paramValues.push(value !== undefined ? value : null);
});
const result = await sqlExecutor.executeQuery(dbId, processedQuery, paramValues);
queries.push({
name: queryName,
sql: query.sql,
executionTime: Date.now() - startTime,
rowCount: result.rowCount,
success: true,
});
return {
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
} catch (error: any) {
queries.push({
name: queryName,
sql: query.sql,
executionTime: Date.now() - startTime,
success: false,
error: error.message,
});
return { success: false, error: error.message, data: [], rowCount: 0 };
}
}
};
// Create sandbox with null-prototype base
const sandbox = Object.create(null);
sandbox.params = context.requestParams;
sandbox.console = capturedConsole;
sandbox.execQuery = execQuery;
// Safe globals
sandbox.JSON = JSON;
sandbox.Date = Date;
sandbox.Math = Math;
sandbox.parseInt = parseInt;
sandbox.parseFloat = parseFloat;
sandbox.Array = Array;
sandbox.Object = Object;
sandbox.String = String;
sandbox.Number = Number;
sandbox.Boolean = Boolean;
sandbox.RegExp = RegExp;
sandbox.Map = Map;
sandbox.Set = Set;
sandbox.Promise = Promise;
sandbox.Error = Error;
sandbox.TypeError = TypeError;
sandbox.RangeError = RangeError;
sandbox.SyntaxError = SyntaxError;
sandbox.isNaN = isNaN;
sandbox.isFinite = isFinite;
sandbox.undefined = undefined;
sandbox.NaN = NaN;
sandbox.Infinity = Infinity;
sandbox.encodeURIComponent = encodeURIComponent;
sandbox.decodeURIComponent = decodeURIComponent;
sandbox.encodeURI = encodeURI;
sandbox.decodeURI = decodeURI;
// Capped setTimeout/clearTimeout
const timerIds = new Set<ReturnType<typeof setTimeout>>();
sandbox.setTimeout = (fn: Function, ms: number, ...args: any[]) => {
const cappedMs = Math.min(ms || 0, 30000);
const id = setTimeout(() => {
timerIds.delete(id);
fn(...args);
}, cappedMs);
timerIds.add(id);
return id;
};
sandbox.clearTimeout = (id: ReturnType<typeof setTimeout>) => {
timerIds.delete(id);
clearTimeout(id);
};
const vmContext = vm.createContext(sandbox);
// Wrap user code in async IIFE
const wrappedCode = `(async function() { ${code} })()`;
try {
const script = new vm.Script(wrappedCode, { filename: 'user-script.js' });
const resultPromise = script.runInContext(vmContext);
// Race against timeout
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Script execution timeout (10min)')), this.TIMEOUT_MS);
});
const result = await Promise.race([resultPromise, timeoutPromise]);
// Clean up timers
for (const id of timerIds) {
clearTimeout(id);
}
timerIds.clear();
return { result, logs, queries };
} catch (error: any) {
// Clean up timers
for (const id of timerIds) {
clearTimeout(id);
}
timerIds.clear();
throw new Error(`JavaScript execution error: ${error.message}`);
}
}
private stringify(value: any): string {
if (value === null) return 'null';
if (value === undefined) return 'undefined';
if (typeof value === 'string') return value;
try {
return JSON.stringify(value);
} catch {
return String(value);
}
}
}
export const isolatedScriptExecutor = new IsolatedScriptExecutor();

View File

@@ -1,8 +1,9 @@
import { spawn } from 'child_process';
import { sqlExecutor } from './SqlExecutor';
import { aqlExecutor } from './AqlExecutor';
import { ScriptQuery, EndpointParameter } from '../types';
import { ScriptQuery, EndpointParameter, LogEntry, QueryExecution, IsolatedExecutionResult } from '../types';
import { databasePoolManager } from './DatabasePoolManager';
import { isolatedScriptExecutor } from './IsolatedScriptExecutor';
interface ScriptContext {
databaseId: string;
@@ -13,122 +14,19 @@ interface ScriptContext {
export class ScriptExecutor {
/**
* Выполняет JavaScript скрипт
* Выполняет JavaScript скрипт через изолированный VM контекст
*/
async executeJavaScript(code: string, context: ScriptContext): Promise<any> {
try {
// Создаем функцию execQuery, доступную в скрипте
const execQuery = async (queryName: string, additionalParams: Record<string, any> = {}) => {
const query = context.scriptQueries.find(q => q.name === queryName);
if (!query) {
throw new Error(`Query '${queryName}' not found`);
}
const allParams = { ...context.requestParams, ...additionalParams };
const dbId = (query as any).database_id || context.databaseId;
if (!dbId) {
throw new Error(`Database ID not found for query '${queryName}'. Query database_id: ${(query as any).database_id}, Context databaseId: ${context.databaseId}. Please specify database_id in the Script Queries configuration for query '${queryName}'.`);
}
// Получаем конфигурацию базы данных для определения типа
const dbConfig = await databasePoolManager.getDatabaseConfig(dbId);
if (!dbConfig) {
throw new Error(`Database configuration not found for ID: ${dbId}`);
}
// Проверяем тип базы данных и выполняем соответствующий запрос
if (dbConfig.type === 'aql') {
// AQL запрос
try {
const result = await aqlExecutor.executeAqlQuery(dbId, {
method: query.aql_method || 'GET',
endpoint: query.aql_endpoint || '',
body: query.aql_body || '',
queryParams: query.aql_query_params || {},
parameters: allParams,
});
return {
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
} catch (error: any) {
// Возвращаем ошибку как объект, а не бросаем исключение
return {
success: false,
error: error.message,
data: [],
rowCount: 0,
};
}
} else {
// SQL запрос
if (!query.sql) {
throw new Error(`SQL query is required for database '${dbConfig.name}' (type: ${dbConfig.type})`);
}
try {
let processedQuery = query.sql;
const paramValues: any[] = [];
const paramMatches = query.sql.match(/\$\w+/g) || [];
const uniqueParams = [...new Set(paramMatches.map(p => p.substring(1)))];
uniqueParams.forEach((paramName, index) => {
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${index + 1}`);
const value = allParams[paramName];
paramValues.push(value !== undefined ? value : null);
});
const result = await sqlExecutor.executeQuery(dbId, processedQuery, paramValues);
console.log(`[execQuery ${queryName}] success, rowCount:`, result.rowCount);
return {
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
} catch (error: any) {
// Возвращаем ошибку как объект, а не бросаем исключение
return {
success: false,
error: error.message,
data: [],
rowCount: 0,
};
}
}
};
// Создаем асинхронную функцию из кода пользователя
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
const userFunction = new AsyncFunction('params', 'execQuery', code);
// Устанавливаем таймаут (10 минут)
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Script execution timeout (10min)')), 600000);
});
// Выполняем скрипт с таймаутом
const result = await Promise.race([
userFunction(context.requestParams, execQuery),
timeoutPromise
]);
return result;
} catch (error: any) {
throw new Error(`JavaScript execution error: ${error.message}`);
}
async executeJavaScript(code: string, context: ScriptContext): Promise<IsolatedExecutionResult> {
return isolatedScriptExecutor.execute(code, context);
}
/**
* Выполняет Python скрипт в отдельном процессе
*/
async executePython(code: string, context: ScriptContext): Promise<any> {
async executePython(code: string, context: ScriptContext): Promise<IsolatedExecutionResult> {
const logs: LogEntry[] = [];
const queries: QueryExecution[] = [];
return new Promise((resolve, reject) => {
// Сериализуем параметры в JSON строку
const paramsJson = JSON.stringify(context.requestParams);
@@ -179,7 +77,6 @@ print(json.dumps(result))
const python = spawn(pythonCommand, ['-c', wrapperCode]);
let output = '';
let errorOutput = '';
let queryRequests: any[] = [];
python.stdout.on('data', (data) => {
output += data.toString();
@@ -192,12 +89,19 @@ print(json.dumps(result))
// Проверяем на запросы к БД
const requestMatches = text.matchAll(/__QUERY_REQUEST__(.*?)__END_REQUEST__/g);
for (const match of requestMatches) {
const queryStartTime = Date.now();
try {
const request = JSON.parse(match[1]);
// Выполняем запрос
const query = context.scriptQueries.find(q => q.name === request.query_name);
if (!query) {
queries.push({
name: request.query_name,
executionTime: Date.now() - queryStartTime,
success: false,
error: `Query '${request.query_name}' not found`,
});
python.stdin.write(JSON.stringify({ error: `Query '${request.query_name}' not found` }) + '\n');
continue;
}
@@ -206,18 +110,18 @@ print(json.dumps(result))
const dbId = (query as any).database_id || context.databaseId;
if (!dbId) {
python.stdin.write(JSON.stringify({
error: `Database ID not found for query '${request.query_name}'. Query database_id: ${(query as any).database_id}, Context databaseId: ${context.databaseId}. Please specify database_id in the Script Queries configuration for query '${request.query_name}'.`
}) + '\n');
const errMsg = `Database ID not found for query '${request.query_name}'.`;
queries.push({ name: request.query_name, executionTime: Date.now() - queryStartTime, success: false, error: errMsg });
python.stdin.write(JSON.stringify({ error: errMsg }) + '\n');
continue;
}
// Получаем конфигурацию базы данных для определения типа
const dbConfig = await databasePoolManager.getDatabaseConfig(dbId);
if (!dbConfig) {
python.stdin.write(JSON.stringify({
error: `Database configuration not found for ID: ${dbId}`
}) + '\n');
const errMsg = `Database configuration not found for ID: ${dbId}`;
queries.push({ name: request.query_name, executionTime: Date.now() - queryStartTime, success: false, error: errMsg });
python.stdin.write(JSON.stringify({ error: errMsg }) + '\n');
continue;
}
@@ -233,6 +137,13 @@ print(json.dumps(result))
parameters: allParams,
});
queries.push({
name: request.query_name,
executionTime: Date.now() - queryStartTime,
rowCount: result.rowCount,
success: true,
});
python.stdin.write(JSON.stringify({
success: true,
data: result.rows,
@@ -240,7 +151,12 @@ print(json.dumps(result))
executionTime: result.executionTime,
}) + '\n');
} catch (error: any) {
// Отправляем ошибку как объект, а не через поле error
queries.push({
name: request.query_name,
executionTime: Date.now() - queryStartTime,
success: false,
error: error.message,
});
python.stdin.write(JSON.stringify({
success: false,
error: error.message,
@@ -251,9 +167,11 @@ print(json.dumps(result))
} else {
// SQL запрос
if (!query.sql) {
const errMsg = `SQL query is required for database '${dbConfig.name}' (type: ${dbConfig.type})`;
queries.push({ name: request.query_name, sql: query.sql, executionTime: Date.now() - queryStartTime, success: false, error: errMsg });
python.stdin.write(JSON.stringify({
success: false,
error: `SQL query is required for database '${dbConfig.name}' (type: ${dbConfig.type})`,
error: errMsg,
data: [],
rowCount: 0,
}) + '\n');
@@ -280,6 +198,14 @@ print(json.dumps(result))
paramValues
);
queries.push({
name: request.query_name,
sql: query.sql,
executionTime: Date.now() - queryStartTime,
rowCount: result.rowCount,
success: true,
});
python.stdin.write(JSON.stringify({
success: true,
data: result.rows,
@@ -287,6 +213,13 @@ print(json.dumps(result))
executionTime: result.executionTime,
}) + '\n');
} catch (error: any) {
queries.push({
name: request.query_name,
sql: query.sql,
executionTime: Date.now() - queryStartTime,
success: false,
error: error.message,
});
python.stdin.write(JSON.stringify({
success: false,
error: error.message,
@@ -296,6 +229,12 @@ print(json.dumps(result))
}
}
} catch (error: any) {
queries.push({
name: 'unknown',
executionTime: Date.now() - queryStartTime,
success: false,
error: error.message,
});
python.stdin.write(JSON.stringify({
success: false,
error: error.message,
@@ -304,18 +243,38 @@ print(json.dumps(result))
}) + '\n');
}
}
// Capture non-query stderr output as log entries
const nonQueryLines = text.replace(/__QUERY_REQUEST__.*?__END_REQUEST__/g, '').trim();
if (nonQueryLines) {
nonQueryLines.split('\n').forEach((line: string) => {
const trimmed = line.trim();
if (trimmed) {
logs.push({ type: 'log', message: trimmed, timestamp: Date.now() });
}
});
}
});
python.on('close', (code) => {
if (code !== 0) {
python.on('close', (exitCode) => {
if (exitCode !== 0) {
reject(new Error(`Python execution error: ${errorOutput}`));
} else {
try {
// Последняя строка вывода - результат
// Последняя строка вывода - результат, остальные - логи
const lines = output.trim().split('\n');
const resultLine = lines[lines.length - 1];
// Capture print() output lines (everything except the last JSON result)
for (let i = 0; i < lines.length - 1; i++) {
const trimmed = lines[i].trim();
if (trimmed) {
logs.push({ type: 'log', message: trimmed, timestamp: Date.now() });
}
}
const result = JSON.parse(resultLine);
resolve(result);
resolve({ result, logs, queries });
} catch (error) {
reject(new Error(`Failed to parse Python output: ${output}`));
}
@@ -337,7 +296,7 @@ print(json.dumps(result))
language: 'javascript' | 'python',
code: string,
context: ScriptContext
): Promise<any> {
): Promise<IsolatedExecutionResult> {
if (language === 'javascript') {
return this.executeJavaScript(code, context);
} else if (language === 'python') {

View File

@@ -0,0 +1,35 @@
import crypto from 'crypto';
const ENCRYPTION_KEY = 'kis-api-builder-endpoint-key-32b'; // exactly 32 bytes for AES-256
const ALGORITHM = 'aes-256-gcm';
export function encryptEndpointData(data: object): Buffer {
const json = JSON.stringify(data);
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(ALGORITHM, Buffer.from(ENCRYPTION_KEY, 'utf-8'), iv);
const encrypted = Buffer.concat([
cipher.update(json, 'utf8'),
cipher.final(),
]);
const authTag = cipher.getAuthTag();
// Format: [16 bytes IV][16 bytes authTag][...encrypted data]
return Buffer.concat([iv, authTag, encrypted]);
}
export function decryptEndpointData(buffer: Buffer): object {
const iv = buffer.subarray(0, 16);
const authTag = buffer.subarray(16, 32);
const encrypted = buffer.subarray(32);
const decipher = crypto.createDecipheriv(ALGORITHM, Buffer.from(ENCRYPTION_KEY, 'utf-8'), iv);
decipher.setAuthTag(authTag);
const decrypted = Buffer.concat([
decipher.update(encrypted),
decipher.final(),
]);
return JSON.parse(decrypted.toString('utf8'));
}

View File

@@ -101,6 +101,27 @@ export interface QueryResult {
executionTime: number;
}
export interface LogEntry {
type: 'log' | 'error' | 'warn' | 'info';
message: string;
timestamp: number;
}
export interface QueryExecution {
name: string;
sql?: string;
executionTime: number;
rowCount?: number;
success: boolean;
error?: string;
}
export interface IsolatedExecutionResult {
result: any;
logs: LogEntry[];
queries: QueryExecution[];
}
export interface SwaggerEndpoint {
tags: string[];
summary: string;
@@ -109,3 +130,38 @@ export interface SwaggerEndpoint {
responses: any;
security?: any[];
}
export interface ExportedScriptQuery {
name: string;
sql?: string;
database_name?: string;
database_type?: string;
aql_method?: string;
aql_endpoint?: string;
aql_body?: string;
aql_query_params?: Record<string, string>;
}
export interface ExportedEndpoint {
_format: 'kabe_v1';
name: string;
description: string;
method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
path: string;
execution_type: 'sql' | 'script' | 'aql';
database_name: string | null;
database_type: string | null;
sql_query: string;
parameters: EndpointParameter[];
script_language: string | null;
script_code: string | null;
script_queries: ExportedScriptQuery[];
aql_method: string | null;
aql_endpoint: string | null;
aql_body: string | null;
aql_query_params: Record<string, string> | null;
is_public: boolean;
enable_logging: boolean;
detailed_response: boolean;
folder_name: string | null;
}