new file: .claude/settings.local.json

new file:   .gitignore
	new file:   backend/.env.example
	new file:   backend/.gitignore
	new file:   backend/ecosystem.config.js
	new file:   backend/nodemon.json
	new file:   backend/package-lock.json
	new file:   backend/package.json
	new file:   backend/src/config/database.ts
	new file:   backend/src/config/dynamicSwagger.ts
	new file:   backend/src/config/environment.ts
	new file:   backend/src/config/swagger.ts
	new file:   backend/src/controllers/apiKeyController.ts
	new file:   backend/src/controllers/authController.ts
	new file:   backend/src/controllers/databaseController.ts
	new file:   backend/src/controllers/databaseManagementController.ts
	new file:   backend/src/controllers/dynamicApiController.ts
	new file:   backend/src/controllers/endpointController.ts
	new file:   backend/src/controllers/folderController.ts
	new file:   backend/src/controllers/logsController.ts
	new file:   backend/src/controllers/userController.ts
	new file:   backend/src/middleware/apiKey.ts
	new file:   backend/src/middleware/auth.ts
	new file:   backend/src/middleware/logging.ts
	new file:   backend/src/migrations/001_initial_schema.sql
	new file:   backend/src/migrations/002_add_logging.sql
	new file:   backend/src/migrations/003_add_scripting.sql
	new file:   backend/src/migrations/004_add_superadmin.sql
	new file:   backend/src/migrations/run.ts
	new file:   backend/src/migrations/seed.ts
	new file:   backend/src/routes/apiKeys.ts
	new file:   backend/src/routes/auth.ts
	new file:   backend/src/routes/databaseManagement.ts
	new file:   backend/src/routes/databases.ts
	new file:   backend/src/routes/dynamic.ts
	new file:   backend/src/routes/endpoints.ts
	new file:   backend/src/routes/folders.ts
	new file:   backend/src/routes/logs.ts
	new file:   backend/src/routes/users.ts
	new file:   backend/src/server.ts
	new file:   backend/src/services/DatabasePoolManager.ts
	new file:   backend/src/services/ScriptExecutor.ts
	new file:   backend/src/services/SqlExecutor.ts
	new file:   backend/src/types/index.ts
	new file:   backend/tsconfig.json
	new file:   frontend/.gitignore
	new file:   frontend/index.html
	new file:   frontend/nginx.conf
	new file:   frontend/package-lock.json
	new file:   frontend/package.json
	new file:   frontend/postcss.config.js
	new file:   frontend/src/App.tsx
	new file:   frontend/src/components/CodeEditor.tsx
This commit is contained in:
GEgorov
2025-10-07 00:04:04 +03:00
commit 8943f5a070
79 changed files with 17032 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
import { Pool } from 'pg';
import { config } from './environment';
// Main database pool for KIS API Builder metadata
export const mainPool = new Pool({
host: config.mainDatabase.host,
port: config.mainDatabase.port,
database: config.mainDatabase.database,
user: config.mainDatabase.user,
password: config.mainDatabase.password,
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
mainPool.on('error', (err) => {
console.error('Unexpected error on idle client', err);
process.exit(-1);
});
export const initializeDatabase = async () => {
try {
const client = await mainPool.connect();
console.log('✅ Connected to main database successfully');
client.release();
} catch (error) {
console.error('❌ Failed to connect to database:', error);
throw error;
}
};

View File

@@ -0,0 +1,237 @@
import { mainPool } from './database';
interface SwaggerPath {
[method: string]: {
tags: string[];
summary: string;
description: string;
security: { apiKey: [] }[];
parameters: any[];
responses: any;
};
}
interface SwaggerSpec {
openapi: string;
info: any;
servers: any[];
components: any;
paths: { [path: string]: SwaggerPath };
tags: any[];
}
export async function generateDynamicSwagger(): Promise<SwaggerSpec> {
// Загружаем все эндпоинты из базы с полным путем папки
const endpointsResult = await mainPool.query(`
WITH RECURSIVE folder_path AS (
-- Базовый случай: папки без родителя
SELECT id, name, parent_id, name::text as full_path
FROM folders
WHERE parent_id IS NULL
UNION ALL
-- Рекурсивный случай: добавляем дочерние папки
SELECT f.id, f.name, f.parent_id, (fp.full_path || ' / ' || f.name)::text as full_path
FROM folders f
INNER JOIN folder_path fp ON f.parent_id = fp.id
)
SELECT
e.id,
e.name,
e.description,
e.method,
e.path,
e.parameters,
e.is_public,
fp.full_path as folder_name
FROM endpoints e
LEFT JOIN folder_path fp ON e.folder_id = fp.id
ORDER BY fp.full_path, e.name
`);
const endpoints = endpointsResult.rows;
// Группируем теги по папкам
const tags: any[] = [];
const folderSet = new Set<string>();
endpoints.forEach((endpoint: any) => {
const folderName = endpoint.folder_name || 'Без категории';
if (!folderSet.has(folderName)) {
folderSet.add(folderName);
tags.push({
name: folderName,
description: `Эндпоинты в папке "${folderName}"`,
});
}
});
// Генерируем пути
const paths: { [path: string]: SwaggerPath } = {};
endpoints.forEach((endpoint: any) => {
const method = endpoint.method.toLowerCase();
const swaggerPath = endpoint.path.replace(/\/api\/v1/, '');
const folderName = endpoint.folder_name || 'Без категории';
// Парсим параметры - PostgreSQL может вернуть как JSON строку, так и уже распарсенный объект
const parameters: any[] = [];
const bodyParams: any = {};
const bodyRequired: string[] = [];
let params: any[] = [];
if (endpoint.parameters) {
if (typeof endpoint.parameters === 'string') {
try {
params = JSON.parse(endpoint.parameters);
} catch (e) {
params = [];
}
} else if (Array.isArray(endpoint.parameters)) {
params = endpoint.parameters;
}
}
if (params.length > 0) {
params.forEach((param: any) => {
if (param.in === 'body') {
// Body параметры идут в requestBody
bodyParams[param.name] = {
type: param.type || 'string',
description: param.description || '',
default: param.default_value,
};
if (param.required) {
bodyRequired.push(param.name);
}
} else {
// Query и path параметры
parameters.push({
name: param.name,
in: param.in || 'query',
required: param.required || false,
description: param.description || '',
schema: {
type: param.type || 'string',
default: param.default_value,
},
});
}
});
}
if (!paths[swaggerPath]) {
paths[swaggerPath] = {} as SwaggerPath;
}
const endpointSpec: any = {
tags: [folderName],
summary: endpoint.name,
description: endpoint.description || '',
security: endpoint.is_public ? [] : [{ apiKey: [] }],
parameters,
responses: {
'200': {
description: 'Успешный ответ',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: { type: 'boolean' },
data: { type: 'array', items: { type: 'object' } },
rowCount: { type: 'number' },
executionTime: { type: 'number' },
},
},
},
},
},
'400': {
description: 'Ошибка в запросе',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
error: { type: 'string' },
},
},
},
},
},
'401': {
description: 'Не авторизован (неверный или отсутствующий API ключ)',
},
'403': {
description: 'Доступ запрещен (нет прав на этот эндпоинт)',
},
'500': {
description: 'Внутренняя ошибка сервера',
},
},
};
// Добавляем requestBody если есть body параметры
if (Object.keys(bodyParams).length > 0) {
endpointSpec.requestBody = {
required: bodyRequired.length > 0,
content: {
'application/json': {
schema: {
type: 'object',
properties: bodyParams,
required: bodyRequired,
},
},
},
};
}
paths[swaggerPath][method] = endpointSpec;
});
return {
openapi: '3.0.0',
info: {
title: 'KIS API Builder - Созданные API эндпоинты',
version: '1.0.0',
description: `
# KIS API Builder - Документация
## Авторизация
Для доступа к эндпоинтам используйте API ключ, полученный от администратора системы.
1. Нажмите кнопку **Authorize** справа вверху
2. Введите ваш API ключ в поле **x-api-key**
3. Нажмите **Authorize**
Теперь вы можете тестировать доступные вам эндпоинты прямо в этой документации.
## Примечание
Публичные эндпоинты доступны без API ключа.
`.trim(),
},
servers: [
{
url: '/api/v1',
description: 'API эндпоинты',
},
],
components: {
securitySchemes: {
apiKey: {
type: 'apiKey',
in: 'header',
name: 'x-api-key',
description: 'API ключ для доступа к эндпоинтам',
},
},
},
paths,
tags,
};
}

View File

@@ -0,0 +1,47 @@
import dotenv from 'dotenv';
import { DatabaseConfig } from '../types';
dotenv.config();
export const config = {
port: parseInt(process.env.PORT || '3000'),
nodeEnv: process.env.NODE_ENV || 'development',
// Main database (for KIS API Builder metadata)
mainDatabase: {
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
database: process.env.DB_NAME || 'api_builder',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
},
// JWT
jwt: {
secret: (process.env.JWT_SECRET || 'default-secret-change-in-production') as string,
expiresIn: (process.env.JWT_EXPIRES_IN || '24h') as string,
},
// Rate limiting
rateLimit: {
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS || '900000'), // 15 minutes
maxRequests: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS || '100'),
},
// Target databases (where API queries will execute)
targetDatabases: parseTargetDatabases(),
};
function parseTargetDatabases(): DatabaseConfig[] {
try {
const dbConfig = process.env.TARGET_DATABASES;
if (!dbConfig) {
console.warn('No TARGET_DATABASES configured. Using empty array.');
return [];
}
return JSON.parse(dbConfig);
} catch (error) {
console.error('Error parsing TARGET_DATABASES:', error);
return [];
}
}

View File

@@ -0,0 +1,96 @@
import swaggerJsdoc from 'swagger-jsdoc';
import { config } from './environment';
const options: swaggerJsdoc.Options = {
definition: {
openapi: '3.0.0',
info: {
title: 'KIS API Builder - Dynamic API System',
version: '1.0.0',
description: 'System for constructing and managing dynamic API endpoints with SQL queries',
contact: {
name: 'KIS API Builder Support',
},
},
servers: [
{
url: `http://localhost:${config.port}`,
description: 'Development server',
},
],
components: {
securitySchemes: {
bearerAuth: {
type: 'http',
scheme: 'bearer',
bearerFormat: 'JWT',
},
apiKey: {
type: 'apiKey',
in: 'header',
name: 'x-api-key',
},
},
schemas: {
User: {
type: 'object',
properties: {
id: { type: 'string', format: 'uuid' },
username: { type: 'string' },
email: { type: 'string', format: 'email' },
role: { type: 'string', enum: ['admin', 'user'] },
created_at: { type: 'string', format: 'date-time' },
},
},
Endpoint: {
type: 'object',
properties: {
id: { type: 'string', format: 'uuid' },
name: { type: 'string' },
description: { type: 'string' },
method: { type: 'string', enum: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'] },
path: { type: 'string' },
database_id: { type: 'string' },
sql_query: { type: 'string' },
parameters: { type: 'array' },
folder_id: { type: 'string', format: 'uuid', nullable: true },
is_public: { type: 'boolean' },
created_at: { type: 'string', format: 'date-time' },
},
},
Folder: {
type: 'object',
properties: {
id: { type: 'string', format: 'uuid' },
name: { type: 'string' },
parent_id: { type: 'string', format: 'uuid', nullable: true },
created_at: { type: 'string', format: 'date-time' },
},
},
ApiKey: {
type: 'object',
properties: {
id: { type: 'string', format: 'uuid' },
name: { type: 'string' },
key: { type: 'string' },
permissions: { type: 'array', items: { type: 'string' } },
is_active: { type: 'boolean' },
created_at: { type: 'string', format: 'date-time' },
expires_at: { type: 'string', format: 'date-time', nullable: true },
},
},
},
},
tags: [
{ name: 'Authentication', description: 'Authentication endpoints' },
{ name: 'Endpoints', description: 'API endpoint management' },
{ name: 'Folders', description: 'Folder management for organizing endpoints' },
{ name: 'API Keys', description: 'API key management' },
{ name: 'Databases', description: 'Database connection information' },
{ name: 'Dynamic API', description: 'Dynamically created API endpoints' },
],
},
apis: ['./src/routes/*.ts'],
};
export const swaggerSpec = swaggerJsdoc(options);

View File

@@ -0,0 +1,107 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
import { v4 as uuidv4 } from 'uuid';
import crypto from 'crypto';
export const getApiKeys = async (req: AuthRequest, res: Response) => {
try {
const result = await mainPool.query(
`SELECT id, name, key, permissions, is_active, enable_logging, created_at, expires_at
FROM api_keys
ORDER BY created_at DESC`
);
res.json(result.rows);
} catch (error) {
console.error('Get API keys error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const createApiKey = async (req: AuthRequest, res: Response) => {
try {
const { name, permissions, expires_at, enable_logging } = req.body;
if (!name) {
return res.status(400).json({ error: 'API key name is required' });
}
// Generate a secure API key
const apiKey = `kb_${crypto.randomBytes(32).toString('hex')}`;
const result = await mainPool.query(
`INSERT INTO api_keys (name, key, user_id, permissions, expires_at, enable_logging)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING *`,
[
name,
apiKey,
req.user!.id,
JSON.stringify(permissions || []),
expires_at || null,
enable_logging || false,
]
);
res.status(201).json(result.rows[0]);
} catch (error) {
console.error('Create API key error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const updateApiKey = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const { name, permissions, is_active, expires_at, enable_logging } = req.body;
const result = await mainPool.query(
`UPDATE api_keys
SET name = COALESCE($1, name),
permissions = COALESCE($2, permissions),
is_active = COALESCE($3, is_active),
expires_at = COALESCE($4, expires_at),
enable_logging = COALESCE($5, enable_logging)
WHERE id = $6
RETURNING *`,
[
name,
permissions ? JSON.stringify(permissions) : null,
is_active,
expires_at,
enable_logging,
id,
]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'API key not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Update API key error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const deleteApiKey = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
'DELETE FROM api_keys WHERE id = $1 RETURNING id',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'API key not found' });
}
res.json({ message: 'API key deleted successfully' });
} catch (error) {
console.error('Delete API key error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};

View File

@@ -0,0 +1,72 @@
import { Request, Response } from 'express';
import bcrypt from 'bcrypt';
import jwt, { SignOptions } from 'jsonwebtoken';
import { mainPool } from '../config/database';
import { config } from '../config/environment';
export const login = async (req: Request, res: Response) => {
try {
const { username, password } = req.body;
if (!username || !password) {
return res.status(400).json({ error: 'Не заполнены обязательные поля' });
}
// Find user
const result = await mainPool.query(
'SELECT id, username, password_hash, role, is_superadmin FROM users WHERE username = $1',
[username]
);
if (result.rows.length === 0) {
return res.status(401).json({ error: 'Неверные учетные данные' });
}
const user = result.rows[0];
// Verify password
const isValidPassword = await bcrypt.compare(password, user.password_hash);
if (!isValidPassword) {
return res.status(401).json({ error: 'Неверные учетные данные' });
}
// Generate token
const token = jwt.sign(
{ userId: user.id },
config.jwt.secret,
{ expiresIn: config.jwt.expiresIn as any }
);
res.json({
user: {
id: user.id,
username: user.username,
role: user.role,
is_superadmin: user.is_superadmin,
},
token,
});
} catch (error) {
console.error('Login error:', error);
res.status(500).json({ error: 'Ошибка сервера' });
}
};
export const getMe = async (req: any, res: Response) => {
try {
const result = await mainPool.query(
'SELECT id, username, role, is_superadmin, created_at FROM users WHERE id = $1',
[req.user.id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Пользователь не найден' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Get me error:', error);
res.status(500).json({ error: 'Ошибка сервера' });
}
};

View File

@@ -0,0 +1,66 @@
import { Request, Response } from 'express';
import { databasePoolManager } from '../services/DatabasePoolManager';
import { sqlExecutor } from '../services/SqlExecutor';
export const getDatabases = async (req: Request, res: Response) => {
try {
const databases = await databasePoolManager.getAllDatabaseConfigs();
// Don't expose sensitive information like passwords
const sanitized = databases.map(db => ({
id: db.id,
name: db.name,
type: db.type,
host: db.host,
port: db.port,
database: db.database_name,
}));
res.json(sanitized);
} catch (error) {
console.error('Get databases error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const testDatabaseConnection = async (req: Request, res: Response) => {
try {
const { databaseId } = req.params;
const isConnected = await databasePoolManager.testConnection(databaseId);
res.json({
success: isConnected,
message: isConnected ? 'Connection successful' : 'Connection failed',
});
} catch (error) {
console.error('Test connection error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const getDatabaseTables = async (req: Request, res: Response) => {
try {
const { databaseId } = req.params;
const tables = await sqlExecutor.getAllTables(databaseId);
res.json({ tables });
} catch (error: any) {
console.error('Get tables error:', error);
res.status(500).json({ error: error.message });
}
};
export const getTableSchema = async (req: Request, res: Response) => {
try {
const { databaseId, tableName } = req.params;
const schema = await sqlExecutor.getTableSchema(databaseId, tableName);
res.json({ schema });
} catch (error: any) {
console.error('Get table schema error:', error);
res.status(500).json({ error: error.message });
}
};

View File

@@ -0,0 +1,185 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
import { databasePoolManager } from '../services/DatabasePoolManager';
// Только админы могут управлять базами данных
export const getDatabases = async (req: AuthRequest, res: Response) => {
try {
const result = await mainPool.query(
'SELECT id, name, type, host, port, database_name, username, ssl, is_active, created_at, updated_at FROM databases ORDER BY name'
);
res.json(result.rows);
} catch (error) {
console.error('Get databases error:', error);
res.status(500).json({ error: 'Ошибка получения списка баз данных' });
}
};
export const getDatabase = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
'SELECT id, name, type, host, port, database_name, username, ssl, is_active, created_at, updated_at FROM databases WHERE id = $1',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'База данных не найдена' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Get database error:', error);
res.status(500).json({ error: 'Ошибка получения базы данных' });
}
};
export const createDatabase = async (req: AuthRequest, res: Response) => {
try {
const { name, type, host, port, database_name, username, password, ssl } = req.body;
if (!name || !host || !port || !database_name || !username || !password) {
return res.status(400).json({ error: 'Не заполнены обязательные поля' });
}
const result = await mainPool.query(
`INSERT INTO databases (name, type, host, port, database_name, username, password, ssl, is_active)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, true)
RETURNING *`,
[name, type || 'postgresql', host, port, database_name, username, password, ssl || false]
);
const newDb = result.rows[0];
// Добавить пул подключений
await databasePoolManager.reloadPool(newDb.id);
// Не возвращаем пароль
delete newDb.password;
res.status(201).json(newDb);
} catch (error: any) {
console.error('Create database error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'База данных с таким именем уже существует' });
}
res.status(500).json({ error: 'Ошибка создания базы данных' });
}
};
export const updateDatabase = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const { name, type, host, port, database_name, username, password, ssl, is_active } = req.body;
// Если пароль не передан, не обновляем его
let query;
let params;
if (password) {
query = `
UPDATE databases
SET name = COALESCE($1, name),
type = COALESCE($2, type),
host = COALESCE($3, host),
port = COALESCE($4, port),
database_name = COALESCE($5, database_name),
username = COALESCE($6, username),
password = $7,
ssl = COALESCE($8, ssl),
is_active = COALESCE($9, is_active),
updated_at = CURRENT_TIMESTAMP
WHERE id = $10
RETURNING id, name, type, host, port, database_name, username, ssl, is_active, created_at, updated_at
`;
params = [name, type, host, port, database_name, username, password, ssl, is_active, id];
} else {
query = `
UPDATE databases
SET name = COALESCE($1, name),
type = COALESCE($2, type),
host = COALESCE($3, host),
port = COALESCE($4, port),
database_name = COALESCE($5, database_name),
username = COALESCE($6, username),
ssl = COALESCE($7, ssl),
is_active = COALESCE($8, is_active),
updated_at = CURRENT_TIMESTAMP
WHERE id = $9
RETURNING id, name, type, host, port, database_name, username, ssl, is_active, created_at, updated_at
`;
params = [name, type, host, port, database_name, username, ssl, is_active, id];
}
const result = await mainPool.query(query, params);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'База данных не найдена' });
}
// Перезагрузить пул
await databasePoolManager.reloadPool(id);
res.json(result.rows[0]);
} catch (error: any) {
console.error('Update database error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'База данных с таким именем уже существует' });
}
res.status(500).json({ error: 'Ошибка обновления базы данных' });
}
};
export const deleteDatabase = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
// Проверяем, используется ли база данных в эндпоинтах
const endpointCheck = await mainPool.query(
'SELECT COUNT(*) FROM endpoints WHERE database_id = $1',
[id]
);
if (parseInt(endpointCheck.rows[0].count) > 0) {
return res.status(400).json({
error: 'Невозможно удалить базу данных, используемую в эндпоинтах'
});
}
const result = await mainPool.query(
'DELETE FROM databases WHERE id = $1 RETURNING id',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'База данных не найдена' });
}
// Удалить пул
databasePoolManager.removePool(id);
res.json({ message: 'База данных удалена успешно' });
} catch (error) {
console.error('Delete database error:', error);
res.status(500).json({ error: 'Ошибка удаления базы данных' });
}
};
export const testDatabaseConnection = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const isConnected = await databasePoolManager.testConnection(id);
res.json({
success: isConnected,
message: isConnected ? 'Подключение успешно' : 'Ошибка подключения',
});
} catch (error) {
console.error('Test connection error:', error);
res.status(500).json({ error: 'Ошибка тестирования подключения' });
}
};

View File

@@ -0,0 +1,303 @@
import { Response } from 'express';
import { ApiKeyRequest } from '../middleware/apiKey';
import { mainPool } from '../config/database';
import { sqlExecutor } from '../services/SqlExecutor';
import { scriptExecutor } from '../services/ScriptExecutor';
import { EndpointParameter, ScriptQuery } from '../types';
export const executeDynamicEndpoint = async (req: ApiKeyRequest, res: Response) => {
const startTime = Date.now();
let shouldLog = false;
let endpointId: string | null = null;
try {
// Extract the path from the request (remove /api/v1 prefix)
const requestPath = req.path; // This already has the path without /api/v1
const requestMethod = req.method.toUpperCase();
// Fetch endpoint configuration by path and method
const endpointResult = await mainPool.query(
'SELECT * FROM endpoints WHERE path = $1 AND method = $2',
[requestPath, requestMethod]
);
if (endpointResult.rows.length === 0) {
return res.status(404).json({
error: 'Endpoint not found',
path: requestPath,
method: requestMethod
});
}
const endpoint = endpointResult.rows[0];
endpointId = endpoint.id;
// Check if logging is enabled (on endpoint OR on API key, but log only once)
const endpointLogging = endpoint.enable_logging || false;
const apiKeyLogging = req.apiKey?.enable_logging || false;
shouldLog = endpointLogging || apiKeyLogging;
// Check if endpoint is public or if API key has permission
if (!endpoint.is_public) {
if (!req.apiKey) {
return res.status(401).json({ error: 'API key required for this endpoint' });
}
let hasPermission = req.apiKey.permissions.includes(endpointId!) ||
req.apiKey.permissions.includes('*');
// If no direct permission, check folder permissions
if (!hasPermission && endpoint.folder_id) {
// Check if this folder or any parent folder has permission
let currentFolderId: string | null = endpoint.folder_id;
while (currentFolderId && !hasPermission) {
if (req.apiKey.permissions.includes(`folder:${currentFolderId}`)) {
hasPermission = true;
break;
}
// Get parent folder
const folderResult = await mainPool.query(
'SELECT parent_id FROM folders WHERE id = $1',
[currentFolderId]
);
currentFolderId = folderResult.rows.length > 0 ? folderResult.rows[0].parent_id : null;
}
}
if (!hasPermission) {
return res.status(403).json({ error: 'Access denied to this endpoint' });
}
}
// Parse parameters - PostgreSQL может вернуть как JSON строку, так и уже распарсенный объект
let parameters: EndpointParameter[] = [];
if (endpoint.parameters) {
if (typeof endpoint.parameters === 'string') {
try {
parameters = JSON.parse(endpoint.parameters);
} catch (e) {
parameters = [];
}
} else if (Array.isArray(endpoint.parameters)) {
parameters = endpoint.parameters;
}
}
// Build request parameters object
const requestParams: Record<string, any> = {};
// Extract and validate parameters from request
for (const param of parameters) {
let value;
if (param.in === 'query') {
value = req.query[param.name];
} else if (param.in === 'body') {
value = req.body[param.name];
} else if (param.in === 'path') {
value = req.params[param.name];
}
// Use default value if not provided
if (value === undefined || value === null) {
if (param.required) {
return res.status(400).json({
error: `Missing required parameter: ${param.name}`,
});
}
value = param.default_value;
}
// Type conversion
if (value !== undefined && value !== null) {
switch (param.type) {
case 'number':
value = Number(value);
if (isNaN(value)) {
return res.status(400).json({
error: `Parameter ${param.name} must be a number`,
});
}
break;
case 'boolean':
value = value === 'true' || value === true;
break;
case 'date':
value = new Date(value);
if (isNaN(value.getTime())) {
return res.status(400).json({
error: `Parameter ${param.name} must be a valid date`,
});
}
break;
}
}
requestParams[param.name] = value;
}
let result;
const executionType = endpoint.execution_type || 'sql';
if (executionType === 'script') {
// Execute script
const scriptLanguage = endpoint.script_language;
const scriptCode = endpoint.script_code;
let scriptQueries: ScriptQuery[] = [];
if (endpoint.script_queries) {
if (typeof endpoint.script_queries === 'string') {
try {
scriptQueries = JSON.parse(endpoint.script_queries);
} catch (e) {
scriptQueries = [];
}
} else if (Array.isArray(endpoint.script_queries)) {
scriptQueries = endpoint.script_queries;
}
}
if (!scriptLanguage || !scriptCode) {
return res.status(500).json({ error: 'Script configuration is incomplete' });
}
const scriptResult = await scriptExecutor.execute(scriptLanguage, scriptCode, {
databaseId: endpoint.database_id,
scriptQueries,
requestParams,
endpointParameters: parameters,
});
result = {
rows: scriptResult.data || scriptResult,
rowCount: scriptResult.rowCount || (Array.isArray(scriptResult.data) ? scriptResult.data.length : 0),
executionTime: scriptResult.executionTime || 0,
};
} else {
// Execute SQL query
const queryParams: any[] = [];
parameters.forEach((param) => {
queryParams.push(requestParams[param.name]);
});
// Преобразуем именованные параметры ($paramName) в позиционные ($1, $2, $3...)
let processedQuery = endpoint.sql_query;
parameters.forEach((param, index) => {
const paramName = param.name;
const position = index + 1;
// Заменяем все вхождения $paramName на $position
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${position}`);
});
result = await sqlExecutor.executeQuery(
endpoint.database_id,
processedQuery,
queryParams
);
}
const responseData = {
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
// Log if needed
if (shouldLog && endpointId) {
const executionTime = Date.now() - startTime;
await logRequest({
endpoint_id: endpointId,
api_key_id: req.apiKey?.id || null,
method: req.method,
path: req.path,
request_params: req.query || {},
request_body: req.body || {},
response_status: 200,
response_data: responseData,
execution_time: executionTime,
error_message: null,
ip_address: req.ip || req.socket.remoteAddress || 'unknown',
user_agent: req.headers['user-agent'] || 'unknown',
});
}
res.json(responseData);
} catch (error: any) {
console.error('Dynamic API execution error:', error);
const errorResponse = {
success: false,
error: error.message,
};
// Log error if needed
if (shouldLog && endpointId) {
const executionTime = Date.now() - startTime;
await logRequest({
endpoint_id: endpointId,
api_key_id: req.apiKey?.id || null,
method: req.method,
path: req.path,
request_params: req.query || {},
request_body: req.body || {},
response_status: 500,
response_data: errorResponse,
execution_time: executionTime,
error_message: error.message,
ip_address: req.ip || req.socket.remoteAddress || 'unknown',
user_agent: req.headers['user-agent'] || 'unknown',
});
}
res.status(500).json(errorResponse);
}
};
async function logRequest(data: {
endpoint_id: string;
api_key_id: string | null;
method: string;
path: string;
request_params: any;
request_body: any;
response_status: number;
response_data: any;
execution_time: number;
error_message: string | null;
ip_address: string;
user_agent: string;
}) {
try {
await mainPool.query(
`INSERT INTO request_logs (
endpoint_id, api_key_id, method, path,
request_params, request_body, response_status,
response_data, execution_time, error_message,
ip_address, user_agent
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`,
[
data.endpoint_id,
data.api_key_id,
data.method,
data.path,
JSON.stringify(data.request_params),
JSON.stringify(data.request_body),
data.response_status,
JSON.stringify(data.response_data),
data.execution_time,
data.error_message,
data.ip_address,
data.user_agent,
]
);
} catch (error) {
console.error('Failed to log request:', error);
}
}

View File

@@ -0,0 +1,317 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
import { v4 as uuidv4 } from 'uuid';
export const getEndpoints = async (req: AuthRequest, res: Response) => {
try {
const { search, folder_id } = req.query;
let query = `
SELECT e.*, f.name as folder_name
FROM endpoints e
LEFT JOIN folders f ON e.folder_id = f.id
WHERE 1=1
`;
const params: any[] = [];
if (folder_id) {
query += ` AND e.folder_id = $${params.length + 1}`;
params.push(folder_id);
}
if (search) {
const searchIndex = params.length + 1;
query += ` AND (
e.name ILIKE $${searchIndex} OR
e.description ILIKE $${searchIndex} OR
e.sql_query ILIKE $${searchIndex} OR
e.path ILIKE $${searchIndex}
)`;
params.push(`%${search}%`);
}
query += ` ORDER BY e.created_at DESC`;
const result = await mainPool.query(query, params);
res.json(result.rows);
} catch (error) {
console.error('Get endpoints error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const getEndpoint = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
`SELECT e.*, f.name as folder_name
FROM endpoints e
LEFT JOIN folders f ON e.folder_id = f.id
WHERE e.id = $1`,
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Endpoint not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Get endpoint error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const createEndpoint = async (req: AuthRequest, res: Response) => {
try {
const {
name,
description,
method,
path,
database_id,
sql_query,
parameters,
folder_id,
is_public,
enable_logging,
execution_type,
script_language,
script_code,
script_queries,
} = req.body;
if (!name || !method || !path) {
return res.status(400).json({ error: 'Missing required fields' });
}
const execType = execution_type || 'sql';
// Валидация для типа SQL
if (execType === 'sql') {
if (!database_id || !sql_query) {
return res.status(400).json({ error: 'Database ID and SQL query are required for SQL execution type' });
}
}
// Валидация для типа Script
if (execType === 'script') {
if (!script_language || !script_code || !script_queries) {
return res.status(400).json({ error: 'Script language, code, and queries are required for script execution type' });
}
}
const result = await mainPool.query(
`INSERT INTO endpoints (
name, description, method, path, database_id, sql_query, parameters,
folder_id, user_id, is_public, enable_logging,
execution_type, script_language, script_code, script_queries
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
RETURNING *`,
[
name,
description || '',
method,
path,
database_id || null,
sql_query || '',
JSON.stringify(parameters || []),
folder_id || null,
req.user!.id,
is_public || false,
enable_logging || false,
execType,
script_language || null,
script_code || null,
JSON.stringify(script_queries || []),
]
);
res.status(201).json(result.rows[0]);
} catch (error: any) {
console.error('Create endpoint error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'Endpoint path already exists' });
}
res.status(500).json({ error: 'Internal server error' });
}
};
export const updateEndpoint = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const {
name,
description,
method,
path,
database_id,
sql_query,
parameters,
folder_id,
is_public,
enable_logging,
execution_type,
script_language,
script_code,
script_queries,
} = req.body;
const result = await mainPool.query(
`UPDATE endpoints
SET name = $1,
description = $2,
method = $3,
path = $4,
database_id = $5,
sql_query = $6,
parameters = $7,
folder_id = $8,
is_public = $9,
enable_logging = $10,
execution_type = $11,
script_language = $12,
script_code = $13,
script_queries = $14,
updated_at = CURRENT_TIMESTAMP
WHERE id = $15
RETURNING *`,
[
name,
description,
method,
path,
database_id || null,
sql_query,
parameters ? JSON.stringify(parameters) : null,
folder_id || null,
is_public,
enable_logging,
execution_type,
script_language || null,
script_code || null,
script_queries ? JSON.stringify(script_queries) : null,
id,
]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Endpoint not found' });
}
res.json(result.rows[0]);
} catch (error: any) {
console.error('Update endpoint error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'Endpoint path already exists' });
}
res.status(500).json({ error: 'Internal server error' });
}
};
export const deleteEndpoint = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
'DELETE FROM endpoints WHERE id = $1 RETURNING id',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Endpoint not found' });
}
res.json({ message: 'Endpoint deleted successfully' });
} catch (error) {
console.error('Delete endpoint error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const testEndpoint = async (req: AuthRequest, res: Response) => {
try {
const {
database_id,
sql_query,
parameters,
endpoint_parameters,
execution_type,
script_language,
script_code,
script_queries
} = req.body;
const execType = execution_type || 'sql';
if (execType === 'sql') {
if (!database_id) {
return res.status(400).json({ error: 'Missing database_id for SQL execution' });
}
if (!sql_query) {
return res.status(400).json({ error: 'Missing sql_query' });
}
// Преобразуем именованные параметры ($paramName) в позиционные ($1, $2, $3...)
let processedQuery = sql_query;
if (endpoint_parameters && Array.isArray(endpoint_parameters)) {
endpoint_parameters.forEach((param: any, index: number) => {
const paramName = param.name;
const position = index + 1;
// Заменяем все вхождения $paramName на $position
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${position}`);
});
}
const { sqlExecutor } = require('../services/SqlExecutor');
const result = await sqlExecutor.executeQuery(database_id, processedQuery, parameters || []);
res.json({
success: true,
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
});
} else if (execType === 'script') {
if (!script_language || !script_code) {
return res.status(400).json({ error: 'Missing script_language or script_code' });
}
// Собираем параметры из тестовых значений
const requestParams: Record<string, any> = {};
if (endpoint_parameters && Array.isArray(endpoint_parameters) && parameters && Array.isArray(parameters)) {
endpoint_parameters.forEach((param: any, index: number) => {
requestParams[param.name] = parameters[index];
});
}
const { scriptExecutor } = require('../services/ScriptExecutor');
const scriptResult = await scriptExecutor.execute(script_language, script_code, {
databaseId: database_id,
scriptQueries: script_queries || [],
requestParams,
endpointParameters: endpoint_parameters || [],
});
res.json({
success: true,
data: scriptResult.data || scriptResult,
rowCount: scriptResult.rowCount || (Array.isArray(scriptResult.data) ? scriptResult.data.length : 0),
executionTime: scriptResult.executionTime || 0,
});
} else {
return res.status(400).json({ error: 'Invalid execution_type' });
}
} catch (error: any) {
res.status(400).json({
success: false,
error: error.message,
});
}
};

View File

@@ -0,0 +1,108 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
export const getFolders = async (req: AuthRequest, res: Response) => {
try {
const result = await mainPool.query(
`SELECT f.*,
(SELECT COUNT(*) FROM endpoints WHERE folder_id = f.id) as endpoint_count,
(SELECT COUNT(*) FROM folders WHERE parent_id = f.id) as subfolder_count
FROM folders f
ORDER BY f.name`
);
res.json(result.rows);
} catch (error) {
console.error('Get folders error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const getFolder = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
'SELECT * FROM folders WHERE id = $1',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Folder not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Get folder error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const createFolder = async (req: AuthRequest, res: Response) => {
try {
const { name, parent_id } = req.body;
if (!name) {
return res.status(400).json({ error: 'Folder name is required' });
}
const result = await mainPool.query(
`INSERT INTO folders (name, parent_id, user_id)
VALUES ($1, $2, $3)
RETURNING *`,
[name, parent_id || null, req.user!.id]
);
res.status(201).json(result.rows[0]);
} catch (error) {
console.error('Create folder error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const updateFolder = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const { name, parent_id } = req.body;
const result = await mainPool.query(
`UPDATE folders
SET name = COALESCE($1, name),
parent_id = COALESCE($2, parent_id),
updated_at = CURRENT_TIMESTAMP
WHERE id = $3
RETURNING *`,
[name, parent_id, id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Folder not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Update folder error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};
export const deleteFolder = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
'DELETE FROM folders WHERE id = $1 RETURNING id',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Folder not found' });
}
res.json({ message: 'Folder deleted successfully' });
} catch (error) {
console.error('Delete folder error:', error);
res.status(500).json({ error: 'Internal server error' });
}
};

View File

@@ -0,0 +1,122 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
export const getLogs = async (req: AuthRequest, res: Response) => {
try {
const { endpoint_id, api_key_id, limit = 100, offset = 0 } = req.query;
let query = `
SELECT
rl.*,
e.name as endpoint_name,
e.path as endpoint_path,
ak.name as api_key_name
FROM request_logs rl
LEFT JOIN endpoints e ON rl.endpoint_id = e.id
LEFT JOIN api_keys ak ON rl.api_key_id = ak.id
WHERE 1=1
`;
const params: any[] = [];
let paramCount = 0;
if (endpoint_id) {
paramCount++;
query += ` AND rl.endpoint_id = $${paramCount}`;
params.push(endpoint_id);
}
if (api_key_id) {
paramCount++;
query += ` AND rl.api_key_id = $${paramCount}`;
params.push(api_key_id);
}
query += ` ORDER BY rl.created_at DESC LIMIT $${paramCount + 1} OFFSET $${paramCount + 2}`;
params.push(limit, offset);
const result = await mainPool.query(query, params);
res.json(result.rows);
} catch (error: any) {
console.error('Get logs error:', error);
res.status(500).json({ error: error.message });
}
};
export const getLogById = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const result = await mainPool.query(
`SELECT
rl.*,
e.name as endpoint_name,
e.path as endpoint_path,
ak.name as api_key_name
FROM request_logs rl
LEFT JOIN endpoints e ON rl.endpoint_id = e.id
LEFT JOIN api_keys ak ON rl.api_key_id = ak.id
WHERE rl.id = $1`,
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Log not found' });
}
res.json(result.rows[0]);
} catch (error: any) {
console.error('Get log by id error:', error);
res.status(500).json({ error: error.message });
}
};
export const deleteLog = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
await mainPool.query('DELETE FROM request_logs WHERE id = $1', [id]);
res.json({ success: true });
} catch (error: any) {
console.error('Delete log error:', error);
res.status(500).json({ error: error.message });
}
};
export const clearLogs = async (req: AuthRequest, res: Response) => {
try {
const { endpoint_id, api_key_id, before_date } = req.body;
let query = 'DELETE FROM request_logs WHERE 1=1';
const params: any[] = [];
let paramCount = 0;
if (endpoint_id) {
paramCount++;
query += ` AND endpoint_id = $${paramCount}`;
params.push(endpoint_id);
}
if (api_key_id) {
paramCount++;
query += ` AND api_key_id = $${paramCount}`;
params.push(api_key_id);
}
if (before_date) {
paramCount++;
query += ` AND created_at < $${paramCount}`;
params.push(before_date);
}
const result = await mainPool.query(query, params);
res.json({ success: true, deleted: result.rowCount });
} catch (error: any) {
console.error('Clear logs error:', error);
res.status(500).json({ error: error.message });
}
};

View File

@@ -0,0 +1,119 @@
import { Response } from 'express';
import { AuthRequest } from '../middleware/auth';
import { mainPool } from '../config/database';
import bcrypt from 'bcrypt';
export const getUsers = async (req: AuthRequest, res: Response) => {
try {
const result = await mainPool.query(
'SELECT id, username, role, is_superadmin, created_at, updated_at FROM users ORDER BY created_at DESC'
);
res.json(result.rows);
} catch (error) {
console.error('Get users error:', error);
res.status(500).json({ error: 'Ошибка получения списка пользователей' });
}
};
export const createUser = async (req: AuthRequest, res: Response) => {
try {
const { username, password, role, is_superadmin } = req.body;
if (!username || !password) {
return res.status(400).json({ error: 'Не заполнены обязательные поля' });
}
const passwordHash = await bcrypt.hash(password, 10);
const result = await mainPool.query(
`INSERT INTO users (username, password_hash, role, is_superadmin)
VALUES ($1, $2, $3, $4)
RETURNING id, username, role, is_superadmin, created_at`,
[username, passwordHash, role || 'admin', is_superadmin || false]
);
res.status(201).json(result.rows[0]);
} catch (error: any) {
console.error('Create user error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'Пользователь уже существует' });
}
res.status(500).json({ error: 'Ошибка создания пользователя' });
}
};
export const updateUser = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
const { username, password, role, is_superadmin } = req.body;
let query;
let params;
if (password) {
const passwordHash = await bcrypt.hash(password, 10);
query = `
UPDATE users
SET username = COALESCE($1, username),
password_hash = $2,
role = COALESCE($3, role),
is_superadmin = COALESCE($4, is_superadmin),
updated_at = CURRENT_TIMESTAMP
WHERE id = $5
RETURNING id, username, role, is_superadmin, created_at, updated_at
`;
params = [username, passwordHash, role, is_superadmin, id];
} else {
query = `
UPDATE users
SET username = COALESCE($1, username),
role = COALESCE($2, role),
is_superadmin = COALESCE($3, is_superadmin),
updated_at = CURRENT_TIMESTAMP
WHERE id = $4
RETURNING id, username, role, is_superadmin, created_at, updated_at
`;
params = [username, role, is_superadmin, id];
}
const result = await mainPool.query(query, params);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Пользователь не найден' });
}
res.json(result.rows[0]);
} catch (error: any) {
console.error('Update user error:', error);
if (error.code === '23505') {
return res.status(400).json({ error: 'Пользователь с таким именем уже существует' });
}
res.status(500).json({ error: 'Ошибка обновления пользователя' });
}
};
export const deleteUser = async (req: AuthRequest, res: Response) => {
try {
const { id } = req.params;
// Нельзя удалить самого себя
if (id === req.user!.id) {
return res.status(400).json({ error: 'Нельзя удалить самого себя' });
}
const result = await mainPool.query(
'DELETE FROM users WHERE id = $1 RETURNING id',
[id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Пользователь не найден' });
}
res.json({ message: 'Пользователь удален успешно' });
} catch (error) {
console.error('Delete user error:', error);
res.status(500).json({ error: 'Ошибка удаления пользователя' });
}
};

View File

@@ -0,0 +1,83 @@
import { Request, Response, NextFunction } from 'express';
import { mainPool } from '../config/database';
export interface ApiKeyRequest extends Request {
apiKey?: {
id: string;
name: string;
permissions: string[];
user_id: string;
enable_logging: boolean;
};
}
// Optional API key middleware - validates if key is provided but doesn't require it
export const apiKeyMiddleware = async (
req: ApiKeyRequest,
res: Response,
next: NextFunction
) => {
try {
const apiKey = req.headers['x-api-key'] as string;
// If no API key provided, continue without setting req.apiKey
// The endpoint controller will decide if key is required
if (!apiKey) {
return next();
}
// Fetch API key from database
const result = await mainPool.query(
`SELECT id, name, user_id, permissions, is_active, expires_at, enable_logging
FROM api_keys
WHERE key = $1`,
[apiKey]
);
if (result.rows.length === 0) {
return res.status(401).json({ error: 'Invalid API key' });
}
const keyData = result.rows[0];
// Check if key is active
if (!keyData.is_active) {
return res.status(401).json({ error: 'API key is inactive' });
}
// Check if key is expired
if (keyData.expires_at && new Date(keyData.expires_at) < new Date()) {
return res.status(401).json({ error: 'API key has expired' });
}
req.apiKey = {
id: keyData.id,
name: keyData.name,
permissions: keyData.permissions || [],
user_id: keyData.user_id,
enable_logging: keyData.enable_logging || false,
};
next();
} catch (error) {
console.error('API key middleware error:', error);
return res.status(500).json({ error: 'Internal server error' });
}
};
export const checkEndpointPermission = (endpointId: string) => {
return (req: ApiKeyRequest, res: Response, next: NextFunction) => {
if (!req.apiKey) {
return res.status(401).json({ error: 'API key required' });
}
const hasPermission = req.apiKey.permissions.includes(endpointId) ||
req.apiKey.permissions.includes('*');
if (!hasPermission) {
return res.status(403).json({ error: 'Access denied to this endpoint' });
}
next();
};
};

View File

@@ -0,0 +1,65 @@
import { Request, Response, NextFunction } from 'express';
import jwt from 'jsonwebtoken';
import { config } from '../config/environment';
import { mainPool } from '../config/database';
export interface AuthRequest extends Request {
user?: {
id: string;
username: string;
role: string;
is_superadmin: boolean;
};
}
export const authMiddleware = async (
req: AuthRequest,
res: Response,
next: NextFunction
) => {
try {
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith('Bearer ')) {
return res.status(401).json({ error: 'No token provided' });
}
const token = authHeader.substring(7);
try {
const decoded = jwt.verify(token, config.jwt.secret) as any;
// Fetch user from database
const result = await mainPool.query(
'SELECT id, username, role, is_superadmin FROM users WHERE id = $1',
[decoded.userId]
);
if (result.rows.length === 0) {
return res.status(401).json({ error: 'User not found' });
}
req.user = result.rows[0];
next();
} catch (error) {
return res.status(401).json({ error: 'Invalid token' });
}
} catch (error) {
console.error('Auth middleware error:', error);
return res.status(500).json({ error: 'Internal server error' });
}
};
export const adminOnly = (req: AuthRequest, res: Response, next: NextFunction) => {
if (req.user?.role !== 'admin') {
return res.status(403).json({ error: 'Admin access required' });
}
next();
};
export const superAdminOnly = (req: AuthRequest, res: Response, next: NextFunction) => {
if (!req.user?.is_superadmin) {
return res.status(403).json({ error: 'Superadmin access required' });
}
next();
};

View File

@@ -0,0 +1,107 @@
import { Response, NextFunction } from 'express';
import { ApiKeyRequest } from './apiKey';
import { mainPool } from '../config/database';
interface LogData {
endpoint_id: string | null;
api_key_id: string | null;
method: string;
path: string;
request_params: any;
request_body: any;
response_status: number;
response_data: any;
execution_time: number;
error_message: string | null;
ip_address: string;
user_agent: string;
}
export const createLoggingMiddleware = (endpointId: string, shouldLog: boolean) => {
return async (req: ApiKeyRequest, res: Response, next: NextFunction) => {
if (!shouldLog) {
return next();
}
const startTime = Date.now();
// Capture original methods
const originalJson = res.json.bind(res);
const originalSend = res.send.bind(res);
let responseData: any = null;
let isLogged = false;
const logRequest = async (data: any, status: number, errorMsg: string | null = null) => {
if (isLogged) return; // Prevent duplicate logging
isLogged = true;
const executionTime = Date.now() - startTime;
const logData: LogData = {
endpoint_id: endpointId,
api_key_id: req.apiKey?.id || null,
method: req.method,
path: req.path,
request_params: req.query || {},
request_body: req.body || {},
response_status: status,
response_data: data,
execution_time: executionTime,
error_message: errorMsg,
ip_address: req.ip || req.socket.remoteAddress || 'unknown',
user_agent: req.headers['user-agent'] || 'unknown',
};
try {
await mainPool.query(
`INSERT INTO request_logs (
endpoint_id, api_key_id, method, path,
request_params, request_body, response_status,
response_data, execution_time, error_message,
ip_address, user_agent
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`,
[
logData.endpoint_id,
logData.api_key_id,
logData.method,
logData.path,
JSON.stringify(logData.request_params),
JSON.stringify(logData.request_body),
logData.response_status,
JSON.stringify(logData.response_data),
logData.execution_time,
logData.error_message,
logData.ip_address,
logData.user_agent,
]
);
} catch (error) {
console.error('Failed to log request:', error);
}
};
// Override res.json
res.json = function (data: any) {
responseData = data;
logRequest(data, res.statusCode);
return originalJson(data);
};
// Override res.send
res.send = function (data: any) {
responseData = data;
logRequest(data, res.statusCode);
return originalSend(data);
};
// Handle errors
res.on('finish', () => {
if (!isLogged && responseData === null) {
logRequest(null, res.statusCode);
}
});
next();
};
};

View File

@@ -0,0 +1,81 @@
-- Users table
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
username VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
password_hash VARCHAR(255) NOT NULL,
role VARCHAR(50) NOT NULL DEFAULT 'user' CHECK (role IN ('admin', 'user')),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Folders table (for organizing endpoints)
CREATE TABLE IF NOT EXISTS folders (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
parent_id UUID REFERENCES folders(id) ON DELETE CASCADE,
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Databases table (target databases for API endpoints)
CREATE TABLE IF NOT EXISTS databases (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL UNIQUE,
type VARCHAR(50) NOT NULL DEFAULT 'postgresql' CHECK (type IN ('postgresql', 'mysql', 'mssql')),
host VARCHAR(255) NOT NULL,
port INTEGER NOT NULL,
database_name VARCHAR(255) NOT NULL,
username VARCHAR(255) NOT NULL,
password VARCHAR(255) NOT NULL,
ssl BOOLEAN DEFAULT false,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Endpoints table
CREATE TABLE IF NOT EXISTS endpoints (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
description TEXT,
method VARCHAR(10) NOT NULL CHECK (method IN ('GET', 'POST', 'PUT', 'DELETE', 'PATCH')),
path VARCHAR(500) NOT NULL UNIQUE,
database_id UUID REFERENCES databases(id) ON DELETE RESTRICT,
sql_query TEXT NOT NULL,
parameters JSONB DEFAULT '[]'::jsonb,
folder_id UUID REFERENCES folders(id) ON DELETE SET NULL,
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
is_public BOOLEAN DEFAULT false,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- API Keys table
CREATE TABLE IF NOT EXISTS api_keys (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
key VARCHAR(500) UNIQUE NOT NULL,
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
permissions JSONB DEFAULT '[]'::jsonb,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
expires_at TIMESTAMP
);
-- Indexes for better performance
CREATE INDEX IF NOT EXISTS idx_folders_parent_id ON folders(parent_id);
CREATE INDEX IF NOT EXISTS idx_folders_user_id ON folders(user_id);
CREATE INDEX IF NOT EXISTS idx_endpoints_folder_id ON endpoints(folder_id);
CREATE INDEX IF NOT EXISTS idx_endpoints_user_id ON endpoints(user_id);
CREATE INDEX IF NOT EXISTS idx_endpoints_database_id ON endpoints(database_id);
CREATE INDEX IF NOT EXISTS idx_endpoints_path ON endpoints(path);
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(key);
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
CREATE INDEX IF NOT EXISTS idx_databases_name ON databases(name);
-- Full text search index for endpoints
CREATE INDEX IF NOT EXISTS idx_endpoints_search ON endpoints USING gin(
to_tsvector('english', name || ' ' || description || ' ' || sql_query)
);

View File

@@ -0,0 +1,28 @@
-- Add logging fields to endpoints
ALTER TABLE endpoints ADD COLUMN IF NOT EXISTS enable_logging BOOLEAN DEFAULT false;
-- Add logging fields to api_keys
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS enable_logging BOOLEAN DEFAULT false;
-- Create request_logs table
CREATE TABLE IF NOT EXISTS request_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
endpoint_id UUID REFERENCES endpoints(id) ON DELETE CASCADE,
api_key_id UUID REFERENCES api_keys(id) ON DELETE SET NULL,
method VARCHAR(10) NOT NULL,
path VARCHAR(500) NOT NULL,
request_params JSONB,
request_body JSONB,
response_status INTEGER,
response_data JSONB,
execution_time INTEGER,
error_message TEXT,
ip_address VARCHAR(45),
user_agent TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Create index for faster queries
CREATE INDEX IF NOT EXISTS idx_request_logs_endpoint_id ON request_logs(endpoint_id);
CREATE INDEX IF NOT EXISTS idx_request_logs_api_key_id ON request_logs(api_key_id);
CREATE INDEX IF NOT EXISTS idx_request_logs_created_at ON request_logs(created_at DESC);

View File

@@ -0,0 +1,14 @@
-- Добавление поддержки скриптинга для эндпоинтов
-- Добавляем новые колонки для скриптинга
ALTER TABLE endpoints
ADD COLUMN execution_type VARCHAR(20) DEFAULT 'sql' CHECK (execution_type IN ('sql', 'script')),
ADD COLUMN script_language VARCHAR(20) CHECK (script_language IN ('javascript', 'python')),
ADD COLUMN script_code TEXT,
ADD COLUMN script_queries JSONB DEFAULT '[]'::jsonb;
-- Комментарии для документации
COMMENT ON COLUMN endpoints.execution_type IS 'Тип выполнения: sql - простой SQL запрос, script - скрипт с несколькими запросами';
COMMENT ON COLUMN endpoints.script_language IS 'Язык скрипта: javascript или python';
COMMENT ON COLUMN endpoints.script_code IS 'Код скрипта (JS или Python)';
COMMENT ON COLUMN endpoints.script_queries IS 'Массив именованных SQL запросов для использования в скрипте: [{"name": "queryName", "sql": "SELECT ..."}]';

View File

@@ -0,0 +1,8 @@
-- Add is_superadmin field to users table
ALTER TABLE users
ADD COLUMN IF NOT EXISTS is_superadmin BOOLEAN DEFAULT FALSE;
COMMENT ON COLUMN users.is_superadmin IS 'Является ли пользователь супер-администратором с полным доступом';
ALTER TABLE users
DROP COLUMN IF EXISTS email;

View File

@@ -0,0 +1,24 @@
import { mainPool } from '../config/database';
import * as fs from 'fs';
import * as path from 'path';
async function runMigrations() {
console.log('Running migrations...');
try {
const migrationFile = fs.readFileSync(
path.join(__dirname, '001_initial_schema.sql'),
'utf-8'
);
await mainPool.query(migrationFile);
console.log('✅ Migrations completed successfully');
process.exit(0);
} catch (error) {
console.error('❌ Migration failed:', error);
process.exit(1);
}
}
runMigrations();

View File

@@ -0,0 +1,38 @@
import { mainPool } from '../config/database';
import bcrypt from 'bcrypt';
async function seed() {
console.log('🌱 Starting seed...');
try {
// Проверяем, есть ли уже пользователи
const userCheck = await mainPool.query('SELECT COUNT(*) FROM users');
const userCount = parseInt(userCheck.rows[0].count);
if (userCount > 0) {
console.log('⚠️ Users already exist. Skipping seed.');
process.exit(0);
}
// Создаем супер-админа по умолчанию
const passwordHash = await bcrypt.hash('admin', 10);
await mainPool.query(
`INSERT INTO users (username, password_hash, role, is_superadmin)
VALUES ($1, $2, $3, $4)`,
['admin', passwordHash, 'admin', true]
);
console.log('✅ Default superadmin created:');
console.log(' Username: admin');
console.log(' Password: admin');
console.log(' ⚠️ ВАЖНО: Смените пароль после первого входа!');
process.exit(0);
} catch (error) {
console.error('❌ Seed failed:', error);
process.exit(1);
}
}
seed();

View File

@@ -0,0 +1,19 @@
import express from 'express';
import { authMiddleware } from '../middleware/auth';
import {
getApiKeys,
createApiKey,
updateApiKey,
deleteApiKey,
} from '../controllers/apiKeyController';
const router = express.Router();
router.use(authMiddleware);
router.get('/', getApiKeys);
router.post('/', createApiKey);
router.put('/:id', updateApiKey);
router.delete('/:id', deleteApiKey);
export default router;

View File

@@ -0,0 +1,10 @@
import express from 'express';
import { login, getMe } from '../controllers/authController';
import { authMiddleware } from '../middleware/auth';
const router = express.Router();
router.post('/login', login);
router.get('/me', authMiddleware, getMe);
export default router;

View File

@@ -0,0 +1,24 @@
import express from 'express';
import { authMiddleware, superAdminOnly } from '../middleware/auth';
import {
getDatabases,
getDatabase,
createDatabase,
updateDatabase,
deleteDatabase,
testDatabaseConnection,
} from '../controllers/databaseManagementController';
const router = express.Router();
router.use(authMiddleware);
router.use(superAdminOnly);
router.get('/', getDatabases);
router.get('/:id', getDatabase);
router.post('/', createDatabase);
router.put('/:id', updateDatabase);
router.delete('/:id', deleteDatabase);
router.get('/:id/test', testDatabaseConnection);
export default router;

View File

@@ -0,0 +1,19 @@
import express from 'express';
import { authMiddleware } from '../middleware/auth';
import {
getDatabases,
testDatabaseConnection,
getDatabaseTables,
getTableSchema,
} from '../controllers/databaseController';
const router = express.Router();
router.use(authMiddleware);
router.get('/', getDatabases);
router.get('/:databaseId/test', testDatabaseConnection);
router.get('/:databaseId/tables', getDatabaseTables);
router.get('/:databaseId/tables/:tableName/schema', getTableSchema);
export default router;

View File

@@ -0,0 +1,11 @@
import express from 'express';
import { apiKeyMiddleware } from '../middleware/apiKey';
import { executeDynamicEndpoint } from '../controllers/dynamicApiController';
const router = express.Router();
// This route handles all dynamically created API endpoints
// Catch all routes after /api/v1/
router.all('*', apiKeyMiddleware, executeDynamicEndpoint);
export default router;

View File

@@ -0,0 +1,133 @@
import express from 'express';
import { authMiddleware } from '../middleware/auth';
import {
getEndpoints,
getEndpoint,
createEndpoint,
updateEndpoint,
deleteEndpoint,
testEndpoint,
} from '../controllers/endpointController';
const router = express.Router();
router.use(authMiddleware);
/**
* @swagger
* /api/endpoints:
* get:
* tags: [Endpoints]
* summary: Get all endpoints
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: search
* schema:
* type: string
* - in: query
* name: folder_id
* schema:
* type: string
* responses:
* 200:
* description: List of endpoints
*/
router.get('/', getEndpoints);
/**
* @swagger
* /api/endpoints/{id}:
* get:
* tags: [Endpoints]
* summary: Get endpoint by ID
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: string
* responses:
* 200:
* description: Endpoint details
*/
router.get('/:id', getEndpoint);
/**
* @swagger
* /api/endpoints:
* post:
* tags: [Endpoints]
* summary: Create new endpoint
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* responses:
* 201:
* description: Endpoint created
*/
router.post('/', createEndpoint);
/**
* @swagger
* /api/endpoints/{id}:
* put:
* tags: [Endpoints]
* summary: Update endpoint
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: string
* responses:
* 200:
* description: Endpoint updated
*/
router.put('/:id', updateEndpoint);
/**
* @swagger
* /api/endpoints/{id}:
* delete:
* tags: [Endpoints]
* summary: Delete endpoint
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: string
* responses:
* 200:
* description: Endpoint deleted
*/
router.delete('/:id', deleteEndpoint);
/**
* @swagger
* /api/endpoints/test:
* post:
* tags: [Endpoints]
* summary: Test SQL query
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Query test result
*/
router.post('/test', testEndpoint);
export default router;

View File

@@ -0,0 +1,21 @@
import express from 'express';
import { authMiddleware } from '../middleware/auth';
import {
getFolders,
getFolder,
createFolder,
updateFolder,
deleteFolder,
} from '../controllers/folderController';
const router = express.Router();
router.use(authMiddleware);
router.get('/', getFolders);
router.get('/:id', getFolder);
router.post('/', createFolder);
router.put('/:id', updateFolder);
router.delete('/:id', deleteFolder);
export default router;

View File

@@ -0,0 +1,14 @@
import express from 'express';
import { authMiddleware } from '../middleware/auth';
import { getLogs, getLogById, deleteLog, clearLogs } from '../controllers/logsController';
const router = express.Router();
router.use(authMiddleware);
router.get('/', getLogs);
router.get('/:id', getLogById);
router.delete('/:id', deleteLog);
router.post('/clear', clearLogs);
export default router;

View File

@@ -0,0 +1,20 @@
import express from 'express';
import { authMiddleware, superAdminOnly } from '../middleware/auth';
import {
getUsers,
createUser,
updateUser,
deleteUser,
} from '../controllers/userController';
const router = express.Router();
router.use(authMiddleware);
router.use(superAdminOnly);
router.get('/', getUsers);
router.post('/', createUser);
router.put('/:id', updateUser);
router.delete('/:id', deleteUser);
export default router;

179
backend/src/server.ts Normal file
View File

@@ -0,0 +1,179 @@
import express, { Express, Request, Response } from 'express';
import cors from 'cors';
import helmet from 'helmet';
import rateLimit from 'express-rate-limit';
import swaggerUi from 'swagger-ui-express';
import path from 'path';
import { createProxyMiddleware } from 'http-proxy-middleware';
import { config } from './config/environment';
import { initializeDatabase } from './config/database';
import { generateDynamicSwagger } from './config/dynamicSwagger';
import { databasePoolManager } from './services/DatabasePoolManager';
// Routes
import authRoutes from './routes/auth';
import endpointRoutes from './routes/endpoints';
import folderRoutes from './routes/folders';
import apiKeyRoutes from './routes/apiKeys';
import databaseRoutes from './routes/databases';
import databaseManagementRoutes from './routes/databaseManagement';
import userRoutes from './routes/users';
import logsRoutes from './routes/logs';
import dynamicRoutes from './routes/dynamic';
const app: Express = express();
// Middleware
app.use(helmet({
contentSecurityPolicy: config.nodeEnv === 'production' ? {
directives: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'", "blob:"],
workerSrc: ["'self'", "blob:"],
styleSrc: ["'self'", "'unsafe-inline'"],
imgSrc: ["'self'", "data:", "blob:"],
fontSrc: ["'self'", "data:"],
connectSrc: ["'self'"],
},
} : false,
}));
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Rate limiting
const limiter = rateLimit({
windowMs: config.rateLimit.windowMs,
max: config.rateLimit.maxRequests,
message: 'Too many requests from this IP, please try again later.',
});
app.use('/api/', limiter);
// Dynamic Swagger documentation
app.get('/api-docs/swagger.json', async (_req, res) => {
try {
const spec = await generateDynamicSwagger();
res.json(spec);
} catch (error) {
console.error('Error generating swagger spec:', error);
res.status(500).json({ error: 'Failed to generate API documentation' });
}
});
app.use('/api-docs', swaggerUi.serve);
app.get('/api-docs', async (_req, res, next) => {
try {
const spec = await generateDynamicSwagger();
const html = swaggerUi.generateHTML(spec, {
customCss: '.swagger-ui .topbar { display: none }',
customSiteTitle: 'KIS API Builder - Документация',
});
res.send(html);
} catch (error) {
console.error('Error generating swagger UI:', error);
next(error);
}
});
// Health check
app.get('/health', (_req: Request, res: Response) => {
res.json({ status: 'OK', timestamp: new Date().toISOString() });
});
// API Routes
app.use('/api/auth', authRoutes);
app.use('/api/endpoints', endpointRoutes);
app.use('/api/folders', folderRoutes);
app.use('/api/keys', apiKeyRoutes);
app.use('/api/databases', databaseRoutes);
app.use('/api/db-management', databaseManagementRoutes);
app.use('/api/users', userRoutes);
app.use('/api/logs', logsRoutes);
// Dynamic API routes (user-created endpoints)
app.use('/api/v1', dynamicRoutes);
// Serve frontend static files in production
if (config.nodeEnv === 'production') {
const frontendPath = path.join(__dirname, '../../frontend/dist');
app.use(express.static(frontendPath));
// SPA fallback - all non-API routes serve index.html
app.get('*', (req: Request, res: Response) => {
if (!req.path.startsWith('/api/') && !req.path.startsWith('/api-docs') && req.path !== '/health') {
res.sendFile(path.join(frontendPath, 'index.html'));
} else {
res.status(404).json({ error: 'API route not found' });
}
});
} else {
// Development mode - proxy to Vite dev server for non-API routes
const viteProxy = createProxyMiddleware({
target: 'http://localhost:5173',
changeOrigin: true,
ws: true, // Enable WebSocket proxying for HMR
});
app.use((req: Request, res: Response, next: any) => {
// If it's an API route or swagger, handle it normally
if (req.path.startsWith('/api/') || req.path.startsWith('/api-docs') || req.path === '/health') {
return next();
}
// Otherwise, proxy to Vite dev server
return viteProxy(req, res, next);
});
// 404 handler for API routes only
app.use((req: Request, res: Response) => {
if (req.path.startsWith('/api/') || req.path.startsWith('/api-docs') || req.path === '/health') {
res.status(404).json({ error: 'API route not found' });
}
});
}
// Error handler
app.use((err: any, _req: Request, res: Response, _next: any) => {
console.error('Error:', err);
res.status(err.status || 500).json({
error: err.message || 'Internal server error',
});
});
// Start server
const startServer = async () => {
try {
await initializeDatabase();
await databasePoolManager.initialize();
app.listen(config.port, () => {
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
console.log(`🚀 KIS API Builder Server running on port ${config.port}`);
console.log(`🌐 Application: http://localhost:${config.port}`);
console.log(`📚 Swagger docs: http://localhost:${config.port}/api-docs`);
console.log(`🏥 Health check: http://localhost:${config.port}/health`);
console.log(`🌍 Environment: ${config.nodeEnv}`);
if (config.nodeEnv === 'development') {
console.log('');
console.log('⚠️ Dev Mode: Backend proxies to Vite dev server (port 5173)');
console.log(' Make sure Vite dev server is running: npm run dev (in frontend/)');
}
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
});
} catch (error) {
console.error('Failed to start server:', error);
process.exit(1);
}
};
// Graceful shutdown
process.on('SIGTERM', () => {
console.log('SIGTERM signal received: closing HTTP server');
process.exit(0);
});
process.on('SIGINT', () => {
console.log('SIGINT signal received: closing HTTP server');
process.exit(0);
});
startServer();

View File

@@ -0,0 +1,146 @@
import { Pool } from 'pg';
import { DatabaseConfig } from '../types';
import { mainPool } from '../config/database';
class DatabasePoolManager {
private pools: Map<string, Pool> = new Map();
async initialize() {
// Load databases from DB instead of env
await this.loadDatabasesFromDB();
}
private async loadDatabasesFromDB() {
try {
const result = await mainPool.query(
'SELECT * FROM databases WHERE is_active = true'
);
for (const row of result.rows) {
const dbConfig: DatabaseConfig = {
id: row.id,
name: row.name,
type: row.type,
host: row.host,
port: row.port,
database_name: row.database_name,
username: row.username,
password: row.password,
ssl: row.ssl,
is_active: row.is_active,
};
this.addPool(dbConfig);
}
console.log(`✅ Loaded ${result.rows.length} database connection(s) from DB`);
} catch (error) {
console.error('❌ Failed to load databases from DB:', error);
}
}
addPool(dbConfig: DatabaseConfig) {
if (this.pools.has(dbConfig.id)) {
console.warn(`Pool with id ${dbConfig.id} already exists. Skipping.`);
return;
}
const pool = new Pool({
host: dbConfig.host,
port: dbConfig.port,
database: dbConfig.database_name,
user: dbConfig.username,
password: dbConfig.password,
ssl: dbConfig.ssl ? { rejectUnauthorized: false } : false,
max: 10,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
pool.on('error', (err) => {
console.error(`Database pool error for ${dbConfig.id}:`, err);
});
this.pools.set(dbConfig.id, pool);
console.log(`✅ Pool created for database: ${dbConfig.name} (${dbConfig.id})`);
}
removePool(databaseId: string) {
const pool = this.pools.get(databaseId);
if (pool) {
pool.end();
this.pools.delete(databaseId);
console.log(`Pool removed for database: ${databaseId}`);
}
}
getPool(databaseId: string): Pool | undefined {
return this.pools.get(databaseId);
}
async getAllDatabaseConfigs(): Promise<DatabaseConfig[]> {
try {
const result = await mainPool.query(
'SELECT id, name, type, host, port, database_name, is_active FROM databases WHERE is_active = true'
);
return result.rows;
} catch (error) {
console.error('Error fetching databases:', error);
return [];
}
}
async testConnection(databaseId: string): Promise<boolean> {
const pool = this.getPool(databaseId);
if (!pool) {
return false;
}
try {
const client = await pool.connect();
client.release();
return true;
} catch (error) {
console.error(`Connection test failed for ${databaseId}:`, error);
return false;
}
}
async reloadPool(databaseId: string) {
// Remove old pool
this.removePool(databaseId);
// Load new config from DB
const result = await mainPool.query(
'SELECT * FROM databases WHERE id = $1 AND is_active = true',
[databaseId]
);
if (result.rows.length > 0) {
const row = result.rows[0];
const dbConfig: DatabaseConfig = {
id: row.id,
name: row.name,
type: row.type,
host: row.host,
port: row.port,
database_name: row.database_name,
username: row.username,
password: row.password,
ssl: row.ssl,
is_active: row.is_active,
};
this.addPool(dbConfig);
}
}
async closeAll() {
const promises = Array.from(this.pools.values()).map((pool) => pool.end());
await Promise.all(promises);
this.pools.clear();
console.log('All database pools closed');
}
}
export const databasePoolManager = new DatabasePoolManager();

View File

@@ -0,0 +1,218 @@
import { spawn } from 'child_process';
import { sqlExecutor } from './SqlExecutor';
import { ScriptQuery, EndpointParameter } from '../types';
interface ScriptContext {
databaseId: string;
scriptQueries: ScriptQuery[];
requestParams: Record<string, any>;
endpointParameters: EndpointParameter[];
}
export class ScriptExecutor {
/**
* Выполняет JavaScript скрипт
*/
async executeJavaScript(code: string, context: ScriptContext): Promise<any> {
try {
// Создаем функцию execQuery, доступную в скрипте
const execQuery = async (queryName: string, additionalParams: Record<string, any> = {}) => {
const query = context.scriptQueries.find(q => q.name === queryName);
if (!query) {
throw new Error(`Query '${queryName}' not found`);
}
const allParams = { ...context.requestParams, ...additionalParams };
let processedQuery = query.sql;
const paramValues: any[] = [];
const paramMatches = query.sql.match(/\$\w+/g) || [];
const uniqueParams = [...new Set(paramMatches.map(p => p.substring(1)))];
uniqueParams.forEach((paramName, index) => {
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${index + 1}`);
const value = allParams[paramName];
paramValues.push(value !== undefined ? value : null);
});
const dbId = (query as any).database_id || context.databaseId;
const result = await sqlExecutor.executeQuery(dbId, processedQuery, paramValues);
return {
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
};
};
// Создаем асинхронную функцию из кода пользователя
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
const userFunction = new AsyncFunction('params', 'execQuery', code);
// Устанавливаем таймаут
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Script execution timeout (30s)')), 30000);
});
// Выполняем скрипт с таймаутом
const result = await Promise.race([
userFunction(context.requestParams, execQuery),
timeoutPromise
]);
return result;
} catch (error: any) {
throw new Error(`JavaScript execution error: ${error.message}`);
}
}
/**
* Выполняет Python скрипт в отдельном процессе
*/
async executePython(code: string, context: ScriptContext): Promise<any> {
return new Promise((resolve, reject) => {
// Создаем wrapper код для Python
const wrapperCode = `
import json
import sys
from datetime import datetime
# Параметры запроса
params = ${JSON.stringify(context.requestParams)}
# Функция для выполнения SQL запросов
def exec_query(query_name, additional_params=None):
if additional_params is None:
additional_params = {}
# Отправляем запрос на выполнение через stdout
request = {
'type': 'exec_query',
'query_name': query_name,
'additional_params': additional_params
}
print('__QUERY_REQUEST__' + json.dumps(request) + '__END_REQUEST__', file=sys.stderr, flush=True)
# Читаем результат
response_line = input()
response = json.loads(response_line)
if 'error' in response:
raise Exception(response['error'])
return response
# Функция-обертка для пользовательского кода
def __user_script():
${code.split('\n').map(line => line.trim() === '' ? '' : ' ' + line).join('\n')}
# Выполняем пользовательский код и выводим результат
result = __user_script()
print(json.dumps(result))
`;
const python = spawn('python', ['-c', wrapperCode]);
let output = '';
let errorOutput = '';
let queryRequests: any[] = [];
python.stdout.on('data', (data) => {
output += data.toString();
});
python.stderr.on('data', async (data) => {
const text = data.toString();
errorOutput += text;
// Проверяем на запросы к БД
const requestMatches = text.matchAll(/__QUERY_REQUEST__(.*?)__END_REQUEST__/g);
for (const match of requestMatches) {
try {
const request = JSON.parse(match[1]);
// Выполняем запрос
const query = context.scriptQueries.find(q => q.name === request.query_name);
if (!query) {
python.stdin.write(JSON.stringify({ error: `Query '${request.query_name}' not found` }) + '\n');
continue;
}
const allParams = { ...context.requestParams, ...request.additional_params };
// Преобразуем параметры
let processedQuery = query.sql;
const paramValues: any[] = [];
const paramMatches = query.sql.match(/\$\w+/g) || [];
const uniqueParams = [...new Set(paramMatches.map(p => p.substring(1)))];
uniqueParams.forEach((paramName, index) => {
const regex = new RegExp(`\\$${paramName}\\b`, 'g');
processedQuery = processedQuery.replace(regex, `$${index + 1}`);
const value = allParams[paramName];
paramValues.push(value !== undefined ? value : null);
});
// Используем database_id из запроса, если указан, иначе из контекста
const dbId = (query as any).database_id || context.databaseId;
const result = await sqlExecutor.executeQuery(
dbId,
processedQuery,
paramValues
);
python.stdin.write(JSON.stringify({
data: result.rows,
rowCount: result.rowCount,
executionTime: result.executionTime,
}) + '\n');
} catch (error: any) {
python.stdin.write(JSON.stringify({ error: error.message }) + '\n');
}
}
});
python.on('close', (code) => {
if (code !== 0) {
reject(new Error(`Python execution error: ${errorOutput}`));
} else {
try {
// Последняя строка вывода - результат
const lines = output.trim().split('\n');
const resultLine = lines[lines.length - 1];
const result = JSON.parse(resultLine);
resolve(result);
} catch (error) {
reject(new Error(`Failed to parse Python output: ${output}`));
}
}
});
// Таймаут 30 секунд
setTimeout(() => {
python.kill();
reject(new Error('Python script execution timeout (30s)'));
}, 30000);
});
}
/**
* Выполняет скрипт на выбранном языке
*/
async execute(
language: 'javascript' | 'python',
code: string,
context: ScriptContext
): Promise<any> {
if (language === 'javascript') {
return this.executeJavaScript(code, context);
} else if (language === 'python') {
return this.executePython(code, context);
} else {
throw new Error(`Unsupported script language: ${language}`);
}
}
}
export const scriptExecutor = new ScriptExecutor();

View File

@@ -0,0 +1,96 @@
import { QueryResult } from '../types';
import { databasePoolManager } from './DatabasePoolManager';
export class SqlExecutor {
async executeQuery(
databaseId: string,
sqlQuery: string,
params: any[] = []
): Promise<QueryResult> {
const pool = databasePoolManager.getPool(databaseId);
if (!pool) {
throw new Error(`Database with id ${databaseId} not found or not configured`);
}
const startTime = Date.now();
try {
// Security: Prevent multiple statements and dangerous commands
this.validateQuery(sqlQuery);
const result = await pool.query(sqlQuery, params);
const executionTime = Date.now() - startTime;
return {
rows: result.rows,
rowCount: result.rowCount || 0,
executionTime,
};
} catch (error: any) {
console.error('SQL Execution Error:', error);
throw new Error(`SQL Error: ${error.message}`);
}
}
private validateQuery(sqlQuery: string) {
const normalized = sqlQuery.trim().toLowerCase();
// Prevent multiple statements (basic check)
if (normalized.includes(';') && normalized.indexOf(';') < normalized.length - 1) {
throw new Error('Multiple SQL statements are not allowed');
}
// Prevent dangerous commands (you can extend this list)
const dangerousCommands = ['drop', 'truncate', 'delete from', 'alter', 'create', 'grant', 'revoke'];
const isDangerous = dangerousCommands.some(cmd => normalized.startsWith(cmd));
if (isDangerous && !normalized.startsWith('select')) {
// For safety, you might want to allow only SELECT queries
// Or implement a whitelist/permission system for write operations
console.warn('Potentially dangerous query detected:', sqlQuery);
}
}
async testQuery(databaseId: string, sqlQuery: string): Promise<{ success: boolean; error?: string }> {
try {
await this.executeQuery(databaseId, sqlQuery);
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
}
async getTableSchema(databaseId: string, tableName: string): Promise<any[]> {
const query = `
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM
information_schema.columns
WHERE
table_name = $1
ORDER BY
ordinal_position;
`;
const result = await this.executeQuery(databaseId, query, [tableName]);
return result.rows;
}
async getAllTables(databaseId: string): Promise<string[]> {
const query = `
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
ORDER BY table_name;
`;
const result = await this.executeQuery(databaseId, query);
return result.rows.map(row => row.table_name);
}
}
export const sqlExecutor = new SqlExecutor();

View File

@@ -0,0 +1,96 @@
export interface DatabaseConfig {
id: string;
name: string;
type: 'postgresql' | 'mysql' | 'mssql';
host: string;
port: number;
database_name: string;
username: string;
password: string;
ssl: boolean;
is_active?: boolean;
created_at?: Date;
updated_at?: Date;
}
export interface User {
id: string;
username: string;
password_hash: string;
role: 'admin' | 'user';
is_superadmin: boolean;
created_at: Date;
updated_at: Date;
}
export interface ApiKey {
id: string;
name: string;
key: string;
user_id: string;
permissions: string[]; // Array of endpoint IDs
is_active: boolean;
enable_logging: boolean;
created_at: Date;
expires_at: Date | null;
}
export interface Folder {
id: string;
name: string;
parent_id: string | null;
user_id: string;
created_at: Date;
updated_at: Date;
}
export interface Endpoint {
id: string;
name: string;
description: string;
method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
path: string;
database_id: string;
sql_query: string;
parameters: EndpointParameter[];
folder_id: string | null;
user_id: string;
is_public: boolean;
enable_logging: boolean;
execution_type: 'sql' | 'script';
script_language?: 'javascript' | 'python';
script_code?: string;
script_queries?: ScriptQuery[];
created_at: Date;
updated_at: Date;
}
export interface EndpointParameter {
name: string;
type: 'string' | 'number' | 'boolean' | 'date';
required: boolean;
default_value?: any;
description?: string;
in: 'query' | 'body' | 'path';
}
export interface ScriptQuery {
name: string;
sql: string;
database_id?: string;
}
export interface QueryResult {
rows: any[];
rowCount: number;
executionTime: number;
}
export interface SwaggerEndpoint {
tags: string[];
summary: string;
description: string;
parameters: any[];
responses: any;
security?: any[];
}