modified: backend/src/controllers/schemaController.ts modified: frontend/src/pages/DatabaseSchema.tsx
220 lines
7.2 KiB
TypeScript
220 lines
7.2 KiB
TypeScript
import { Request, Response } from 'express';
|
|
import { mainPool } from '../config/database';
|
|
import { databasePoolManager } from '../services/DatabasePoolManager';
|
|
|
|
interface ColumnInfo {
|
|
name: string;
|
|
type: string;
|
|
nullable: boolean;
|
|
default_value: string | null;
|
|
is_primary: boolean;
|
|
comment: string | null;
|
|
}
|
|
|
|
interface ForeignKey {
|
|
column: string;
|
|
references_table: string;
|
|
references_column: string;
|
|
constraint_name: string;
|
|
}
|
|
|
|
interface TableInfo {
|
|
name: string;
|
|
schema: string;
|
|
comment: string | null;
|
|
columns: ColumnInfo[];
|
|
foreign_keys: ForeignKey[];
|
|
}
|
|
|
|
interface SchemaData {
|
|
tables: TableInfo[];
|
|
updated_at: string;
|
|
}
|
|
|
|
// Parse PostgreSQL schema
|
|
async function parsePostgresSchema(databaseId: string): Promise<SchemaData> {
|
|
const startTime = Date.now();
|
|
console.log(`[Schema] Starting schema parse for database ${databaseId}`);
|
|
|
|
const pool = databasePoolManager.getPool(databaseId);
|
|
if (!pool) {
|
|
throw new Error('Database not found or not active');
|
|
}
|
|
|
|
console.log(`[Schema] Fetching tables list...`);
|
|
const tablesStartTime = Date.now();
|
|
|
|
// Get all tables with comments via pg_catalog
|
|
const tablesResult = await pool.query(`
|
|
SELECT
|
|
t.table_schema,
|
|
t.table_name,
|
|
pg_catalog.obj_description(c.oid, 'pg_class') as table_comment
|
|
FROM information_schema.tables t
|
|
LEFT JOIN pg_catalog.pg_class c ON c.relname = t.table_name
|
|
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace AND n.nspname = t.table_schema
|
|
WHERE t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
AND t.table_type = 'BASE TABLE'
|
|
ORDER BY t.table_schema, t.table_name
|
|
`);
|
|
|
|
console.log(`[Schema] Found ${tablesResult.rows.length} tables in ${Date.now() - tablesStartTime}ms`);
|
|
|
|
const tables: TableInfo[] = [];
|
|
let processedCount = 0;
|
|
|
|
for (const table of tablesResult.rows) {
|
|
processedCount++;
|
|
if (processedCount % 50 === 0 || processedCount === tablesResult.rows.length) {
|
|
console.log(`[Schema] Processing table ${processedCount}/${tablesResult.rows.length}: ${table.table_schema}.${table.table_name}`);
|
|
}
|
|
|
|
// Get columns for each table
|
|
const columnsResult = await pool.query(`
|
|
SELECT
|
|
c.column_name,
|
|
c.data_type,
|
|
c.is_nullable,
|
|
c.column_default,
|
|
CASE WHEN pk.column_name IS NOT NULL THEN true ELSE false END as is_primary,
|
|
pg_catalog.col_description(pc.oid, c.ordinal_position) as column_comment
|
|
FROM information_schema.columns c
|
|
LEFT JOIN pg_catalog.pg_class pc ON pc.relname = c.table_name
|
|
LEFT JOIN pg_catalog.pg_namespace pn ON pn.oid = pc.relnamespace AND pn.nspname = c.table_schema
|
|
LEFT JOIN (
|
|
SELECT ku.column_name, ku.table_schema, ku.table_name
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.key_column_usage ku
|
|
ON tc.constraint_name = ku.constraint_name
|
|
AND tc.table_schema = ku.table_schema
|
|
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
) pk ON c.column_name = pk.column_name
|
|
AND c.table_schema = pk.table_schema
|
|
AND c.table_name = pk.table_name
|
|
WHERE c.table_schema = $1 AND c.table_name = $2
|
|
ORDER BY c.ordinal_position
|
|
`, [table.table_schema, table.table_name]);
|
|
|
|
// Get foreign keys
|
|
const fkResult = await pool.query(`
|
|
SELECT
|
|
kcu.column_name,
|
|
ccu.table_name AS references_table,
|
|
ccu.column_name AS references_column,
|
|
tc.constraint_name
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.key_column_usage kcu
|
|
ON tc.constraint_name = kcu.constraint_name
|
|
AND tc.table_schema = kcu.table_schema
|
|
JOIN information_schema.constraint_column_usage ccu
|
|
ON ccu.constraint_name = tc.constraint_name
|
|
AND ccu.table_schema = tc.table_schema
|
|
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
AND tc.table_schema = $1
|
|
AND tc.table_name = $2
|
|
`, [table.table_schema, table.table_name]);
|
|
|
|
tables.push({
|
|
name: table.table_name,
|
|
schema: table.table_schema,
|
|
comment: table.table_comment,
|
|
columns: columnsResult.rows.map(col => ({
|
|
name: col.column_name,
|
|
type: col.data_type,
|
|
nullable: col.is_nullable === 'YES',
|
|
default_value: col.column_default,
|
|
is_primary: col.is_primary,
|
|
comment: col.column_comment,
|
|
})),
|
|
foreign_keys: fkResult.rows.map(fk => ({
|
|
column: fk.column_name,
|
|
references_table: fk.references_table,
|
|
references_column: fk.references_column,
|
|
constraint_name: fk.constraint_name,
|
|
})),
|
|
});
|
|
}
|
|
|
|
const totalTime = Date.now() - startTime;
|
|
console.log(`[Schema] Completed! ${tables.length} tables, ${tables.reduce((acc, t) => acc + t.columns.length, 0)} columns, ${tables.reduce((acc, t) => acc + t.foreign_keys.length, 0)} FKs in ${totalTime}ms`);
|
|
|
|
return {
|
|
tables,
|
|
updated_at: new Date().toISOString(),
|
|
};
|
|
}
|
|
|
|
// Get cached schema or return null
|
|
async function getCachedSchema(databaseId: string): Promise<SchemaData | null> {
|
|
const result = await mainPool.query(
|
|
'SELECT schema_data FROM database_schemas WHERE database_id = $1',
|
|
[databaseId]
|
|
);
|
|
|
|
if (result.rows.length > 0) {
|
|
return result.rows[0].schema_data as SchemaData;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// Save schema to cache
|
|
async function saveSchemaToCache(databaseId: string, schemaData: SchemaData): Promise<void> {
|
|
await mainPool.query(`
|
|
INSERT INTO database_schemas (database_id, schema_data, updated_at)
|
|
VALUES ($1, $2, NOW())
|
|
ON CONFLICT (database_id)
|
|
DO UPDATE SET schema_data = $2, updated_at = NOW()
|
|
`, [databaseId, JSON.stringify(schemaData)]);
|
|
}
|
|
|
|
// GET /api/workbench/schema/:databaseId
|
|
export const getSchema = async (req: Request, res: Response) => {
|
|
try {
|
|
const { databaseId } = req.params;
|
|
|
|
// Try to get from cache first
|
|
let schema = await getCachedSchema(databaseId);
|
|
|
|
if (!schema) {
|
|
// Parse and cache if not exists
|
|
schema = await parsePostgresSchema(databaseId);
|
|
await saveSchemaToCache(databaseId, schema);
|
|
}
|
|
|
|
res.json({ success: true, data: schema });
|
|
} catch (error: any) {
|
|
console.error('Error getting schema:', error);
|
|
res.status(500).json({ success: false, error: error.message });
|
|
}
|
|
};
|
|
|
|
// POST /api/workbench/schema/:databaseId/refresh
|
|
export const refreshSchema = async (req: Request, res: Response) => {
|
|
try {
|
|
const { databaseId } = req.params;
|
|
|
|
// Parse fresh schema
|
|
const schema = await parsePostgresSchema(databaseId);
|
|
|
|
// Save to cache
|
|
await saveSchemaToCache(databaseId, schema);
|
|
|
|
res.json({ success: true, data: schema });
|
|
} catch (error: any) {
|
|
console.error('Error refreshing schema:', error);
|
|
res.status(500).json({ success: false, error: error.message });
|
|
}
|
|
};
|
|
|
|
// Generate schema for a database (called from other controllers)
|
|
export const generateSchemaForDatabase = async (databaseId: string): Promise<void> => {
|
|
try {
|
|
const schema = await parsePostgresSchema(databaseId);
|
|
await saveSchemaToCache(databaseId, schema);
|
|
console.log(`Schema generated for database ${databaseId}`);
|
|
} catch (error: any) {
|
|
console.error(`Error generating schema for database ${databaseId}:`, error.message);
|
|
// Don't throw - schema generation is not critical
|
|
}
|
|
};
|