new file: backend/src/controllers/schemaController.ts
new file: backend/src/migrations/008_add_database_schemas.sql modified: backend/src/routes/sqlInterface.ts modified: frontend/package.json modified: frontend/src/App.tsx modified: frontend/src/components/Sidebar.tsx new file: frontend/src/pages/DatabaseSchema.tsx modified: frontend/src/services/api.ts
This commit is contained in:
186
backend/src/controllers/schemaController.ts
Normal file
186
backend/src/controllers/schemaController.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { mainPool } from '../config/database';
|
||||
import { databasePoolManager } from '../services/DatabasePoolManager';
|
||||
|
||||
interface ColumnInfo {
|
||||
name: string;
|
||||
type: string;
|
||||
nullable: boolean;
|
||||
default_value: string | null;
|
||||
is_primary: boolean;
|
||||
comment: string | null;
|
||||
}
|
||||
|
||||
interface ForeignKey {
|
||||
column: string;
|
||||
references_table: string;
|
||||
references_column: string;
|
||||
constraint_name: string;
|
||||
}
|
||||
|
||||
interface TableInfo {
|
||||
name: string;
|
||||
schema: string;
|
||||
comment: string | null;
|
||||
columns: ColumnInfo[];
|
||||
foreign_keys: ForeignKey[];
|
||||
}
|
||||
|
||||
interface SchemaData {
|
||||
tables: TableInfo[];
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
// Parse PostgreSQL schema
|
||||
async function parsePostgresSchema(databaseId: string): Promise<SchemaData> {
|
||||
const pool = databasePoolManager.getPool(databaseId);
|
||||
if (!pool) {
|
||||
throw new Error('Database not found or not active');
|
||||
}
|
||||
|
||||
// Get all tables
|
||||
const tablesResult = await pool.query(`
|
||||
SELECT
|
||||
t.table_schema,
|
||||
t.table_name,
|
||||
obj_description((t.table_schema || '.' || t.table_name)::regclass, 'pg_class') as table_comment
|
||||
FROM information_schema.tables t
|
||||
WHERE t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND t.table_type = 'BASE TABLE'
|
||||
ORDER BY t.table_schema, t.table_name
|
||||
`);
|
||||
|
||||
const tables: TableInfo[] = [];
|
||||
|
||||
for (const table of tablesResult.rows) {
|
||||
// Get columns for each table
|
||||
const columnsResult = await pool.query(`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
CASE WHEN pk.column_name IS NOT NULL THEN true ELSE false END as is_primary,
|
||||
col_description((c.table_schema || '.' || c.table_name)::regclass, c.ordinal_position) as column_comment
|
||||
FROM information_schema.columns c
|
||||
LEFT JOIN (
|
||||
SELECT ku.column_name, ku.table_schema, ku.table_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage ku
|
||||
ON tc.constraint_name = ku.constraint_name
|
||||
AND tc.table_schema = ku.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
) pk ON c.column_name = pk.column_name
|
||||
AND c.table_schema = pk.table_schema
|
||||
AND c.table_name = pk.table_name
|
||||
WHERE c.table_schema = $1 AND c.table_name = $2
|
||||
ORDER BY c.ordinal_position
|
||||
`, [table.table_schema, table.table_name]);
|
||||
|
||||
// Get foreign keys
|
||||
const fkResult = await pool.query(`
|
||||
SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS references_table,
|
||||
ccu.column_name AS references_column,
|
||||
tc.constraint_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = $1
|
||||
AND tc.table_name = $2
|
||||
`, [table.table_schema, table.table_name]);
|
||||
|
||||
tables.push({
|
||||
name: table.table_name,
|
||||
schema: table.table_schema,
|
||||
comment: table.table_comment,
|
||||
columns: columnsResult.rows.map(col => ({
|
||||
name: col.column_name,
|
||||
type: col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default_value: col.column_default,
|
||||
is_primary: col.is_primary,
|
||||
comment: col.column_comment,
|
||||
})),
|
||||
foreign_keys: fkResult.rows.map(fk => ({
|
||||
column: fk.column_name,
|
||||
references_table: fk.references_table,
|
||||
references_column: fk.references_column,
|
||||
constraint_name: fk.constraint_name,
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tables,
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
// Get cached schema or return null
|
||||
async function getCachedSchema(databaseId: string): Promise<SchemaData | null> {
|
||||
const result = await mainPool.query(
|
||||
'SELECT schema_data FROM database_schemas WHERE database_id = $1',
|
||||
[databaseId]
|
||||
);
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
return result.rows[0].schema_data as SchemaData;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Save schema to cache
|
||||
async function saveSchemaToCache(databaseId: string, schemaData: SchemaData): Promise<void> {
|
||||
await mainPool.query(`
|
||||
INSERT INTO database_schemas (database_id, schema_data, updated_at)
|
||||
VALUES ($1, $2, NOW())
|
||||
ON CONFLICT (database_id)
|
||||
DO UPDATE SET schema_data = $2, updated_at = NOW()
|
||||
`, [databaseId, JSON.stringify(schemaData)]);
|
||||
}
|
||||
|
||||
// GET /api/workbench/schema/:databaseId
|
||||
export const getSchema = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { databaseId } = req.params;
|
||||
|
||||
// Try to get from cache first
|
||||
let schema = await getCachedSchema(databaseId);
|
||||
|
||||
if (!schema) {
|
||||
// Parse and cache if not exists
|
||||
schema = await parsePostgresSchema(databaseId);
|
||||
await saveSchemaToCache(databaseId, schema);
|
||||
}
|
||||
|
||||
res.json({ success: true, data: schema });
|
||||
} catch (error: any) {
|
||||
console.error('Error getting schema:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
// POST /api/workbench/schema/:databaseId/refresh
|
||||
export const refreshSchema = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { databaseId } = req.params;
|
||||
|
||||
// Parse fresh schema
|
||||
const schema = await parsePostgresSchema(databaseId);
|
||||
|
||||
// Save to cache
|
||||
await saveSchemaToCache(databaseId, schema);
|
||||
|
||||
res.json({ success: true, data: schema });
|
||||
} catch (error: any) {
|
||||
console.error('Error refreshing schema:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
};
|
||||
12
backend/src/migrations/008_add_database_schemas.sql
Normal file
12
backend/src/migrations/008_add_database_schemas.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
-- Database schemas cache table
|
||||
CREATE TABLE IF NOT EXISTS database_schemas (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
database_id UUID NOT NULL REFERENCES databases(id) ON DELETE CASCADE,
|
||||
schema_data JSONB NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
UNIQUE(database_id)
|
||||
);
|
||||
|
||||
-- Index for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_database_schemas_database_id ON database_schemas(database_id);
|
||||
@@ -1,6 +1,7 @@
|
||||
import express from 'express';
|
||||
import { authMiddleware } from '../middleware/auth';
|
||||
import { executeQuery } from '../controllers/sqlInterfaceController';
|
||||
import { getSchema, refreshSchema } from '../controllers/schemaController';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -8,4 +9,8 @@ router.use(authMiddleware);
|
||||
|
||||
router.post('/execute', executeQuery);
|
||||
|
||||
// Schema routes
|
||||
router.get('/schema/:databaseId', getSchema);
|
||||
router.post('/schema/:databaseId/refresh', refreshSchema);
|
||||
|
||||
export default router;
|
||||
|
||||
Reference in New Issue
Block a user