Initial commit: kisync CLI client for KIS API Builder

CLI tool for syncing local folders with KIS API Builder server.
Commands: init, pull, push, status with conflict detection.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-14 16:19:48 +03:00
commit 4c54bfff26
13 changed files with 1882 additions and 0 deletions

57
src/api.ts Normal file
View File

@@ -0,0 +1,57 @@
import fetch from 'node-fetch';
import { Config } from './config';
export class ApiClient {
private baseUrl: string;
private token: string;
constructor(config: Config) {
this.baseUrl = config.host.replace(/\/$/, '');
this.token = config.token;
}
private async request(method: string, path: string, body?: any): Promise<any> {
const url = `${this.baseUrl}${path}`;
const res = await fetch(url, {
method,
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.token}`,
},
body: body ? JSON.stringify(body) : undefined,
});
if (res.status === 401) {
throw new Error('Authentication failed. Check your token (kisync init to reconfigure).');
}
const data = await res.json();
if (!res.ok) {
if (res.status === 409) {
// Conflict response — return data with a flag instead of throwing
return { _conflict: true, ...data };
}
throw new Error(data.error || `HTTP ${res.status}: ${res.statusText}`);
}
return data;
}
async pull() {
return this.request('GET', '/api/sync/pull');
}
async status(clientState: { endpoints: any[]; folders: any[] }) {
return this.request('POST', '/api/sync/status', clientState);
}
async push(endpoints: any[], force = false) {
return this.request('POST', '/api/sync/push', { endpoints, force });
}
async login(username: string, password: string): Promise<string> {
const data = await this.request('POST', '/api/auth/login', { username, password });
return data.token;
}
}

101
src/commands/init.ts Normal file
View File

@@ -0,0 +1,101 @@
import * as readline from 'readline';
import chalk from 'chalk';
import { writeConfig, findProjectRoot } from '../config';
import { ApiClient } from '../api';
function ask(question: string, hidden = false): Promise<string> {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
if (hidden) {
// For password input - don't echo characters
process.stdout.write(question);
let input = '';
const stdin = process.stdin;
const wasRaw = stdin.isRaw;
if (stdin.setRawMode) stdin.setRawMode(true);
stdin.resume();
stdin.setEncoding('utf-8');
const onData = (char: string) => {
const c = char.toString();
if (c === '\n' || c === '\r') {
stdin.removeListener('data', onData);
if (stdin.setRawMode) stdin.setRawMode(wasRaw || false);
process.stdout.write('\n');
rl.close();
resolve(input);
} else if (c === '\u0003') {
process.exit();
} else if (c === '\u007f' || c === '\b') {
if (input.length > 0) {
input = input.slice(0, -1);
process.stdout.write('\b \b');
}
} else {
input += c;
process.stdout.write('*');
}
};
stdin.on('data', onData);
} else {
rl.question(question, (answer) => {
rl.close();
resolve(answer.trim());
});
}
});
}
export async function initCommand(): Promise<void> {
const cwd = process.cwd();
const existing = findProjectRoot(cwd);
if (existing) {
console.log(chalk.yellow(`Project already initialized at: ${existing}`));
const overwrite = await ask('Reinitialize? (y/N): ');
if (overwrite.toLowerCase() !== 'y') {
console.log('Aborted.');
return;
}
}
console.log(chalk.bold('\nKIS API Builder Sync — Setup\n'));
const host = await ask('Server URL (e.g. http://localhost:3000): ');
if (!host) {
throw new Error('Server URL is required');
}
console.log(chalk.gray('\nLogin to get authentication token:'));
const username = await ask('Username: ');
const password = await ask('Password: ', true);
if (!username || !password) {
throw new Error('Username and password are required');
}
// Try to authenticate
console.log(chalk.gray('\nConnecting...'));
const tempClient = new ApiClient({ host, token: '' });
let token: string;
try {
token = await tempClient.login(username, password);
} catch (err: any) {
throw new Error(`Authentication failed: ${err.message}`);
}
writeConfig({ host, token }, cwd);
console.log(chalk.green('\nProject initialized successfully!'));
console.log(chalk.gray(`Config saved to: ${cwd}/.kisync.json`));
console.log('');
console.log('Next steps:');
console.log(` ${chalk.cyan('kisync pull')} — download endpoints from server`);
console.log(` ${chalk.cyan('kisync status')} — check what changed`);
console.log(` ${chalk.cyan('kisync push')} — upload your changes`);
}

302
src/commands/pull.ts Normal file
View File

@@ -0,0 +1,302 @@
import * as fs from 'fs';
import * as path from 'path';
import * as readline from 'readline';
import chalk from 'chalk';
import { readConfig, readState, writeState, getProjectRoot, SyncState } from '../config';
import { ApiClient } from '../api';
import {
sanitizeName,
buildFolderPath,
writeEndpointToDisk,
findEndpointDirs,
readEndpointFromDisk,
} from '../files';
import { computeEndpointHash } from '../hash';
function ask(question: string): Promise<string> {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(question, (answer: string) => {
rl.close();
resolve(answer.trim());
});
});
}
interface PullDiff {
newEndpoints: { name: string; folder: string }[];
updatedEndpoints: { name: string; folder: string; serverDate: string }[];
deletedEndpoints: { name: string; folder: string }[];
unchangedCount: number;
localConflicts: { name: string; folder: string }[]; // locally modified AND server modified
}
export async function pullCommand(force = false): Promise<void> {
const root = getProjectRoot();
const config = readConfig(root);
const state = readState(root);
const api = new ApiClient(config);
console.log(chalk.gray('Pulling from server...'));
const data = await api.pull();
const { endpoints, folders } = data;
console.log(
chalk.gray(
`Server: ${endpoints.length} endpoints, ${folders.length} folders`
)
);
// Build folders map
const foldersMap = new Map<string, any>();
for (const f of folders) {
foldersMap.set(f.id, f);
}
// Detect local modifications
const localModifiedIds = new Set<string>();
if (state.last_sync) {
const endpointDirs = findEndpointDirs(root);
for (const dir of endpointDirs) {
const ep = readEndpointFromDisk(dir);
if (!ep || !ep.id) continue;
const stateEntry = state.endpoints[ep.id];
if (!stateEntry || !stateEntry.hash) continue;
const currentHash = computeEndpointHash(ep);
if (currentHash !== stateEntry.hash) {
localModifiedIds.add(ep.id);
}
}
}
// Build diff preview
const diff: PullDiff = {
newEndpoints: [],
updatedEndpoints: [],
deletedEndpoints: [],
unchangedCount: 0,
localConflicts: [],
};
for (const ep of endpoints) {
const folderName = ep.folder_name || '_no_folder';
if (!state.endpoints[ep.id]) {
diff.newEndpoints.push({ name: ep.name, folder: folderName });
} else {
const serverTime = new Date(ep.updated_at).getTime();
const stateTime = new Date(state.endpoints[ep.id].updated_at).getTime();
if (serverTime > stateTime) {
// Server has a newer version
if (localModifiedIds.has(ep.id)) {
// CONFLICT: both local and server changed
diff.localConflicts.push({ name: ep.name, folder: folderName });
} else {
diff.updatedEndpoints.push({
name: ep.name,
folder: folderName,
serverDate: ep.updated_at,
});
}
} else {
diff.unchangedCount++;
}
}
}
// Detect server deletions
const serverIds = new Set(endpoints.map((e: any) => e.id));
for (const [id, info] of Object.entries(state.endpoints)) {
if (!serverIds.has(id)) {
const dirName = path.basename(info.folder_path);
const parentName = path.basename(path.dirname(info.folder_path));
diff.deletedEndpoints.push({ name: dirName, folder: parentName });
}
}
// Show preview
const totalChanges =
diff.newEndpoints.length +
diff.updatedEndpoints.length +
diff.deletedEndpoints.length +
diff.localConflicts.length;
if (totalChanges === 0) {
console.log(chalk.green('\nEverything is up to date.'));
return;
}
console.log(chalk.bold('\nIncoming changes from server:\n'));
for (const item of diff.newEndpoints) {
console.log(chalk.green(` + new: ${item.folder}/${item.name}`));
}
for (const item of diff.updatedEndpoints) {
console.log(chalk.blue(` ~ updated: ${item.folder}/${item.name}`));
console.log(chalk.gray(` server updated: ${new Date(item.serverDate).toLocaleString()}`));
}
for (const item of diff.deletedEndpoints) {
console.log(chalk.red(` - deleted: ${item.folder}/${item.name}`));
}
for (const item of diff.localConflicts) {
console.log(chalk.redBright(` ! CONFLICT: ${item.folder}/${item.name}`));
console.log(chalk.redBright(` changed locally AND on server`));
}
if (diff.unchangedCount > 0) {
console.log(chalk.gray(`\n ${diff.unchangedCount} unchanged`));
}
// Handle conflicts
if (diff.localConflicts.length > 0 && !force) {
console.log(
chalk.yellow(
`\n${diff.localConflicts.length} conflict(s): you edited these locally, but they were also changed on the server.`
)
);
console.log(chalk.yellow('Options:'));
console.log(chalk.yellow(' 1) "kisync pull --force" — overwrite your local changes with server version'));
console.log(chalk.yellow(' 2) "kisync push" — push your changes first (server version will be overwritten)'));
console.log(chalk.yellow(' 3) "kisync push --force" — force push if server also changed'));
return;
}
// If there are locally modified files that DON'T conflict (server hasn't changed them),
// those are safe — pull won't touch them. But warn if force is used.
const safeLocalModified = [...localModifiedIds].filter(
(id) => !diff.localConflicts.find((c) => {
const ep = endpoints.find((e: any) => e.id === id);
return ep && c.name === ep.name;
})
);
if (force && safeLocalModified.length > 0) {
console.log(
chalk.yellow(`\n--force: ${safeLocalModified.length} locally modified endpoint(s) will be overwritten.`)
);
}
// Confirm
if (!force) {
const confirm = await ask('\nApply these changes? (y/N): ');
if (confirm.toLowerCase() !== 'y') {
console.log('Aborted.');
return;
}
}
// Apply changes
const newState: SyncState = { endpoints: {}, folders: {}, last_sync: '' };
// Create folder structure
for (const folder of folders) {
const folderPath = buildFolderPath(folder.id, foldersMap, root);
fs.mkdirSync(folderPath, { recursive: true });
const folderMeta = {
id: folder.id,
name: folder.name,
parent_id: folder.parent_id,
};
fs.writeFileSync(
path.join(folderPath, '_folder.json'),
JSON.stringify(folderMeta, null, 2),
'utf-8'
);
newState.folders[folder.id] = {
updated_at: folder.updated_at,
path: path.relative(root, folderPath),
};
}
// Write endpoints
let applied = 0;
let skipped = 0;
for (const ep of endpoints) {
let endpointDir: string;
if (ep.folder_id && foldersMap.has(ep.folder_id)) {
const folderPath = buildFolderPath(ep.folder_id, foldersMap, root);
endpointDir = path.join(folderPath, sanitizeName(ep.name));
} else {
endpointDir = path.join(root, '_no_folder', sanitizeName(ep.name));
}
const isConflict = diff.localConflicts.some((c) => c.name === ep.name);
// Skip conflicts unless force
if (isConflict && !force) {
// Keep local version in state but mark with server's updated_at
if (state.endpoints[ep.id]) {
newState.endpoints[ep.id] = state.endpoints[ep.id];
}
skipped++;
continue;
}
// If endpoint moved to different folder, clean up old location
if (state.endpoints[ep.id]) {
const oldRelPath = state.endpoints[ep.id].folder_path;
const oldAbsPath = path.join(root, oldRelPath);
const newRelPath = path.relative(root, endpointDir);
if (oldRelPath !== newRelPath && fs.existsSync(oldAbsPath)) {
fs.rmSync(oldAbsPath, { recursive: true, force: true });
}
}
writeEndpointToDisk(ep, endpointDir);
const hash = computeEndpointHash(ep);
newState.endpoints[ep.id] = {
updated_at: ep.updated_at,
folder_path: path.relative(root, endpointDir),
hash,
};
applied++;
}
// Clean up endpoints deleted on server
let deleted = 0;
for (const [id, info] of Object.entries(state.endpoints)) {
if (!serverIds.has(id)) {
const oldPath = path.join(root, info.folder_path);
if (fs.existsSync(oldPath)) {
fs.rmSync(oldPath, { recursive: true, force: true });
deleted++;
}
}
}
cleanEmptyDirs(root);
newState.last_sync = new Date().toISOString();
writeState(newState, root);
// Summary
console.log('');
console.log(chalk.green(`Pull complete: ${applied} applied, ${deleted} deleted, ${skipped} skipped.`));
}
function cleanEmptyDirs(dir: string): void {
if (!fs.existsSync(dir)) return;
const entries = fs.readdirSync(dir);
for (const entry of entries) {
const fullPath = path.join(dir, entry);
if (fs.statSync(fullPath).isDirectory()) {
cleanEmptyDirs(fullPath);
}
}
const remaining = fs.readdirSync(dir);
if (remaining.length === 0 && dir !== process.cwd()) {
fs.rmdirSync(dir);
}
}

117
src/commands/push.ts Normal file
View File

@@ -0,0 +1,117 @@
import * as path from 'path';
import * as readline from 'readline';
import chalk from 'chalk';
import { readConfig, readState, writeState, getProjectRoot } from '../config';
import { ApiClient } from '../api';
import { findEndpointDirs, readEndpointFromDisk } from '../files';
import { computeEndpointHash } from '../hash';
function ask(question: string): Promise<string> {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(question, (answer) => {
rl.close();
resolve(answer.trim());
});
});
}
export async function pushCommand(force = false): Promise<void> {
const root = getProjectRoot();
const config = readConfig(root);
const state = readState(root);
const api = new ApiClient(config);
if (!state.last_sync) {
console.log(chalk.yellow('No sync history. Run "kisync pull" first.'));
return;
}
// Find locally modified endpoints
const endpointDirs = findEndpointDirs(root);
const modified: { ep: any; dir: string }[] = [];
for (const dir of endpointDirs) {
const ep = readEndpointFromDisk(dir);
if (!ep || !ep.id) continue;
const stateEntry = state.endpoints[ep.id];
if (!stateEntry) continue;
const currentHash = computeEndpointHash(ep);
if (stateEntry.hash && currentHash !== stateEntry.hash) {
modified.push({ ep, dir });
}
}
if (modified.length === 0) {
console.log(chalk.green('Nothing to push. All endpoints match server state.'));
return;
}
// Show what will be pushed
console.log(chalk.bold(`\n${modified.length} endpoint(s) to push:\n`));
for (const { ep, dir } of modified) {
console.log(chalk.yellow(` ~ ${ep.name}`));
console.log(chalk.gray(` ${path.relative(root, dir)}`));
}
console.log('');
if (!force) {
const confirm = await ask('Push these changes? (y/N): ');
if (confirm.toLowerCase() !== 'y') {
console.log('Aborted.');
return;
}
}
// Prepare push payload — include _base_updated_at for conflict detection
const pushEndpoints = modified.map(({ ep }) => {
const stateEntry = state.endpoints[ep.id];
return {
...ep,
_base_updated_at: stateEntry ? stateEntry.updated_at : undefined,
};
});
console.log(chalk.gray('\nPushing to server...'));
const result = await api.push(pushEndpoints, force);
// Handle conflicts
if (result._conflict) {
console.log(chalk.red('\nConflicts detected! These endpoints were modified on the server:'));
console.log(chalk.gray('(Someone else changed them since your last sync)\n'));
for (const c of result.conflicts || []) {
console.log(chalk.red(` ${c.name}`));
console.log(chalk.gray(` server updated: ${c.server_updated_at}`));
console.log(chalk.gray(` your base: ${c.client_base_updated_at}`));
}
if (result.applied && result.applied.length > 0) {
console.log(chalk.green(`\n${result.applied.length} non-conflicting endpoint(s) were pushed.`));
}
console.log(chalk.yellow('\nUse "kisync push --force" to overwrite server changes.'));
console.log(chalk.yellow('Or run "kisync pull --force" to get the latest version first.'));
return;
}
// Update state with new updated_at from server
for (const r of result.results) {
const serverEp = r.endpoint;
if (serverEp && state.endpoints[serverEp.id]) {
const localEp = modified.find((m) => m.ep.id === serverEp.id);
state.endpoints[serverEp.id].updated_at = serverEp.updated_at;
state.endpoints[serverEp.id].hash = localEp
? computeEndpointHash(localEp.ep)
: state.endpoints[serverEp.id].hash;
}
}
state.last_sync = new Date().toISOString();
writeState(state, root);
console.log(chalk.green(`\nPush complete. ${result.results.length} endpoint(s) updated.`));
}

119
src/commands/status.ts Normal file
View File

@@ -0,0 +1,119 @@
import * as path from 'path';
import chalk from 'chalk';
import { readConfig, readState, getProjectRoot } from '../config';
import { ApiClient } from '../api';
import { findEndpointDirs, readEndpointFromDisk } from '../files';
import { computeEndpointHash } from '../hash';
export async function statusCommand(): Promise<void> {
const root = getProjectRoot();
const config = readConfig(root);
const state = readState(root);
const api = new ApiClient(config);
if (!state.last_sync) {
console.log(chalk.yellow('No sync history. Run "kisync pull" first.'));
return;
}
console.log(chalk.gray(`Last sync: ${state.last_sync}\n`));
// 1) Detect local changes
const localModified: string[] = [];
const localNew: string[] = [];
const endpointDirs = findEndpointDirs(root);
const knownIds = new Set(Object.keys(state.endpoints));
const foundIds = new Set<string>();
for (const dir of endpointDirs) {
const ep = readEndpointFromDisk(dir);
if (!ep) continue;
if (ep.id && knownIds.has(ep.id)) {
foundIds.add(ep.id);
const stateEntry = state.endpoints[ep.id];
if (stateEntry && stateEntry.hash) {
const currentHash = computeEndpointHash(ep);
if (currentHash !== stateEntry.hash) {
localModified.push(`${path.relative(root, dir)} (${ep.name})`);
}
}
} else if (!ep.id) {
// New local endpoint (no id assigned yet)
localNew.push(`${path.relative(root, dir)} (${ep.name || 'unnamed'})`);
}
}
// Locally deleted (existed in state but no longer on disk)
const localDeleted: string[] = [];
for (const [id, info] of Object.entries(state.endpoints)) {
if (!foundIds.has(id)) {
localDeleted.push(`${info.folder_path}`);
}
}
// 2) Check server changes
const clientEndpoints = Object.entries(state.endpoints).map(([id, info]) => ({
id,
updated_at: info.updated_at,
}));
const clientFolders = Object.entries(state.folders).map(([id, info]) => ({
id,
updated_at: info.updated_at,
}));
console.log(chalk.gray('Checking server...'));
const serverStatus = await api.status({ endpoints: clientEndpoints, folders: clientFolders });
// 3) Display results
const hasLocalChanges = localModified.length > 0 || localNew.length > 0 || localDeleted.length > 0;
const hasServerChanges =
serverStatus.endpoints.changed.length > 0 ||
serverStatus.endpoints.new.length > 0 ||
serverStatus.endpoints.deleted.length > 0;
if (!hasLocalChanges && !hasServerChanges) {
console.log(chalk.green('\nEverything is in sync.'));
return;
}
// Local changes
if (hasLocalChanges) {
console.log(chalk.bold('\nLocal changes (not pushed):'));
for (const item of localModified) {
console.log(chalk.yellow(` modified: ${item}`));
}
for (const item of localNew) {
console.log(chalk.green(` new: ${item}`));
}
for (const item of localDeleted) {
console.log(chalk.red(` deleted: ${item}`));
}
}
// Server changes
if (hasServerChanges) {
console.log(chalk.bold('\nServer changes (not pulled):'));
for (const item of serverStatus.endpoints.changed) {
console.log(chalk.blue(` modified: ${item.name}`));
}
for (const item of serverStatus.endpoints.new) {
console.log(chalk.green(` new: ${item.name}`));
}
for (const item of serverStatus.endpoints.deleted) {
console.log(chalk.red(` deleted: id=${item.id}`));
}
}
// Conflicts warning
if (hasLocalChanges && hasServerChanges) {
console.log(chalk.yellow('\nBoth local and server have changes!'));
console.log(chalk.gray(' Push first to send your changes, then pull to get server updates.'));
console.log(chalk.gray(' Or use --force on either to overwrite.'));
} else if (hasLocalChanges) {
console.log(chalk.gray('\nRun "kisync push" to upload your changes.'));
} else {
console.log(chalk.gray('\nRun "kisync pull" to download server changes.'));
}
}

66
src/config.ts Normal file
View File

@@ -0,0 +1,66 @@
import * as fs from 'fs';
import * as path from 'path';
export interface Config {
host: string;
token: string;
}
export interface SyncState {
endpoints: Record<string, { updated_at: string; folder_path: string; hash: string }>;
folders: Record<string, { updated_at: string; path: string }>;
last_sync: string;
}
const CONFIG_FILE = '.kisync.json';
const STATE_FILE = '.kisync-state.json';
export function findProjectRoot(startDir: string = process.cwd()): string | null {
let dir = startDir;
while (true) {
if (fs.existsSync(path.join(dir, CONFIG_FILE))) {
return dir;
}
const parent = path.dirname(dir);
if (parent === dir) return null;
dir = parent;
}
}
export function getProjectRoot(): string {
const root = findProjectRoot();
if (!root) {
throw new Error(
'Not a kisync project. Run "kisync init" first to initialize.'
);
}
return root;
}
export function readConfig(projectRoot?: string): Config {
const root = projectRoot || getProjectRoot();
const configPath = path.join(root, CONFIG_FILE);
const raw = fs.readFileSync(configPath, 'utf-8');
return JSON.parse(raw);
}
export function writeConfig(config: Config, projectRoot: string): void {
const configPath = path.join(projectRoot, CONFIG_FILE);
fs.writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf-8');
}
export function readState(projectRoot?: string): SyncState {
const root = projectRoot || getProjectRoot();
const statePath = path.join(root, STATE_FILE);
if (!fs.existsSync(statePath)) {
return { endpoints: {}, folders: {}, last_sync: '' };
}
const raw = fs.readFileSync(statePath, 'utf-8');
return JSON.parse(raw);
}
export function writeState(state: SyncState, projectRoot?: string): void {
const root = projectRoot || getProjectRoot();
const statePath = path.join(root, STATE_FILE);
fs.writeFileSync(statePath, JSON.stringify(state, null, 2), 'utf-8');
}

328
src/files.ts Normal file
View File

@@ -0,0 +1,328 @@
import * as fs from 'fs';
import * as path from 'path';
/**
* Sanitize a name for use as a folder/file name
*/
export function sanitizeName(name: string): string {
return name.replace(/[<>:"/\\|?*]/g, '_').replace(/\s+/g, ' ').trim();
}
/**
* Build the full folder path for a folder, resolving parent chain
*/
export function buildFolderPath(
folderId: string,
foldersMap: Map<string, any>,
projectRoot: string
): string {
const parts: string[] = [];
let currentId: string | null = folderId;
while (currentId) {
const folder = foldersMap.get(currentId);
if (!folder) break;
parts.unshift(sanitizeName(folder.name));
currentId = folder.parent_id;
}
return path.join(projectRoot, ...parts);
}
/**
* Write an endpoint to disk as a set of files
*/
export function writeEndpointToDisk(
ep: any,
endpointDir: string
): void {
fs.mkdirSync(endpointDir, { recursive: true });
// endpoint.json — metadata (everything except code/queries content)
const meta: any = {
id: ep.id,
name: ep.name,
description: ep.description || '',
method: ep.method,
path: ep.path,
execution_type: ep.execution_type || 'sql',
database_name: ep.database_name || null,
database_type: ep.database_type || null,
database_id: ep.database_id || null,
parameters: ep.parameters || [],
is_public: ep.is_public || false,
enable_logging: ep.enable_logging || false,
detailed_response: ep.detailed_response || false,
response_schema: ep.response_schema || null,
folder_id: ep.folder_id || null,
folder_name: ep.folder_name || null,
updated_at: ep.updated_at,
created_at: ep.created_at,
};
fs.writeFileSync(
path.join(endpointDir, 'endpoint.json'),
JSON.stringify(meta, null, 2),
'utf-8'
);
// SQL query file
if (ep.execution_type === 'sql' && ep.sql_query) {
fs.writeFileSync(path.join(endpointDir, 'query.sql'), ep.sql_query, 'utf-8');
}
// Script file
if (ep.execution_type === 'script' && ep.script_code) {
const ext = ep.script_language === 'python' ? 'py' : 'js';
fs.writeFileSync(path.join(endpointDir, `main.${ext}`), ep.script_code, 'utf-8');
}
// Script queries
const scriptQueries = ep.script_queries || [];
if (ep.execution_type === 'script' && scriptQueries.length > 0) {
const queriesDir = path.join(endpointDir, 'queries');
fs.mkdirSync(queriesDir, { recursive: true });
// Write an index file with database mappings for each query
const queryIndex: any[] = [];
for (const sq of scriptQueries) {
const sqMeta: any = {
name: sq.name,
database_name: sq.database_name || null,
database_type: sq.database_type || null,
database_id: sq.database_id || null,
};
if (sq.sql) {
const fileName = `${sanitizeName(sq.name)}.sql`;
fs.writeFileSync(path.join(queriesDir, fileName), sq.sql, 'utf-8');
sqMeta.file = fileName;
}
// AQL-type script query
if (sq.aql_method) {
const fileName = `${sanitizeName(sq.name)}.http`;
const httpContent = buildHttpFile(
sq.aql_method,
sq.aql_endpoint || '',
sq.aql_body || '',
sq.aql_query_params || {}
);
fs.writeFileSync(path.join(queriesDir, fileName), httpContent, 'utf-8');
sqMeta.file = fileName;
sqMeta.type = 'aql';
}
queryIndex.push(sqMeta);
}
fs.writeFileSync(
path.join(queriesDir, '_index.json'),
JSON.stringify(queryIndex, null, 2),
'utf-8'
);
}
// AQL endpoint as .http file
if (ep.execution_type === 'aql') {
const httpContent = buildHttpFile(
ep.aql_method || 'GET',
ep.aql_endpoint || '',
ep.aql_body || '',
ep.aql_query_params || {}
);
fs.writeFileSync(path.join(endpointDir, 'request.http'), httpContent, 'utf-8');
}
}
/**
* Build .http file content from AQL params
*/
function buildHttpFile(
method: string,
endpoint: string,
body: string,
queryParams: Record<string, string>
): string {
let url = endpoint;
// Append query params
const qp = Object.entries(queryParams || {});
if (qp.length > 0) {
const params = qp.map(([k, v]) => `${k}=${v}`).join('&');
url += (url.includes('?') ? '&' : '?') + params;
}
let content = `${method} ${url}\n`;
content += `Content-Type: application/json\n`;
if (body) {
content += `\n${body}\n`;
}
return content;
}
/**
* Parse .http file back to AQL params
*/
export function parseHttpFile(content: string): {
method: string;
endpoint: string;
body: string;
queryParams: Record<string, string>;
} {
const lines = content.split('\n');
let method = 'GET';
let endpoint = '';
let body = '';
const queryParams: Record<string, string> = {};
// First non-empty line is the request line
let requestLineFound = false;
let headersEnded = false;
const bodyLines: string[] = [];
for (const line of lines) {
const trimmed = line.trim();
if (!requestLineFound) {
if (trimmed === '') continue;
// Parse "METHOD URL"
const match = trimmed.match(/^(GET|POST|PUT|DELETE|PATCH)\s+(.+)$/i);
if (match) {
method = match[1].toUpperCase();
let url = match[2].trim();
// Extract query params from URL
const qIdx = url.indexOf('?');
if (qIdx !== -1) {
const queryString = url.substring(qIdx + 1);
url = url.substring(0, qIdx);
for (const pair of queryString.split('&')) {
const eqIdx = pair.indexOf('=');
if (eqIdx !== -1) {
queryParams[pair.substring(0, eqIdx)] = pair.substring(eqIdx + 1);
}
}
}
endpoint = url;
requestLineFound = true;
}
continue;
}
// Skip headers until empty line
if (!headersEnded) {
if (trimmed === '') {
headersEnded = true;
continue;
}
// Skip header lines (e.g. Content-Type: ...)
continue;
}
// Everything after empty line is body
bodyLines.push(line);
}
body = bodyLines.join('\n').trim();
return { method, endpoint, body, queryParams };
}
/**
* Read an endpoint from disk back to API format
*/
export function readEndpointFromDisk(endpointDir: string): any | null {
const metaPath = path.join(endpointDir, 'endpoint.json');
if (!fs.existsSync(metaPath)) return null;
const meta = JSON.parse(fs.readFileSync(metaPath, 'utf-8'));
const ep: any = { ...meta };
// Read SQL query
const sqlPath = path.join(endpointDir, 'query.sql');
if (fs.existsSync(sqlPath)) {
ep.sql_query = fs.readFileSync(sqlPath, 'utf-8');
}
// Read script code
for (const ext of ['js', 'py']) {
const scriptPath = path.join(endpointDir, `main.${ext}`);
if (fs.existsSync(scriptPath)) {
ep.script_code = fs.readFileSync(scriptPath, 'utf-8');
ep.script_language = ext === 'py' ? 'python' : 'javascript';
break;
}
}
// Read script queries
const queriesIndexPath = path.join(endpointDir, 'queries', '_index.json');
if (fs.existsSync(queriesIndexPath)) {
const queryIndex = JSON.parse(fs.readFileSync(queriesIndexPath, 'utf-8'));
ep.script_queries = queryIndex.map((sq: any) => {
const result: any = {
name: sq.name,
database_id: sq.database_id || undefined,
};
if (sq.file) {
const filePath = path.join(endpointDir, 'queries', sq.file);
if (fs.existsSync(filePath)) {
if (sq.type === 'aql' || sq.file.endsWith('.http')) {
const parsed = parseHttpFile(fs.readFileSync(filePath, 'utf-8'));
result.aql_method = parsed.method;
result.aql_endpoint = parsed.endpoint;
result.aql_body = parsed.body;
result.aql_query_params = parsed.queryParams;
} else {
result.sql = fs.readFileSync(filePath, 'utf-8');
}
}
}
return result;
});
}
// Read AQL .http file
const httpPath = path.join(endpointDir, 'request.http');
if (fs.existsSync(httpPath)) {
const parsed = parseHttpFile(fs.readFileSync(httpPath, 'utf-8'));
ep.aql_method = parsed.method;
ep.aql_endpoint = parsed.endpoint;
ep.aql_body = parsed.body;
ep.aql_query_params = parsed.queryParams;
}
return ep;
}
/**
* Find all endpoint directories recursively
*/
export function findEndpointDirs(dir: string): string[] {
const results: string[] = [];
if (!fs.existsSync(dir)) return results;
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
if (entry.name.startsWith('.')) continue;
const fullPath = path.join(dir, entry.name);
if (fs.existsSync(path.join(fullPath, 'endpoint.json'))) {
results.push(fullPath);
} else {
// Could be a folder directory, recurse
results.push(...findEndpointDirs(fullPath));
}
}
return results;
}

42
src/hash.ts Normal file
View File

@@ -0,0 +1,42 @@
import * as crypto from 'crypto';
/**
* Compute a stable hash of endpoint content for change detection.
* Only hashes the content that matters (code, queries, config), not timestamps.
*/
export function computeEndpointHash(ep: any): string {
const significant = {
name: ep.name || '',
description: ep.description || '',
method: ep.method || '',
path: ep.path || '',
execution_type: ep.execution_type || 'sql',
database_id: ep.database_id || '',
sql_query: ep.sql_query || '',
parameters: JSON.stringify(ep.parameters || []),
script_language: ep.script_language || '',
script_code: ep.script_code || '',
script_queries: JSON.stringify(
(ep.script_queries || []).map((sq: any) => ({
name: sq.name,
sql: sq.sql || '',
database_id: sq.database_id || '',
aql_method: sq.aql_method || '',
aql_endpoint: sq.aql_endpoint || '',
aql_body: sq.aql_body || '',
aql_query_params: JSON.stringify(sq.aql_query_params || {}),
}))
),
aql_method: ep.aql_method || '',
aql_endpoint: ep.aql_endpoint || '',
aql_body: ep.aql_body || '',
aql_query_params: JSON.stringify(ep.aql_query_params || {}),
is_public: String(ep.is_public || false),
enable_logging: String(ep.enable_logging || false),
detailed_response: String(ep.detailed_response || false),
response_schema: JSON.stringify(ep.response_schema || null),
};
const content = JSON.stringify(significant);
return crypto.createHash('sha256').update(content).digest('hex').substring(0, 16);
}

67
src/index.ts Normal file
View File

@@ -0,0 +1,67 @@
#!/usr/bin/env node
import { Command } from 'commander';
import chalk from 'chalk';
import { initCommand } from './commands/init';
import { pullCommand } from './commands/pull';
import { pushCommand } from './commands/push';
import { statusCommand } from './commands/status';
const program = new Command();
program
.name('kisync')
.description('CLI tool for syncing local folders with KIS API Builder')
.version('1.0.0');
program
.command('init')
.description('Initialize a new kisync project in the current directory')
.action(async () => {
try {
await initCommand();
} catch (err: any) {
console.error(chalk.red(`Error: ${err.message}`));
process.exit(1);
}
});
program
.command('pull')
.description('Download all endpoints from server to local files')
.option('--force', 'Overwrite local changes without prompting')
.action(async (opts) => {
try {
await pullCommand(opts.force);
} catch (err: any) {
console.error(chalk.red(`Error: ${err.message}`));
process.exit(1);
}
});
program
.command('push')
.description('Upload local changes to server')
.option('--force', 'Force push, overwriting server changes')
.action(async (opts) => {
try {
await pushCommand(opts.force);
} catch (err: any) {
console.error(chalk.red(`Error: ${err.message}`));
process.exit(1);
}
});
program
.command('status')
.description('Show sync status — what changed locally and on server')
.action(async () => {
try {
await statusCommand();
} catch (err: any) {
console.error(chalk.red(`Error: ${err.message}`));
process.exit(1);
}
});
program.parse();