Add full REST API for all deployment operations (projects, servers, docker)
Port all IPC handlers to HTTP endpoints so the UI and LLM use the same API. Adds routes/projects.js (scan, compare, init), routes/servers.js (CRUD, containers, logs), routes/docker.js (build, deploy, pull, vscode-diff). Enhanced ssh.js with full SSHService class (SFTP upload/download). Updated renderer api.js to use fetch instead of window.api IPC. Added concurrently for npm run dev (API + Vite + Electron). OpenAPI spec now covers all 24 endpoints. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
231
api/routes/docker.js
Normal file
231
api/routes/docker.js
Normal file
@@ -0,0 +1,231 @@
|
||||
import { Router } from 'express';
|
||||
import { existsSync, readFileSync, readdirSync, mkdirSync } from 'fs';
|
||||
import { join, basename, dirname } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import { exec } from 'child_process';
|
||||
import { loadDeployConfig, getServerSshConfig } from '../lib/config.js';
|
||||
import { SSHService } from '../lib/ssh.js';
|
||||
|
||||
const router = Router();
|
||||
|
||||
function wrap(fn) {
|
||||
return async (req, res) => {
|
||||
try {
|
||||
const result = await fn(req, res);
|
||||
if (!res.headersSent) res.json(result);
|
||||
} catch (err) {
|
||||
res.status(err.status || 500).json({ error: err.message });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function loadProjectDeployConfig(projectPath) {
|
||||
const configPath = join(projectPath, 'docker-deployment.json');
|
||||
if (!existsSync(configPath)) return null;
|
||||
try { return JSON.parse(readFileSync(configPath, 'utf-8')); } catch { return null; }
|
||||
}
|
||||
|
||||
function getUploadFiles(projectPath, projectConfig) {
|
||||
const projectName = basename(projectPath);
|
||||
const defaultFiles = [
|
||||
{ local: `${projectName}.tar`, remote: `${projectName}.tar`, type: 'file' },
|
||||
{ local: 'docker-compose.yml', remote: 'docker-compose.yml', type: 'file' },
|
||||
{ local: '.env', remote: '.env', type: 'file' },
|
||||
];
|
||||
|
||||
const additional = projectConfig?.deployment?.uploadFiles || [];
|
||||
const custom = additional.map(f => {
|
||||
if (typeof f === 'string') {
|
||||
const isDir = f.endsWith('/');
|
||||
return { local: f.replace(/\/$/, ''), remote: f.replace(/\/$/, ''), type: isDir ? 'directory' : 'file' };
|
||||
}
|
||||
return f;
|
||||
});
|
||||
|
||||
return [...defaultFiles, ...custom];
|
||||
}
|
||||
|
||||
// POST /api/docker/build — build tar for a project
|
||||
router.post('/build', wrap(async (req) => {
|
||||
const { projectPath } = req.body;
|
||||
if (!projectPath) throw Object.assign(new Error('projectPath is required'), { status: 400 });
|
||||
|
||||
const scriptPath = join(projectPath, 'build-image-tar.ps1');
|
||||
if (!existsSync(scriptPath)) {
|
||||
throw Object.assign(new Error('No build-image-tar.ps1 found in project'), { status: 400 });
|
||||
}
|
||||
|
||||
return new Promise((resolve) => {
|
||||
exec(`powershell -ExecutionPolicy Bypass -File "${scriptPath}"`, { cwd: projectPath }, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
resolve({ error: error.message, stderr });
|
||||
} else {
|
||||
resolve({ success: true, output: stdout });
|
||||
}
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
// POST /api/docker/deploy — deploy project to server via SSH
|
||||
router.post('/deploy', wrap(async (req) => {
|
||||
const { projectPath, serverId, remotePath } = req.body;
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === serverId);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
const projectName = basename(projectPath);
|
||||
const password = sshConfig.password;
|
||||
const sudoPrefix = server.useSudo ? `echo '${password}' | sudo -S ` : '';
|
||||
|
||||
const projectConfig = loadProjectDeployConfig(projectPath);
|
||||
const uploadFiles = getUploadFiles(projectPath, projectConfig);
|
||||
const uploadedFiles = [];
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
|
||||
// Ensure remote directory exists
|
||||
await ssh.exec(`mkdir -p ${remotePath}`);
|
||||
|
||||
// Delete old tar file with sudo if needed
|
||||
if (server.useSudo) {
|
||||
await ssh.exec(`echo '${password}' | sudo -S rm -f ${remotePath}/${projectName}.tar 2>/dev/null || true`);
|
||||
}
|
||||
|
||||
// Upload all configured files
|
||||
for (const fileSpec of uploadFiles) {
|
||||
const localPath = join(projectPath, fileSpec.local);
|
||||
if (!existsSync(localPath)) continue;
|
||||
|
||||
const remoteDest = `${remotePath}/${fileSpec.remote}`;
|
||||
|
||||
if (fileSpec.type === 'directory') {
|
||||
await ssh.uploadDirectory(localPath, remoteDest);
|
||||
uploadedFiles.push(`${fileSpec.local}/ (directory)`);
|
||||
} else {
|
||||
await ssh.uploadFile(localPath, remoteDest);
|
||||
uploadedFiles.push(fileSpec.local);
|
||||
}
|
||||
}
|
||||
|
||||
// Load image, stop existing container, start new
|
||||
await ssh.exec(`cd ${remotePath} && ${sudoPrefix}docker load -i ${projectName}.tar && ${sudoPrefix}docker compose down 2>/dev/null; ${sudoPrefix}docker compose up -d`);
|
||||
|
||||
// Health check — poll for container status
|
||||
let healthy = false;
|
||||
let status = '';
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
try {
|
||||
status = await ssh.exec(`cd ${remotePath} && ${sudoPrefix}docker compose ps --format "{{.Name}}|{{.Status}}" 2>/dev/null || ${sudoPrefix}docker compose ps`);
|
||||
if (status.includes('Up') || status.includes('healthy')) {
|
||||
healthy = true;
|
||||
break;
|
||||
}
|
||||
} catch { /* ignore during health check */ }
|
||||
}
|
||||
|
||||
ssh.disconnect();
|
||||
return {
|
||||
success: true,
|
||||
healthy,
|
||||
status,
|
||||
uploadedFiles,
|
||||
message: healthy ? 'Container started successfully' : 'Container started but health check pending',
|
||||
};
|
||||
} catch (err) {
|
||||
throw new Error(`Deploy failed: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
// POST /api/docker/pull — pull file(s) from remote server
|
||||
router.post('/pull', wrap(async (req) => {
|
||||
const { serverId, files } = req.body;
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === serverId);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
const pulled = [];
|
||||
const errors = [];
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
if (file.type === 'directory') {
|
||||
// Pull directory recursively
|
||||
const pullDir = async (remoteDir, localDir) => {
|
||||
if (!existsSync(localDir)) mkdirSync(localDir, { recursive: true });
|
||||
const result = await ssh.exec(`ls -la ${remoteDir} 2>/dev/null | tail -n +4 || echo ""`);
|
||||
const lines = result.split('\n').filter(Boolean);
|
||||
|
||||
for (const line of lines) {
|
||||
const parts = line.split(/\s+/);
|
||||
if (parts.length < 9) continue;
|
||||
const isDir = line.startsWith('d');
|
||||
const fileName = parts.slice(8).join(' ');
|
||||
if (fileName === '.' || fileName === '..') continue;
|
||||
|
||||
if (isDir) {
|
||||
await pullDir(`${remoteDir}/${fileName}`, join(localDir, fileName));
|
||||
} else {
|
||||
await ssh.downloadFile(`${remoteDir}/${fileName}`, join(localDir, fileName));
|
||||
}
|
||||
}
|
||||
};
|
||||
await pullDir(file.remotePath, file.localPath);
|
||||
pulled.push(file.name);
|
||||
} else {
|
||||
const parentDir = dirname(file.localPath);
|
||||
if (!existsSync(parentDir)) mkdirSync(parentDir, { recursive: true });
|
||||
await ssh.downloadFile(file.remotePath, file.localPath);
|
||||
pulled.push(file.name);
|
||||
}
|
||||
} catch (err) {
|
||||
errors.push({ name: file.name, error: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
ssh.disconnect();
|
||||
return { success: true, pulled, errors };
|
||||
} catch (err) {
|
||||
throw new Error(`Pull failed: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
// POST /api/docker/vscode-diff — download remote file and open VS Code diff
|
||||
router.post('/vscode-diff', wrap(async (req) => {
|
||||
const { serverId, localPath, remoteFilePath } = req.body;
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === serverId);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
|
||||
try {
|
||||
const tempDir = join(tmpdir(), 'docker-deploy-diff');
|
||||
if (!existsSync(tempDir)) mkdirSync(tempDir, { recursive: true });
|
||||
const tempFile = join(tempDir, `remote-${basename(localPath)}`);
|
||||
|
||||
await ssh.connect();
|
||||
await ssh.downloadFile(remoteFilePath, tempFile);
|
||||
ssh.disconnect();
|
||||
|
||||
return new Promise((resolve) => {
|
||||
exec(`code --diff "${tempFile}" "${localPath}"`, (error) => {
|
||||
if (error) resolve({ error: error.message });
|
||||
else resolve({ success: true });
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`VS Code diff failed: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
export default router;
|
||||
258
api/routes/projects.js
Normal file
258
api/routes/projects.js
Normal file
@@ -0,0 +1,258 @@
|
||||
import { Router } from 'express';
|
||||
import { readdirSync, existsSync, readFileSync } from 'fs';
|
||||
import { join, basename, relative } from 'path';
|
||||
import { exec } from 'child_process';
|
||||
import { loadDeployConfig, getProjectsRoot, getServerSshConfig } from '../lib/config.js';
|
||||
import { SSHService } from '../lib/ssh.js';
|
||||
|
||||
const router = Router();
|
||||
|
||||
function wrap(fn) {
|
||||
return async (req, res) => {
|
||||
try {
|
||||
const result = await fn(req, res);
|
||||
if (!res.headersSent) res.json(result);
|
||||
} catch (err) {
|
||||
res.status(err.status || 500).json({ error: err.message });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Project scanning (ported from app/main/project-scanner.js) ─────
|
||||
|
||||
function analyzeProject(projectPath, name) {
|
||||
const hasDockerfile = existsSync(join(projectPath, 'Dockerfile'));
|
||||
const hasDockerCompose = existsSync(join(projectPath, 'docker-compose.yml'));
|
||||
const hasBuildScript = existsSync(join(projectPath, 'build-image-tar.ps1'));
|
||||
const hasDeployScript = existsSync(join(projectPath, 'deploy-docker-auto.ps1'));
|
||||
const hasDeploymentConfig = existsSync(join(projectPath, 'docker-deployment.json'));
|
||||
const hasDockerIgnore = existsSync(join(projectPath, '.dockerignore'));
|
||||
const hasCoolifyJson = existsSync(join(projectPath, 'coolify.json'));
|
||||
|
||||
let tarFile = null;
|
||||
try {
|
||||
const files = readdirSync(projectPath);
|
||||
tarFile = files.find(f => f.endsWith('.tar')) || null;
|
||||
} catch { /* ignore */ }
|
||||
|
||||
let deploymentConfig = null;
|
||||
if (hasDeploymentConfig) {
|
||||
try {
|
||||
deploymentConfig = JSON.parse(readFileSync(join(projectPath, 'docker-deployment.json'), 'utf-8'));
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
|
||||
let coolifyConfig = null;
|
||||
if (hasCoolifyJson) {
|
||||
try {
|
||||
coolifyConfig = JSON.parse(readFileSync(join(projectPath, 'coolify.json'), 'utf-8'));
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
|
||||
let dockerStatus = 'none';
|
||||
if (hasDockerfile && hasDockerCompose) dockerStatus = 'configured';
|
||||
else if (hasDockerfile || hasDockerCompose) dockerStatus = 'partial';
|
||||
|
||||
return {
|
||||
name,
|
||||
path: projectPath,
|
||||
hasDockerfile,
|
||||
hasDockerCompose,
|
||||
hasBuildScript,
|
||||
hasDeployScript,
|
||||
hasDeploymentConfig,
|
||||
hasDockerIgnore,
|
||||
hasCoolifyJson,
|
||||
tarFile,
|
||||
deploymentConfig,
|
||||
coolifyConfig,
|
||||
dockerStatus,
|
||||
serverId: deploymentConfig?.deployment?.serverId || null,
|
||||
remotePath: deploymentConfig?.deployment?.targetPath || `~/containers/${name}`,
|
||||
};
|
||||
}
|
||||
|
||||
function scanDeep(rootPath, currentPath, projects, depth, maxDepth) {
|
||||
if (depth > maxDepth) return;
|
||||
|
||||
try {
|
||||
const entries = readdirSync(currentPath, { withFileTypes: true });
|
||||
|
||||
if (depth === 0) {
|
||||
// At root: analyze each top-level directory
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const projectPath = join(currentPath, entry.name);
|
||||
const info = analyzeProject(projectPath, entry.name);
|
||||
if (info) projects.push(info);
|
||||
}
|
||||
} else {
|
||||
// Deeper: only include if has Dockerfile
|
||||
const hasDockerfile = entries.some(e => e.name === 'Dockerfile');
|
||||
if (hasDockerfile) {
|
||||
const name = relative(rootPath, currentPath).replace(/\\/g, '/');
|
||||
const info = analyzeProject(currentPath, name);
|
||||
if (info) projects.push(info);
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse into subdirectories
|
||||
if (depth < maxDepth) {
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
if (entry.name.startsWith('.') || ['node_modules', 'dist', 'build'].includes(entry.name)) continue;
|
||||
scanDeep(rootPath, join(currentPath, entry.name), projects, depth + 1, maxDepth);
|
||||
}
|
||||
}
|
||||
} catch { /* ignore permission errors */ }
|
||||
}
|
||||
|
||||
// ─── Routes ─────────────────────────────────────────────────────────
|
||||
|
||||
// GET /api/projects — scan local projects
|
||||
router.get('/', wrap(async () => {
|
||||
const rootPath = getProjectsRoot();
|
||||
const projects = [];
|
||||
scanDeep(rootPath, rootPath, projects, 0, 2);
|
||||
return projects;
|
||||
}));
|
||||
|
||||
// POST /api/projects/compare — compare local vs deployed
|
||||
router.post('/compare', wrap(async (req) => {
|
||||
const { projectPath, serverId, remotePath } = req.body;
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === serverId);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
|
||||
// Load project config for additional files
|
||||
const deployConfigPath = join(projectPath, 'docker-deployment.json');
|
||||
let additionalFiles = [];
|
||||
if (existsSync(deployConfigPath)) {
|
||||
try {
|
||||
const pc = JSON.parse(readFileSync(deployConfigPath, 'utf-8'));
|
||||
additionalFiles = pc?.deployment?.uploadFiles || [];
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
|
||||
const diff = { files: [] };
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
|
||||
// Compare docker-compose.yml
|
||||
const localComposePath = join(projectPath, 'docker-compose.yml');
|
||||
const composeFile = { name: 'docker-compose.yml', type: 'file', status: 'unknown', localPath: localComposePath, remotePath: `${remotePath}/docker-compose.yml` };
|
||||
|
||||
if (existsSync(localComposePath)) {
|
||||
composeFile.localContent = readFileSync(localComposePath, 'utf-8');
|
||||
try {
|
||||
composeFile.remoteContent = await ssh.exec(`cat ${remotePath}/docker-compose.yml 2>/dev/null`);
|
||||
composeFile.status = composeFile.localContent.trim() === composeFile.remoteContent.trim() ? 'match' : 'different';
|
||||
} catch {
|
||||
composeFile.status = 'remote-missing';
|
||||
}
|
||||
} else {
|
||||
composeFile.status = 'local-missing';
|
||||
}
|
||||
diff.files.push(composeFile);
|
||||
|
||||
// Compare .env
|
||||
const localEnvPath = join(projectPath, '.env');
|
||||
const envFile = { name: '.env', type: 'file', status: 'unknown', localPath: localEnvPath, remotePath: `${remotePath}/.env`, sensitive: true };
|
||||
const hasLocalEnv = existsSync(localEnvPath);
|
||||
if (hasLocalEnv) envFile.localContent = readFileSync(localEnvPath, 'utf-8');
|
||||
|
||||
let hasRemoteEnv = false;
|
||||
try {
|
||||
await ssh.exec(`test -f ${remotePath}/.env`);
|
||||
hasRemoteEnv = true;
|
||||
try { envFile.remoteContent = await ssh.exec(`cat ${remotePath}/.env 2>/dev/null`); } catch { /* ignore */ }
|
||||
} catch { /* no remote .env */ }
|
||||
|
||||
if (hasLocalEnv && hasRemoteEnv) {
|
||||
envFile.status = (envFile.localContent && envFile.remoteContent && envFile.localContent.trim() === envFile.remoteContent.trim()) ? 'match' : 'different';
|
||||
} else if (hasLocalEnv) {
|
||||
envFile.status = 'remote-missing';
|
||||
} else if (hasRemoteEnv) {
|
||||
envFile.status = 'local-missing';
|
||||
} else {
|
||||
envFile.status = 'neither';
|
||||
}
|
||||
diff.files.push(envFile);
|
||||
|
||||
// Compare data directory
|
||||
const localDataPath = join(projectPath, 'data');
|
||||
const hasLocalData = existsSync(localDataPath);
|
||||
let hasRemoteData = false;
|
||||
try { await ssh.exec(`test -d ${remotePath}/data`); hasRemoteData = true; } catch { /* no */ }
|
||||
|
||||
diff.files.push({
|
||||
name: 'data/',
|
||||
type: 'directory',
|
||||
status: hasLocalData && hasRemoteData ? 'both-exist' : hasLocalData ? 'remote-missing' : hasRemoteData ? 'local-missing' : 'neither',
|
||||
localPath: localDataPath,
|
||||
remotePath: `${remotePath}/data`,
|
||||
});
|
||||
|
||||
// Additional files from project config
|
||||
for (const fileSpec of additionalFiles) {
|
||||
const fileName = typeof fileSpec === 'string' ? fileSpec : fileSpec.local;
|
||||
if (['docker-compose.yml', '.env', 'data', 'data/'].includes(fileName)) continue;
|
||||
|
||||
const isDir = fileName.endsWith('/');
|
||||
const cleanName = fileName.replace(/\/$/, '');
|
||||
const localFilePath = join(projectPath, cleanName);
|
||||
const remoteFilePath = `${remotePath}/${cleanName}`;
|
||||
const fileInfo = { name: fileName, type: isDir ? 'directory' : 'file', status: 'unknown', localPath: localFilePath, remotePath: remoteFilePath };
|
||||
const hasLocal = existsSync(localFilePath);
|
||||
|
||||
if (isDir) {
|
||||
let hasRemote = false;
|
||||
try { await ssh.exec(`test -d ${remoteFilePath}`); hasRemote = true; } catch { /* no */ }
|
||||
fileInfo.status = hasLocal && hasRemote ? 'both-exist' : hasLocal ? 'remote-missing' : hasRemote ? 'local-missing' : 'neither';
|
||||
} else {
|
||||
if (hasLocal) try { fileInfo.localContent = readFileSync(localFilePath, 'utf-8'); } catch { fileInfo.localContent = null; }
|
||||
try {
|
||||
fileInfo.remoteContent = await ssh.exec(`cat ${remoteFilePath} 2>/dev/null`);
|
||||
if (hasLocal && fileInfo.localContent != null) {
|
||||
fileInfo.status = fileInfo.localContent.trim() === fileInfo.remoteContent.trim() ? 'match' : 'different';
|
||||
} else {
|
||||
fileInfo.status = hasLocal ? 'different' : 'local-missing';
|
||||
}
|
||||
} catch {
|
||||
fileInfo.status = hasLocal ? 'remote-missing' : 'neither';
|
||||
}
|
||||
}
|
||||
diff.files.push(fileInfo);
|
||||
}
|
||||
|
||||
ssh.disconnect();
|
||||
} catch (err) {
|
||||
throw new Error(`Compare failed: ${err.message}`);
|
||||
}
|
||||
|
||||
return { success: true, diff };
|
||||
}));
|
||||
|
||||
// POST /api/projects/init — initialize a project with CLI tool
|
||||
router.post('/init', wrap(async (req) => {
|
||||
const { projectPath } = req.body;
|
||||
if (!projectPath) throw Object.assign(new Error('projectPath is required'), { status: 400 });
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const cliPath = join(getProjectsRoot(), '..', 'idea.llm.gitea.repo.docker.deployment');
|
||||
const cmd = `node cli/index.js init "${projectPath}" --no-interactive`;
|
||||
exec(cmd, { cwd: cliPath }, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
resolve({ error: error.message, stderr });
|
||||
} else {
|
||||
resolve({ success: true, output: stdout + (stderr || '') });
|
||||
}
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
export default router;
|
||||
153
api/routes/servers.js
Normal file
153
api/routes/servers.js
Normal file
@@ -0,0 +1,153 @@
|
||||
import { Router } from 'express';
|
||||
import { loadDeployConfig, saveDeployConfig, getServerSshConfig } from '../lib/config.js';
|
||||
import { SSHService } from '../lib/ssh.js';
|
||||
|
||||
const router = Router();
|
||||
|
||||
function wrap(fn) {
|
||||
return async (req, res) => {
|
||||
try {
|
||||
const result = await fn(req, res);
|
||||
if (!res.headersSent) res.json(result);
|
||||
} catch (err) {
|
||||
res.status(err.status || 500).json({ error: err.message });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// GET /api/servers — list all configured deployment servers
|
||||
router.get('/', wrap(async () => {
|
||||
const config = loadDeployConfig();
|
||||
return config.servers || [];
|
||||
}));
|
||||
|
||||
// POST /api/servers — add or update a server
|
||||
router.post('/', wrap(async (req) => {
|
||||
const server = req.body;
|
||||
const config = loadDeployConfig();
|
||||
if (!config.servers) config.servers = [];
|
||||
|
||||
const index = config.servers.findIndex(s => s.id === server.id);
|
||||
if (index >= 0) {
|
||||
config.servers[index] = server;
|
||||
} else {
|
||||
server.id = Date.now().toString();
|
||||
config.servers.push(server);
|
||||
}
|
||||
|
||||
saveDeployConfig(config);
|
||||
return { success: true, server };
|
||||
}));
|
||||
|
||||
// DELETE /api/servers/:id — delete a server
|
||||
router.delete('/:id', wrap(async (req) => {
|
||||
const config = loadDeployConfig();
|
||||
config.servers = (config.servers || []).filter(s => s.id !== req.params.id);
|
||||
saveDeployConfig(config);
|
||||
return { success: true };
|
||||
}));
|
||||
|
||||
// GET /api/servers/:id/scan — scan remote server for deployed containers
|
||||
router.get('/:id/scan', wrap(async (req) => {
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === req.params.id);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
|
||||
const result = await ssh.exec('ls -1 ~/containers 2>/dev/null || echo ""');
|
||||
const projectDirs = result.split('\n').filter(Boolean);
|
||||
const deployed = [];
|
||||
|
||||
for (const dir of projectDirs) {
|
||||
const remotePath = `~/containers/${dir}`;
|
||||
try {
|
||||
const filesResult = await ssh.exec(`ls -la ${remotePath} 2>/dev/null || echo ""`);
|
||||
const hasDockerCompose = filesResult.includes('docker-compose.yml');
|
||||
const hasEnv = filesResult.includes('.env');
|
||||
const hasData = filesResult.includes('data');
|
||||
const tarMatch = filesResult.match(/(\S+\.tar)/);
|
||||
|
||||
let dockerComposeContent = null;
|
||||
if (hasDockerCompose) {
|
||||
try { dockerComposeContent = await ssh.exec(`cat ${remotePath}/docker-compose.yml 2>/dev/null`); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
deployed.push({
|
||||
name: dir,
|
||||
remotePath,
|
||||
hasDockerCompose,
|
||||
hasEnv,
|
||||
hasData,
|
||||
tarFile: tarMatch ? tarMatch[1] : null,
|
||||
dockerComposeContent,
|
||||
});
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
ssh.disconnect();
|
||||
return { success: true, deployed };
|
||||
} catch (err) {
|
||||
throw new Error(`Scan failed: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
// GET /api/servers/:id/containers — list running docker containers
|
||||
router.get('/:id/containers', wrap(async (req) => {
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === req.params.id);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
const result = await ssh.exec('docker ps --format "{{.Names}}|{{.Status}}|{{.Ports}}"');
|
||||
ssh.disconnect();
|
||||
|
||||
const containers = result.split('\n').filter(Boolean).map(line => {
|
||||
const [name, status, ports] = line.split('|');
|
||||
return { name, status, ports };
|
||||
});
|
||||
|
||||
return { success: true, containers };
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to get containers: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
// GET /api/servers/:id/logs — get container logs
|
||||
router.get('/:id/logs', wrap(async (req) => {
|
||||
const config = loadDeployConfig();
|
||||
const server = config.servers.find(s => s.id === req.params.id);
|
||||
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
|
||||
|
||||
const { containerName, remotePath, lines = 100 } = req.query;
|
||||
const sshConfig = getServerSshConfig(server);
|
||||
const ssh = new SSHService(sshConfig);
|
||||
const password = sshConfig.password;
|
||||
const sudoPrefix = server.useSudo ? `echo '${password}' | sudo -S ` : '';
|
||||
|
||||
try {
|
||||
await ssh.connect();
|
||||
let logs;
|
||||
if (remotePath) {
|
||||
logs = await ssh.exec(`cd ${remotePath} && ${sudoPrefix}docker compose logs --tail ${lines} 2>&1`);
|
||||
} else if (containerName) {
|
||||
logs = await ssh.exec(`${sudoPrefix}docker logs ${containerName} --tail ${lines} 2>&1`);
|
||||
} else {
|
||||
throw Object.assign(new Error('containerName or remotePath required'), { status: 400 });
|
||||
}
|
||||
ssh.disconnect();
|
||||
return { success: true, logs };
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to get logs: ${err.message}`);
|
||||
}
|
||||
}));
|
||||
|
||||
export default router;
|
||||
Reference in New Issue
Block a user