Files
idea.llm.gitea.repo.docker.…/api/routes/docker.js
Clint Masden feec35ffce Add full REST API for all deployment operations (projects, servers, docker)
Port all IPC handlers to HTTP endpoints so the UI and LLM use the same
API. Adds routes/projects.js (scan, compare, init), routes/servers.js
(CRUD, containers, logs), routes/docker.js (build, deploy, pull, vscode-diff).
Enhanced ssh.js with full SSHService class (SFTP upload/download).
Updated renderer api.js to use fetch instead of window.api IPC.
Added concurrently for npm run dev (API + Vite + Electron).
OpenAPI spec now covers all 24 endpoints.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 11:17:40 -06:00

232 lines
8.0 KiB
JavaScript

import { Router } from 'express';
import { existsSync, readFileSync, readdirSync, mkdirSync } from 'fs';
import { join, basename, dirname } from 'path';
import { tmpdir } from 'os';
import { exec } from 'child_process';
import { loadDeployConfig, getServerSshConfig } from '../lib/config.js';
import { SSHService } from '../lib/ssh.js';
const router = Router();
function wrap(fn) {
return async (req, res) => {
try {
const result = await fn(req, res);
if (!res.headersSent) res.json(result);
} catch (err) {
res.status(err.status || 500).json({ error: err.message });
}
};
}
function loadProjectDeployConfig(projectPath) {
const configPath = join(projectPath, 'docker-deployment.json');
if (!existsSync(configPath)) return null;
try { return JSON.parse(readFileSync(configPath, 'utf-8')); } catch { return null; }
}
function getUploadFiles(projectPath, projectConfig) {
const projectName = basename(projectPath);
const defaultFiles = [
{ local: `${projectName}.tar`, remote: `${projectName}.tar`, type: 'file' },
{ local: 'docker-compose.yml', remote: 'docker-compose.yml', type: 'file' },
{ local: '.env', remote: '.env', type: 'file' },
];
const additional = projectConfig?.deployment?.uploadFiles || [];
const custom = additional.map(f => {
if (typeof f === 'string') {
const isDir = f.endsWith('/');
return { local: f.replace(/\/$/, ''), remote: f.replace(/\/$/, ''), type: isDir ? 'directory' : 'file' };
}
return f;
});
return [...defaultFiles, ...custom];
}
// POST /api/docker/build — build tar for a project
router.post('/build', wrap(async (req) => {
const { projectPath } = req.body;
if (!projectPath) throw Object.assign(new Error('projectPath is required'), { status: 400 });
const scriptPath = join(projectPath, 'build-image-tar.ps1');
if (!existsSync(scriptPath)) {
throw Object.assign(new Error('No build-image-tar.ps1 found in project'), { status: 400 });
}
return new Promise((resolve) => {
exec(`powershell -ExecutionPolicy Bypass -File "${scriptPath}"`, { cwd: projectPath }, (error, stdout, stderr) => {
if (error) {
resolve({ error: error.message, stderr });
} else {
resolve({ success: true, output: stdout });
}
});
});
}));
// POST /api/docker/deploy — deploy project to server via SSH
router.post('/deploy', wrap(async (req) => {
const { projectPath, serverId, remotePath } = req.body;
const config = loadDeployConfig();
const server = config.servers.find(s => s.id === serverId);
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
const sshConfig = getServerSshConfig(server);
const ssh = new SSHService(sshConfig);
const projectName = basename(projectPath);
const password = sshConfig.password;
const sudoPrefix = server.useSudo ? `echo '${password}' | sudo -S ` : '';
const projectConfig = loadProjectDeployConfig(projectPath);
const uploadFiles = getUploadFiles(projectPath, projectConfig);
const uploadedFiles = [];
try {
await ssh.connect();
// Ensure remote directory exists
await ssh.exec(`mkdir -p ${remotePath}`);
// Delete old tar file with sudo if needed
if (server.useSudo) {
await ssh.exec(`echo '${password}' | sudo -S rm -f ${remotePath}/${projectName}.tar 2>/dev/null || true`);
}
// Upload all configured files
for (const fileSpec of uploadFiles) {
const localPath = join(projectPath, fileSpec.local);
if (!existsSync(localPath)) continue;
const remoteDest = `${remotePath}/${fileSpec.remote}`;
if (fileSpec.type === 'directory') {
await ssh.uploadDirectory(localPath, remoteDest);
uploadedFiles.push(`${fileSpec.local}/ (directory)`);
} else {
await ssh.uploadFile(localPath, remoteDest);
uploadedFiles.push(fileSpec.local);
}
}
// Load image, stop existing container, start new
await ssh.exec(`cd ${remotePath} && ${sudoPrefix}docker load -i ${projectName}.tar && ${sudoPrefix}docker compose down 2>/dev/null; ${sudoPrefix}docker compose up -d`);
// Health check — poll for container status
let healthy = false;
let status = '';
for (let i = 0; i < 10; i++) {
await new Promise(r => setTimeout(r, 2000));
try {
status = await ssh.exec(`cd ${remotePath} && ${sudoPrefix}docker compose ps --format "{{.Name}}|{{.Status}}" 2>/dev/null || ${sudoPrefix}docker compose ps`);
if (status.includes('Up') || status.includes('healthy')) {
healthy = true;
break;
}
} catch { /* ignore during health check */ }
}
ssh.disconnect();
return {
success: true,
healthy,
status,
uploadedFiles,
message: healthy ? 'Container started successfully' : 'Container started but health check pending',
};
} catch (err) {
throw new Error(`Deploy failed: ${err.message}`);
}
}));
// POST /api/docker/pull — pull file(s) from remote server
router.post('/pull', wrap(async (req) => {
const { serverId, files } = req.body;
const config = loadDeployConfig();
const server = config.servers.find(s => s.id === serverId);
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
const sshConfig = getServerSshConfig(server);
const ssh = new SSHService(sshConfig);
const pulled = [];
const errors = [];
try {
await ssh.connect();
for (const file of files) {
try {
if (file.type === 'directory') {
// Pull directory recursively
const pullDir = async (remoteDir, localDir) => {
if (!existsSync(localDir)) mkdirSync(localDir, { recursive: true });
const result = await ssh.exec(`ls -la ${remoteDir} 2>/dev/null | tail -n +4 || echo ""`);
const lines = result.split('\n').filter(Boolean);
for (const line of lines) {
const parts = line.split(/\s+/);
if (parts.length < 9) continue;
const isDir = line.startsWith('d');
const fileName = parts.slice(8).join(' ');
if (fileName === '.' || fileName === '..') continue;
if (isDir) {
await pullDir(`${remoteDir}/${fileName}`, join(localDir, fileName));
} else {
await ssh.downloadFile(`${remoteDir}/${fileName}`, join(localDir, fileName));
}
}
};
await pullDir(file.remotePath, file.localPath);
pulled.push(file.name);
} else {
const parentDir = dirname(file.localPath);
if (!existsSync(parentDir)) mkdirSync(parentDir, { recursive: true });
await ssh.downloadFile(file.remotePath, file.localPath);
pulled.push(file.name);
}
} catch (err) {
errors.push({ name: file.name, error: err.message });
}
}
ssh.disconnect();
return { success: true, pulled, errors };
} catch (err) {
throw new Error(`Pull failed: ${err.message}`);
}
}));
// POST /api/docker/vscode-diff — download remote file and open VS Code diff
router.post('/vscode-diff', wrap(async (req) => {
const { serverId, localPath, remoteFilePath } = req.body;
const config = loadDeployConfig();
const server = config.servers.find(s => s.id === serverId);
if (!server) throw Object.assign(new Error('Server not found'), { status: 404 });
const sshConfig = getServerSshConfig(server);
const ssh = new SSHService(sshConfig);
try {
const tempDir = join(tmpdir(), 'docker-deploy-diff');
if (!existsSync(tempDir)) mkdirSync(tempDir, { recursive: true });
const tempFile = join(tempDir, `remote-${basename(localPath)}`);
await ssh.connect();
await ssh.downloadFile(remoteFilePath, tempFile);
ssh.disconnect();
return new Promise((resolve) => {
exec(`code --diff "${tempFile}" "${localPath}"`, (error) => {
if (error) resolve({ error: error.message });
else resolve({ success: true });
});
});
} catch (err) {
throw new Error(`VS Code diff failed: ${err.message}`);
}
}));
export default router;