first pass

This commit is contained in:
2026-01-26 22:33:55 -06:00
commit fe66be4aad
37 changed files with 3127 additions and 0 deletions

36
.gitignore vendored Normal file
View File

@@ -0,0 +1,36 @@
# Dependencies
node_modules/
package-lock.json
# Environment files
.env
.env.local
.env.*.local
config.json
# Docker artifacts
*.tar
*.tar.gz
# Logs
logs/
*.log
npm-debug.log*
# OS files
.DS_Store
Thumbs.db
# IDE
.vscode/
.idea/
*.swp
*.swo
# Test outputs
coverage/
.nyc_output/
# Temporary files
tmp/
temp/

139
README.md Normal file
View File

@@ -0,0 +1,139 @@
# Docker Deployment Manager
Automated Docker deployment system for containerizing and deploying projects from Windows to Linux.
## Overview
**Phase 1 (Current):** CLI tool to generate Dockerfiles, docker-compose, and deployment scripts.
**Phase 2 (Planned):** Electron desktop app for central management and visibility across all projects.
## Quick Start
```powershell
# 1. Install dependencies
npm install
# 2. Detect a project type
npm run docker-deploy -- detect "C:\.bucket\Repos.Git\game.justone"
# 3. Initialize Docker config for a project
npm run docker-deploy -- init "C:\.bucket\Repos.Git\game.justone"
# 4. Build and deploy (from the target project directory)
cd C:\.bucket\Repos.Git\game.justone
.\build-image-tar.ps1 # Manual: creates tar, shows instructions
.\deploy-docker-auto.ps1 # Automated: builds, transfers, starts on server
```
## Commands
### detect
Identify project type without generating files.
```powershell
npm run docker-deploy -- detect [path]
npm run docker-deploy -- detect "C:\.bucket\Repos.Git\dotrepo.timer"
```
### init
Generate Docker configuration for a project.
```powershell
npm run docker-deploy -- init [path]
npm run docker-deploy -- init "C:\.bucket\Repos.Git\game.justone"
# Options:
# --no-interactive Skip prompts, use defaults
# --type <type> Force specific project type
# --port <port> Override default port
# --dry-run Show what would be generated
# --overwrite Replace existing files
```
### batch
Operations across multiple projects.
```powershell
# Scan all projects and show report
npm run docker-deploy -- batch detect --root "C:\.bucket\Repos.Git" --report
# Initialize all projects
npm run docker-deploy -- batch init --root "C:\.bucket\Repos.Git"
```
## Supported Project Types
| Type | Detection | Template |
|------|-----------|----------|
| Node.js Express | `package.json` with express | Multi-stage build |
| Node.js Vite+React | `package.json` with vite + react | Nginx static |
| Node.js Vite+React SSR | `package.json` with vite + react + express | Node runtime |
| Python Standard | `requirements.txt` | Python slim |
| Python ML/PyTorch | `requirements.txt` with torch | Python + system deps |
| .NET Blazor | `.csproj` with Blazor | SDK + ASP.NET runtime |
| Static Site | `index.html` | Nginx |
## Generated Files
When you run `init`, these files are created in the target project:
- `Dockerfile` - Container build instructions
- `docker-compose.yml` - Runtime configuration
- `.dockerignore` - Files excluded from image
- `docker-deployment.json` - Project configuration
- `.env.example` - Environment variables template
- `deploy-docker-auto.ps1` - Automated SSH deployment script
- `build-image-tar.ps1` - Manual build script
- `README.DOCKER.md` - Deployment documentation
## Deployment Workflow
### Option 1: Automated (SSH)
```powershell
.\deploy-docker-auto.ps1
# Builds image → creates tar → SCPs to server → loads → starts
```
### Option 2: Manual
```powershell
.\build-image-tar.ps1
# Then follow the printed instructions to SCP and deploy
```
### On the Linux Server
```bash
cd ~/containers/project-name/files
docker load -i project-name.tar
docker compose up -d
docker compose logs -f
```
## Configuration
### Per-Project: docker-deployment.json
```json
{
"project": { "name": "game-justone", "type": "nodejs-express" },
"runtime": { "port": 3000 },
"deployment": {
"sshHost": "192.168.8.178",
"sshUser": "deployer",
"targetPath": "~/containers/game-justone/files"
}
}
```
### Global: global-deployment-config.json
Default SSH settings used when initializing new projects.
## Phase 2 Roadmap
Electron desktop app with:
- Central view of all 35+ projects
- Docker status: configured / deployed / running
- One-click init, build, deploy
- Real-time logs and status updates
---
Built for managing Docker deployments across the Gitea repository collection.

278
cli/commands/batch.js Normal file
View File

@@ -0,0 +1,278 @@
import chalk from 'chalk';
import { resolve, join, basename } from 'path';
import { existsSync, readdirSync, statSync } from 'fs';
import { detectProject, isValidProject } from '../detectors/index.js';
import { initCommand } from './init.js';
/**
* Batch command - operations across multiple projects
*/
export async function batchCommand(action, options) {
const rootPath = resolve(options.root);
if (!existsSync(rootPath)) {
throw new Error(`Root path does not exist: ${rootPath}`);
}
console.log(chalk.blue('Batch operation:'), action);
console.log(chalk.gray('Root directory:'), rootPath);
console.log();
// Get all project directories
const projects = getProjectDirectories(rootPath, options);
console.log(chalk.gray(`Found ${projects.length} projects`));
console.log();
switch (action) {
case 'detect':
await batchDetect(projects, options);
break;
case 'init':
await batchInit(projects, options);
break;
default:
throw new Error(`Unknown batch action: ${action}. Supported: detect, init`);
}
}
/**
* Get all project directories from root
*/
function getProjectDirectories(rootPath, options) {
const entries = readdirSync(rootPath);
let projects = [];
for (const entry of entries) {
const fullPath = join(rootPath, entry);
// Skip if not a directory
try {
if (!statSync(fullPath).isDirectory()) continue;
} catch {
continue;
}
// Skip invalid projects
if (!isValidProject(fullPath)) continue;
// Apply filter pattern
if (options.filter && !entry.includes(options.filter)) continue;
// Apply exclusions
if (options.exclude) {
const excludeList = options.exclude.split(',').map(e => e.trim());
if (excludeList.some(exc => entry.includes(exc))) continue;
}
projects.push({
name: entry,
path: fullPath
});
}
return projects.sort((a, b) => a.name.localeCompare(b.name));
}
/**
* Batch detect - scan all projects and report types
*/
async function batchDetect(projects, options) {
const results = {
dockerizable: [],
notDockerizable: [],
byType: {}
};
console.log(chalk.blue('Scanning projects...'));
console.log();
for (const project of projects) {
process.stdout.write(chalk.gray(` ${project.name}... `));
try {
const detection = await detectProject(project.path);
if (detection.dockerizable) {
results.dockerizable.push({
...project,
...detection
});
// Group by type
if (!results.byType[detection.type]) {
results.byType[detection.type] = [];
}
results.byType[detection.type].push(project.name);
console.log(chalk.green(`${detection.type}`));
} else {
results.notDockerizable.push({
...project,
...detection
});
console.log(chalk.yellow(`${detection.type} - ${detection.reason}`));
}
} catch (error) {
results.notDockerizable.push({
...project,
type: 'error',
reason: error.message
});
console.log(chalk.red(`✗ Error: ${error.message}`));
}
}
// Print report
if (options.report) {
printReport(results, projects.length);
}
return results;
}
/**
* Print batch detect report
*/
function printReport(results, totalCount) {
console.log();
console.log(chalk.blue('═'.repeat(60)));
console.log(chalk.blue.bold(' BATCH DETECTION REPORT'));
console.log(chalk.blue('═'.repeat(60)));
console.log();
// Summary
console.log(chalk.white.bold('Summary'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` Total projects: ${totalCount}`);
console.log(` Dockerizable: ${chalk.green(results.dockerizable.length)}`);
console.log(` Not dockerizable: ${chalk.yellow(results.notDockerizable.length)}`);
console.log();
// By type
console.log(chalk.white.bold('Projects by Type'));
console.log(chalk.gray('─'.repeat(40)));
const typeOrder = [
'nodejs-express',
'nodejs-vite-react',
'nodejs-vite-react-ssr',
'nodejs-generic',
'python-standard',
'python-ml-pytorch',
'dotnet-blazor',
'dotnet-webapi',
'static-nginx',
'flutter-web'
];
for (const type of typeOrder) {
if (results.byType[type] && results.byType[type].length > 0) {
console.log();
console.log(chalk.cyan(` ${type} (${results.byType[type].length}):`));
results.byType[type].forEach(name => {
console.log(chalk.gray(` - ${name}`));
});
}
}
// Other types not in the order list
for (const [type, projects] of Object.entries(results.byType)) {
if (!typeOrder.includes(type) && projects.length > 0) {
console.log();
console.log(chalk.cyan(` ${type} (${projects.length}):`));
projects.forEach(name => {
console.log(chalk.gray(` - ${name}`));
});
}
}
// Not dockerizable
if (results.notDockerizable.length > 0) {
console.log();
console.log(chalk.white.bold('Not Dockerizable'));
console.log(chalk.gray('─'.repeat(40)));
results.notDockerizable.forEach(p => {
console.log(chalk.yellow(` - ${p.name}`), chalk.gray(`(${p.reason || p.type})`));
});
}
console.log();
console.log(chalk.blue('═'.repeat(60)));
// Suggested ports
if (results.dockerizable.length > 0) {
console.log();
console.log(chalk.white.bold('Suggested Port Mapping'));
console.log(chalk.gray('─'.repeat(40)));
let port = 3000;
results.dockerizable.forEach(p => {
const suggestedPort = p.port || port;
console.log(chalk.gray(` ${p.name}:`), `${suggestedPort}`);
port = Math.max(port, suggestedPort) + 1;
});
}
}
/**
* Batch init - initialize Docker config for multiple projects
*/
async function batchInit(projects, options) {
console.log(chalk.blue('Initializing Docker configuration...'));
console.log();
const parallel = options.parallel || 4;
let completed = 0;
let failed = 0;
let skipped = 0;
// Process in parallel batches
for (let i = 0; i < projects.length; i += parallel) {
const batch = projects.slice(i, i + parallel);
const promises = batch.map(async (project) => {
try {
// Check if already initialized
const hasDockerfile = existsSync(join(project.path, 'Dockerfile'));
const hasConfig = existsSync(join(project.path, 'docker-deployment.json'));
if ((hasDockerfile || hasConfig) && !options.force) {
console.log(chalk.yellow(` ${project.name}: Skipped (already initialized)`));
skipped++;
return;
}
// Detect first
const detection = await detectProject(project.path);
if (!detection.dockerizable) {
console.log(chalk.yellow(` ${project.name}: Skipped (${detection.reason})`));
skipped++;
return;
}
// Initialize with non-interactive mode
await initCommand(project.path, {
interactive: false,
overwrite: options.force
});
console.log(chalk.green(` ${project.name}: ✓ Initialized`));
completed++;
} catch (error) {
console.log(chalk.red(` ${project.name}: ✗ Failed - ${error.message}`));
failed++;
}
});
await Promise.all(promises);
}
// Summary
console.log();
console.log(chalk.blue('Batch init complete:'));
console.log(chalk.green(` Initialized: ${completed}`));
console.log(chalk.yellow(` Skipped: ${skipped}`));
console.log(chalk.red(` Failed: ${failed}`));
}

77
cli/commands/detect.js Normal file
View File

@@ -0,0 +1,77 @@
import chalk from 'chalk';
import { resolve } from 'path';
import { existsSync } from 'fs';
import { detectProject, getProjectInfo } from '../detectors/index.js';
/**
* Detect command - identify project type and show deployment recommendations
*/
export async function detectCommand(path, options) {
const projectPath = resolve(path);
if (!existsSync(projectPath)) {
throw new Error(`Path does not exist: ${projectPath}`);
}
console.log(chalk.blue('Scanning project...'), projectPath);
console.log();
const detection = await detectProject(projectPath);
if (options.json) {
console.log(JSON.stringify(detection, null, 2));
return detection;
}
// Display results
if (detection.dockerizable) {
console.log(chalk.green('✓ Detected:'), chalk.bold(detection.description || detection.type));
console.log();
console.log(chalk.gray(' Project: '), detection.projectName);
console.log(chalk.gray(' Type: '), detection.type);
console.log(chalk.gray(' Template: '), detection.template);
console.log(chalk.gray(' Port: '), detection.port || 'N/A');
console.log(chalk.gray(' Entry: '), detection.entryPoint || 'N/A');
if (detection.buildCommand) {
console.log(chalk.gray(' Build: '), detection.buildCommand);
}
if (detection.note) {
console.log();
console.log(chalk.yellow(' Note:'), detection.note);
}
console.log();
console.log(chalk.gray('Dockerizable:'), chalk.green('Yes'));
// Get additional info
const info = await getProjectInfo(projectPath, detection.type);
if (info) {
console.log();
console.log(chalk.gray('Additional Info:'));
if (info.dependencies) {
console.log(chalk.gray(' Dependencies:'), info.dependencies.length);
}
if (info.scripts) {
console.log(chalk.gray(' Scripts:'), info.scripts.join(', '));
}
if (info.packages) {
console.log(chalk.gray(' Packages:'), info.packages.length);
}
}
console.log();
console.log(chalk.blue('Next step:'), `npm run docker-deploy -- init "${path}"`);
} else {
console.log(chalk.red('✗ Not Dockerizable:'), chalk.bold(detection.type));
console.log();
console.log(chalk.gray(' Project:'), detection.projectName);
console.log(chalk.gray(' Reason: '), detection.reason);
console.log();
console.log(chalk.gray('Dockerizable:'), chalk.red('No'));
}
return detection;
}

706
cli/commands/init.js Normal file
View File

@@ -0,0 +1,706 @@
import chalk from 'chalk';
import inquirer from 'inquirer';
import { resolve, join, basename } from 'path';
import { existsSync, writeFileSync, mkdirSync } from 'fs';
import { detectProject } from '../detectors/index.js';
import { createProjectConfig, saveProjectConfig, loadGlobalConfig } from '../utils/config-manager.js';
import { processTemplate, getTemplateFiles, buildTemplateContext, loadTemplate, renderTemplate, TEMPLATES_DIR } from '../utils/template-engine.js';
/**
* Init command - initialize Docker configuration for a project
*/
export async function initCommand(path, options) {
const projectPath = resolve(path);
if (!existsSync(projectPath)) {
throw new Error(`Path does not exist: ${projectPath}`);
}
const projectName = basename(projectPath);
console.log(chalk.blue('Initializing Docker configuration for:'), projectName);
console.log();
// Detect project type
let detection;
if (options.type) {
// Force specific type
detection = {
type: options.type,
dockerizable: true,
template: options.type.replace('-', '/'),
port: options.port || 3000,
projectName,
projectPath
};
console.log(chalk.yellow('Using forced type:'), options.type);
} else {
detection = await detectProject(projectPath);
}
if (!detection.dockerizable) {
console.log(chalk.red('✗ Project is not dockerizable:'), detection.reason);
return;
}
console.log(chalk.green('✓ Detected:'), detection.description || detection.type);
console.log();
// Check for existing files
const existingFiles = checkExistingFiles(projectPath);
if (existingFiles.length > 0 && !options.overwrite) {
console.log(chalk.yellow('Existing Docker files found:'));
existingFiles.forEach(f => console.log(chalk.gray(` - ${f}`)));
console.log();
if (options.interactive !== false) {
const { proceed } = await inquirer.prompt([{
type: 'confirm',
name: 'proceed',
message: 'Overwrite existing files?',
default: false
}]);
if (!proceed) {
console.log(chalk.yellow('Cancelled.'));
return;
}
} else {
console.log(chalk.yellow('Use --overwrite to replace existing files.'));
return;
}
}
// Interactive configuration
let config;
if (options.interactive !== false) {
config = await interactiveConfig(projectName, detection, options);
} else {
config = createProjectConfig(projectName, detection, {
port: options.port,
name: options.name
});
}
// Dry run - just show what would be generated
if (options.dryRun) {
console.log(chalk.blue('Dry run - would generate:'));
console.log();
const files = getFilesToGenerate(detection, config);
files.forEach(f => console.log(chalk.gray(` - ${f.output}`)));
console.log();
console.log(chalk.gray('Configuration:'));
console.log(JSON.stringify(config, null, 2));
return;
}
// Generate files
console.log();
console.log(chalk.blue('Generating files...'));
const generatedFiles = await generateFiles(projectPath, detection, config);
console.log();
generatedFiles.forEach(file => {
console.log(chalk.green('✓'), file);
});
console.log();
console.log(chalk.green('Docker configuration initialized successfully!'));
console.log();
console.log(chalk.blue('Next steps:'));
console.log(chalk.gray(' 1. Review generated files'));
console.log(chalk.gray(' 2. Create .env file from .env.example (if needed)'));
console.log(chalk.gray(' 3. Build and test locally:'), 'docker compose up --build');
console.log(chalk.gray(' 4. Deploy using:'), '.\\deploy-docker-auto.ps1', chalk.gray('or'), '.\\build-image-tar.ps1');
}
/**
* Interactive configuration prompts
*/
async function interactiveConfig(projectName, detection, options) {
const globalConfig = loadGlobalConfig();
const sanitizedName = projectName.toLowerCase().replace(/[^a-z0-9-]/g, '-');
const answers = await inquirer.prompt([
{
type: 'input',
name: 'containerName',
message: 'Container name:',
default: options.name || sanitizedName
},
{
type: 'number',
name: 'port',
message: 'Application port:',
default: options.port || detection.port || 3000
},
{
type: 'confirm',
name: 'useEnvFile',
message: 'Use .env file for configuration?',
default: true
},
{
type: 'confirm',
name: 'configureSSH',
message: 'Configure SSH deployment?',
default: true
}
]);
let sshConfig = {};
if (answers.configureSSH) {
sshConfig = await inquirer.prompt([
{
type: 'input',
name: 'sshHost',
message: 'SSH host:',
default: globalConfig.deployment?.sshHost || '192.168.8.178'
},
{
type: 'input',
name: 'sshUser',
message: 'SSH user:',
default: globalConfig.deployment?.sshUser || 'deployer'
}
]);
}
// Ask about volumes for projects that might need persistence
let volumes = [];
if (detection.type.includes('express') || detection.type.includes('python')) {
const { needsVolumes } = await inquirer.prompt([{
type: 'confirm',
name: 'needsVolumes',
message: 'Does this project need persistent data volumes?',
default: false
}]);
if (needsVolumes) {
const { volumePath } = await inquirer.prompt([{
type: 'input',
name: 'volumePath',
message: 'Volume mount (local:container):',
default: './data:/app/data'
}]);
volumes = [volumePath];
}
}
return createProjectConfig(projectName, detection, {
name: answers.containerName,
port: answers.port,
envFile: answers.useEnvFile,
sshHost: sshConfig.sshHost,
sshUser: sshConfig.sshUser,
volumes
});
}
/**
* Check for existing Docker files
*/
function checkExistingFiles(projectPath) {
const filesToCheck = [
'Dockerfile',
'docker-compose.yml',
'docker-compose.yaml',
'.dockerignore',
'docker-deployment.json'
];
return filesToCheck.filter(f => existsSync(join(projectPath, f)));
}
/**
* Get list of files to generate
*/
function getFilesToGenerate(detection, config) {
const files = [
{ output: 'Dockerfile' },
{ output: 'docker-compose.yml' },
{ output: '.dockerignore' },
{ output: 'docker-deployment.json' },
{ output: '.env.example' },
{ output: 'deploy-docker-auto.ps1' },
{ output: 'build-image-tar.ps1' },
{ output: 'README.DOCKER.md' }
];
// Add nginx.conf for static sites
if (detection.type === 'static-nginx' || detection.type === 'flutter-web') {
files.push({ output: 'nginx.conf' });
}
return files;
}
/**
* Generate all Docker files for the project
*/
async function generateFiles(projectPath, detection, config) {
const context = buildTemplateContext(config, detection);
const generatedFiles = [];
// 1. Generate Dockerfile
try {
const dockerfileTemplate = loadTemplate(`${detection.template}/Dockerfile.template`);
const dockerfile = renderTemplate(dockerfileTemplate, context);
writeFileSync(join(projectPath, 'Dockerfile'), dockerfile);
generatedFiles.push('Dockerfile');
} catch (error) {
console.log(chalk.yellow(`Warning: Could not generate Dockerfile: ${error.message}`));
// Use fallback template
const fallbackDockerfile = generateFallbackDockerfile(detection, context);
writeFileSync(join(projectPath, 'Dockerfile'), fallbackDockerfile);
generatedFiles.push('Dockerfile (fallback)');
}
// 2. Generate docker-compose.yml
try {
const composeTemplate = loadTemplate(`${detection.template}/docker-compose.yml.template`);
const compose = renderTemplate(composeTemplate, context);
writeFileSync(join(projectPath, 'docker-compose.yml'), compose);
generatedFiles.push('docker-compose.yml');
} catch (error) {
const fallbackCompose = generateFallbackCompose(detection, context);
writeFileSync(join(projectPath, 'docker-compose.yml'), fallbackCompose);
generatedFiles.push('docker-compose.yml (fallback)');
}
// 3. Generate .dockerignore
try {
const ignoreTemplate = loadTemplate(`${detection.template}/.dockerignore.template`);
const dockerignore = renderTemplate(ignoreTemplate, context);
writeFileSync(join(projectPath, '.dockerignore'), dockerignore);
generatedFiles.push('.dockerignore');
} catch (error) {
const fallbackIgnore = generateFallbackDockerignore(detection);
writeFileSync(join(projectPath, '.dockerignore'), fallbackIgnore);
generatedFiles.push('.dockerignore (fallback)');
}
// 4. Generate nginx.conf for static sites
if (detection.type === 'static-nginx' || detection.type === 'flutter-web') {
try {
const nginxTemplate = loadTemplate(`${detection.template}/nginx.conf.template`);
const nginx = renderTemplate(nginxTemplate, context);
writeFileSync(join(projectPath, 'nginx.conf'), nginx);
generatedFiles.push('nginx.conf');
} catch (error) {
const fallbackNginx = generateFallbackNginxConf();
writeFileSync(join(projectPath, 'nginx.conf'), fallbackNginx);
generatedFiles.push('nginx.conf (fallback)');
}
}
// 5. Save project config
saveProjectConfig(projectPath, config);
generatedFiles.push('docker-deployment.json');
// 6. Generate .env.example
const envExample = generateEnvExample(detection, context);
writeFileSync(join(projectPath, '.env.example'), envExample);
generatedFiles.push('.env.example');
// 7. Generate PowerShell scripts
const deployScript = generateDeployScript(context);
writeFileSync(join(projectPath, 'deploy-docker-auto.ps1'), deployScript);
generatedFiles.push('deploy-docker-auto.ps1');
const buildScript = generateBuildScript(context);
writeFileSync(join(projectPath, 'build-image-tar.ps1'), buildScript);
generatedFiles.push('build-image-tar.ps1');
// 8. Generate README.DOCKER.md
const readme = generateDockerReadme(detection, context, config);
writeFileSync(join(projectPath, 'README.DOCKER.md'), readme);
generatedFiles.push('README.DOCKER.md');
return generatedFiles;
}
/**
* Generate fallback Dockerfile when template is not found
*/
function generateFallbackDockerfile(detection, context) {
if (detection.type.startsWith('nodejs')) {
return `FROM node:${context.NODE_VERSION}-alpine
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci --omit=dev
COPY . .
ENV NODE_ENV=production
ENV PORT=${context.PORT}
EXPOSE ${context.PORT}
CMD ["node", "${context.ENTRY_POINT}"]
`;
}
if (detection.type.startsWith('python')) {
return `FROM python:${context.PYTHON_VERSION}-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
ENV PYTHONUNBUFFERED=1
EXPOSE ${context.PORT}
CMD ["python", "${context.ENTRY_POINT}"]
`;
}
if (detection.type.startsWith('dotnet')) {
return `FROM mcr.microsoft.com/dotnet/sdk:${context.DOTNET_VERSION} AS build
WORKDIR /src
COPY . .
RUN dotnet restore
RUN dotnet publish -c Release -o /app/publish
FROM mcr.microsoft.com/dotnet/aspnet:${context.DOTNET_VERSION}
WORKDIR /app
COPY --from=build /app/publish .
ENV ASPNETCORE_URLS=http://+:${context.PORT}
EXPOSE ${context.PORT}
ENTRYPOINT ["dotnet", "${context.ENTRY_POINT}"]
`;
}
// Static site fallback
return `FROM nginx:alpine
COPY . /usr/share/nginx/html
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]
`;
}
/**
* Generate fallback docker-compose.yml
*/
function generateFallbackCompose(detection, context) {
let compose = `services:
${context.PROJECT_NAME}:
build: .
container_name: ${context.PROJECT_NAME}
restart: unless-stopped
ports:
- "\${HOST_PORT:-${context.PORT}}:${context.PORT}"
`;
if (context.USE_ENV_FILE) {
compose += ` env_file:
- .env
`;
}
compose += ` environment:
NODE_ENV: production
`;
if (context.HAS_VOLUMES && context.VOLUMES.length > 0) {
compose += ` volumes:\n`;
context.VOLUMES.forEach(vol => {
compose += ` - ${vol}\n`;
});
}
if (context.EXTRA_HOSTS) {
compose += ` extra_hosts:
- "host.docker.internal:host-gateway"
`;
}
return compose;
}
/**
* Generate fallback .dockerignore
*/
function generateFallbackDockerignore(detection) {
return `node_modules
npm-debug.log
.git
.gitignore
.env
.env.local
*.tar
*.tar.gz
Dockerfile
docker-compose.yml
docker-compose.yaml
.dockerignore
README.md
README.DOCKER.md
.vscode
.idea
coverage
.nyc_output
*.log
`;
}
/**
* Generate fallback nginx.conf
*/
function generateFallbackNginxConf() {
return `server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
location / {
try_files $uri $uri/ /index.html;
}
location ~* \\.(jpg|jpeg|png|gif|ico|css|js|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
gzip on;
gzip_vary on;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml;
}
`;
}
/**
* Generate .env.example file
*/
function generateEnvExample(detection, context) {
let env = `# Application Configuration
PORT=${context.PORT}
NODE_ENV=production
# Docker Host Port (change to avoid conflicts)
HOST_PORT=${context.PORT}
`;
if (context.HAS_SSH) {
env += `
# SSH Deployment (used by deploy-docker-auto.ps1)
SSH_HOST=${context.SSH_HOST}
SSH_USER=${context.SSH_USER}
`;
}
return env;
}
/**
* Generate deploy-docker-auto.ps1 (SSH automation)
*/
function generateDeployScript(context) {
return `# Docker Deploy Script (SSH Automation)
# Generated by docker-deployment-manager
param(
[string]$Platform = "linux/amd64",
[string]$ImageTag = "${context.PROJECT_NAME}:latest",
[string]$TarFile = "${context.PROJECT_NAME}.tar",
[switch]$SkipBuild,
[switch]$SkipDeploy
)
$ErrorActionPreference = "Stop"
# Load config if exists
$config = $null
if (Test-Path "docker-deployment.json") {
$config = Get-Content "docker-deployment.json" | ConvertFrom-Json
}
if (-not $SkipBuild) {
Write-Host "Building Docker image..." -ForegroundColor Cyan
docker buildx build --platform $Platform -t $ImageTag --load .
if ($LASTEXITCODE -ne 0) { throw "Docker build failed" }
Write-Host "Saving image to tar..." -ForegroundColor Cyan
docker save -o $TarFile $ImageTag
if ($LASTEXITCODE -ne 0) { throw "Docker save failed" }
Write-Host "Created: $TarFile" -ForegroundColor Green
}
if (-not $SkipDeploy) {
$sshHost = if ($config) { $config.deployment.sshHost } else { "${context.SSH_HOST}" }
$sshUser = if ($config) { $config.deployment.sshUser } else { "${context.SSH_USER}" }
$targetPath = if ($config) { $config.deployment.targetPath } else { "${context.TARGET_PATH}" }
if (-not $sshHost -or -not $sshUser) {
Write-Host "SSH not configured. Use build-image-tar.ps1 for manual deployment." -ForegroundColor Yellow
exit 0
}
$sshTarget = "\${sshUser}@\${sshHost}"
Write-Host "Creating target directory on server..." -ForegroundColor Cyan
ssh $sshTarget "mkdir -p $targetPath"
Write-Host "Copying files to server..." -ForegroundColor Cyan
scp $TarFile "\${sshTarget}:\${targetPath}/"
scp docker-compose.yml "\${sshTarget}:\${targetPath}/"
if (Test-Path ".env") {
scp .env "\${sshTarget}:\${targetPath}/"
}
Write-Host "Loading and starting container on server..." -ForegroundColor Cyan
ssh $sshTarget "cd $targetPath && docker load -i $TarFile && docker compose down 2>/dev/null; docker compose up -d"
Write-Host ""
Write-Host "Deployment complete!" -ForegroundColor Green
Write-Host "View logs: ssh $sshTarget 'cd $targetPath && docker compose logs -f'" -ForegroundColor Gray
}
`;
}
/**
* Generate build-image-tar.ps1 (manual workflow)
*/
function generateBuildScript(context) {
return `# Docker Build Script (Manual Workflow)
# Generated by docker-deployment-manager
param(
[string]$Platform = "linux/amd64",
[string]$ImageTag = "${context.PROJECT_NAME}:latest",
[string]$TarFile = "${context.PROJECT_NAME}.tar"
)
$ErrorActionPreference = "Stop"
Write-Host "Building Docker image..." -ForegroundColor Cyan
docker buildx build --platform $Platform -t $ImageTag --load .
if ($LASTEXITCODE -ne 0) { throw "Docker build failed" }
Write-Host "Saving image to tar..." -ForegroundColor Cyan
docker save -o $TarFile $ImageTag
if ($LASTEXITCODE -ne 0) { throw "Docker save failed" }
Write-Host ""
Write-Host "Build complete!" -ForegroundColor Green
Write-Host "Created: $TarFile" -ForegroundColor Green
Write-Host ""
Write-Host "Manual deployment instructions:" -ForegroundColor Yellow
Write-Host "1. scp $TarFile user@server:~/containers/${context.PROJECT_NAME}/files/" -ForegroundColor Gray
Write-Host "2. scp docker-compose.yml user@server:~/containers/${context.PROJECT_NAME}/files/" -ForegroundColor Gray
Write-Host "3. ssh user@server" -ForegroundColor Gray
Write-Host "4. cd ~/containers/${context.PROJECT_NAME}/files" -ForegroundColor Gray
Write-Host "5. docker load -i $TarFile" -ForegroundColor Gray
Write-Host "6. docker compose up -d" -ForegroundColor Gray
`;
}
/**
* Generate README.DOCKER.md
*/
function generateDockerReadme(detection, context, config) {
return `# Docker Deployment
This project is configured for Docker deployment.
## Project Info
- **Type:** ${detection.description || detection.type}
- **Port:** ${context.PORT}
- **Image:** ${context.PROJECT_NAME}:latest
## Quick Start
### Build and Run Locally
\`\`\`bash
docker compose up --build
\`\`\`
Then visit: http://localhost:${context.PORT}
### Deploy to Server
**Option 1: Automated SSH Deployment**
\`\`\`powershell
.\\deploy-docker-auto.ps1
\`\`\`
**Option 2: Manual Deployment**
\`\`\`powershell
# Build
.\\build-image-tar.ps1
# Copy to server
scp ${context.PROJECT_NAME}.tar user@server:~/containers/${context.PROJECT_NAME}/files/
scp docker-compose.yml user@server:~/containers/${context.PROJECT_NAME}/files/
# On server
ssh user@server
cd ~/containers/${context.PROJECT_NAME}/files
docker load -i ${context.PROJECT_NAME}.tar
docker compose up -d
\`\`\`
## Configuration
### Environment Variables
Copy \`.env.example\` to \`.env\` and configure:
\`\`\`bash
cp .env.example .env
\`\`\`
Key variables:
- \`PORT\` - Application port (default: ${context.PORT})
- \`HOST_PORT\` - Docker host port mapping
### Deployment Settings
Edit \`docker-deployment.json\` to configure:
- SSH host and user
- Target deployment path
- Build settings
## Files
- \`Dockerfile\` - Container build instructions
- \`docker-compose.yml\` - Container runtime configuration
- \`.dockerignore\` - Files excluded from image
- \`docker-deployment.json\` - Deployment configuration
- \`deploy-docker-auto.ps1\` - Automated SSH deployment script
- \`build-image-tar.ps1\` - Manual build script
## Useful Commands
\`\`\`bash
# View logs
docker compose logs -f
# Stop container
docker compose down
# Rebuild and restart
docker compose up --build -d
# Shell into container
docker compose exec ${context.PROJECT_NAME} sh
\`\`\`
---
Generated by docker-deployment-manager
`;
}

147
cli/detectors/dotnet.js Normal file
View File

@@ -0,0 +1,147 @@
import { readFileSync, existsSync, readdirSync } from 'fs';
import { join, basename } from 'path';
import { glob } from 'glob';
/**
* Detect .NET project type and configuration
*/
export async function detectDotNet(projectPath) {
// Find .csproj files
const csprojFiles = await glob('*.csproj', { cwd: projectPath });
if (csprojFiles.length === 0) {
// Check for .sln file (solution)
const slnFiles = await glob('*.sln', { cwd: projectPath });
if (slnFiles.length > 0) {
return {
type: 'dotnet-solution',
dockerizable: false,
reason: 'Solution files require building individual projects',
template: null
};
}
return null;
}
const csprojFile = csprojFiles[0];
const csprojPath = join(projectPath, csprojFile);
const csprojContent = readFileSync(csprojPath, 'utf-8');
// Detect .NET version
const targetFrameworkMatch = csprojContent.match(/<TargetFramework>([^<]+)<\/TargetFramework>/);
const targetFramework = targetFrameworkMatch ? targetFrameworkMatch[1] : 'net8.0';
// Extract version number (e.g., net9.0 -> 9.0)
const versionMatch = targetFramework.match(/net(\d+\.\d+)/);
const dotnetVersion = versionMatch ? versionMatch[1] : '8.0';
// Detect project type
const isBlazor = csprojContent.includes('Microsoft.AspNetCore.Components') ||
csprojContent.includes('MudBlazor') ||
csprojContent.includes('Blazor');
const isWebAPI = csprojContent.includes('Microsoft.NET.Sdk.Web') &&
!isBlazor;
const isConsole = csprojContent.includes('Microsoft.NET.Sdk') &&
!csprojContent.includes('Microsoft.NET.Sdk.Web');
// Get assembly name for DLL
const assemblyNameMatch = csprojContent.match(/<AssemblyName>([^<]+)<\/AssemblyName>/);
const projectName = basename(csprojFile, '.csproj');
const assemblyName = assemblyNameMatch ? assemblyNameMatch[1] : projectName;
const dllName = `${assemblyName}.dll`;
if (isBlazor) {
return {
type: 'dotnet-blazor',
dockerizable: true,
template: 'dotnet/blazor',
port: 8080,
entryPoint: dllName,
buildCommand: `dotnet publish -c Release`,
description: 'Blazor web application',
csprojFile,
dllName,
dotnetVersion,
targetFramework
};
}
if (isWebAPI) {
return {
type: 'dotnet-webapi',
dockerizable: true,
template: 'dotnet/webapi',
port: 8080,
entryPoint: dllName,
buildCommand: `dotnet publish -c Release`,
description: '.NET Web API',
csprojFile,
dllName,
dotnetVersion,
targetFramework
};
}
if (isConsole) {
return {
type: 'dotnet-console',
dockerizable: true,
template: 'dotnet/console',
port: null,
entryPoint: dllName,
buildCommand: `dotnet publish -c Release`,
description: '.NET Console application',
csprojFile,
dllName,
dotnetVersion,
targetFramework
};
}
// Default to web
return {
type: 'dotnet-generic',
dockerizable: true,
template: 'dotnet/webapi',
port: 8080,
entryPoint: dllName,
buildCommand: `dotnet publish -c Release`,
description: '.NET application',
csprojFile,
dllName,
dotnetVersion,
targetFramework
};
}
/**
* Get additional info about .NET project
*/
export async function getDotNetInfo(projectPath) {
const csprojFiles = await glob('*.csproj', { cwd: projectPath });
if (csprojFiles.length === 0) {
return null;
}
const csprojFile = csprojFiles[0];
const csprojPath = join(projectPath, csprojFile);
const content = readFileSync(csprojPath, 'utf-8');
// Extract package references
const packageRefs = content.match(/<PackageReference\s+Include="([^"]+)"/g) || [];
const packages = packageRefs.map(ref => {
const match = ref.match(/Include="([^"]+)"/);
return match ? match[1] : null;
}).filter(Boolean);
return {
csprojFile,
packages,
hasLaunchSettings: existsSync(join(projectPath, 'Properties', 'launchSettings.json')),
hasAppSettings: existsSync(join(projectPath, 'appsettings.json')),
hasWwwroot: existsSync(join(projectPath, 'wwwroot'))
};
}

126
cli/detectors/index.js Normal file
View File

@@ -0,0 +1,126 @@
import { detectNodeJS, getNodeJSInfo } from './nodejs.js';
import { detectPython, getPythonInfo } from './python.js';
import { detectDotNet, getDotNetInfo } from './dotnet.js';
import { detectStatic, getStaticInfo } from './static.js';
import { basename } from 'path';
/**
* Detect project type by running all detectors
* Returns the first successful detection
*/
export async function detectProject(projectPath) {
const projectName = basename(projectPath);
// Run detectors in order of priority
// Node.js first (most common), then Python, .NET, and finally static
// 1. Node.js detection
const nodeResult = detectNodeJS(projectPath);
if (nodeResult) {
return {
...nodeResult,
projectName,
projectPath
};
}
// 2. Python detection
const pythonResult = detectPython(projectPath);
if (pythonResult) {
return {
...pythonResult,
projectName,
projectPath
};
}
// 3. .NET detection
const dotnetResult = await detectDotNet(projectPath);
if (dotnetResult) {
return {
...dotnetResult,
projectName,
projectPath
};
}
// 4. Static site detection
const staticResult = await detectStatic(projectPath);
if (staticResult) {
return {
...staticResult,
projectName,
projectPath
};
}
// No detection
return {
type: 'unknown',
dockerizable: false,
reason: 'Could not determine project type. No package.json, requirements.txt, .csproj, or index.html found.',
projectName,
projectPath,
template: null
};
}
/**
* Get detailed info about a project
*/
export async function getProjectInfo(projectPath, type) {
switch (true) {
case type.startsWith('nodejs'):
return getNodeJSInfo(projectPath);
case type.startsWith('python'):
return getPythonInfo(projectPath);
case type.startsWith('dotnet'):
return await getDotNetInfo(projectPath);
case type.startsWith('static') || type.startsWith('flutter'):
return await getStaticInfo(projectPath);
default:
return null;
}
}
/**
* Check if a path is a valid project directory
*/
export function isValidProject(projectPath) {
// Exclude common non-project directories
const excludePatterns = [
'node_modules',
'.git',
'.vscode',
'.idea',
'dist',
'build',
'coverage',
'__pycache__',
'venv',
'.venv'
];
const name = basename(projectPath);
return !excludePatterns.includes(name) && !name.startsWith('.');
}
/**
* Get all project types supported
*/
export function getSupportedTypes() {
return [
{ type: 'nodejs-express', description: 'Express.js server', template: 'nodejs/express' },
{ type: 'nodejs-vite-react', description: 'Vite + React SPA', template: 'nodejs/vite-react' },
{ type: 'nodejs-vite-react-ssr', description: 'Vite + React with Express SSR', template: 'nodejs/vite-react-ssr' },
{ type: 'nodejs-generic', description: 'Generic Node.js application', template: 'nodejs/express' },
{ type: 'python-standard', description: 'Standard Python application', template: 'python/standard' },
{ type: 'python-ml-pytorch', description: 'Python ML/AI with PyTorch', template: 'python/ml-pytorch' },
{ type: 'dotnet-blazor', description: '.NET Blazor web application', template: 'dotnet/blazor' },
{ type: 'dotnet-webapi', description: '.NET Web API', template: 'dotnet/webapi' },
{ type: 'static-nginx', description: 'Static website with Nginx', template: 'static/nginx' },
{ type: 'flutter-web', description: 'Flutter web application', template: 'static/nginx' }
];
}
export { detectNodeJS, detectPython, detectDotNet, detectStatic };

210
cli/detectors/nodejs.js Normal file
View File

@@ -0,0 +1,210 @@
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
/**
* Detect Node.js project type and configuration
*/
export function detectNodeJS(projectPath) {
const packageJsonPath = join(projectPath, 'package.json');
if (!existsSync(packageJsonPath)) {
return null;
}
let pkg;
try {
pkg = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
} catch (error) {
return null;
}
const deps = pkg.dependencies || {};
const devDeps = pkg.devDependencies || {};
const scripts = pkg.scripts || {};
// Check for Electron (not dockerizable)
if (deps.electron || devDeps.electron) {
return {
type: 'nodejs-electron',
dockerizable: false,
reason: 'Electron apps are desktop applications and cannot be containerized',
template: null
};
}
// Detect project subtype
const hasVite = !!devDeps.vite;
const hasReact = !!(deps.react || devDeps.react);
const hasExpress = !!deps.express;
const hasSocketIO = !!(deps['socket.io'] || deps['socket.io-client']);
const hasConcurrently = !!devDeps.concurrently;
// Check for server-side rendering setup (Vite + React + Express with concurrent dev)
if (hasVite && hasReact && hasExpress && (hasConcurrently || scripts.dev?.includes('concurrently'))) {
return {
type: 'nodejs-vite-react-ssr',
dockerizable: true,
template: 'nodejs/vite-react-ssr',
port: detectPort(pkg) || 3000,
entryPoint: detectEntryPoint(pkg, projectPath, 'ssr'),
buildCommand: 'npm run build',
description: 'Vite + React with Express SSR'
};
}
// Vite + React SPA (no backend)
if (hasVite && hasReact && !hasExpress) {
return {
type: 'nodejs-vite-react',
dockerizable: true,
template: 'nodejs/vite-react',
port: 80, // Nginx serves static files on port 80
entryPoint: null,
buildCommand: 'npm run build',
description: 'Vite + React SPA (served by Nginx)'
};
}
// Express server (with or without Socket.io)
if (hasExpress) {
return {
type: 'nodejs-express',
dockerizable: true,
template: 'nodejs/express',
port: detectPort(pkg) || 3000,
entryPoint: detectEntryPoint(pkg, projectPath, 'express'),
buildCommand: detectBuildCommand(pkg),
description: hasSocketIO ? 'Express + Socket.io server' : 'Express server'
};
}
// Generic Node.js project (has package.json but no clear type)
if (pkg.main || scripts.start) {
return {
type: 'nodejs-generic',
dockerizable: true,
template: 'nodejs/express',
port: detectPort(pkg) || 3000,
entryPoint: detectEntryPoint(pkg, projectPath, 'generic'),
buildCommand: detectBuildCommand(pkg),
description: 'Generic Node.js application'
};
}
// Has package.json but nothing to run
return {
type: 'nodejs-unknown',
dockerizable: false,
reason: 'No start script or main entry point found',
template: null
};
}
/**
* Detect the default port from package.json or environment
*/
function detectPort(pkg) {
const scripts = pkg.scripts || {};
// Check start script for port
const startScript = scripts.start || '';
const portMatch = startScript.match(/PORT[=\s]+(\d+)/i) ||
startScript.match(/-p\s+(\d+)/) ||
startScript.match(/--port\s+(\d+)/);
if (portMatch) {
return parseInt(portMatch[1], 10);
}
// Check for common port patterns in dev script
const devScript = scripts.dev || '';
const devPortMatch = devScript.match(/:(\d{4})/);
if (devPortMatch) {
return parseInt(devPortMatch[1], 10);
}
// Default based on common frameworks
if (pkg.dependencies?.['next']) return 3000;
if (pkg.devDependencies?.vite) return 5173;
return null;
}
/**
* Detect the entry point file
*/
function detectEntryPoint(pkg, projectPath, type) {
// Check package.json main field
if (pkg.main) {
return pkg.main;
}
// Check for common entry points
const commonEntries = [
'server.js',
'server.mjs',
'src/server.js',
'src/server.mjs',
'src/index.js',
'src/index.mjs',
'index.js',
'app.js',
'src/app.js'
];
for (const entry of commonEntries) {
if (existsSync(join(projectPath, entry))) {
return entry;
}
}
// For SSR projects, look for server files
if (type === 'ssr') {
const ssrEntries = ['server.mjs', 'server.js', 'src/server.mjs', 'src/server.js'];
for (const entry of ssrEntries) {
if (existsSync(join(projectPath, entry))) {
return entry;
}
}
}
// Default
return 'index.js';
}
/**
* Detect if project has a build command
*/
function detectBuildCommand(pkg) {
const scripts = pkg.scripts || {};
if (scripts.build) {
return 'npm run build';
}
return null;
}
/**
* Get additional info about the Node.js project
*/
export function getNodeJSInfo(projectPath) {
const packageJsonPath = join(projectPath, 'package.json');
if (!existsSync(packageJsonPath)) {
return null;
}
const pkg = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
return {
name: pkg.name,
version: pkg.version,
hasLockFile: existsSync(join(projectPath, 'package-lock.json')),
hasYarnLock: existsSync(join(projectPath, 'yarn.lock')),
hasPnpmLock: existsSync(join(projectPath, 'pnpm-lock.yaml')),
nodeVersion: pkg.engines?.node || null,
scripts: Object.keys(pkg.scripts || {}),
dependencies: Object.keys(pkg.dependencies || {}),
devDependencies: Object.keys(pkg.devDependencies || {})
};
}

186
cli/detectors/python.js Normal file
View File

@@ -0,0 +1,186 @@
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
/**
* Detect Python project type and configuration
*/
export function detectPython(projectPath) {
const requirementsPath = join(projectPath, 'requirements.txt');
const pyprojectPath = join(projectPath, 'pyproject.toml');
const setupPath = join(projectPath, 'setup.py');
const hasRequirements = existsSync(requirementsPath);
const hasPyproject = existsSync(pyprojectPath);
const hasSetup = existsSync(setupPath);
if (!hasRequirements && !hasPyproject && !hasSetup) {
return null;
}
// Read requirements to detect project type
let requirements = '';
if (hasRequirements) {
requirements = readFileSync(requirementsPath, 'utf-8').toLowerCase();
}
// Read pyproject.toml for additional info
let pyproject = '';
if (hasPyproject) {
pyproject = readFileSync(pyprojectPath, 'utf-8').toLowerCase();
}
// Detect ML/PyTorch project
const mlIndicators = [
'torch',
'pytorch',
'tensorflow',
'keras',
'transformers',
'opencv',
'faster-whisper',
'whisper',
'scikit-learn',
'numpy'
];
const isML = mlIndicators.some(ind => requirements.includes(ind) || pyproject.includes(ind));
if (isML) {
return {
type: 'python-ml-pytorch',
dockerizable: true,
template: 'python/ml-pytorch',
port: detectPythonPort(projectPath) || 8000,
entryPoint: detectPythonEntryPoint(projectPath),
buildCommand: null,
description: 'Python ML/AI application',
systemDeps: detectSystemDeps(requirements)
};
}
// Check for web frameworks
const hasFlask = requirements.includes('flask') || pyproject.includes('flask');
const hasFastAPI = requirements.includes('fastapi') || pyproject.includes('fastapi');
const hasDjango = requirements.includes('django') || pyproject.includes('django');
let description = 'Python application';
let port = 8000;
if (hasFlask) {
description = 'Flask web application';
port = 5000;
} else if (hasFastAPI) {
description = 'FastAPI web application';
port = 8000;
} else if (hasDjango) {
description = 'Django web application';
port = 8000;
}
return {
type: 'python-standard',
dockerizable: true,
template: 'python/standard',
port: detectPythonPort(projectPath) || port,
entryPoint: detectPythonEntryPoint(projectPath),
buildCommand: null,
description,
framework: hasFlask ? 'flask' : hasFastAPI ? 'fastapi' : hasDjango ? 'django' : null
};
}
/**
* Detect Python entry point
*/
function detectPythonEntryPoint(projectPath) {
const commonEntries = [
'main.py',
'app.py',
'server.py',
'run.py',
'src/main.py',
'src/app.py'
];
for (const entry of commonEntries) {
if (existsSync(join(projectPath, entry))) {
return entry;
}
}
return 'main.py';
}
/**
* Detect port from Python files
*/
function detectPythonPort(projectPath) {
const mainFiles = ['main.py', 'app.py', 'server.py', 'run.py'];
for (const file of mainFiles) {
const filePath = join(projectPath, file);
if (existsSync(filePath)) {
const content = readFileSync(filePath, 'utf-8');
// Look for port definitions
const portMatch = content.match(/port[=\s:]+(\d{4})/i) ||
content.match(/PORT[=\s:]+(\d{4})/i);
if (portMatch) {
return parseInt(portMatch[1], 10);
}
}
}
return null;
}
/**
* Detect system dependencies needed for ML projects
*/
function detectSystemDeps(requirements) {
const deps = [];
if (requirements.includes('opencv') || requirements.includes('cv2')) {
deps.push('libgl1-mesa-glx', 'libglib2.0-0');
}
if (requirements.includes('whisper') || requirements.includes('faster-whisper')) {
deps.push('ffmpeg');
}
if (requirements.includes('soundfile') || requirements.includes('librosa')) {
deps.push('libsndfile1');
}
if (requirements.includes('pillow') || requirements.includes('pil')) {
deps.push('libjpeg-dev', 'zlib1g-dev');
}
return deps;
}
/**
* Get additional info about Python project
*/
export function getPythonInfo(projectPath) {
const requirementsPath = join(projectPath, 'requirements.txt');
const pyprojectPath = join(projectPath, 'pyproject.toml');
const info = {
hasRequirements: existsSync(requirementsPath),
hasPyproject: existsSync(pyprojectPath),
hasSetupPy: existsSync(join(projectPath, 'setup.py')),
hasVenv: existsSync(join(projectPath, 'venv')) || existsSync(join(projectPath, '.venv')),
dependencies: []
};
if (info.hasRequirements) {
const content = readFileSync(requirementsPath, 'utf-8');
info.dependencies = content
.split('\n')
.filter(line => line.trim() && !line.startsWith('#'))
.map(line => line.split('==')[0].split('>=')[0].split('<=')[0].trim());
}
return info;
}

98
cli/detectors/static.js Normal file
View File

@@ -0,0 +1,98 @@
import { existsSync, readdirSync } from 'fs';
import { join } from 'path';
import { glob } from 'glob';
/**
* Detect static site or Flutter web project
*/
export async function detectStatic(projectPath) {
// Check for Flutter project first
const pubspecPath = join(projectPath, 'pubspec.yaml');
if (existsSync(pubspecPath)) {
return {
type: 'flutter-web',
dockerizable: true,
template: 'static/nginx',
port: 80,
entryPoint: null,
buildCommand: 'flutter build web',
description: 'Flutter web application (builds to static files)',
buildDir: 'build/web',
note: 'Run "flutter build web" before Docker build'
};
}
// Check for index.html (static site)
const indexPath = join(projectPath, 'index.html');
const hasIndexHtml = existsSync(indexPath);
// Check for common static site directories
const hasPublicDir = existsSync(join(projectPath, 'public', 'index.html'));
const hasDistDir = existsSync(join(projectPath, 'dist', 'index.html'));
const hasBuildDir = existsSync(join(projectPath, 'build', 'index.html'));
if (!hasIndexHtml && !hasPublicDir && !hasDistDir && !hasBuildDir) {
return null;
}
// Determine the source directory
let sourceDir = '.';
if (hasPublicDir) sourceDir = 'public';
else if (hasDistDir) sourceDir = 'dist';
else if (hasBuildDir) sourceDir = 'build';
// Check for PHP files (simple PHP site)
const phpFiles = await glob('*.php', { cwd: projectPath });
if (phpFiles.length > 0) {
return {
type: 'static-php',
dockerizable: true,
template: 'static/php',
port: 80,
entryPoint: null,
buildCommand: null,
description: 'PHP static site',
sourceDir,
note: 'Uses PHP-FPM with Nginx'
};
}
// Pure static site (HTML/CSS/JS)
return {
type: 'static-nginx',
dockerizable: true,
template: 'static/nginx',
port: 80,
entryPoint: null,
buildCommand: null,
description: 'Static website (served by Nginx)',
sourceDir
};
}
/**
* Get additional info about static site
*/
export async function getStaticInfo(projectPath) {
const info = {
hasIndexHtml: existsSync(join(projectPath, 'index.html')),
hasPackageJson: existsSync(join(projectPath, 'package.json')),
files: {
html: (await glob('**/*.html', { cwd: projectPath, ignore: 'node_modules/**' })).length,
css: (await glob('**/*.css', { cwd: projectPath, ignore: 'node_modules/**' })).length,
js: (await glob('**/*.js', { cwd: projectPath, ignore: 'node_modules/**' })).length,
php: (await glob('**/*.php', { cwd: projectPath, ignore: 'node_modules/**' })).length
},
directories: []
};
// Check for common directories
const dirs = ['public', 'dist', 'build', 'assets', 'css', 'js', 'images'];
for (const dir of dirs) {
if (existsSync(join(projectPath, dir))) {
info.directories.push(dir);
}
}
return info;
}

114
cli/index.js Normal file
View File

@@ -0,0 +1,114 @@
#!/usr/bin/env node
import { Command } from 'commander';
import chalk from 'chalk';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { readFileSync } from 'fs';
// Import commands
import { detectCommand } from './commands/detect.js';
import { initCommand } from './commands/init.js';
import { batchCommand } from './commands/batch.js';
// Get package.json for version
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const packageJson = JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf-8'));
const program = new Command();
program
.name('docker-deploy')
.description('Automated Docker deployment system for containerizing and deploying projects')
.version(packageJson.version);
// Detect command - identify project type
program
.command('detect [path]')
.description('Detect project type and show deployment recommendations')
.option('--json', 'Output as JSON')
.action(async (path, options) => {
try {
await detectCommand(path || process.cwd(), options);
} catch (error) {
console.error(chalk.red('Error:'), error.message);
process.exit(1);
}
});
// Init command - initialize Docker config for a project
program
.command('init [path]')
.description('Initialize Docker configuration for a project')
.option('--no-interactive', 'Skip interactive prompts, use smart defaults')
.option('--type <type>', 'Force specific project type')
.option('--port <port>', 'Override default port', parseInt)
.option('--name <name>', 'Override container name')
.option('--dry-run', 'Show what would be generated without writing files')
.option('--overwrite', 'Overwrite existing files')
.action(async (path, options) => {
try {
await initCommand(path || process.cwd(), options);
} catch (error) {
console.error(chalk.red('Error:'), error.message);
process.exit(1);
}
});
// Batch command - operations across multiple projects
program
.command('batch <action>')
.description('Batch operations: detect, init')
.option('--root <path>', 'Root directory containing projects', 'C:\\.bucket\\Repos.Git')
.option('--filter <pattern>', 'Filter projects by pattern')
.option('--exclude <patterns>', 'Exclude projects (comma-separated)')
.option('--parallel <n>', 'Max parallel operations', parseInt, 4)
.option('--report', 'Generate summary report')
.option('--force', 'Force operation even if files exist')
.action(async (action, options) => {
try {
await batchCommand(action, options);
} catch (error) {
console.error(chalk.red('Error:'), error.message);
process.exit(1);
}
});
// Build command placeholder
program
.command('build')
.description('Build Docker image for current project')
.option('--platform <platform>', 'Target platform', 'linux/amd64')
.option('--tag <tag>', 'Image tag')
.option('--no-cache', 'Build without cache')
.action(async (options) => {
console.log(chalk.yellow('Build command - coming soon'));
console.log('For now, use the generated deploy-docker-auto.ps1 or build-image-tar.ps1 scripts');
});
// Package command placeholder
program
.command('package')
.description('Package Docker image as tar file')
.option('--output <file>', 'Output tar file name')
.option('--compress', 'Use gzip compression')
.action(async (options) => {
console.log(chalk.yellow('Package command - coming soon'));
console.log('For now, use the generated build-image-tar.ps1 script');
});
// Deploy command placeholder
program
.command('deploy')
.description('Deploy to Linux server via SSH')
.option('--host <host>', 'SSH host')
.option('--user <user>', 'SSH user')
.option('--key <path>', 'SSH private key path')
.option('--target <path>', 'Target directory on server')
.action(async (options) => {
console.log(chalk.yellow('Deploy command - coming soon'));
console.log('For now, use the generated deploy-docker-auto.ps1 script');
});
program.parse();

174
cli/utils/config-manager.js Normal file
View File

@@ -0,0 +1,174 @@
import { readFileSync, writeFileSync, existsSync } from 'fs';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Default configuration values
const DEFAULT_CONFIG = {
version: '1.0',
project: {
name: '',
type: '',
template: ''
},
build: {
platform: 'linux/amd64',
nodeVersion: '20',
pythonVersion: '3.11',
dotnetVersion: '9.0',
entryPoint: '',
buildCommand: null
},
runtime: {
port: 3000,
envFile: true,
volumes: [],
extraHosts: false
},
deployment: {
sshHost: '',
sshUser: '',
sshKeyPath: '',
targetPath: '',
autoLoad: true,
autoStart: true
}
};
// Global defaults (can be overridden per project)
const GLOBAL_DEFAULTS = {
deployment: {
sshHost: '192.168.8.178',
sshUser: 'deployer',
targetRoot: '~/containers'
},
build: {
platform: 'linux/amd64',
nodeVersion: '20',
pythonVersion: '3.11',
dotnetVersion: '9.0'
}
};
/**
* Load project configuration from docker-deployment.json
*/
export function loadProjectConfig(projectPath) {
const configPath = join(projectPath, 'docker-deployment.json');
if (!existsSync(configPath)) {
return null;
}
try {
const content = readFileSync(configPath, 'utf-8');
return JSON.parse(content);
} catch (error) {
throw new Error(`Failed to parse config at ${configPath}: ${error.message}`);
}
}
/**
* Save project configuration to docker-deployment.json
*/
export function saveProjectConfig(projectPath, config) {
const configPath = join(projectPath, 'docker-deployment.json');
const content = JSON.stringify(config, null, 2);
writeFileSync(configPath, content, 'utf-8');
return configPath;
}
/**
* Load global configuration from the deployment tool directory
*/
export function loadGlobalConfig() {
const globalConfigPath = join(__dirname, '..', '..', 'global-deployment-config.json');
if (!existsSync(globalConfigPath)) {
return GLOBAL_DEFAULTS;
}
try {
const content = readFileSync(globalConfigPath, 'utf-8');
return { ...GLOBAL_DEFAULTS, ...JSON.parse(content) };
} catch (error) {
return GLOBAL_DEFAULTS;
}
}
/**
* Save global configuration
*/
export function saveGlobalConfig(config) {
const globalConfigPath = join(__dirname, '..', '..', 'global-deployment-config.json');
const content = JSON.stringify(config, null, 2);
writeFileSync(globalConfigPath, content, 'utf-8');
return globalConfigPath;
}
/**
* Create a new project configuration with defaults
*/
export function createProjectConfig(projectName, detection, overrides = {}) {
const globalConfig = loadGlobalConfig();
const config = {
...DEFAULT_CONFIG,
project: {
name: projectName,
type: detection.type,
template: detection.template
},
build: {
...DEFAULT_CONFIG.build,
...globalConfig.build,
entryPoint: detection.entryPoint || '',
buildCommand: detection.buildCommand || null
},
runtime: {
...DEFAULT_CONFIG.runtime,
port: detection.port || 3000
},
deployment: {
...DEFAULT_CONFIG.deployment,
sshHost: globalConfig.deployment?.sshHost || '',
sshUser: globalConfig.deployment?.sshUser || '',
targetPath: `${globalConfig.deployment?.targetRoot || '~/containers'}/${projectName}/files`
}
};
// Apply overrides
if (overrides.port) config.runtime.port = overrides.port;
if (overrides.name) config.project.name = overrides.name;
if (overrides.sshHost) config.deployment.sshHost = overrides.sshHost;
if (overrides.sshUser) config.deployment.sshUser = overrides.sshUser;
if (overrides.volumes) config.runtime.volumes = overrides.volumes;
return config;
}
/**
* Get the target path for deployment
*/
export function getTargetPath(config) {
return config.deployment?.targetPath ||
`~/containers/${config.project.name}/files`;
}
/**
* Merge configs with priority: overrides > project > global > defaults
*/
export function mergeConfigs(projectConfig, overrides = {}) {
const globalConfig = loadGlobalConfig();
return {
...DEFAULT_CONFIG,
...globalConfig,
...projectConfig,
...overrides
};
}
export { DEFAULT_CONFIG, GLOBAL_DEFAULTS };

View File

@@ -0,0 +1,158 @@
import Handlebars from 'handlebars';
import { readFileSync, existsSync } from 'fs';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Templates directory
const TEMPLATES_DIR = join(__dirname, '..', '..', 'templates');
// Register Handlebars helpers
Handlebars.registerHelper('if_eq', function(a, b, options) {
return a === b ? options.fn(this) : options.inverse(this);
});
Handlebars.registerHelper('if_includes', function(arr, value, options) {
if (Array.isArray(arr) && arr.includes(value)) {
return options.fn(this);
}
return options.inverse(this);
});
Handlebars.registerHelper('join', function(arr, separator) {
if (Array.isArray(arr)) {
return arr.join(separator);
}
return '';
});
Handlebars.registerHelper('lowercase', function(str) {
return str ? str.toLowerCase() : '';
});
Handlebars.registerHelper('sanitize', function(str) {
// Convert to lowercase and replace invalid chars for Docker
return str ? str.toLowerCase().replace(/[^a-z0-9-]/g, '-') : '';
});
/**
* Load a template file from the templates directory
*/
export function loadTemplate(templatePath) {
const fullPath = join(TEMPLATES_DIR, templatePath);
if (!existsSync(fullPath)) {
throw new Error(`Template not found: ${templatePath}`);
}
return readFileSync(fullPath, 'utf-8');
}
/**
* Render a template with the given context
*/
export function renderTemplate(templateContent, context) {
const template = Handlebars.compile(templateContent, { noEscape: true });
return template(context);
}
/**
* Load and render a template file
*/
export function processTemplate(templatePath, context) {
const content = loadTemplate(templatePath);
return renderTemplate(content, context);
}
/**
* Get the template directory path for a given project type
*/
export function getTemplateDir(projectType) {
// Map project types to template directories
const typeMap = {
'nodejs-express': 'nodejs/express',
'nodejs-vite-react': 'nodejs/vite-react',
'nodejs-vite-react-ssr': 'nodejs/vite-react-ssr',
'nodejs-generic': 'nodejs/express',
'python-standard': 'python/standard',
'python-ml-pytorch': 'python/ml-pytorch',
'dotnet-blazor': 'dotnet/blazor',
'dotnet-webapi': 'dotnet/webapi',
'static-nginx': 'static/nginx',
'flutter-web': 'static/nginx'
};
return typeMap[projectType] || 'nodejs/express';
}
/**
* Get all template files for a project type
*/
export function getTemplateFiles(projectType) {
const templateDir = getTemplateDir(projectType);
const files = [
{ template: `${templateDir}/Dockerfile.template`, output: 'Dockerfile' },
{ template: `${templateDir}/docker-compose.yml.template`, output: 'docker-compose.yml' },
{ template: `${templateDir}/.dockerignore.template`, output: '.dockerignore' }
];
// Add nginx.conf for static sites
if (projectType === 'static-nginx' || projectType === 'flutter-web') {
files.push({ template: `${templateDir}/nginx.conf.template`, output: 'nginx.conf' });
}
return files;
}
/**
* Build template context from config
*/
export function buildTemplateContext(config, detection) {
const projectName = config.project.name;
const sanitizedName = projectName.toLowerCase().replace(/[^a-z0-9-]/g, '-');
return {
// Project info
PROJECT_NAME: sanitizedName,
PROJECT_NAME_RAW: projectName,
PROJECT_TYPE: config.project.type,
// Build settings
NODE_VERSION: config.build.nodeVersion || '20',
PYTHON_VERSION: config.build.pythonVersion || '3.11',
DOTNET_VERSION: config.build.dotnetVersion || '9.0',
PLATFORM: config.build.platform || 'linux/amd64',
ENTRY_POINT: config.build.entryPoint || detection?.entryPoint || 'index.js',
BUILD_COMMAND: config.build.buildCommand,
// Runtime settings
PORT: config.runtime.port || 3000,
USE_ENV_FILE: config.runtime.envFile !== false,
VOLUMES: config.runtime.volumes || [],
HAS_VOLUMES: (config.runtime.volumes || []).length > 0,
EXTRA_HOSTS: config.runtime.extraHosts || false,
// Deployment settings
SSH_HOST: config.deployment.sshHost || '',
SSH_USER: config.deployment.sshUser || '',
TARGET_PATH: config.deployment.targetPath || `~/containers/${sanitizedName}/files`,
HAS_SSH: !!(config.deployment.sshHost && config.deployment.sshUser),
// Detection info
HAS_BUILD_COMMAND: !!detection?.buildCommand,
IS_SSR: config.project.type === 'nodejs-vite-react-ssr',
IS_STATIC: config.project.type === 'static-nginx' || config.project.type === 'flutter-web',
// .NET specific
CSPROJ_FILE: detection?.csprojFile || '',
DLL_NAME: detection?.dllName || '',
// Data directory (for projects that need persistence)
DATA_DIR: detection?.dataDir || 'data'
};
}
export { TEMPLATES_DIR };

31
package.json Normal file
View File

@@ -0,0 +1,31 @@
{
"name": "docker-deployment-manager",
"version": "1.0.0",
"description": "Automated Docker deployment system for containerizing and deploying 35+ projects from Windows to Linux",
"main": "cli/index.js",
"type": "module",
"scripts": {
"docker-deploy": "node cli/index.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"docker",
"deployment",
"automation",
"containerization",
"gitea"
],
"author": "",
"license": "MIT",
"dependencies": {
"commander": "^12.1.0",
"handlebars": "^4.7.8",
"chalk": "^5.3.0",
"inquirer": "^11.1.0",
"ssh2": "^1.16.0",
"glob": "^11.0.0"
},
"engines": {
"node": ">=18.0.0"
}
}

View File

@@ -0,0 +1,34 @@
bin
obj
.git
.gitignore
.env
.env.local
*.tar
*.tar.gz
# IDE
.vs
.vscode
.idea
*.user
*.suo
# Build
publish
# Documentation
README.md
README.DOCKER.md
docs
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1
# Logs
*.log
logs

View File

@@ -0,0 +1,28 @@
# Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:{{DOTNET_VERSION}} AS build
WORKDIR /src
# Copy project file and restore
COPY ["{{CSPROJ_FILE}}", "./"]
RUN dotnet restore "{{CSPROJ_FILE}}"
# Copy everything else and build
COPY . .
RUN dotnet build "{{CSPROJ_FILE}}" -c Release -o /app/build
RUN dotnet publish "{{CSPROJ_FILE}}" -c Release -o /app/publish
# Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:{{DOTNET_VERSION}}
WORKDIR /app
COPY --from=build /app/publish .
# Set environment
ENV ASPNETCORE_URLS=http://+:{{PORT}}
ENV ASPNETCORE_ENVIRONMENT=Production
EXPOSE {{PORT}}
ENTRYPOINT ["dotnet", "{{DLL_NAME}}"]

View File

@@ -0,0 +1,20 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-{{PORT}}}:{{PORT}}"
{{#if USE_ENV_FILE}}
env_file:
- .env
{{/if}}
environment:
ASPNETCORE_ENVIRONMENT: Production
ASPNETCORE_URLS: http://+:{{PORT}}
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}

View File

@@ -0,0 +1,47 @@
node_modules
npm-debug.log*
.git
.gitignore
.env
.env.local
.env.*.local
*.tar
*.tar.gz
# IDE
.vscode
.idea
*.swp
*.swo
# Test & Coverage
coverage
.nyc_output
*.test.js
*.spec.js
__tests__
# Build artifacts
dist
build
# Documentation
README.md
README.DOCKER.md
CHANGELOG.md
docs
# Docker files (don't include in image)
Dockerfile
docker-compose.yml
docker-compose.yaml
.dockerignore
docker-deployment.json
# Scripts
*.ps1
*.sh
# Logs
logs
*.log

View File

@@ -0,0 +1,27 @@
FROM node:{{NODE_VERSION}}-slim
WORKDIR /app
# Copy package files first for better caching
COPY package.json package-lock.json* ./
# Install production dependencies only
RUN npm ci --omit=dev
# Copy application source
COPY . .
# Set environment
ENV NODE_ENV=production
ENV PORT={{PORT}}
{{#if BUILD_COMMAND}}
# Build application
RUN {{BUILD_COMMAND}}
{{/if}}
# Expose port
EXPOSE {{PORT}}
# Start application
CMD ["node", "{{ENTRY_POINT}}"]

View File

@@ -0,0 +1,24 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-{{PORT}}}:{{PORT}}"
{{#if USE_ENV_FILE}}
env_file:
- .env
{{/if}}
environment:
NODE_ENV: production
PORT: {{PORT}}
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}
{{#if EXTRA_HOSTS}}
extra_hosts:
- "host.docker.internal:host-gateway"
{{/if}}

View File

@@ -0,0 +1,32 @@
node_modules
npm-debug.log*
.git
.gitignore
.env
.env.local
.env.*.local
*.tar
*.tar.gz
# IDE
.vscode
.idea
# Test & Coverage
coverage
.nyc_output
# Build output (built during docker build)
dist
# Documentation
README.md
README.DOCKER.md
docs
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1

View File

@@ -0,0 +1,34 @@
# Stage 1: Build
FROM node:{{NODE_VERSION}}-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json* ./
RUN npm ci
COPY . .
RUN npm run build
# Stage 2: Production
FROM node:{{NODE_VERSION}}-alpine
WORKDIR /app
# Copy package files and install production dependencies
COPY package.json package-lock.json* ./
RUN npm ci --omit=dev
# Copy built files from builder
COPY --from=builder /app/dist ./dist
# Copy server file
COPY server.mjs ./server.mjs
# Set environment
ENV NODE_ENV=production
ENV PORT={{PORT}}
ENV DIST_DIR=/app/dist
EXPOSE {{PORT}}
CMD ["node", "server.mjs"]

View File

@@ -0,0 +1,24 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-{{PORT}}}:{{PORT}}"
{{#if USE_ENV_FILE}}
env_file:
- .env
{{/if}}
environment:
NODE_ENV: production
PORT: {{PORT}}
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}
{{#if EXTRA_HOSTS}}
extra_hosts:
- "host.docker.internal:host-gateway"
{{/if}}

View File

@@ -0,0 +1,32 @@
node_modules
npm-debug.log*
.git
.gitignore
.env
.env.local
.env.*.local
*.tar
*.tar.gz
# IDE
.vscode
.idea
# Test & Coverage
coverage
.nyc_output
# Build output (built during docker build)
dist
# Documentation
README.md
README.DOCKER.md
docs
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1

View File

@@ -0,0 +1,23 @@
# Stage 1: Build
FROM node:{{NODE_VERSION}}-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json* ./
RUN npm ci
COPY . .
RUN npm run build
# Stage 2: Production (Nginx)
FROM nginx:alpine
# Copy built files
COPY --from=builder /app/dist /usr/share/nginx/html
# Copy nginx configuration
COPY nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

View File

@@ -0,0 +1,13 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-80}:80"
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}

View File

@@ -0,0 +1,25 @@
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
# Handle client-side routing
location / {
try_files $uri $uri/ /index.html;
}
# Cache static assets
location ~* \.(jpg|jpeg|png|gif|ico|css|js|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
# Gzip compression
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_proxied expired no-cache no-store private auth;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
}

View File

@@ -0,0 +1,49 @@
__pycache__
*.pyc
*.pyo
*.pyd
.Python
.git
.gitignore
.env
.env.local
*.tar
*.tar.gz
# Virtual environments
venv
.venv
env
# IDE
.vscode
.idea
# Test
.pytest_cache
.coverage
# ML artifacts (large files)
*.pt
*.pth
*.onnx
*.h5
*.pkl
models/
checkpoints/
weights/
# Data (mount as volume instead)
data/
datasets/
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1
# Logs
*.log
logs

View File

@@ -0,0 +1,27 @@
FROM python:{{PYTHON_VERSION}}-slim
# Install system dependencies for ML libraries
RUN apt-get update && apt-get install -y --no-install-recommends \
libsndfile1 \
ffmpeg \
libavcodec-extra \
libgl1-mesa-glx \
libglib2.0-0 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY . .
# Set environment
ENV PYTHONUNBUFFERED=1
ENV PORT={{PORT}}
EXPOSE {{PORT}}
CMD ["python", "{{ENTRY_POINT}}"]

View File

@@ -0,0 +1,28 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-{{PORT}}}:{{PORT}}"
{{#if USE_ENV_FILE}}
env_file:
- .env
{{/if}}
environment:
PYTHONUNBUFFERED: 1
PORT: {{PORT}}
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}
# Uncomment below for GPU support
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]

View File

@@ -0,0 +1,44 @@
__pycache__
*.pyc
*.pyo
*.pyd
.Python
.git
.gitignore
.env
.env.local
*.tar
*.tar.gz
# Virtual environments
venv
.venv
env
ENV
# IDE
.vscode
.idea
*.swp
# Test & Coverage
.pytest_cache
.coverage
htmlcov
.tox
# Documentation
README.md
README.DOCKER.md
docs
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1
# Logs
*.log
logs

View File

@@ -0,0 +1,18 @@
FROM python:{{PYTHON_VERSION}}-slim
WORKDIR /app
# Install dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY . .
# Set environment
ENV PYTHONUNBUFFERED=1
ENV PORT={{PORT}}
EXPOSE {{PORT}}
CMD ["python", "{{ENTRY_POINT}}"]

View File

@@ -0,0 +1,20 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-{{PORT}}}:{{PORT}}"
{{#if USE_ENV_FILE}}
env_file:
- .env
{{/if}}
environment:
PYTHONUNBUFFERED: 1
PORT: {{PORT}}
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}

View File

@@ -0,0 +1,24 @@
.git
.gitignore
.env
.env.local
*.tar
*.tar.gz
# IDE
.vscode
.idea
# Documentation (keep README.md for the site if needed)
README.DOCKER.md
docs
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-deployment.json
*.ps1
# Logs
*.log

View File

@@ -0,0 +1,19 @@
FROM nginx:alpine
# Copy static files
COPY . /usr/share/nginx/html
# Copy nginx configuration
COPY nginx.conf /etc/nginx/conf.d/default.conf
# Remove Docker-related files from the served directory
RUN rm -f /usr/share/nginx/html/Dockerfile \
/usr/share/nginx/html/docker-compose.yml \
/usr/share/nginx/html/.dockerignore \
/usr/share/nginx/html/docker-deployment.json \
/usr/share/nginx/html/*.ps1 \
/usr/share/nginx/html/README.DOCKER.md 2>/dev/null || true
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

View File

@@ -0,0 +1,13 @@
services:
{{PROJECT_NAME}}:
build: .
container_name: {{PROJECT_NAME}}
restart: unless-stopped
ports:
- "${HOST_PORT:-80}:80"
{{#if HAS_VOLUMES}}
volumes:
{{#each VOLUMES}}
- {{this}}
{{/each}}
{{/if}}

View File

@@ -0,0 +1,42 @@
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html index.htm;
# Handle SPA routing (fallback to index.html)
location / {
try_files $uri $uri/ /index.html;
}
# PHP handling (if PHP files exist)
location ~ \.php$ {
# Uncomment below if using PHP-FPM
# fastcgi_pass php:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
# include fastcgi_params;
# For static sites without PHP, return 404
return 404;
}
# Cache static assets
location ~* \.(jpg|jpeg|png|gif|ico|css|js|svg|woff|woff2|ttf|eot|pdf)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
# Security headers
add_header X-Frame-Options "SAMEORIGIN" always;
add_header X-Content-Type-Options "nosniff" always;
add_header X-XSS-Protection "1; mode=block" always;
# Gzip compression
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_proxied expired no-cache no-store private auth;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
}