Files
modelgrid/ts/cli/container-handler.ts

318 lines
8.7 KiB
TypeScript
Raw Normal View History

2026-01-30 03:16:57 +00:00
/**
* Container Handler
*
* CLI commands for container management.
*/
import { logger } from '../logger.ts';
import { theme } from '../colors.ts';
import { ContainerManager } from '../containers/container-manager.ts';
import { DockerManager } from '../docker/docker-manager.ts';
import type { IContainerConfig } from '../interfaces/container.ts';
import type { ITableColumn } from '../logger.ts';
import * as helpers from '../helpers/index.ts';
/**
* Handler for container-related CLI commands
*/
export class ContainerHandler {
private containerManager: ContainerManager;
private dockerManager: DockerManager;
constructor(containerManager: ContainerManager) {
this.containerManager = containerManager;
this.dockerManager = new DockerManager();
}
/**
* List all configured containers
*/
public async list(): Promise<void> {
logger.log('');
logger.info('Containers');
logger.log('');
const containers = this.containerManager.getAllContainers();
if (containers.length === 0) {
logger.logBox(
'No Containers',
[
'No containers are configured.',
'',
theme.dim('Add a container with:'),
` ${theme.command('modelgrid container add')}`,
],
60,
'warning',
);
return;
}
const rows = [];
for (const container of containers) {
const status = await container.getStatus();
const config = container.getConfig();
rows.push({
id: config.id,
name: config.name,
type: this.formatContainerType(container.type),
status: status.running
? theme.success('Running')
: theme.dim('Stopped'),
health: status.running
? this.formatHealth(status.health)
: theme.dim('N/A'),
port: config.externalPort || config.port,
models: status.loadedModels.length,
gpus: config.gpuIds.length > 0 ? config.gpuIds.join(',') : theme.dim('None'),
});
}
const columns: ITableColumn[] = [
{ header: 'ID', key: 'id', align: 'left' },
{ header: 'Name', key: 'name', align: 'left', color: theme.highlight },
{ header: 'Type', key: 'type', align: 'left' },
{ header: 'Status', key: 'status', align: 'left' },
{ header: 'Health', key: 'health', align: 'left' },
{ header: 'Port', key: 'port', align: 'right', color: theme.info },
{ header: 'Models', key: 'models', align: 'right' },
{ header: 'GPUs', key: 'gpus', align: 'left' },
];
logger.logTable(columns, rows);
logger.log('');
}
/**
* Add a new container interactively
*/
public async add(): Promise<void> {
const { prompt, close, select } = await helpers.createPrompt();
try {
logger.log('');
logger.highlight('Add Container');
logger.dim('Configure a new AI model container');
logger.log('');
// Select container type
const typeIndex = await select('Select container type:', [
'Ollama - Easy to use, good for local models',
'vLLM - High performance, OpenAI compatible',
'TGI - HuggingFace Text Generation Inference',
]);
const types = ['ollama', 'vllm', 'tgi'] as const;
const containerType = types[typeIndex];
// Container name
const name = await prompt('Container name: ');
if (!name.trim()) {
logger.error('Container name is required');
return;
}
// Generate ID from name
const id = name.toLowerCase().replace(/[^a-z0-9-]/g, '-');
// Port
const defaultPorts = { ollama: 11434, vllm: 8000, tgi: 8080 };
const portStr = await prompt(`Port [${defaultPorts[containerType]}]: `);
const port = portStr ? parseInt(portStr, 10) : defaultPorts[containerType];
// GPU assignment
const gpuStr = await prompt('GPU IDs (comma-separated, or "all", or empty for none): ');
let gpuIds: string[] = [];
if (gpuStr.trim().toLowerCase() === 'all') {
const { GpuDetector } = await import('../hardware/gpu-detector.ts');
const detector = new GpuDetector();
const gpus = await detector.detectGpus();
gpuIds = gpus.map((g) => g.id);
} else if (gpuStr.trim()) {
gpuIds = gpuStr.split(',').map((s) => s.trim());
}
// Build config
const config: IContainerConfig = {
id,
type: containerType,
name,
image: this.getDefaultImage(containerType),
port,
gpuIds,
models: [],
};
// Add container
await this.containerManager.addContainer(config);
logger.log('');
logger.success(`Container "${name}" added successfully`);
logger.log('');
logger.dim('Start the container with:');
logger.log(` ${theme.command(`modelgrid container start ${id}`)}`);
logger.log('');
} finally {
close();
}
}
/**
* Remove a container
*/
public async remove(containerId: string): Promise<void> {
if (!containerId) {
logger.error('Container ID is required');
return;
}
const { prompt, close } = await helpers.createPrompt();
try {
const confirm = await prompt(`Remove container "${containerId}"? (y/N): `);
if (confirm.toLowerCase() !== 'y') {
logger.log('Aborted');
return;
}
const success = await this.containerManager.removeContainer(containerId);
if (success) {
logger.success(`Container "${containerId}" removed`);
} else {
logger.error(`Failed to remove container "${containerId}"`);
}
} finally {
close();
}
}
/**
* Start a container
*/
public async start(containerId?: string): Promise<void> {
if (containerId) {
// Start specific container
const container = this.containerManager.getContainer(containerId);
if (!container) {
logger.error(`Container "${containerId}" not found`);
return;
}
logger.info(`Starting container "${containerId}"...`);
const success = await container.start();
if (success) {
logger.success(`Container "${containerId}" started`);
} else {
logger.error(`Failed to start container "${containerId}"`);
}
} else {
// Start all containers
logger.info('Starting all containers...');
await this.containerManager.startAll();
logger.success('All containers started');
}
}
/**
* Stop a container
*/
public async stop(containerId?: string): Promise<void> {
if (containerId) {
// Stop specific container
const container = this.containerManager.getContainer(containerId);
if (!container) {
logger.error(`Container "${containerId}" not found`);
return;
}
logger.info(`Stopping container "${containerId}"...`);
const success = await container.stop();
if (success) {
logger.success(`Container "${containerId}" stopped`);
} else {
logger.error(`Failed to stop container "${containerId}"`);
}
} else {
// Stop all containers
logger.info('Stopping all containers...');
await this.containerManager.stopAll();
logger.success('All containers stopped');
}
}
/**
* Show container logs
*/
public async logs(containerId: string, lines: number = 100): Promise<void> {
if (!containerId) {
logger.error('Container ID is required');
return;
}
const container = this.containerManager.getContainer(containerId);
if (!container) {
logger.error(`Container "${containerId}" not found`);
return;
}
const logs = await container.getLogs(lines);
console.log(logs);
}
/**
* Format container type for display
*/
private formatContainerType(type: string): string {
switch (type) {
case 'ollama':
return theme.containerOllama('Ollama');
case 'vllm':
return theme.containerVllm('vLLM');
case 'tgi':
return theme.containerTgi('TGI');
default:
return type;
}
}
/**
* Format health status
*/
private formatHealth(health: string): string {
switch (health) {
case 'healthy':
return theme.success('Healthy');
case 'unhealthy':
return theme.error('Unhealthy');
case 'starting':
return theme.warning('Starting');
default:
return theme.dim(health);
}
}
/**
* Get default image for container type
*/
private getDefaultImage(type: string): string {
switch (type) {
case 'ollama':
return 'ollama/ollama:latest';
case 'vllm':
return 'vllm/vllm-openai:latest';
case 'tgi':
return 'ghcr.io/huggingface/text-generation-inference:latest';
default:
return '';
}
}
}