Scaffold dd0c/run: Rust agent (classifier, executor, audit) + TypeScript SaaS

- Rust agent: clap CLI, command classifier (read-only/modifying/destructive), executor with approval gates, audit log entries
- Classifier: pattern-based safety classification for shell, AWS, kubectl, terraform/tofu commands
- 6 Rust tests: read-only, destructive, modifying, empty, terraform apply, tofu destroy
- SaaS backend: Fastify server, runbook CRUD API, approval API, Slack interactive handler
- Slack integration: signature verification, block_actions for approve/reject buttons
- PostgreSQL schema with RLS: runbooks, executions, audit_entries (append-only), agents
- Dual Dockerfiles: Rust multi-stage (agent), Node multi-stage (SaaS)
- Gitea Actions CI: Rust test+clippy, Node typecheck+test
- Fly.io config for SaaS
This commit is contained in:
2026-03-01 03:03:29 +00:00
parent 6f692fc5ef
commit 57e7083986
18 changed files with 1046 additions and 0 deletions

View File

@@ -0,0 +1,43 @@
name: CI
on:
push:
branches: [main]
pull_request:
jobs:
agent-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Test agent
run: cargo test
working-directory: products/06-runbook-automation/agent
- name: Clippy
run: cargo clippy -- -D warnings
working-directory: products/06-runbook-automation/agent
saas-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '22'
- name: Install deps
run: npm ci
working-directory: products/06-runbook-automation/saas
- name: Type check
run: npx tsc --noEmit
working-directory: products/06-runbook-automation/saas
- name: Test
run: npm test
working-directory: products/06-runbook-automation/saas

View File

@@ -0,0 +1,28 @@
[package]
name = "dd0c-run-agent"
version = "0.1.0"
edition = "2021"
description = "dd0c/run agent — runbook automation with safety-first command execution"
[[bin]]
name = "dd0c-run"
path = "src/main.rs"
[dependencies]
tokio = { version = "1", features = ["full"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
reqwest = { version = "0.12", features = ["json", "rustls-tls"] }
clap = { version = "4", features = ["derive"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
sha2 = "0.10"
ed25519-dalek = { version = "2", features = ["rand_core"] }
chrono = { version = "0.4", features = ["serde"] }
uuid = { version = "1", features = ["v4"] }
thiserror = "1"
anyhow = "1"
[dev-dependencies]
tokio-test = "0.4"
proptest = "1"

View File

@@ -0,0 +1,10 @@
FROM rust:1.79-slim AS builder
WORKDIR /app
COPY Cargo.toml Cargo.lock* ./
COPY src/ src/
RUN cargo build --release
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/dd0c-run /usr/local/bin/dd0c-run
ENTRYPOINT ["dd0c-run"]

View File

@@ -0,0 +1,58 @@
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc};
use uuid::Uuid;
/// Immutable, append-only audit log entry.
/// Every command execution gets logged — no exceptions (BMad must-have).
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuditEntry {
pub id: String,
pub tenant_id: String,
pub runbook_id: String,
pub step_index: usize,
pub command: String,
pub safety_level: String,
pub approved_by: Option<String>,
pub approval_method: Option<String>, // "slack_button", "api", "auto" (read-only only)
pub exit_code: Option<i32>,
pub stdout_hash: Option<String>, // SHA-256 of stdout (don't store raw output)
pub stderr_hash: Option<String>,
pub started_at: DateTime<Utc>,
pub completed_at: Option<DateTime<Utc>>,
pub duration_ms: Option<u64>,
pub status: AuditStatus,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AuditStatus {
Pending,
AwaitingApproval,
Approved,
Rejected,
Executing,
Completed,
Failed,
TimedOut,
}
impl AuditEntry {
pub fn new(tenant_id: &str, runbook_id: &str, step_index: usize, command: &str, safety_level: &str) -> Self {
Self {
id: Uuid::new_v4().to_string(),
tenant_id: tenant_id.to_string(),
runbook_id: runbook_id.to_string(),
step_index,
command: command.to_string(),
safety_level: safety_level.to_string(),
approved_by: None,
approval_method: None,
exit_code: None,
stdout_hash: None,
stderr_hash: None,
started_at: Utc::now(),
completed_at: None,
duration_ms: None,
status: AuditStatus::Pending,
}
}
}

View File

@@ -0,0 +1,254 @@
use serde::{Deserialize, Serialize};
/// Command safety classification.
/// No full-auto mode — destructive commands ALWAYS require human approval (BMad must-have).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum SafetyLevel {
/// Read-only commands (ls, cat, kubectl get, aws describe-*)
ReadOnly,
/// Modifying but recoverable (restart service, scale replicas)
Modifying,
/// Destructive / irreversible (rm, drop, terminate, delete)
Destructive,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Classification {
pub command: String,
pub safety: SafetyLevel,
pub requires_approval: bool,
pub reason: String,
pub matched_pattern: Option<String>,
}
/// Classify a shell command by safety level.
/// Uses pattern matching (not regex on raw strings — BMad review finding).
pub fn classify(command: &str) -> Classification {
let trimmed = command.trim();
let tokens: Vec<&str> = trimmed.split_whitespace().collect();
if tokens.is_empty() {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::ReadOnly,
requires_approval: false,
reason: "Empty command".into(),
matched_pattern: None,
};
}
let base_cmd = tokens[0];
// --- Destructive patterns (always require approval) ---
let destructive_patterns: &[(&str, &str)] = &[
("rm", "file deletion"),
("rmdir", "directory deletion"),
("dd", "raw disk write"),
("mkfs", "filesystem format"),
("fdisk", "partition table modification"),
("shutdown", "system shutdown"),
("reboot", "system reboot"),
("kill", "process termination"),
("pkill", "process termination"),
("killall", "process termination"),
];
for (pattern, reason) in destructive_patterns {
if base_cmd == *pattern {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Destructive,
requires_approval: true,
reason: reason.to_string(),
matched_pattern: Some(pattern.to_string()),
};
}
}
// AWS destructive
if base_cmd == "aws" {
let has_destructive = tokens.iter().any(|t| {
matches!(*t, "terminate-instances" | "delete-stack" | "delete-bucket"
| "delete-table" | "delete-function" | "delete-cluster"
| "delete-service" | "deregister-task-definition"
| "delete-db-instance" | "delete-db-cluster")
});
if has_destructive {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Destructive,
requires_approval: true,
reason: "AWS resource deletion".into(),
matched_pattern: Some("aws delete/terminate".into()),
};
}
}
// kubectl destructive
if base_cmd == "kubectl" {
let has_destructive = tokens.iter().any(|t| {
matches!(*t, "delete" | "drain" | "cordon" | "taint")
});
if has_destructive {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Destructive,
requires_approval: true,
reason: "Kubernetes destructive operation".into(),
matched_pattern: Some("kubectl delete/drain".into()),
};
}
}
// terraform destructive
if base_cmd == "terraform" || base_cmd == "tofu" {
let has_destructive = tokens.iter().any(|t| {
matches!(*t, "destroy" | "apply")
});
if has_destructive {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Destructive,
requires_approval: true,
reason: "Infrastructure state change".into(),
matched_pattern: Some("terraform destroy/apply".into()),
};
}
}
// --- Modifying patterns (approval recommended) ---
let modifying_patterns: &[(&str, &str)] = &[
("systemctl", "service management"),
("service", "service management"),
("docker", "container management"),
("podman", "container management"),
("chmod", "permission change"),
("chown", "ownership change"),
("mv", "file move/rename"),
("cp", "file copy"),
("sed", "in-place file edit"),
("tee", "file write"),
];
for (pattern, reason) in modifying_patterns {
if base_cmd == *pattern {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Modifying,
requires_approval: true,
reason: reason.to_string(),
matched_pattern: Some(pattern.to_string()),
};
}
}
// AWS modifying
if base_cmd == "aws" {
let has_modifying = tokens.iter().any(|t| {
matches!(*t, "update-service" | "update-function-code" | "put-item"
| "create-stack" | "update-stack" | "run-instances"
| "stop-instances" | "start-instances" | "scale")
});
if has_modifying {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Modifying,
requires_approval: true,
reason: "AWS resource modification".into(),
matched_pattern: Some("aws modify".into()),
};
}
}
// kubectl modifying
if base_cmd == "kubectl" {
let has_modifying = tokens.iter().any(|t| {
matches!(*t, "apply" | "patch" | "scale" | "rollout" | "edit" | "label" | "annotate")
});
if has_modifying {
return Classification {
command: trimmed.to_string(),
safety: SafetyLevel::Modifying,
requires_approval: true,
reason: "Kubernetes resource modification".into(),
matched_pattern: Some("kubectl modify".into()),
};
}
}
// --- Read-only (default) ---
Classification {
command: trimmed.to_string(),
safety: SafetyLevel::ReadOnly,
requires_approval: false,
reason: "No destructive or modifying patterns detected".into(),
matched_pattern: None,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_read_only_commands() {
let cases = vec!["ls -la", "cat /etc/hosts", "kubectl get pods", "aws s3 ls", "echo hello"];
for cmd in cases {
let result = classify(cmd);
assert_eq!(result.safety, SafetyLevel::ReadOnly, "Expected ReadOnly for: {}", cmd);
assert!(!result.requires_approval);
}
}
#[test]
fn test_destructive_commands() {
let cases = vec![
"rm -rf /tmp/data",
"aws ec2 terminate-instances --instance-ids i-123",
"kubectl delete pod nginx",
"terraform destroy",
"kill -9 1234",
"dd if=/dev/zero of=/dev/sda",
];
for cmd in cases {
let result = classify(cmd);
assert_eq!(result.safety, SafetyLevel::Destructive, "Expected Destructive for: {}", cmd);
assert!(result.requires_approval);
}
}
#[test]
fn test_modifying_commands() {
let cases = vec![
"systemctl restart nginx",
"docker restart my-container",
"chmod 755 script.sh",
"aws ecs update-service --cluster prod --service api",
"kubectl apply -f deployment.yaml",
];
for cmd in cases {
let result = classify(cmd);
assert_eq!(result.safety, SafetyLevel::Modifying, "Expected Modifying for: {}", cmd);
assert!(result.requires_approval);
}
}
#[test]
fn test_empty_command() {
let result = classify("");
assert_eq!(result.safety, SafetyLevel::ReadOnly);
}
#[test]
fn test_terraform_apply_is_destructive() {
let result = classify("terraform apply -auto-approve");
assert_eq!(result.safety, SafetyLevel::Destructive);
assert!(result.requires_approval);
}
#[test]
fn test_tofu_destroy_is_destructive() {
let result = classify("tofu destroy");
assert_eq!(result.safety, SafetyLevel::Destructive);
}
}

View File

@@ -0,0 +1,94 @@
use serde::{Deserialize, Serialize};
use crate::classifier::{classify, SafetyLevel};
use crate::audit::{AuditEntry, AuditStatus};
/// Execution result for a single step.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StepResult {
pub step_index: usize,
pub command: String,
pub exit_code: i32,
pub stdout: String,
pub stderr: String,
pub duration_ms: u64,
pub status: StepStatus,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum StepStatus {
Success,
Failed,
Skipped,
AwaitingApproval,
Rejected,
TimedOut,
}
/// Execute a command after classification and approval check.
/// Destructive commands are NEVER auto-approved (BMad must-have).
pub async fn execute_step(
command: &str,
dry_run: bool,
approval_callback: &dyn Fn(&str, SafetyLevel) -> bool,
) -> StepResult {
let classification = classify(command);
// Approval gate
if classification.requires_approval {
let approved = approval_callback(command, classification.safety);
if !approved {
return StepResult {
step_index: 0,
command: command.to_string(),
exit_code: -1,
stdout: String::new(),
stderr: "Approval denied".into(),
duration_ms: 0,
status: StepStatus::Rejected,
};
}
}
if dry_run {
return StepResult {
step_index: 0,
command: command.to_string(),
exit_code: 0,
stdout: format!("[DRY RUN] Would execute: {}", command),
stderr: String::new(),
duration_ms: 0,
status: StepStatus::Skipped,
};
}
// Execute via tokio::process
let start = std::time::Instant::now();
let output = tokio::process::Command::new("sh")
.arg("-c")
.arg(command)
.output()
.await;
let duration = start.elapsed().as_millis() as u64;
match output {
Ok(out) => StepResult {
step_index: 0,
command: command.to_string(),
exit_code: out.status.code().unwrap_or(-1),
stdout: String::from_utf8_lossy(&out.stdout).to_string(),
stderr: String::from_utf8_lossy(&out.stderr).to_string(),
duration_ms: duration,
status: if out.status.success() { StepStatus::Success } else { StepStatus::Failed },
},
Err(e) => StepResult {
step_index: 0,
command: command.to_string(),
exit_code: -1,
stdout: String::new(),
stderr: e.to_string(),
duration_ms: duration,
status: StepStatus::Failed,
},
}
}

View File

@@ -0,0 +1,85 @@
use clap::{Parser, Subcommand};
use tracing::info;
mod parser;
mod classifier;
mod executor;
mod audit;
#[derive(Parser)]
#[command(name = "dd0c-run", version, about = "Runbook automation agent")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Execute a runbook
Run {
/// Path to runbook file (YAML/Markdown)
#[arg(short, long)]
runbook: String,
/// dd0c SaaS endpoint
#[arg(long, default_value = "https://api.dd0c.dev")]
endpoint: String,
/// API key
#[arg(long, env = "DD0C_API_KEY")]
api_key: String,
/// Dry run (classify only, don't execute)
#[arg(long)]
dry_run: bool,
},
/// Classify a single command
Classify {
/// Command to classify
command: String,
},
/// Verify agent binary signature
Verify {
/// Path to signature file
#[arg(short, long)]
sig: String,
/// Path to public key
#[arg(short, long)]
pubkey: String,
},
/// Print version
Version,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| "dd0c_run=info".into()),
)
.json()
.init();
let cli = Cli::parse();
match cli.command {
Commands::Run { runbook, endpoint, api_key, dry_run } => {
info!(runbook = %runbook, dry_run, "Starting runbook execution");
// TODO: Parse runbook → classify steps → execute with approval gates
}
Commands::Classify { command } => {
let result = classifier::classify(&command);
println!("{}", serde_json::to_string_pretty(&result)?);
}
Commands::Verify { sig, pubkey } => {
// TODO: Ed25519 signature verification
println!("Signature verification not yet implemented");
}
Commands::Version => {
println!("dd0c/run agent v{}", env!("CARGO_PKG_VERSION"));
}
}
Ok(())
}

View File

@@ -0,0 +1,52 @@
use serde::{Deserialize, Serialize};
/// Parsed runbook step.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RunbookStep {
pub index: usize,
pub description: String,
pub command: String,
pub timeout_seconds: u64,
pub on_failure: FailureAction,
pub condition: Option<String>, // Optional: only run if previous step output matches
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum FailureAction {
Abort,
Continue,
Retry { max_attempts: u32 },
}
/// Parsed runbook.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Runbook {
pub name: String,
pub description: String,
pub version: String,
pub steps: Vec<RunbookStep>,
}
/// Parse a YAML runbook into structured steps.
pub fn parse_yaml(content: &str) -> anyhow::Result<Runbook> {
// TODO: Full YAML parsing with serde_yaml
// For now, return a placeholder
Ok(Runbook {
name: "placeholder".into(),
description: "TODO: implement YAML parser".into(),
version: "0.1.0".into(),
steps: vec![],
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_empty_returns_placeholder() {
let result = parse_yaml("").unwrap();
assert_eq!(result.name, "placeholder");
assert!(result.steps.is_empty());
}
}

View File

@@ -0,0 +1,27 @@
app = "dd0c-run"
primary_region = "iad"
[build]
dockerfile = "saas/Dockerfile"
[env]
NODE_ENV = "production"
PORT = "3000"
LOG_LEVEL = "info"
[http_service]
internal_port = 3000
force_https = true
auto_stop_machines = true
auto_start_machines = true
min_machines_running = 0
[http_service.concurrency]
type = "requests"
hard_limit = 100
soft_limit = 80
[[vm]]
cpu_kind = "shared"
cpus = 1
memory_mb = 256

View File

@@ -0,0 +1,14 @@
FROM node:22-slim AS builder
WORKDIR /app
COPY package.json package-lock.json* ./
RUN npm ci
COPY . .
RUN npm run build
FROM node:22-slim
WORKDIR /app
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/package.json ./
EXPOSE 3000
CMD ["node", "dist/index.js"]

View File

@@ -0,0 +1,94 @@
-- dd0c/run V1 schema
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Tenants
CREATE TABLE tenants (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
name TEXT NOT NULL,
slug TEXT NOT NULL UNIQUE,
tier TEXT NOT NULL DEFAULT 'free' CHECK (tier IN ('free', 'pro')),
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
-- Runbooks
CREATE TABLE runbooks (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
name TEXT NOT NULL,
description TEXT,
yaml_content TEXT NOT NULL,
step_count INT NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'active' CHECK (status IN ('active', 'archived')),
created_by TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_runbooks_tenant ON runbooks(tenant_id, status);
-- Executions
CREATE TABLE executions (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
runbook_id UUID NOT NULL REFERENCES runbooks(id) ON DELETE CASCADE,
triggered_by TEXT NOT NULL,
trigger_source TEXT NOT NULL DEFAULT 'api' CHECK (trigger_source IN ('api', 'slack', 'schedule', 'webhook')),
dry_run BOOLEAN NOT NULL DEFAULT false,
variables JSONB NOT NULL DEFAULT '{}',
status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'running', 'awaiting_approval', 'completed', 'failed', 'aborted')),
started_at TIMESTAMPTZ,
completed_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_executions_tenant ON executions(tenant_id, created_at DESC);
CREATE INDEX idx_executions_runbook ON executions(runbook_id);
-- Audit entries (append-only, immutable — BMad must-have)
CREATE TABLE audit_entries (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
execution_id UUID NOT NULL REFERENCES executions(id) ON DELETE CASCADE,
step_index INT NOT NULL,
command TEXT NOT NULL,
safety_level TEXT NOT NULL CHECK (safety_level IN ('read_only', 'modifying', 'destructive')),
status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'awaiting_approval', 'approved', 'rejected', 'executing', 'completed', 'failed', 'timed_out')),
approved_by TEXT,
approval_method TEXT CHECK (approval_method IN ('slack_button', 'api', 'auto')),
exit_code INT,
stdout_hash TEXT,
stderr_hash TEXT,
started_at TIMESTAMPTZ NOT NULL DEFAULT now(),
completed_at TIMESTAMPTZ,
duration_ms INT
);
CREATE INDEX idx_audit_execution ON audit_entries(execution_id, step_index);
CREATE INDEX idx_audit_tenant ON audit_entries(tenant_id, started_at DESC);
-- Agent registrations
CREATE TABLE agents (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
name TEXT NOT NULL,
api_key_hash TEXT NOT NULL,
api_key_prefix TEXT NOT NULL,
last_heartbeat_at TIMESTAMPTZ,
version TEXT,
enabled BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
UNIQUE(tenant_id, name)
);
-- RLS
ALTER TABLE runbooks ENABLE ROW LEVEL SECURITY;
ALTER TABLE executions ENABLE ROW LEVEL SECURITY;
ALTER TABLE audit_entries ENABLE ROW LEVEL SECURITY;
ALTER TABLE agents ENABLE ROW LEVEL SECURITY;
CREATE POLICY tenant_iso_runbooks ON runbooks
USING (tenant_id::text = current_setting('app.tenant_id', true));
CREATE POLICY tenant_iso_executions ON executions
USING (tenant_id::text = current_setting('app.tenant_id', true));
CREATE POLICY tenant_iso_audit ON audit_entries
USING (tenant_id::text = current_setting('app.tenant_id', true));
CREATE POLICY tenant_iso_agents ON agents
USING (tenant_id::text = current_setting('app.tenant_id', true));

View File

@@ -0,0 +1,38 @@
{
"name": "dd0c-run",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"test": "vitest run",
"lint": "eslint src/ tests/"
},
"dependencies": {
"fastify": "^4.28.0",
"@fastify/cors": "^9.0.0",
"@fastify/helmet": "^11.1.0",
"@fastify/websocket": "^10.0.0",
"pg": "^8.12.0",
"ioredis": "^5.4.0",
"zod": "^3.23.0",
"jsonwebtoken": "^9.0.2",
"pino": "^9.1.0",
"uuid": "^9.0.1",
"@slack/web-api": "^7.1.0",
"@slack/bolt": "^3.19.0"
},
"devDependencies": {
"typescript": "^5.5.0",
"tsx": "^4.15.0",
"vitest": "^1.6.0",
"@types/node": "^20.14.0",
"@types/pg": "^8.11.0",
"@types/jsonwebtoken": "^9.0.6",
"@types/uuid": "^9.0.8",
"@types/ws": "^8.5.10",
"eslint": "^9.5.0"
}
}

View File

@@ -0,0 +1,33 @@
import type { FastifyInstance } from 'fastify';
import { z } from 'zod';
import pino from 'pino';
const logger = pino({ name: 'api-approvals' });
const approvalDecisionSchema = z.object({
decision: z.enum(['approve', 'reject']),
reason: z.string().max(500).optional(),
});
export function registerApprovalRoutes(app: FastifyInstance) {
// List pending approvals for tenant
app.get('/api/v1/approvals', async (req, reply) => {
// TODO: SELECT from audit_entries WHERE status = 'awaiting_approval'
return { approvals: [] };
});
// Approve or reject a step
app.post('/api/v1/approvals/:stepId', async (req, reply) => {
const { stepId } = req.params as { stepId: string };
const body = approvalDecisionSchema.parse(req.body);
// TODO: Update audit entry, notify agent via WebSocket/Redis pub-sub
logger.info({ stepId, decision: body.decision }, 'Approval decision recorded');
return {
step_id: stepId,
decision: body.decision,
reason: body.reason,
};
});
}

View File

@@ -0,0 +1,73 @@
import type { FastifyInstance } from 'fastify';
import { z } from 'zod';
import pino from 'pino';
const logger = pino({ name: 'api-runbooks' });
const createRunbookSchema = z.object({
name: z.string().min(1).max(200),
description: z.string().max(2000).optional(),
yaml_content: z.string().min(1),
});
const listQuerySchema = z.object({
page: z.coerce.number().min(1).default(1),
limit: z.coerce.number().min(1).max(100).default(20),
status: z.enum(['active', 'archived']).optional(),
});
export function registerRunbookRoutes(app: FastifyInstance) {
// List runbooks
app.get('/api/v1/runbooks', async (req, reply) => {
const query = listQuerySchema.parse(req.query);
// TODO: SELECT from runbooks with RLS tenant context
return { runbooks: [], page: query.page, limit: query.limit, total: 0 };
});
// Get single runbook
app.get('/api/v1/runbooks/:id', async (req, reply) => {
const { id } = req.params as { id: string };
// TODO: SELECT by id
return { runbook: null };
});
// Create runbook
app.post('/api/v1/runbooks', async (req, reply) => {
const body = createRunbookSchema.parse(req.body);
// TODO: INSERT into runbooks, parse YAML, validate steps
logger.info({ name: body.name }, 'Runbook created');
return reply.status(201).send({ id: 'placeholder', ...body });
});
// Trigger runbook execution
app.post('/api/v1/runbooks/:id/execute', async (req, reply) => {
const { id } = req.params as { id: string };
const body = z.object({
dry_run: z.boolean().default(false),
variables: z.record(z.string()).optional(),
}).parse(req.body ?? {});
// TODO: Create execution record, dispatch to agent via WebSocket/queue
logger.info({ runbookId: id, dryRun: body.dry_run }, 'Execution triggered');
return reply.status(202).send({
execution_id: 'placeholder',
runbook_id: id,
status: 'pending',
dry_run: body.dry_run,
});
});
// Get execution history
app.get('/api/v1/runbooks/:id/executions', async (req, reply) => {
const { id } = req.params as { id: string };
// TODO: SELECT from executions
return { executions: [] };
});
// Get execution detail (with step-by-step audit trail)
app.get('/api/v1/executions/:executionId', async (req, reply) => {
const { executionId } = req.params as { executionId: string };
// TODO: SELECT execution + JOIN audit_entries
return { execution: null, steps: [] };
});
}

View File

@@ -0,0 +1,15 @@
import { z } from 'zod';
const envSchema = z.object({
PORT: z.coerce.number().default(3000),
DATABASE_URL: z.string().default('postgresql://localhost:5432/dd0c_run'),
REDIS_URL: z.string().default('redis://localhost:6379'),
JWT_SECRET: z.string().min(32).default('dev-secret-change-me-in-production!!'),
SLACK_BOT_TOKEN: z.string().optional(),
SLACK_SIGNING_SECRET: z.string().optional(),
CORS_ORIGIN: z.string().default('*'),
LOG_LEVEL: z.string().default('info'),
});
export const config = envSchema.parse(process.env);
export type Config = z.infer<typeof envSchema>;

View File

@@ -0,0 +1,31 @@
import Fastify from 'fastify';
import cors from '@fastify/cors';
import helmet from '@fastify/helmet';
import pino from 'pino';
import { config } from './config/index.js';
import { registerRunbookRoutes } from './api/runbooks.js';
import { registerApprovalRoutes } from './api/approvals.js';
import { registerSlackRoutes } from './slackbot/handler.js';
const logger = pino({ name: 'dd0c-run', level: config.LOG_LEVEL });
const app = Fastify({ logger: true });
await app.register(cors, { origin: config.CORS_ORIGIN });
await app.register(helmet);
// Health check
app.get('/health', async () => ({ status: 'ok', service: 'dd0c-run' }));
// API routes
registerRunbookRoutes(app);
registerApprovalRoutes(app);
registerSlackRoutes(app);
try {
await app.listen({ port: config.PORT, host: '0.0.0.0' });
logger.info({ port: config.PORT }, 'dd0c/run SaaS started');
} catch (err) {
logger.fatal(err, 'Failed to start');
process.exit(1);
}

View File

@@ -0,0 +1,78 @@
import type { FastifyInstance } from 'fastify';
import pino from 'pino';
import crypto from 'node:crypto';
import { config } from '../config/index.js';
const logger = pino({ name: 'slackbot' });
/**
* Slack interactive message handler.
* Receives button clicks for approve/reject from Slack Block Kit messages.
*/
export function registerSlackRoutes(app: FastifyInstance) {
// Slack Events API verification + interactive payloads
app.post('/slack/events', async (req, reply) => {
// Verify Slack signature
if (config.SLACK_SIGNING_SECRET) {
const timestamp = req.headers['x-slack-request-timestamp'] as string;
const signature = req.headers['x-slack-signature'] as string;
const body = JSON.stringify(req.body);
if (!verifySlackSignature(body, timestamp, signature, config.SLACK_SIGNING_SECRET)) {
return reply.status(401).send({ error: 'Invalid signature' });
}
}
const payload = req.body as any;
// URL verification challenge
if (payload.type === 'url_verification') {
return { challenge: payload.challenge };
}
return { ok: true };
});
// Slack interactive components (button clicks)
app.post('/slack/interactions', async (req, reply) => {
// Slack sends form-encoded payload
const rawPayload = (req.body as any)?.payload;
if (!rawPayload) return reply.status(400).send({ error: 'Missing payload' });
let payload: any;
try {
payload = JSON.parse(rawPayload);
} catch {
return reply.status(400).send({ error: 'Invalid payload' });
}
if (payload.type === 'block_actions') {
for (const action of payload.actions ?? []) {
const [actionType, stepId] = (action.action_id ?? '').split(':');
if (actionType === 'approve_step') {
logger.info({ stepId, user: payload.user?.id }, 'Step approved via Slack');
// TODO: Update audit entry, resume execution
} else if (actionType === 'reject_step') {
logger.info({ stepId, user: payload.user?.id }, 'Step rejected via Slack');
// TODO: Update audit entry, abort execution
}
}
}
return { ok: true };
});
}
function verifySlackSignature(body: string, timestamp: string, signature: string, secret: string): boolean {
if (!timestamp || !signature) return false;
// Reject stale requests (>5 min)
const now = Math.floor(Date.now() / 1000);
if (Math.abs(now - parseInt(timestamp, 10)) > 300) return false;
const sigBasestring = `v0:${timestamp}:${body}`;
const expected = 'v0=' + crypto.createHmac('sha256', secret).update(sigBasestring).digest('hex');
return crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected));
}

View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": ["src"],
"exclude": ["node_modules", "dist", "tests"]
}