Sfoglia il codice sorgente

Merge refactor/boilerplates-v2 into main

xcad 6 mesi fa
parent
commit
82be6274b4
100 ha cambiato i file con 6376 aggiunte e 2764 eliminazioni
  1. 1 1
      .editorconfig
  2. 139 0
      .github/workflows/release.yaml
  3. 15 0
      .gitignore
  4. 79 0
      .renovate/CUSTOM_MANAGERS.md
  5. 94 0
      .renovate/README.md
  6. 42 0
      .renovate/sync-template-version.sh
  7. 350 0
      AGENTS.md
  8. 11 0
      MANIFEST.in
  9. 6 0
      TODO.md
  10. 1 0
      WARP.md
  11. 0 23
      actions/github/kubectl/kubernetes-deploy.yml
  12. 0 27
      actions/github/scp-action/copy-config-files.yml
  13. 0 32
      actions/github/ssh-action/restart-docker.yml
  14. 0 51
      actions/gitlab/ansible/run.yml
  15. 0 39
      actions/gitlab/ansible/test.yml
  16. 0 73
      actions/gitlab/docker/config.yml
  17. 0 80
      actions/gitlab/docker/deploy.yml
  18. 0 35
      actions/gitlab/docker/test.yml
  19. 0 53
      actions/gitlab/terraform/apply.yml
  20. 0 51
      actions/gitlab/terraform/validate.yml
  21. 0 19
      ansible/checkmk/activate-changes.yaml
  22. 0 17
      ansible/checkmk/install-agent.yaml
  23. 0 25
      ansible/checkmk/lookup-rule.yaml
  24. 0 22
      ansible/checkmk/manage-hosts.yaml
  25. 0 71
      ansible/checkmk/manage-rules.yaml
  26. 0 3
      ansible/checkmk/secrets.yaml
  27. 0 76
      ansible/discord/notify-discord.yaml
  28. 0 52
      ansible/docker/docker-certs-enable.yaml
  29. 0 158
      ansible/docker/docker-certs.yaml
  30. 0 35
      ansible/docker/inst-docker-ubuntu.yaml
  31. 0 14
      ansible/docker/maint-docker-clean.yaml
  32. 0 46
      ansible/kubernetes/README.md
  33. 0 2
      ansible/kubernetes/ansible.cfg
  34. 0 318
      ansible/kubernetes/inst-k8s.yaml
  35. 0 1
      ansible/kubernetes/k8s_worker_node_connection.j2
  36. 0 21
      ansible/portainer/deploy-portainer.yaml
  37. 0 18
      ansible/traefik/deploy-traefik.yaml
  38. 0 19
      ansible/ubuntu/config-add-sshkey.yaml
  39. 0 11
      ansible/ubuntu/inst-qemu-agent.yaml
  40. 0 19
      ansible/ubuntu/inst-vm-core.yaml
  41. 0 12
      ansible/ubuntu/inst-zsh.yaml
  42. 0 25
      ansible/ubuntu/maint-diskspace.yaml
  43. 0 16
      ansible/ubuntu/maint-reboot-required.yaml
  44. 0 9
      ansible/ubuntu/maint-reboot.yaml
  45. 0 14
      ansible/ubuntu/upd-apt.yaml
  46. 0 16
      ansible/wireguard/inst-wireguard.yaml
  47. 7 0
      cli/__init__.py
  48. 197 0
      cli/__main__.py
  49. 587 0
      cli/core/collection.py
  50. 772 0
      cli/core/config.py
  51. 519 0
      cli/core/display.py
  52. 133 0
      cli/core/exceptions.py
  53. 238 0
      cli/core/library.py
  54. 908 0
      cli/core/module.py
  55. 224 0
      cli/core/prompt.py
  56. 36 0
      cli/core/registry.py
  57. 376 0
      cli/core/repo.py
  58. 113 0
      cli/core/section.py
  59. 562 0
      cli/core/template.py
  60. 297 0
      cli/core/validators.py
  61. 377 0
      cli/core/variable.py
  62. 0 0
      cli/modules/__init__.py
  63. 292 0
      cli/modules/compose.py
  64. 0 42
      docker-compose/alloy/compose.yaml
  65. 0 43
      docker-compose/ansiblesemaphore/compose.yaml
  66. 0 130
      docker-compose/authentik/compose.yaml
  67. 0 12
      docker-compose/bind9/compose.yaml
  68. 0 1
      docker-compose/bind9/config/example.named.conf
  69. 0 17
      docker-compose/cadvisor/compose.yaml
  70. 0 16
      docker-compose/duplicati/compose.yaml
  71. 0 1
      docker-compose/factory/README.md
  72. 0 23
      docker-compose/factory/runner-pool/compose.yaml
  73. 0 8
      docker-compose/gitea/.env.example
  74. 0 90
      docker-compose/gitea/compose.yaml
  75. 0 52
      docker-compose/gitlab/compose.yaml
  76. 0 58
      docker-compose/gitlab/config/gitlab.rb
  77. 0 13
      docker-compose/grafana/compose.yaml
  78. 0 82
      docker-compose/homer/assets/example.config.yml
  79. 0 8
      docker-compose/homer/assets/example.custom.css
  80. 0 64
      docker-compose/homer/assets/example2.config.yml
  81. 0 10
      docker-compose/homer/compose.yaml
  82. 0 45
      docker-compose/influxdb/compose.yaml
  83. 0 32
      docker-compose/nextcloud/compose.yaml
  84. 0 26
      docker-compose/nginx/compose.yaml
  85. 0 14
      docker-compose/nvidiadgcm/compose.yaml
  86. 0 16
      docker-compose/nvidiasmi/compose.yaml
  87. 0 40
      docker-compose/pihole/compose.yaml
  88. 0 31
      docker-compose/portainer/compose.yaml
  89. 0 45
      docker-compose/postgres/compose.yaml
  90. 0 36
      docker-compose/swag/compose.yaml
  91. 0 24
      docker-compose/traefik/compose.yaml
  92. 0 21
      docker-compose/traefik/config/example.externalservice.yaml
  93. 0 20
      docker-compose/traefik/config/example.middleware-authentik.yaml
  94. 0 22
      docker-compose/traefik/config/example.middleware-passbolt.yaml
  95. 0 18
      docker-compose/traefik/config/example.tls.yaml
  96. 0 62
      docker-compose/traefik/config/traefik.yaml
  97. 0 36
      kestra/ansible/ansible-playbook-git.yaml
  98. 0 38
      kestra/ansible/ansible-playbook-inline.yaml
  99. 0 31
      kestra/docker/docker-build-git.yaml
  100. 0 33
      kestra/docker/docker-build-inline.yaml

+ 1 - 1
.editorconfig

@@ -43,7 +43,7 @@ trim_trailing_whitespace = false
 indent_size = 2
 
 [*.py]
-indent_size = 4
+indent_size = 2
 
 [*.tf]
 indent_size = unset

+ 139 - 0
.github/workflows/release.yaml

@@ -0,0 +1,139 @@
+---
+name: Release
+
+on:
+  push:
+    tags:
+      - 'v*.*.*'  # Trigger on version tags like v1.0.0, v2.1.3, etc.
+
+permissions:
+  contents: write
+
+jobs:
+  release:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+
+      - name: Extract version from tag
+        id: version
+        run: |
+          # Remove 'v' prefix if present (e.g., v1.0.0 -> 1.0.0)
+          VERSION="${GITHUB_REF#refs/tags/}"
+          VERSION="${VERSION#v}"
+          echo "version=$VERSION" >> $GITHUB_OUTPUT
+          echo "tag=$GITHUB_REF_NAME" >> $GITHUB_OUTPUT
+          echo "Extracted version: $VERSION from tag $GITHUB_REF_NAME"
+
+      - name: Update version in pyproject.toml
+        run: |
+          VERSION="${{ steps.version.outputs.version }}"
+          sed -i "s/^version = .*/version = \"$VERSION\"/" pyproject.toml
+          echo "✓ Updated pyproject.toml with version $VERSION"
+
+      - name: Update version in cli/__main__.py
+        run: |
+          VERSION="${{ steps.version.outputs.version }}"
+          sed -i "s/^__version__ = .*/__version__ = \"$VERSION\"/" cli/__main__.py
+          echo "✓ Updated cli/__main__.py with version $VERSION"
+
+      - name: Verify changes
+        run: |
+          echo "=== pyproject.toml ==="
+          grep "^version" pyproject.toml
+          echo ""
+          echo "=== cli/__main__.py ==="
+          grep "^__version__" cli/__main__.py
+
+      - name: Commit and update tag
+        run: |
+          git config --local user.email "github-actions[bot]@users.noreply.github.com"
+          git config --local user.name "github-actions[bot]"
+
+          # Add changes
+          git add pyproject.toml cli/__main__.py
+
+          # Check if there are changes to commit
+          if git diff --staged --quiet; then
+            echo "No version changes needed"
+          else
+            # Commit the version updates
+            git commit -m "chore: bump version to ${{ steps.version.outputs.version }}"
+
+            # Delete the tag locally and remotely
+            git tag -d ${{ steps.version.outputs.tag }}
+            git push origin :refs/tags/${{ steps.version.outputs.tag }}
+
+            # Recreate the tag pointing to the new commit
+            git tag -a ${{ steps.version.outputs.tag }} -m "Release ${{ steps.version.outputs.tag }}"
+
+            # Push the new tag
+            git push origin ${{ steps.version.outputs.tag }}
+
+            echo "✓ Tag ${{ steps.version.outputs.tag }} updated to point to version bump commit"
+          fi
+
+      - name: Set up Python
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.9'
+
+      - name: Install build dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install build twine
+
+      - name: Build package
+        run: python -m build
+
+      - name: Check distribution
+        run: |
+          echo "Built packages:"
+          ls -lh dist/
+          echo ""
+          echo "Checking package integrity:"
+          twine check dist/*
+
+      # PyPI publishing disabled for now - install via GitHub releases
+      # - name: Publish to PyPI
+      #   if: >
+      #     ${{ !contains(steps.version.outputs.version, 'alpha') &&
+      #     !contains(steps.version.outputs.version, 'beta') &&
+      #     !contains(steps.version.outputs.version, 'rc') }}
+      #   env:
+      #     TWINE_USERNAME: __token__
+      #     TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
+      #   run: |
+      #     echo "Publishing to PyPI..."
+      #     twine upload dist/*
+
+      - name: Create GitHub Release
+        uses: softprops/action-gh-release@v1
+        with:
+          tag_name: ${{ steps.version.outputs.tag }}
+          name: Release ${{ steps.version.outputs.tag }}
+          body: |
+            ## Boilerplates CLI ${{ steps.version.outputs.tag }}
+
+            Install using the installation script:
+            ```bash
+            curl -fsSL https://raw.githubusercontent.com/christianlempa/boilerplates/main/scripts/install.sh | bash
+            ```
+
+            Or install a specific version:
+            ```bash
+            curl -fsSL https://raw.githubusercontent.com/christianlempa/boilerplates/main/scripts/install.sh | bash -s -- --version ${{ steps.version.outputs.tag }}
+            ```
+          draft: false
+          prerelease: >
+            ${{ contains(steps.version.outputs.version, 'alpha') ||
+            contains(steps.version.outputs.version, 'beta') ||
+            contains(steps.version.outputs.version, 'rc') }}
+          files: |
+            dist/*.whl
+            dist/*.tar.gz
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

+ 15 - 0
.gitignore

@@ -5,6 +5,21 @@
 # Docker Secrets, Environment Files
 **/secret.*
 **/.env
+**/.envrc
 
 # Ignore Ansible
 **/.ansible
+
+# Python
+**/__pycache__/
+**/*.py[cod]
+**/*.pyo
+**/*.pyd
+**/.venv
+**/venv/
+
+# Packaging
+*.egg-info/
+
+# Installation tracking
+.installed-version

+ 79 - 0
.renovate/CUSTOM_MANAGERS.md

@@ -0,0 +1,79 @@
+# Renovate Custom Managers
+
+This document describes the custom regex managers configured for the boilerplates repository.
+
+## 1. Docker Compose Templates
+
+**File Pattern:** `library/compose/**/*.j2`
+
+**Detects:** Docker images in compose files
+
+**Example:**
+```yaml
+services:
+  app:
+    image: ghcr.io/goauthentik/server:2025.6.3
+    # Renovate will detect: depName=ghcr.io/goauthentik/server, currentValue=2025.6.3
+```
+
+## 2. Kubernetes Helm Values
+
+**File Patterns:** 
+- `library/kubernetes/**/helm/values.yaml`
+- `library/kubernetes/**/*.j2`
+
+**Detects:** Docker images using repository + tag pattern (common in Helm charts)
+
+**Example:**
+```yaml
+image:
+  repository: "longhornio/longhorn-engine"
+  tag: "v1.9.1"
+# Renovate will detect: depName=longhornio/longhorn-engine, currentValue=v1.9.1
+```
+
+## 3. Terraform Providers
+
+**File Patterns:**
+- `library/terraform/**/*.tf`
+- `library/terraform/**/*.j2`
+
+**Detects:** Terraform provider versions
+
+**Example:**
+```hcl
+terraform {
+  required_providers {
+    proxmox = {
+      source  = "telmate/proxmox"
+      version = "3.0.1-rc9"
+    }
+  }
+}
+# Renovate will detect: depName=telmate/proxmox, currentValue=3.0.1-rc9
+```
+
+## 4. Terraform Modules
+
+**File Patterns:**
+- `library/terraform/**/*.tf`
+- `library/terraform/**/*.j2`
+
+**Detects:** Terraform module versions from Git sources with `?ref=` parameter
+
+**Example:**
+```hcl
+module "vpc" {
+  source = "git::https://github.com/terraform-aws-modules/terraform-aws-vpc.git?ref=v5.1.2"
+}
+# Renovate will detect: depName=github.com/terraform-aws-modules/terraform-aws-vpc, currentValue=v5.1.2
+```
+
+## Post-Upgrade Tasks
+
+After any dependency update, Renovate runs `.renovate/sync-template-version.sh` which:
+1. Detects which `template.yaml` files were affected by the update
+2. Automatically bumps their patch version
+3. Includes the updated `template.yaml` files in the Renovate PR
+
+This ensures template metadata stays in sync with dependency updates across all modules (compose, kubernetes, terraform).

+ 94 - 0
.renovate/README.md

@@ -0,0 +1,94 @@
+# Renovate Configuration
+
+This directory contains helper scripts and configuration for Renovate bot automation.
+
+## Template Version Sync
+
+### Overview
+
+The `sync-template-version.sh` script automatically syncs Docker image versions from `compose.yaml.j2` files to their corresponding `template.yaml` metadata files.
+
+### How It Works
+
+1. **Renovate detects updates**: The custom regex manager in `renovate.json` detects Docker image versions in `.j2` template files
+2. **Updates are applied**: When Renovate creates a PR, it updates the Docker image version in `compose.yaml.j2`
+3. **Post-upgrade task runs**: After the update, the `sync-template-version.sh` script runs automatically
+4. **Metadata synced**: The script extracts the first Docker image version from each `compose.yaml.j2` and updates the `version` field in the corresponding `template.yaml`
+
+### Configuration
+
+In `renovate.json`, the following configuration enables this feature:
+
+```json
+{
+  "customManagers": [
+    {
+      "customType": "regex",
+      "description": "Update Docker images in Jinja2 compose templates",
+      "managerFilePatterns": [
+        "/^library/compose/.+/compose\\.ya?ml\\.j2$/"
+      ],
+      "matchStrings": [
+        "image:\\s*(?<depName>[^:\\s]+):(?<currentValue>[^\\s\\n{]+)"
+      ],
+      "datasourceTemplate": "docker"
+    }
+  ],
+  "postUpgradeTasks": {
+    "commands": [
+      ".renovate/sync-template-version.sh"
+    ],
+    "fileFilters": [
+      "library/compose/**/template.yaml"
+    ],
+    "executionMode": "update"
+  }
+}
+```
+
+### Manual Execution
+
+You can run the script manually at any time:
+
+```bash
+./.renovate/sync-template-version.sh
+```
+
+This will scan all compose templates and update their metadata versions to match the Docker image versions.
+
+### Limitations
+
+- Only updates templates that have a Docker image with a version tag (e.g., `image: name:1.2.3`)
+- Skips templates using Jinja2 variables for versions (e.g., `image: name:{{ version }}`)
+- Uses the **first** image found in the `compose.yaml.j2` file (typically the main application image)
+- Templates without `template.yaml` files are skipped
+
+### Template Structure
+
+Expected directory structure for each template:
+
+```
+library/compose/<template-name>/
+├── compose.yaml.j2     # Jinja2 template with Docker Compose config
+├── template.yaml       # Template metadata (includes version field)
+└── ... (other files)
+```
+
+The `template.yaml` should have a `version` field in the metadata section:
+
+```yaml
+---
+kind: compose
+metadata:
+  name: Application Name
+  description: Description
+  version: 0.1.0  # This will be auto-updated
+  author: Christian Lempa
+  date: '2025-10-02'
+```
+
+### Benefits
+
+- **Consistency**: Template versions automatically track Docker image versions
+- **Automation**: No manual version updates needed when Docker images are updated
+- **Traceability**: Easy to see which Docker image version a template was designed for

+ 42 - 0
.renovate/sync-template-version.sh

@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# Sync the first Docker image version from compose.yaml.j2 to template.yaml
+# This script is called by Renovate as a post-upgrade task
+
+set -euo pipefail
+
+# Find all template directories
+find library/compose -type f -name "compose.yaml.j2" | while read -r compose_file; do
+    template_dir=$(dirname "$compose_file")
+    template_file="$template_dir/template.yaml"
+    
+    # Skip if template.yaml doesn't exist
+    [ ! -f "$template_file" ] && continue
+    
+    # Extract the first image version from compose.yaml.j2
+    # This matches: image: repo/name:version or image: name:version
+    # Ignores Jinja2 variables like {{ variable }}
+    version=$(grep -E '^\s*image:\s*[^{]*:[^{}\s]+' "$compose_file" | head -n1 | sed -E 's/.*:([^:]+)$/\1/' | tr -d ' ' || true)
+    
+    # Skip if no version found or if it's a Jinja2 variable
+    if [ -z "$version" ] || [[ "$version" =~ \{\{ ]]; then
+        continue
+    fi
+    
+    # Get current template version and trim whitespace
+    current_version=$(grep -E '^\s*version:\s*' "$template_file" | sed -E 's/.*version:\s*['\''"]?([^'\''"]+)['\''"]?/\1/' | tr -d ' ')
+    
+    # Only update if versions are different
+    if [ -n "$current_version" ] && [ "$version" != "$current_version" ]; then
+        echo "Updating $template_file: $current_version -> $version"
+        
+        # Use sed to update the version in template.yaml
+        # Works on both macOS and Linux
+        if [[ "$OSTYPE" == "darwin"* ]]; then
+            sed -i '' "s/version: .*/version: $version/" "$template_file"
+        else
+            sed -i "s/version: .*/version: $version/" "$template_file"
+        fi
+    fi
+done
+
+echo "Template version sync complete"

+ 350 - 0
AGENTS.md

@@ -0,0 +1,350 @@
+# AGENTS.md
+
+Guidance for AI Agents working with this repository.
+
+## Project Overview
+
+A sophisticated collection of infrastructure templates (boilerplates) with a Python CLI for management. Supports Terraform, Docker, Ansible, Kubernetes, etc. Built with Typer (CLI) and Jinja2 (templating).
+
+## Repository Structure
+
+- `cli/` - Python CLI application source code
+  - `cli/core/` - Core functionality (app, config, commands, logging)
+  - `cli/modules/` - Technology-specific modules (terraform, docker, compose, config, etc.)
+- `library/` - Template collections organized by module
+  - `library/ansible/` - Ansible playbooks and configurations
+  - `library/compose/` - Docker Compose configurations
+  - `library/docker/` - Docker templates
+  - `library/kubernetes/` - Kubernetes deployments
+  - `library/packer/` - Packer templates
+  - `library/terraform/` - OpenTofu/Terraform templates and examples
+
+## Development Setup
+
+### Running the CLI
+
+```bash
+# List available commands
+python3 -m cli
+
+# List templates for a module
+python3 -m cli compose list
+
+# Debugging commands
+python3 -m cli --log-level DEBUG compose list
+
+# Generate template to directory named after template (default)
+python3 -m cli compose generate nginx
+
+# Generate template to custom directory
+python3 -m cli compose generate nginx my-nginx-server
+
+# Generate template interactively (default - prompts for variables)
+python3 -m cli compose generate authentik
+
+# Generate template non-interactively (skips prompts, uses defaults and CLI variables)
+python3 -m cli compose generate authentik my-auth --no-interactive
+
+# Generate with variable overrides (non-interactive)
+python3 -m cli compose generate authentik my-auth \
+  --var service_name=auth \
+  --var ports_enabled=false \
+  --var database_type=postgres \
+  --no-interactive
+
+# Show template details
+python3 -m cli compose show authentik
+
+# Managing default values
+python3 -m cli compose defaults set service_name my-app
+python3 -m cli compose defaults get
+python3 -m cli compose defaults list
+
+# Managing library repositories
+python3 -m cli repo list
+python3 -m cli repo update
+python3 -m cli repo add my-lib https://github.com/user/templates --directory library --branch main
+python3 -m cli repo remove my-lib
+```
+
+## Common Development Tasks
+
+## Release Management
+
+**Process:** Tag-based workflow via `.github/workflows/release.yaml`. Push a semver tag (e.g., `v1.2.3`) to trigger.
+
+**Workflow Steps:**
+1. Extracts version from tag
+2. Auto-updates `pyproject.toml` and `cli/__main__.py` with version
+3. Recreates tag pointing to version bump commit
+4. Builds wheel/tarball
+5. Creates GitHub release (marks alpha/beta/rc as pre-release)
+
+**Important:** Never manually edit version numbers - they're placeholders (`0.0.0`) that get auto-updated.
+
+**User Installation:**
+```bash
+# Latest
+curl -fsSL https://raw.githubusercontent.com/christianlempa/boilerplates/main/scripts/install.sh | bash
+
+# Specific version
+curl -fsSL https://raw.githubusercontent.com/christianlempa/boilerplates/main/scripts/install.sh | bash -s -- --version v1.2.3
+```
+
+The `install.sh` script downloads the release tarball and installs via pipx. PyPI publishing is currently disabled.
+
+## Library System
+
+### Git-Based Libraries
+
+Templates are stored in git repositories and synced locally:
+
+- **Location**: `~/.config/boilerplates/libraries/{name}/`
+- **Config**: Stored in `~/.config/boilerplates/config.yaml`
+- **Sync**: Uses sparse-checkout to clone only template directories
+
+### Library Configuration
+
+Libraries are defined in the config file:
+
+```yaml
+libraries:
+  - name: default
+    url: https://github.com/christianlempa/boilerplates.git
+    branch: refactor/boilerplates-v2
+    directory: library  # Directory within repo containing templates
+    enabled: true
+```
+
+**Properties:**
+- `name`: Unique identifier for the library
+- `url`: Git repository URL
+- `branch`: Git branch to use (default: `main`)
+- `directory`: Path within repo where templates are located (use `.` for root)
+- `enabled`: Whether library is active
+
+### Sparse-Checkout
+
+The system uses git sparse-checkout (non-cone mode) to clone only the specified `directory`, avoiding unnecessary files:
+
+```bash
+# Only clones library/ directory, not root files
+git sparse-checkout init --no-cone
+git sparse-checkout set library/*
+```
+
+### Library Manager
+
+`LibraryManager` loads libraries from config and provides template discovery:
+
+- **Priority**: Libraries are searched in config order (first = highest priority)
+- **Deduplication**: Duplicate template IDs are resolved by priority
+- **Path Resolution**: Automatically handles `directory` config to locate templates
+
+### Config Manager
+
+`ConfigManager` handles all configuration:
+
+- **Location**: `~/.config/boilerplates/config.yaml`
+- **Atomic Writes**: Uses temp file + rename for safety
+- **Validation**: Comprehensive validation of all config fields
+- **Migration**: Auto-migrates old configs to add new sections
+
+**Main Sections:**
+- `defaults`: Per-module default variable values
+- `preferences`: User preferences (editor, output_dir, etc.)
+- `libraries`: Git repository configurations
+
+### Display Manager
+
+`DisplayManager` (`cli/core/display.py`) provides consistent output rendering:
+
+**Key Methods:**
+- `display_message(level, message, context)` - Unified message display
+- `display_success(message, context)` - Success messages
+- `display_error(message, context)` - Error messages  
+- `display_warning(message, context)` - Warning messages
+- `display_info(message, context)` - Info messages
+- `display_templates_table(templates, module, title)` - Template listings
+- `display_template_details(template, id)` - Detailed template view
+
+**Usage:**
+```python
+from cli.core.display import DisplayManager
+
+display = DisplayManager()
+display.display_success("Operation completed")
+display.display_error("Failed to process", context="module_name")
+```
+
+### Icon Manager
+
+`IconManager` provides **Nerd Font icons** for consistent CLI display:
+
+**Categories:**
+- **File Types**: `FILE_YAML`, `FILE_JSON`, `FILE_MARKDOWN`, `FILE_JINJA2`, `FILE_DOCKER`, etc.
+- **Status**: `STATUS_SUCCESS` (✓), `STATUS_ERROR` (✗), `STATUS_WARNING` (⚠), `STATUS_INFO` (ℹ)
+- **UI Elements**: `UI_CONFIG`, `UI_LOCK`, `UI_SETTINGS`, `UI_ARROW_RIGHT`
+
+**Important:** Icons use Nerd Font glyphs (Unicode characters). The terminal must have a Nerd Font installed.
+
+**Usage:**
+```python
+from cli.core.display import IconManager
+
+# Get status icon
+icon = IconManager.get_status_icon("success")  # Returns \uf00c (✓)
+
+# Get file icon
+icon = IconManager.get_file_icon("config.yaml")  # Returns \uf15c
+
+# Direct access
+folder = IconManager.folder()  # \uf07b
+lock = IconManager.lock()  # \uf084
+```
+
+**Best Practices:**
+- ❌ **Don't use emojis** (✓, ✗, ⚠) directly in output
+- ✅ **Do use IconManager** for all icons and symbols
+- ✅ **Do use DisplayManager** for consistent formatting
+- Example: `display.display_success(f"Added {name}")` not `console.print(f"✓ Added {name}")`
+
+## Architecture Notes
+
+### Key Components
+
+Modular architecture with dynamic module discovery:
+
+- **`cli/__main__.py`**: Entry point. Auto-discovers modules and registers commands.
+- **`cli/core/registry.py`**: Central module class store.
+- **`cli/core/module.py`**: Abstract `Module` base class for standardized commands (list, search, show, generate).
+- **`cli/core/library.py`**: `LibraryManager` finds templates from git-synced libraries with priority system.
+- **`cli/core/repo.py`**: Repository management for syncing git-based template libraries.
+- **`cli/core/config.py`**: `ConfigManager` handles configuration, defaults, and library definitions.
+- **`cli/core/template.py`**: Parses templates, merges YAML frontmatter with Jinja2 content.
+- **`cli/core/variables.py`**: Variable data structures (`Variable`, `VariableSection`, `VariableCollection`).
+- **`cli/core/prompt.py`**: Interactive CLI prompts via `rich` library.
+- **`cli/core/display.py`**: Consistent output rendering with `DisplayManager` and `IconManager`.
+
+### Template Format
+
+Templates are directory-based. Each template is a directory containing all the necessary files and subdirectories for the boilerplate.
+
+#### Main Template File
+
+Requires `template.yaml` or `template.yml` with metadata and variables in YAML frontmatter:
+
+```yaml
+---
+kind: compose
+metadata:
+  name: My Nginx Template
+  description: >
+    A template for a simple Nginx service.
+
+
+    Project: https://...
+
+    Source: https://
+
+    Documentation: https://
+  version: 0.1.0
+  author: Christian Lempa
+  date: '2024-10-01'
+spec:
+  general:
+    vars:
+      nginx_version:
+        type: string
+        description: The Nginx version to use.
+        default: latest
+```
+
+#### Template Files
+
+- **Jinja2 Templates (`.j2`)**: Rendered by Jinja2, `.j2` extension removed in output. Support `{% include %}` and `{% import %}`.
+- **Static Files**: Non-`.j2` files copied as-is.
+- **Sanitization**: Auto-sanitized (single blank lines, no leading blanks, trimmed whitespace, single trailing newline).
+
+#### Example Directory Structure
+
+```
+library/compose/my-nginx-template/
+├── template.yaml
+├── compose.yaml.j2
+├── config/
+│   └── nginx.conf.j2
+└── static/
+    └── README.md
+```
+
+#### Variables
+
+**Precedence** (lowest to highest):
+1. Module `spec` (defaults for all templates of that kind)
+2. Template `spec` (overrides module defaults)
+3. CLI `--var` (highest priority)
+
+**Key Features:**
+- **Required Sections**: Mark with `required: true` (general is implicit). Users must provide all values.
+- **Toggle Settings**: Conditional sections via `toggle: "bool_var_name"`. If false, section is skipped.
+- **Dependencies**: Use `needs: "section_name"` or `needs: ["sec1", "sec2"]`. Dependent sections only shown when dependencies are enabled. Auto-validated (detects circular/missing/self dependencies). Topologically sorted.
+
+**Example Section with Dependencies:**
+
+```yaml
+spec:
+  traefik:
+    title: "Traefik"
+    toggle: "traefik_enabled"
+    vars:
+      traefik_enabled:
+        type: "bool"
+        default: false
+      traefik_host:
+        type: "hostname"
+  
+  traefik_tls:
+    title: "Traefik TLS/SSL"
+    needs: "traefik"  # Only shown if traefik is enabled
+    toggle: "traefik_tls_enabled"
+    vars:
+      traefik_tls_enabled:
+        type: "bool"
+        default: true
+      traefik_tls_certresolver:
+        type: "str"
+```
+
+## Best Practices
+
+### Template Structure
+- Include `template.yaml`/`template.yml` with descriptive IDs (lowercase-with-hyphens)
+- Use subdirectories for Jinja2 templates (e.g., `config/`)
+- Prefer `config` module for app-specific configs vs complex directories
+
+### Variables
+- **Priority**: Prefer module spec → override when needed → add new only if unique
+- Use descriptive underscore names, always specify `type`
+- **Defaults**: Define sensible `default` values in `template.yaml` for all non-required variables (improves non-interactive generation)
+- **Credentials**: Mark with `sensitive: true` (hides input), `autogenerated: true` (auto-generates secure values when empty)
+
+### Jinja2
+- Keep logic simple, add descriptive comments
+
+### Docker Compose
+
+**Naming Conventions:**
+- Service: `service_name`, `container_name`, `container_timezone`, `restart_policy`
+- App: Prefix with app name (e.g., `authentik_secret_key`)
+- Database: `database_*` (type, enabled, external, host, port, name, user, password)
+- Network: `network_*` (enabled, name, external)
+- Traefik: `traefik_*` (enabled, host, tls_enabled, tls_entrypoint, tls_certresolver)
+- Ports: `ports_*` (enabled, http, https, ssh)
+- Email: `email_*` (enabled, host, port, username, password, from)
+
+**Patterns:**
+- Use scoped `.env.{service}.j2` files for better security/organization
+- Always: `depends_on`, named volumes, health checks (DB), `restart: {{ restart_policy | default('unless-stopped') }}`
+- Conditionals: `{% if not database_external %}` for service creation
+- Common toggles: `database_enabled`, `email_enabled`, `traefik_enabled`, `ports_enabled`, `network_enabled`

+ 11 - 0
MANIFEST.in

@@ -0,0 +1,11 @@
+# Include library directory with all templates
+recursive-include library *
+
+# Include documentation
+include README.md
+include LICENSE
+
+# Exclude unnecessary files
+global-exclude *.pyc
+global-exclude __pycache__
+global-exclude .DS_Store

+ 6 - 0
TODO.md

@@ -0,0 +1,6 @@
+* TODO Add compose deploy command to deploy a generated compose project to a local or remote docker environment
+* TODO Interactive Variable Prompt Improvements: The interactive prompt could be improved with better navigation, help text, and validation feedback.
+* TODO Better Error Recovery in Jinja2 Rendering: Improve error handling during Jinja2 template rendering with better context and suggestions.
+* FIXME Standardize DisplayManager and IconManager responsibilities and interactions (currently we mixed a bit where to create table structure, etc)
+* TODO Better Release Notes Management
+* TODO Better Documentation

+ 1 - 0
WARP.md

@@ -0,0 +1 @@
+AGENTS.md

+ 0 - 23
actions/github/kubectl/kubernetes-deploy.yml

@@ -1,23 +0,0 @@
----
-name: Kubernetes Deploy
-
-on:  # yamllint disable-line rule:truthy
-  push:
-    branches:
-      - main
-
-env:
-  KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
-
-jobs:
-  deploy:
-    runs-on: your-runner
-
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v2
-
-      - name: Deploy Manifest
-        uses: actions-hub/kubectl@master
-        with:
-          args: apply -f your-manifest.yml

+ 0 - 27
actions/github/scp-action/copy-config-files.yml

@@ -1,27 +0,0 @@
----
-name: copy config files to remote machine
-
-on:  # yamllint disable-line rule:truthy
-  push:
-    branches:
-      - main
-    paths:
-      - 'config/**'
-
-jobs:
-  deploy:
-    runs-on: your-runner
-
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v2
-
-      - name: Upload new Config Files
-        uses: appleboy/scp-action@master
-        with:
-          username: your-username
-          host: your-host
-          key: ${{ secrets.your-private-ssh-key }}
-          source: './config/*'
-          target: '/target/path/'
-          strip_components: 1   # remove the top level directory

+ 0 - 32
actions/github/ssh-action/restart-docker.yml

@@ -1,32 +0,0 @@
----
-name: Update Docker Compose File
-
-on:  # yamllint disable-line rule:truthy
-  push:
-    branches:
-      - main
-    paths:
-      - 'docker-compose.yml'
-
-env:
-  YOUR-ENV-SECRET: ${{ secrets.YOUR-ENV-SECRET }}
-  YOUR-ENV-VAR: ${{ vars.YOUR-ENV-VAR }}
-jobs:
-  deploy:
-    runs-on: your-runner
-
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v2
-
-      - name: Restart Docker Container
-        uses: fifsky/ssh-action@master
-        with:
-          user: your-user
-          host: your-host
-          key: ${{ secrets.your-private-ssh-key }}
-          command: |
-            cd your-compose-project-directory
-            export YOUR-ENV-SECRET=${{ secrets.YOUR-ENV-SECRET }}
-            export YOUR-ENV-VAR=${{ vars.YOUR-ENV-VAR }}
-            docker-compose up -d --force-recreate

+ 0 - 51
actions/gitlab/ansible/run.yml

@@ -1,51 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: run-ansible
-    stage:
-      default: ansible
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}/ansible
-      description: 'Root directory for the Ansible config and playbooks.'
-    project_file:
-      description: 'Ansible Playbook to run.'
-    inventory_file:
-      default: ${CI_PROJECT_DIR}/ansible/inventory
-      description: 'Ansible Inventory File'
-
-    remote_ssh:
-      description: 'Remote ssh'
-
----
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image:
-    name: alpine:latest
-    entrypoint: [""]
-  variables:
-    PROJECT_DIR: "$[[ inputs.root_dir ]]"
-    PROJECT_FILE: "$[[ inputs.project_file ]]"
-    INVENTORY_FILE: "$[[ inputs.inventory_file ]]"
-    SSH_KEY: "$[[ inputs.remote_ssh ]]"
-  before_script: |
-    echo "Before → Executing..."
-    echo "Before → Installing dependencies"
-    apk add --no-cache openssh-client ansible-core
-    echo "Before → Enter Ansible root directory"
-    cd ${PROJECT_DIR}
-    echo "Before → Adding ssh key"
-    echo "${SSH_KEY}" > id_rsa && chmod 600 id_rsa
-    eval $(ssh-agent -s)
-    ssh-add id_rsa
-    echo "Before → Setting additional environment variables"
-    export ANSIBLE_HOST_KEY_CHECKING=false
-  script: |
-    echo "Script → Executing..."
-    echo "Script → Run Ansible Playbooks"
-    ansible-playbook -i ${INVENTORY_FILE} ${PROJECT_FILE}
-  rules:
-    - if: '$CI_COMMIT_REF_NAME == "main"'
-      changes:
-        - '$[[ inputs.root_dir ]]/$[[ inputs.project_file ]]'

+ 0 - 39
actions/gitlab/ansible/test.yml

@@ -1,39 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: test-ansible
-    stage:
-      default: test
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}/ansible
-      description: 'Root directory for the Ansible config and playbooks.'
-    project_file:
-      description: 'Ansible Playbook to run.'
-
----
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image:
-    name: alpine:latest
-    entrypoint: [""]
-  variables:
-    ANSIBLE_DIR: "$[[ inputs.root_dir ]]"
-    PROJECT_FILE: "$[[ inputs.project_file ]]"
-  before_script: |
-    echo "Before → Executing..."
-    echo "Before → Enter Ansible root directory"
-    cd ${ANSIBLE_DIR}
-  script: |
-    echo "Script → Executing..."
-    echo "Before → Installing dependencies"
-    apk add --no-cache ansible-core
-    echo "Script → Test Ansible Playbooks"
-    ansible-lint ${PROJECT_FILE}
-  rules:
-    - if: |
-        $CI_PIPELINE_SOURCE == "push" ||
-        $CI_PIPELINE_SOURCE == "merge_request_event"
-      changes:
-        - '$[[ inputs.root_dir ]]/**'

+ 0 - 73
actions/gitlab/docker/config.yml

@@ -1,73 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: config-docker
-    stage:
-      default: config
-
-    config_dir:
-      default: ${CI_PROJECT_DIR}
-      description: 'Config directory to copy.'
-    project_file:
-      default: 'compose.yaml'
-      description: 'Docker Compose file to use.'
-
-    remote_host:
-      description: 'Remote host'
-    remote_user:
-      description: 'Remote user'
-    remote_ssh:
-      description: 'Remote ssh'
-
-    remote_config:
-      default: ${CI_PROJECT_DIR}
-      description: 'Target directory on the remote server for the config.'
-    remote_dir:
-      default: ${CI_PROJECT_DIR}
-      description: 'Directory on the remote server for the Docker Compose project.'
-
-
-    restart:
-      default: 'false'
-      description: 'Restart the remote compose project after config update?'
-
----
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image: alpine:latest
-  variables:
-    CONFIG_DIR: "$[[ inputs.config_dir ]]"
-    PROJECT_FILE: "$[[ inputs.project_file ]]"
-    SSH_KEY: "$[[ inputs.remote_ssh ]]"
-    REMOTE_HOST: "$[[ inputs.remote_host ]]"
-    REMOTE_USER: "$[[ inputs.remote_user ]]"
-    REMOTE_CONFIG: "$[[ inputs.remote_config ]]"
-    REMOTE_PATH: "$[[ inputs.remote_dir ]]"
-    RESTART: "$[[ inputs.restart ]]"
-  before_script: |
-    echo "Before → Executing..."
-    echo "Before → Installing dependencies"
-    apk add --no-cache openssh-client
-    echo "Before → Adding ssh key"
-    echo "$SSH_KEY" > id_rsa && chmod 600 id_rsa
-    eval $(ssh-agent -s)
-    ssh-add id_rsa
-  script: |
-    echo "Script → Executing..."
-    echo "Script → Copying config file to remote host"
-    ssh -o StrictHostKeyChecking=no $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_CONFIG"
-    scp -o StrictHostKeyChecking=no $CONFIG_DIR/* $REMOTE_USER@$REMOTE_HOST:$REMOTE_CONFIG
-    echo "Script → Executing remote commands"
-    ssh -o StrictHostKeyChecking=no $REMOTE_USER@$REMOTE_HOST<<EOF
-      if [ '$RESTART' = 'true' ]; then
-        echo "Script → Restarting services"
-        docker compose -f $REMOTE_PATH/$PROJECT_FILE down --remove-orphans
-        docker compose -f $REMOTE_PATH/$PROJECT_FILE up -d
-      fi
-    EOF
-    echo "Script ✓ Done"
-  rules:
-    - if: '$CI_COMMIT_REF_NAME == "main"'
-      changes:
-        - '$[[ inputs.config_dir ]]/**'

+ 0 - 80
actions/gitlab/docker/deploy.yml

@@ -1,80 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: deploy-docker
-    stage:
-      default: deploy
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}
-      description: 'Root directory for the Docker Compose project.'
-    project_file:
-      default: 'compose.yaml'
-      description: 'Docker Compose file to use.'
-
-    remote_host:
-      description: 'Remote host'
-    remote_user:
-      description: 'Remote user'
-    remote_ssh:
-      description: 'Remote ssh'
-
-    remote_dir:
-      default: ${CI_PROJECT_DIR}
-      description: 'Directory on the remote server for the Docker Compose project.'
-
-    docker_login:
-      default: 'true'
-      description: 'Login to Docker on the remote server?'
-    docker_user:
-      default: ${DOCKER_USER}
-      description: 'Docker user on the remote server'
-    docker_password:
-      default: ${DOCKER_PASSWORD}
-      description: 'Docker group on the remote server'
-
----
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image: docker:latest
-  variables:
-    PROJECT_DIR: "$[[ inputs.root_dir ]]"
-    PROJECT_FILE: "$[[ inputs.project_file ]]"
-    SSH_KEY: "$[[ inputs.remote_ssh ]]"
-    REMOTE_HOST: "$[[ inputs.remote_host ]]"
-    REMOTE_USER: "$[[ inputs.remote_user ]]"
-    REMOTE_PATH: "$[[ inputs.remote_dir ]]"
-    DOCKER_LOGIN: "$[[ inputs.docker_login ]]"
-    DOCKER_USER: "$[[ inputs.docker_user ]]"
-    DOCKER_PASSWORD: "$[[ inputs.docker_password ]]"
-  before_script: |
-    echo "Before → Executing..."
-    cd $PROJECT_DIR
-    echo "Before → Installing dependencies"
-    apk add --no-cache openssh-client
-    echo "Before → Adding ssh key"
-    echo "$SSH_KEY" > id_rsa && chmod 600 id_rsa
-    eval $(ssh-agent -s)
-    ssh-add id_rsa
-  script: |
-    echo "Script → Executing..."
-    echo "Script → Copying docker compose file to remote host"
-    ssh -o StrictHostKeyChecking=no $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_PATH"
-    scp -o StrictHostKeyChecking=no $PROJECT_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_PATH
-    echo "Script → Executing remote commands"
-    ssh -o StrictHostKeyChecking=no $REMOTE_USER@$REMOTE_HOST<<EOF
-      if [ '$DOCKER_LOGIN' = 'true' ]; then
-        echo "Script → Logging into docker hub"
-        docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
-      fi
-      echo "Script → Pulling and restarting services"
-      docker compose -f $REMOTE_PATH/$PROJECT_FILE pull -q
-      docker compose -f $REMOTE_PATH/$PROJECT_FILE down --remove-orphans
-      docker compose -f $REMOTE_PATH/$PROJECT_FILE up -d
-    EOF
-    echo "Script ✓ Done"
-  rules:
-    - if: '$CI_COMMIT_REF_NAME == "main"'
-      changes:
-        - '$[[ inputs.root_dir ]]/$[[ inputs.project_file ]]'

+ 0 - 35
actions/gitlab/docker/test.yml

@@ -1,35 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: test-docker
-    stage:
-      default: test
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}
-      description: 'Root directory for the Docker Compose project.'
-    project_file:
-      default: 'compose.yaml'
-      description: 'Docker Compose file to use.'
-
----
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image: docker:latest
-  variables:
-    PROJECT_DIR: "$[[ inputs.root_dir ]]"
-    PROJECT_FILE: "$[[ inputs.project_file ]]"
-  before_script:
-    - cd $PROJECT_DIR
-  script:
-    - docker compose -f $PROJECT_FILE config --quiet
-  rules:
-    - if: '$CI_COMMIT_REF_NAME == "main"'
-      changes:
-        - '$[[ inputs.root_dir ]]/$[[ inputs.project_file ]]'
-    - if: |
-        $CI_PIPELINE_SOURCE == "push" ||
-        $CI_PIPELINE_SOURCE == "merge_request_event"
-      changes:
-        - '$[[ inputs.root_dir ]]/$[[ inputs.project_file ]]'

+ 0 - 53
actions/gitlab/terraform/apply.yml

@@ -1,53 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: apply-terraform
-    stage:
-      default: terraform
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}/terraform
-      description: 'Root directory for the OpenTofu project.'
-    state_name:
-      default: default
-      description: 'Remote OpenTofu state name.'
-
----
-variables:
-  TF_ROOT: "$[[ inputs.root_dir ]]"
-  TF_ADDRESS: ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/terraform/state/$[[ inputs.state_name ]]
-  TF_USERNAME: gitlab-ci-token
-  TF_PASSWORD: ${CI_JOB_TOKEN}
-
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image:
-    name: ghcr.io/opentofu/opentofu:latest
-    entrypoint: [""]
-  before_script: |
-    echo "Before → Executing..."
-    echo "Before → Enter TF root directory"
-    cd ${TF_ROOT}
-  script: |
-    echo "Script → Executing..."
-    echo "Script → Initialize Terraform backend"
-    tofu init \
-     -backend-config=address=${TF_ADDRESS} \
-     -backend-config=lock_address=${TF_ADDRESS}/lock \
-     -backend-config=unlock_address=${TF_ADDRESS}/lock \
-     -backend-config=username=${TF_USERNAME} \
-     -backend-config=password=${TF_PASSWORD} \
-     -backend-config=lock_method=POST \
-     -backend-config=unlock_method=DELETE \
-     -backend-config=retry_wait_min=5
-    echo "Script → Validate Terraform"
-    tofu validate
-    echo "Script → Plan Terraform"
-    tofu plan -lock=false -out=tfplan
-    echo "Script → Apply Terraform"
-    tofu apply -lock=false -auto-approve tfplan
-  rules:
-    - if: $CI_COMMIT_BRANCH == "main"
-      changes:
-        - '$[[ inputs.root_dir ]]/**'

+ 0 - 51
actions/gitlab/terraform/validate.yml

@@ -1,51 +0,0 @@
----
-spec:
-  inputs:
-    as:
-      default: validate-terraform
-    stage:
-      default: test
-
-    root_dir:
-      default: ${CI_PROJECT_DIR}/terraform
-      description: 'Root directory for the OpenTofu project.'
-    state_name:
-      default: default
-      description: 'Remote OpenTofu state name.'
-
----
-variables:
-  TF_ROOT: "$[[ inputs.root_dir ]]"
-  TF_ADDRESS: ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/terraform/state/$[[ inputs.state_name ]]
-  TF_USERNAME: gitlab-ci-token
-  TF_PASSWORD: ${CI_JOB_TOKEN}
-
-'$[[ inputs.as ]]':
-  stage: '$[[ inputs.stage ]]'
-  image:
-    name: ghcr.io/opentofu/opentofu:latest
-    entrypoint: [""]
-  before_script: |
-    echo "Before → Executing..."
-    echo "Before → Enter TF root directory"
-    cd ${TF_ROOT}
-  script: |
-    echo "Script → Executing..."
-    echo "Script → Initialize Terraform backend"
-    tofu init \
-     -backend-config=address=${TF_ADDRESS} \
-     -backend-config=lock_address=${TF_ADDRESS}/lock \
-     -backend-config=unlock_address=${TF_ADDRESS}/lock \
-     -backend-config=username=${TF_USERNAME} \
-     -backend-config=password=${TF_PASSWORD} \
-     -backend-config=lock_method=POST \
-     -backend-config=unlock_method=DELETE \
-     -backend-config=retry_wait_min=5
-    echo "Script → Validate Terraform"
-    tofu validate
-  rules:
-    - if: |
-        $CI_PIPELINE_SOURCE == "push" ||
-        $CI_PIPELINE_SOURCE == "merge_request_event"
-      changes:
-        - '$[[ inputs.root_dir ]]/**'

+ 0 - 19
ansible/checkmk/activate-changes.yaml

@@ -1,19 +0,0 @@
----
-- name: "Activate Checkmk changes"
-  hosts: localhost
-  gather_facts: false
-  vars_files:
-    - secrets.yaml
-  vars:
-    server_url: "checkmk.home.arpa"
-    site: "cmk"
-
-  tasks:
-    - name: "Start activation on a specific site"
-      checkmk.general.activation:
-        server_url: "{{ server_url }}"
-        site: "{{ site }}"
-        automation_user: "{{ automation_user }}"
-        automation_secret: "{{ automation_secret }}"
-        sites:
-          - "{{ site }}"

+ 0 - 17
ansible/checkmk/install-agent.yaml

@@ -1,17 +0,0 @@
----
-- name: "Install Checkmk agent on all hosts"
-  hosts: "{{ my_hosts | d([]) }}"
-  roles:
-    - checkmk.general.agent
-  vars:
-    checkmk_agent_version: "2.4.0p4"
-    checkmk_agent_server: "checkmk.home.arpa"
-    checkmk_agent_server_protocol: https
-    checkmk_agent_site: "cmk"
-    checkmk_agent_auto_activate: true
-    checkmk_agent_tls: "true"  # NOTE: Register Agent to enable TLS
-    checkmk_agent_user: "{{ automation_user }}"
-    checkmk_agent_pass: "{{ automation_secret }}"
-    checkmk_agent_host_name: "{{ ansible_hostname }}"  # NOTE: Required to replace FQDN with hostname only
-  vars_files:
-    - secrets.yaml

+ 0 - 25
ansible/checkmk/lookup-rule.yaml

@@ -1,25 +0,0 @@
----
-- name: "Manage Checkmk rules"
-  hosts: localhost
-  gather_facts: false
-  vars_files:
-    - secrets.yaml
-  vars:
-    server_url: "checkmk.home.arpa"
-    site: "cmk"
-
-  tasks:
-    - name: Get a rule with a particular rule id
-      ansible.builtin.debug:
-        msg: "Rule: {{ extensions | to_nice_yaml }}"
-      vars:
-        extensions: "{{
-          lookup('checkmk.general.rule',
-            rule_id='checkmk-rule-id',
-            server_url=server_url,
-            site=site,
-            automation_user=automation_user,
-            automation_secret=automation_secret,
-            validate_certs=False
-            )
-          }}"

+ 0 - 22
ansible/checkmk/manage-hosts.yaml

@@ -1,22 +0,0 @@
----
-- name: "Manage Checkmk hosts"
-  hosts: localhost
-  gather_facts: false
-  vars_files:
-    - secrets.yaml
-  vars:
-    server_url: "checkmk.home.arpa"
-    site: "cmk"
-
-  tasks:
-    - name: "Create host"
-      checkmk.general.host:
-        server_url: "{{ server_url }}"
-        site: "{{ site }}"
-        automation_user: "{{ automation_user }}"
-        automation_secret: "{{ automation_secret }}"
-        name: "your-host-name"
-        attributes:
-          ipaddress: "host-ip-address"
-        folder: "/"
-        state: "present"

+ 0 - 71
ansible/checkmk/manage-rules.yaml

@@ -1,71 +0,0 @@
----
-- name: "Manage Checkmk rules"
-  hosts: localhost
-  gather_facts: false
-  vars_files:
-    - secrets.yaml
-  vars:
-    server_url: "checkmk.home.arpa"
-    site: "cmk"
-
-  tasks:
-    - name: Create DNS Check Rule
-      checkmk.general.rule:
-        server_url: "{{ server_url }}"
-        site: "{{ site }}"
-        automation_user: "{{ automation_user }}"
-        automation_secret: "{{ automation_secret }}"
-        ruleset: "active_checks:dns"
-        rule:
-          properties: {
-            "comment": "Ansible managed",
-            "description": "DNS DNS Monitoring",
-            "disabled": false,
-          }
-          conditions: {
-            "host_label_groups": [],
-            "host_name": {
-              "match_on": [
-                "your-dns-container-host"
-              ],
-              "operator": "one_of"
-            },
-            "host_tags": [],
-            "service_label_groups": []
-          }
-          "value_raw": {
-            "hostname": "hostname-to-query",
-            "server": "dns-server-ip",
-            "expected_addresses_list": [
-              "expected-ip-address"
-            ]
-          }
-          location:
-            folder: "/"
-            position: "top"
-        state: "present"
-
-    - name: Create NVME Temperature override rule
-      checkmk.general.rule:
-        server_url: "{{ server_url }}"
-        site: "{{ site }}"
-        automation_user: "{{ automation_user }}"
-        automation_secret: "{{ automation_secret }}"
-        ruleset: "checkgroup_parameters:temperature"
-        rule:
-          conditions:
-            host_label_groups: []
-            host_tags: []
-            service_description:
-              match_on:
-                - "DRIVE MODEL NAME*"
-              operator: "one_of"
-            service_label_groups: []
-          location:
-            folder: "/"
-            position: "top"
-          properties:
-            description: "NVME Temperature override"
-            disabled: false
-          value_raw: "{'levels': (60.0, 80.0)}"
-        state: present

+ 0 - 3
ansible/checkmk/secrets.yaml

@@ -1,3 +0,0 @@
----
-automation_user: "your-checkmk-user"
-automation_secret: "your-checkmk-password"

+ 0 - 76
ansible/discord/notify-discord.yaml

@@ -1,76 +0,0 @@
----
-# This Ansible playbook demonstrates how to send Discord notifications
-# using the `community.general.discord` module.
-# https://docs.ansible.com/ansible/latest/collections/community/general/discord_module.html
-#
-# If you need guidance how to create your own Discord server, see
-# https://support.discord.com/hc/en-us/articles/204849977-How-do-I-create-a-server
-#
-# In order to generate a webhook, please see
-# https://support.discord.com/hc/en-us/articles/360045093012-Server-Integrations-Page
-
-- name: Notify discord
-
-  hosts: "{{ my_hosts | d([]) }}"
-
-  vars:
-    # The name that will be shown as sender of the notification. Note
-    # that some usernames are blocked by Discord, for example it must
-    # not contain the word `discord`.
-    notify_discord_username: Ansible
-
-    # Your Discord webhook URL should have following format. Please
-    # extract following segments of the URL path and set it as value of
-    # the following variables:
-    #
-    # https://discord.com/api/webhooks/nnnnnnnnnn/xxxxxxxxxxxxxxxxxxxxxxxxxxx
-    #                                  |        | |                         |
-    #   notify_discord_webhook_id <----'--------' |                         |
-    #                                             |                         |
-    #   notify_discord_webhook_token <------------'-------------------------'
-    #
-    # Security advise: if you commit this data to a repository it is
-    # strongly recommended to encrypt `notify_discord_webhook_token` using
-    # Ansible Vault.
-    notify_discord_webhook_id: ''
-    notify_discord_webhook_token: ''
-
-    # Do not modify following regular expressions unless you know what
-    # you're doing. Those are to ensure that whatever you've set as
-    # `notify_discord_webhook_id` and `notify_discord_webhook_token`
-    # complies with the Discord API Specification (as of 2024-02-25).
-    #
-    # https://github.com/discord/discord-api-spec/blob/fe9917381e47285b56d98cb72ae3cfe7db9ea19c/specs/openapi.json#L7524-L7531
-    # https://github.com/discord/discord-api-spec/blob/fe9917381e47285b56d98cb72ae3cfe7db9ea19c/specs/openapi.json#L24817-L24821
-    notify_discord_webhook_id_regex: '^0|[1-9][0-9]*$'
-    # https://github.com/discord/discord-api-spec/blob/fe9917381e47285b56d98cb72ae3cfe7db9ea19c/specs/openapi.json#L7532-L7541
-    notify_discord_webhook_token_regex: '^[a-zA-Z0-9_-]+$'
-
-    # The content of the notification
-    notify_discord_webhook_content: |-
-      **Message from `{{ inventory_hostname }}` by *Ansible* ** :tada:
-      Just a test, adjust it to your liking.
-
-      You can use any Markdown formatting here [supported by Discord](
-      https://support.discord.com/hc/en-us/articles/210298617-Markdown-Text-101-Chat-Formatting-Bold-Italic-Underline).
-
-    # Delegate the sending of the Dicord notification to following host
-    # which must be able to access the public internet on destination
-    # port 443/tcp. When `localhost` is specified, this is sent from
-    # the Ansible Controller, but you can pick any host listed in the
-    # Ansible inventory.
-    notify_discord_send_from_host: localhost
-
-  tasks:
-    - name: Send Discord message
-      community.general.discord:
-        username: "{{ notify_discord_username }}"
-        webhook_id: "{{ notify_discord_webhook_id }}"
-        webhook_token: "{{ notify_discord_webhook_token }}"
-        content: "{{ notify_discord_webhook_content }}"
-      delegate_to: "{{ notify_discord_send_from_host }}"
-      when:
-        - notify_discord_webhook_id is match(notify_discord_webhook_id_regex)
-        - notify_discord_webhook_token is match(notify_discord_webhook_token_regex)
-        - notify_discord_webhook_content | length > 0
-        - notify_discord_send_from_host is in (['localhost'] + groups['all'])

+ 0 - 52
ansible/docker/docker-certs-enable.yaml

@@ -1,52 +0,0 @@
----
-- name: "Docker Certs enable"
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-  vars:
-    certs_path: "/root/docker-certs"
-
-  tasks:
-    - name: Check if docker certs are existing
-      ansible.builtin.stat:
-        path: "{{ certs_path }}"
-      register: certs_dir
-
-    - name: Fail if docker certs are not existing
-      ansible.builtin.fail:
-        msg: "Docker certificates are not existing in /root/docker-certs."
-      when: not certs_dir.stat.exists
-
-    - name: Get machine's primary internal ip address from eth0 interface
-      ansible.builtin.setup:
-      register: ip_address
-
-    - name: Set machine's primary internal ip address
-      ansible.builtin.set_fact:
-        ip_address: "{{ ip_address.ansible_facts.ansible_default_ipv4.address }}"
-
-    - name: Check if ip_address is a valid ip address
-      ansible.builtin.assert:
-        that:
-          - ip_address is match("^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$")
-        fail_msg: "ip_address is not a valid ip address."
-        success_msg: "ip_address is a valid ip address."
-
-    - name: Change docker daemon to use certs
-      ansible.builtin.lineinfile:
-        path: /lib/systemd/system/docker.service
-        line: >
-          ExecStart=/usr/bin/dockerd -H fd:// --containerd=/run/containerd/containerd.sock
-          -H tcp://{{ ip_address }}:2376 --tlsverify --tlscacert={{ certs_path }}/ca.pem
-          --tlscert={{ certs_path }}/server-cert.pem --tlskey={{ certs_path }}/server-key.pem
-        regexp: '^ExecStart='
-        state: present
-
-    - name: Reload systemd daemon
-      ansible.builtin.systemd:
-        daemon_reload: true
-
-    - name: Restart docker daemon
-      ansible.builtin.systemd:
-        name: docker
-        state: restarted
-        enabled: true

+ 0 - 158
ansible/docker/docker-certs.yaml

@@ -1,158 +0,0 @@
----
-- name: "Docker Certs"
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-  vars:
-    certs_path: "/root/docker-certs"
-    cert_validity_days: 3650
-    cn_domain: "your-domain.tld"
-
-  tasks:
-    - name: Check if docker certs are existing
-      ansible.builtin.stat:
-        path: "{{ certs_path }}"
-      register: certs_dir
-
-    - name: Create docker certs directory (if needed)
-      ansible.builtin.file:
-        path: "{{ certs_path }}"
-        state: directory
-        mode: '0700'
-      when: not certs_dir.stat.exists
-
-    - name: Check if docker certs directory is empty
-      ansible.builtin.command: ls -A "{{ certs_path }}"
-      register: certs_list
-      when: certs_dir.stat.exists
-      changed_when: false
-      ignore_errors: true
-
-    - name: Fail if docker certs already exist
-      ansible.builtin.fail:
-        msg: "Docker certificates already exist in /root/docker-certs."
-      when: certs_list.stdout | default('') != ''
-
-    - name: Get machine's primary internal ip address from eth0 interface
-      ansible.builtin.setup:
-      register: ip_address
-
-    - name: Set machine's primary internal ip address
-      ansible.builtin.set_fact:
-        ip_address: "{{ ip_address.ansible_facts.ansible_default_ipv4.address }}"
-
-    - name: Check if ip_address is a valid ip address
-      ansible.builtin.assert:
-        that:
-          - ip_address is match("^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$")
-        fail_msg: "ip_address is not a valid ip address."
-        success_msg: "ip_address is a valid ip address."
-
-    - name: Generate CA private key
-      ansible.builtin.command:
-        cmd: >
-          openssl genrsa -out "{{ certs_path }}/ca-key.pem" 4096
-      args:
-        creates: "{{ certs_path }}/ca-key.pem"
-
-    - name: Generate CA certificate
-      ansible.builtin.command:
-        cmd: >
-          openssl req -sha256 -new -x509
-            -subj "/CN={{ cn_domain }}"
-            -days "{{ cert_validity_days }}"
-            -key "{{ certs_path }}/ca-key.pem"
-            -out "{{ certs_path }}/ca.pem"
-      args:
-        creates: "{{ certs_path }}/ca.pem"
-
-    - name: Generate server private key
-      ansible.builtin.command:
-        cmd: >
-          openssl genrsa -out "{{ certs_path }}/server-key.pem" 4096
-        creates: "{{ certs_path }}/server-key.pem"
-
-    - name: Generate server certificate signing request
-      ansible.builtin.command:
-        cmd: >
-          openssl req -sha256 -new
-            -subj "/CN={{ inventory_hostname }}"
-            -key "{{ certs_path }}/server-key.pem"
-            -out "{{ certs_path }}/server.csr"
-        creates: "{{ certs_path }}/server.csr"
-
-    - name: Generate server certificate extension file
-      ansible.builtin.shell: |
-        echo "subjectAltName = DNS:{{ inventory_hostname }},IP:{{ ip_address }},IP:127.0.0.1" >> "{{ certs_path }}/extfile.cnf"
-        echo "extendedKeyUsage = serverAuth" >> "{{ certs_path }}/extfile.cnf"
-      args:
-        creates: "{{ certs_path }}/extfile.cnf"
-
-    - name: Generate server certificate
-      ansible.builtin.command:
-        cmd: >
-          openssl x509 -req -days "{{ cert_validity_days }}" -sha256
-            -in "{{ certs_path }}/server.csr"
-            -CA "{{ certs_path }}/ca.pem"
-            -CAkey "{{ certs_path }}/ca-key.pem"
-            -CAcreateserial -out "{{ certs_path }}/server-cert.pem"
-            -extfile "{{ certs_path }}/extfile.cnf"
-        creates: "{{ certs_path }}/server-cert.pem"
-
-    - name: Generate client private key
-      ansible.builtin.command:
-        cmd: >
-          openssl genrsa -out "{{ certs_path }}/key.pem" 4096
-        creates: "{{ certs_path }}/key.pem"
-
-    - name: Generate client certificate signing request
-      ansible.builtin.command:
-        cmd: >
-          openssl req -sha256 -new
-            -subj "/CN=client"
-            -key "{{ certs_path }}/key.pem"
-            -out "{{ certs_path }}/client.csr"
-        creates: "{{ certs_path }}/client.csr"
-
-    - name: Generate client certificate extension file
-      ansible.builtin.shell: |
-        echo "extendedKeyUsage = clientAuth" >> "{{ certs_path }}/client-extfile.cnf"
-      args:
-        creates: "{{ certs_path }}/client-extfile.cnf"
-
-    - name: Generate client certificate
-      ansible.builtin.command:
-        cmd: >
-          openssl x509 -req -days "{{ cert_validity_days }}"
-            -sha256 -in "{{ certs_path }}/client.csr"
-            -CA "{{ certs_path }}/ca.pem"
-            -CAkey "{{ certs_path }}/ca-key.pem"
-            -CAcreateserial -out "{{ certs_path }}/cert.pem"
-            -extfile "{{ certs_path }}/client-extfile.cnf"
-        creates: "{{ certs_path }}/cert.pem"
-
-    - name: Remove client certificate signing request
-      ansible.builtin.file:
-        path: "{{ certs_path }}/server.csr"
-        state: absent
-
-    - name: Remove client certificate signing request
-      ansible.builtin.file:
-        path: "{{ certs_path }}/client.csr"
-        state: absent
-
-    - name: Remove server certificate extension file
-      ansible.builtin.file:
-        path: "{{ certs_path }}/extfile.cnf"
-        state: absent
-
-    - name: Remove client certificate extension file
-      ansible.builtin.file:
-        path: "{{ certs_path }}/client-extfile.cnf"
-        state: absent
-
-    - name: Set permissions for docker certs
-      ansible.builtin.file:
-        path: "{{ certs_path }}"
-        mode: '0700'
-        recurse: true
-        follow: true

+ 0 - 35
ansible/docker/inst-docker-ubuntu.yaml

@@ -1,35 +0,0 @@
----
-- name: Install docker
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Install docker dependencies
-      ansible.builtin.apt:
-        name:
-          - apt-transport-https
-          - ca-certificates
-          - curl
-          - gnupg-agent
-          - software-properties-common
-        update_cache: true
-
-    - name: Add docker gpg key
-      ansible.builtin.apt_key:
-        url: https://download.docker.com/linux/ubuntu/gpg
-        state: present
-        keyring: /etc/apt/keyrings/docker.gpg
-
-    - name: Add docker repository
-      ansible.builtin.apt_repository:
-        filename: docker
-        repo: deb [arch=amd64 signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename | lower }} stable
-        state: present
-
-    - name: Install docker engine
-      ansible.builtin.apt:
-        name:
-          - docker-ce
-          - docker-buildx-plugin
-          - docker-compose-plugin
-        update_cache: true

+ 0 - 14
ansible/docker/maint-docker-clean.yaml

@@ -1,14 +0,0 @@
----
-- name: Clean docker
-  hosts: "{{ my_hosts | d([]) }}"
-
-  tasks:
-    - name: Prune non-dangling images
-      community.docker.docker_prune:
-        containers: false
-        images: true
-        images_filters:
-          dangling: false
-        networks: false
-        volumes: false
-        builder_cache: false

+ 0 - 46
ansible/kubernetes/README.md

@@ -1,46 +0,0 @@
-# Kubernetes (K8s) Installation Script
-
-- Introduction
-- Prerequisites
-- Execution Instructions
-
-## Introduction
-
-> The objective of this playbook is to automate the installation and setup of a kubernetes instance. The playbook consist of 3 main plays. For both controller and nodes, for controller only and for nodes only. It will ask user confirmation before moving on to each stage. By the end of the playbook two files will be created on the controller node named **worker_conn_string** and locally inside the playbook directory with the name **Remote_Files/worker_conn_string**. This will have the **connection string**. (Note:- If you want to join controllers or nodes manually later. For controllers use **--control-plane** flag)
-
-### References
-
-**Documentation** - [https://kubernetes.io/docs/setup/](https://kubernetes.io/docs/setup/)
-
-## Prerequisites
-
-- Atleast 2 VMs  (1 For Control Node and 1 For Worker Node).
-- Static IPs should be set along with unique host names.
-- Inventory should be in this format
-
-```ini
-    [controllers]
-    host_name ansible_ssh_host=<IP> ansible_user='<USERNAME>' ansible_become_pass='<PASSWORD>'
-
-    [nodes]
-
-    [instance:children]
-    controllers
-    nodes
-```
-
-(If you want to change this, don't forget to change the `inst-k8s` as well)
-
-## Execution Instructions
-
-```bash
-ansible-playbook -i <INVENTORY> <PLAYBOOK>
-```
-
-### Optional Flags
-
-| Flag  | Use Case |
-|-------|-----------|
-| --ask-vault-pass | If the vault is encrypted |
-| --start-at-task | If you want to start from a specific task|
-| --tags | If you want to only run a specific group of tasks|

+ 0 - 2
ansible/kubernetes/ansible.cfg

@@ -1,2 +0,0 @@
-[defaults]
-timeout = 25

+ 0 - 318
ansible/kubernetes/inst-k8s.yaml

@@ -1,318 +0,0 @@
----
-- name: Setup Prerequisites To Install Kubernetes
-  hosts: instance
-  become: true
-  vars:
-    kube_prereq_packages: [curl, ca-certificates, apt-transport-https]
-    kube_packages: [kubeadm, kubectl, kubelet]
-
-  tasks:
-    - name: Test Reacheability
-      ansible.builtin.ping:
-
-    - name: Update Cache
-      ansible.builtin.apt:
-        update_cache: true
-        autoclean: true
-
-    - name: 1. Upgrade All the Packages to the latest
-      ansible.builtin.apt:
-        upgrade: "full"
-
-    - name: 2. Install Qemu-Guest-Agent
-      ansible.builtin.apt:
-        name:
-          - qemu-guest-agent
-        state: present
-
-    - name: 3. Setup a Container Runtime
-      ansible.builtin.apt:
-        name:
-          - containerd
-        state: present
-
-    - name: 4. Start Containerd If Stopped
-      ansible.builtin.service:
-        name: containerd
-        state: started
-
-    - name: 5. Create Containerd Directory
-      ansible.builtin.file:
-        path: /etc/containerd
-        state: directory
-        mode: '0755'
-
-    - name: 6. Check config.toml Exists
-      ansible.builtin.stat:
-        path: /etc/containerd/config.toml
-      register: pre_file_exist_result
-
-    - name: 6.1 Delete config.toml Exists
-      ansible.builtin.file:
-        path: /etc/containerd/config.toml
-        state: absent
-      when: pre_file_exist_result.stat.exists
-
-    - name: 7. Place Default Containerd Config Inside It
-      ansible.builtin.shell: |
-        set -o pipefail
-        containerd config default | sudo tee /etc/containerd/config.toml
-      register: output
-      changed_when: output.rc != 0
-      args:
-        executable: /bin/bash
-      tags:
-        - containerd_config
-
-    - name: 7.1 Check If New config.toml Exists Now
-      ansible.builtin.stat:
-        path: /etc/containerd/config.toml
-      register: post_file_exist_result
-      tags:
-        - containerd_config
-
-    - name: 7.2 Exit The Play If config.toml Does Not Exist
-      ansible.builtin.meta: end_play
-      when: not post_file_exist_result.stat.exists
-      tags:
-        - containerd_config
-
-    - name: 8.1 Disable Swap
-      ansible.builtin.command: sudo swapoff -a
-      register: output
-      changed_when: output.rc != 0
-      tags:
-        - disable_swap
-
-    - name: 8.2 Disable Swap permanently
-      ansible.builtin.replace:
-        path: /etc/fstab
-        regexp: '^([^#].*?\sswap\s+sw\s+.*)$'
-        replace: '# \1'
-      tags:
-        - disable_swap
-
-    - name: 9. Edit config.toml
-      ansible.builtin.replace:
-        path: /etc/containerd/config.toml
-        after: \[plugins\."io\.containerd\.grpc\.v1\.cri"\.containerd\.runtimes\.runc\.options\]
-        regexp: SystemdCgroup = false
-        replace: SystemdCgroup = true
-
-    - name: 10. Enable Ipv4 Bridging
-      ansible.builtin.replace:
-        path: /etc/sysctl.conf
-        regexp: ^#net\.ipv4\.ip_forward=1$
-        replace: net.ipv4.ip_forward=1
-
-    - name: 11.1 Delete k8s Config If Exists
-      ansible.builtin.file:
-        path: /etc/modules-load.d/k8s.conf
-        state: absent
-      tags:
-        - kube_config
-
-    - name: 11.2 Add k8s.config and Edit It
-      ansible.builtin.lineinfile:
-        path: /etc/modules-load.d/k8s.conf
-        line: br_netfilter
-        create: true
-        mode: '0755'
-      tags:
-        - kube_config
-
-    - name: 12.1 Reboot
-      ansible.builtin.reboot:
-      register: system_reboot
-
-    - name: 12.2 Verify Reboot Success
-      ansible.builtin.ping:
-      when: system_reboot.rebooted
-
-    - name: 13.1 Update Cache
-      ansible.builtin.apt:
-        update_cache: true
-        autoclean: true
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.2 Remove apt lock file
-      ansible.builtin.file:
-        state: absent
-        path: "/var/lib/dpkg/lock"
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.3 Install Prerequisite Packages
-      ansible.builtin.apt:
-        name: '{{ kube_prereq_packages }}'
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.4 Remove GPG Keys If They Exist
-      ansible.builtin.file:
-        path: "{{ item }}"
-        state: absent
-      with_items:
-        - /usr/share/keyrings/kubernetes-apt-keyring.gpg
-        - /usr/share/keyrings/kubernetes-apt-keyring.gpg_armored
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.5 Download Kubernetes APT Key
-      ansible.builtin.get_url:
-        url: https://pkgs.k8s.io/core:/stable:/v1.28/deb/Release.key
-        dest: /usr/share/keyrings/kubernetes-apt-keyring.gpg_armored
-        mode: '0755'
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.6 De-Armor Kubernetes APT Key
-      ansible.builtin.shell: gpg --dearmor < /usr/share/keyrings/kubernetes-apt-keyring.gpg_armored > /etc/apt/keyrings/kubernetes-apt-keyring.gpg
-      no_log: true
-      args:
-        creates: /etc/apt/keyrings/kubernetes-apt-keyring.gpg
-      tags:
-        - install_pre_kube_packages
-
-    - name: 13.7 Add Kubernetes APT Key
-      ansible.builtin.shell: |
-        set -o pipefail
-        echo 'deb [signed-by=/etc/apt/keyrings/kubernetes-apt-keyring.gpg] https://pkgs.k8s.io/core:/stable:/v1.28/deb/ /' \
-        | sudo tee /etc/apt/sources.list.d/kubernetes.list
-      register: apt_output
-      changed_when: apt_output.rc != 0
-      args:
-        executable: /bin/bash
-      tags:
-        - install_pre_kube_packages
-
-    - name: 14.1 Update Cache
-      ansible.builtin.apt:
-        update_cache: true
-        autoclean: true
-      tags:
-        - install_kube_packages
-
-    - name: 14.2 Remove apt lock file
-      ansible.builtin.file:
-        state: absent
-        path: "/var/lib/dpkg/lock"
-      tags:
-        - install_kube_packages
-
-    - name: 14.3 Install Required Packages
-      ansible.builtin.apt:
-        name: '{{ kube_packages }}'
-      tags:
-        - install_kube_packages
-
-    - name: 14.4 Hold Packages
-      ansible.builtin.dpkg_selections:
-        name: '{{ item }}'
-        selection: hold
-      with_items: '{{ kube_packages }}'
-      tags:
-        - install_kube_packages
-
-    - name: Prompt To Continue On To Configuring Control Nodes
-      ansible.builtin.pause:
-        prompt: Press RETURN when you want to continue configuring the Control nodes!
-
-- name: Setup Controller Nodes
-  gather_facts: true
-  hosts: controllers
-  become: true
-
-  tasks:
-    - name: 1. Initialize Cluster
-      ansible.builtin.shell: |
-        set -o pipefail
-        sudo kubeadm init --control-plane-endpoint={{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }} --pod-network-cidr=10.244.0.0/16
-      register: init_cluster_output
-      changed_when: init_cluster_output.rc != 0
-      args:
-        executable: /bin/bash
-
-    - name: 2.1 Create .kube Directory
-      ansible.builtin.file:
-        path: .kube
-        state: directory
-        mode: '0755'
-      tags:
-        - kube_admin_config
-
-    - name: 2.2 Copy Kubernetes Admin Config
-      ansible.builtin.copy:
-        remote_src: true
-        src: /etc/kubernetes/admin.conf
-        dest: .kube/config
-        mode: '0755'
-      tags:
-        - kube_admin_config
-
-    - name: 2.3 Change Config File Permission
-      ansible.builtin.command: chown {{ ansible_env.USER }}:{{ ansible_env.USER }} ".kube/config"
-      changed_when: false
-      when: not ansible_env.HOME is undefined
-      tags:
-        - kube_admin_config
-
-    - name: 3. Install An Overlay Network
-      ansible.builtin.shell: |
-        set -o pipefail
-        kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/master/Documentation/kube-flannel.yml
-      register: init_cluster_output
-      become: false
-      changed_when: init_cluster_output.rc != 0
-      args:
-        executable: /bin/bash
-
-    - name: 4.1 Execute Join String Generation Command
-      ansible.builtin.command: kubeadm token create --print-join-command
-      become: false
-      register: join_output
-      changed_when: false
-      tags:
-        - join_string
-
-    - name: 4.2 Display Join String
-      ansible.builtin.debug:
-        msg: 'Join Command : {{ join_output.stdout }}'
-      tags:
-        - join_string
-
-    - name: Copy Connection String To A Remote File
-      ansible.builtin.template:
-        src: k8s_worker_node_connection.j2
-        dest: worker_conn_string
-        mode: '0755'
-
-    - name: Check Connection String File Exists
-      ansible.builtin.stat:
-        path: worker_conn_string
-      register: conn_file_path_remote
-
-    - name: Fetch The Remote File
-      ansible.builtin.fetch:
-        src: worker_conn_string
-        dest: Remote_Files/worker_conn_string
-        flat: true
-      when: conn_file_path_remote.stat.exists
-
-    - name: Prompt To Continue On To Configuring Worker Nodes
-      ansible.builtin.pause:
-        prompt: Press RETURN when you want to continue configuring the Worker nodes!
-
-- name: Join Worker Nodes
-  gather_facts: true
-  hosts: nodes
-  become: true
-  vars:
-    node_conn_string: "{{ lookup('ansible.builtin.file', 'Remote_Files/worker_conn_string') }}"
-
-  tasks:
-    - name: 1. Add Worker Nodes To The Controller
-      ansible.builtin.command: '{{ node_conn_string }}'
-      changed_when: false
-      throttle: 1

+ 0 - 1
ansible/kubernetes/k8s_worker_node_connection.j2

@@ -1 +0,0 @@
-{{ join_output.stdout }}

+ 0 - 21
ansible/portainer/deploy-portainer.yaml

@@ -1,21 +0,0 @@
----
-- name: Deploy portainer-ce latest
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-  become_user: "{{ lookup('env','USER') }}"
-
-  tasks:
-    - name: Create new volume
-      community.docker.docker_volume:
-        name: portainer-data
-
-    - name: Deploy portainer
-      community.docker.docker_container:
-        name: portainer
-        image: "docker.io/portainer/portainer-ce"
-        ports:
-          - "9443:9443"
-        volumes:
-          - /run/docker.sock:/var/run/docker.sock
-          - portainer-data:/data
-        restart_policy: unless-stopped

+ 0 - 18
ansible/traefik/deploy-traefik.yaml

@@ -1,18 +0,0 @@
----
-- name: Deploy traefik v2.5
-  hosts: "{{ my_hosts | d([]) }}"
-
-  tasks:
-    - name: Deploy traefik
-      community.docker.docker_container:
-        name: traefik
-        image: "traefik:v2.5"
-        ports:
-          - "80:80"
-          - "443:443"
-        volumes:
-          - /run/docker.sock:/run/docker.sock
-          - /etc/traefik:/etc/traefik
-        restart_policy: unless-stopped
-      become: true
-      become_user: "{{ lookup('env', 'USER') }}"

+ 0 - 19
ansible/ubuntu/config-add-sshkey.yaml

@@ -1,19 +0,0 @@
----
-- name: Add ssh key
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Install public keys
-      ansible.posix.authorized_key:
-        user: "{{ lookup('env', 'USER') }}"
-        state: present
-        key: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}"
-
-    - name: Change sudoers file
-      ansible.builtin.lineinfile:
-        path: /etc/sudoers
-        state: present
-        regexp: '^%sudo'
-        line: '%sudo ALL=(ALL) NOPASSWD: ALL'
-        validate: /usr/sbin/visudo -cf %s

+ 0 - 11
ansible/ubuntu/inst-qemu-agent.yaml

@@ -1,11 +0,0 @@
----
-- name: Install qemu-guest-agent package
-  hosts: all
-  become: true
-  become_method: ansible.builtin.sudo
-
-  tasks:
-    - name: Install qemu-guest-agent
-      ansible.builtin.apt:
-        name: qemu-guest-agent
-        state: present

+ 0 - 19
ansible/ubuntu/inst-vm-core.yaml

@@ -1,19 +0,0 @@
----
-- name: Install core packages for virtual machines
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Install packages
-      ansible.builtin.apt:
-        name:
-          - prometheus-node-exporter
-          - nfs-common
-          - qemu-guest-agent
-        update_cache: true
-
-    - name: Start guest qemu-guest-agent
-      ansible.builtin.service:
-        name: qemu-guest-agent
-        state: started
-        enabled: true

+ 0 - 12
ansible/ubuntu/inst-zsh.yaml

@@ -1,12 +0,0 @@
----
-- name: Install zsh
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Install zsh
-      ansible.builtin.apt:
-        name: zsh
-        state: present
-        update_cache: true
-      become: true

+ 0 - 25
ansible/ubuntu/maint-diskspace.yaml

@@ -1,25 +0,0 @@
----
-- name: Check disk space
-  hosts: "{{ my_hosts | d([]) }}"
-
-  tasks:
-    - name: Check disk space available
-      ansible.builtin.shell:
-        cmd: |
-          set -euo pipefail
-          df -Ph / | awk 'NR==2 {print $5}'
-        executable: /bin/bash
-      changed_when: false
-      check_mode: false
-      register: disk_usage
-
-#   - name: Send discord message when disk space is over 80%
-#     uri:
-#       url: "your-webhook"
-#       method: POST
-#       body_format: json
-#       body: '{"content": "Disk space on {{ inventory_hostname }} is above 80%!"}'
-#       headers:
-#         Content-Type: application/json
-#       status_code: 204
-#     when: disk_usage.stdout[:-1]|int > 80

+ 0 - 16
ansible/ubuntu/maint-reboot-required.yaml

@@ -1,16 +0,0 @@
----
-- name: Check if system reboot is required
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Check if system reboot is required
-      become: true
-      ansible.builtin.stat:
-        path: /run/reboot-required
-      register: reboot_required
-
-    - name: Report if reboot is required
-      ansible.builtin.debug:
-        msg: "Reboot is required"
-      when: reboot_required.stat.exists

+ 0 - 9
ansible/ubuntu/maint-reboot.yaml

@@ -1,9 +0,0 @@
----
-- name: Reboot machine
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Reboot machine
-      ansible.builtin.reboot:
-        reboot_timeout: 3600

+ 0 - 14
ansible/ubuntu/upd-apt.yaml

@@ -1,14 +0,0 @@
----
-- name: Update and upgrade apt packages
-  hosts: all
-
-  tasks:
-    - name: Update packages with apt
-      when: ansible_pkg_mgr == 'apt'
-      ansible.builtin.apt:
-        update_cache: true
-
-    - name: Upgrade packages with apt
-      when: ansible_pkg_mgr == 'apt'
-      ansible.builtin.apt:
-        upgrade: dist

+ 0 - 16
ansible/wireguard/inst-wireguard.yaml

@@ -1,16 +0,0 @@
----
-- name: Install wireguard
-  hosts: "{{ my_hosts | d([]) }}"
-  become: true
-
-  tasks:
-    - name: Install wireguard
-      ansible.builtin.apt:
-        name: wireguard
-        update_cache: true
-
-    - name: Generate private and public keypair
-      ansible.builtin.shell: |
-        wg genkey | tee privatekey | wg pubkey > publickey
-        chmod 0400 privatekey
-        chmod 0400 publickey

+ 7 - 0
cli/__init__.py

@@ -0,0 +1,7 @@
+"""
+Boilerplates CLI - A sophisticated command-line tool for managing infrastructure boilerplates.
+"""
+
+__version__ = "0.0.1"
+__author__ = "Christian Lempa"
+__description__ = "CLI tool for managing infrastructure boilerplates"

+ 197 - 0
cli/__main__.py

@@ -0,0 +1,197 @@
+#!/usr/bin/env python3
+"""
+Main entry point for the Boilerplates CLI application.
+This file serves as the primary executable when running the CLI.
+"""
+from __future__ import annotations
+
+import importlib
+import logging
+import pkgutil
+import sys
+from pathlib import Path
+from typing import Optional
+from typer import Typer, Context, Option
+from rich.console import Console
+import cli.modules
+from cli.core.registry import registry
+from cli.core import repo
+# Using standard Python exceptions instead of custom ones
+
+# NOTE: Placeholder version - will be overwritten by release script (.github/workflows/release.yaml)
+__version__ = "0.0.0"
+
+app = Typer(
+  help="CLI tool for managing infrastructure boilerplates.\n\n[dim]Easily generate, customize, and deploy templates for Docker Compose, Terraform, Kubernetes, and more.\n\n [white]Made with 💜 by [bold]Christian Lempa[/bold]",
+  add_completion=True,
+  rich_markup_mode="rich",
+)
+console = Console()
+
+def setup_logging(log_level: str = "WARNING") -> None:
+  """Configure the logging system with the specified log level.
+  
+  Args:
+      log_level: The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
+  
+  Raises:
+      ValueError: If the log level is invalid
+      RuntimeError: If logging configuration fails
+  """
+  numeric_level = getattr(logging, log_level.upper(), None)
+  if not isinstance(numeric_level, int):
+    raise ValueError(
+      f"Invalid log level '{log_level}'. Valid levels: DEBUG, INFO, WARNING, ERROR, CRITICAL"
+    )
+  
+  try:
+    logging.basicConfig(
+      level=numeric_level,
+      format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+      datefmt='%Y-%m-%d %H:%M:%S'
+    )
+
+    logger = logging.getLogger(__name__)
+    logger.setLevel(numeric_level)
+  except Exception as e:
+    raise RuntimeError(f"Failed to configure logging: {e}")
+
+@app.callback(invoke_without_command=True)
+def main(
+  ctx: Context,
+  version: Optional[bool] = Option(
+    None,
+    "--version",
+    "-v",
+    help="Show the application version and exit.",
+    is_flag=True,
+    callback=lambda v: console.print(f"boilerplates version {__version__}") or sys.exit(0) if v else None,
+    is_eager=True,
+  ),
+  log_level: Optional[str] = Option(
+    None,
+    "--log-level",
+    help="Set the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL). If omitted, logging is disabled."
+  )
+) -> None:
+  """CLI tool for managing infrastructure boilerplates."""
+  # Disable logging by default; only enable when user provides --log-level
+  if log_level:
+    # Re-enable logging and configure
+    logging.disable(logging.NOTSET)
+    setup_logging(log_level)
+  else:
+    # Silence all logging (including third-party) unless user explicitly requests it
+    logging.disable(logging.CRITICAL)
+  
+  # Store log level in context for potential use by other commands
+  ctx.ensure_object(dict)
+  ctx.obj['log_level'] = log_level
+  
+  # If no subcommand is provided, show help and friendly intro
+  if ctx.invoked_subcommand is None:
+    console.print(ctx.get_help())
+    sys.exit(0)
+
+def init_app() -> None:
+  """Initialize the application by discovering and registering modules.
+  
+  Raises:
+      ImportError: If critical module import operations fail
+      RuntimeError: If application initialization fails
+  """
+  logger = logging.getLogger(__name__)
+  failed_imports = []
+  failed_registrations = []
+  
+  try:
+    # Auto-discover and import all modules
+    modules_path = Path(cli.modules.__file__).parent
+    logger.debug(f"Discovering modules in {modules_path}")
+    
+    for finder, name, ispkg in pkgutil.iter_modules([str(modules_path)]):
+      if not ispkg and not name.startswith('_') and name != 'base':
+        try:
+          logger.debug(f"Importing module: {name}")
+          importlib.import_module(f"cli.modules.{name}")
+        except ImportError as e:
+          error_info = f"Import failed for '{name}': {str(e)}"
+          failed_imports.append(error_info)
+          logger.warning(error_info)
+        except Exception as e:
+          error_info = f"Unexpected error importing '{name}': {str(e)}"
+          failed_imports.append(error_info)
+          logger.error(error_info)
+    
+    # Register core repo command
+    try:
+      logger.debug("Registering repo command")
+      repo.register_cli(app)
+    except Exception as e:
+      error_info = f"Repo command registration failed: {str(e)}"
+      failed_registrations.append(error_info)
+      logger.warning(error_info)
+    
+    # Register template-based modules with app
+    module_classes = list(registry.iter_module_classes())
+    logger.debug(f"Registering {len(module_classes)} template-based modules")
+    
+    for name, module_cls in module_classes:
+      try:
+        logger.debug(f"Registering module class: {module_cls.__name__}")
+        module_cls.register_cli(app)
+      except Exception as e:
+        error_info = f"Registration failed for '{module_cls.__name__}': {str(e)}"
+        failed_registrations.append(error_info)
+        # Log warning but don't raise exception for individual module failures
+        logger.warning(error_info)
+        console.print(f"[yellow]Warning:[/yellow] {error_info}")
+    
+    # If we have no modules registered at all, that's a critical error
+    if not module_classes and not failed_imports:
+      raise RuntimeError("No modules found to register")
+    
+    # Log summary
+    successful_modules = len(module_classes) - len(failed_registrations)
+    logger.info(f"Application initialized: {successful_modules} modules registered successfully")
+    
+    if failed_imports:
+      logger.info(f"Module import failures: {len(failed_imports)}")
+    if failed_registrations:
+      logger.info(f"Module registration failures: {len(failed_registrations)}")
+      
+  except Exception as e:
+    error_details = []
+    if failed_imports:
+      error_details.extend(["Import failures:"] + [f"  - {err}" for err in failed_imports])
+    if failed_registrations:
+      error_details.extend(["Registration failures:"] + [f"  - {err}" for err in failed_registrations])
+    
+    details = "\n".join(error_details) if error_details else str(e)
+    raise RuntimeError(f"Application initialization failed: {details}")
+
+def run() -> None:
+  """Run the CLI application."""
+  try:
+    init_app()
+    app()
+  except (ValueError, RuntimeError) as e:
+    # Handle configuration and initialization errors cleanly
+    console.print(f"[bold red]Error:[/bold red] {e}")
+    sys.exit(1)
+  except ImportError as e:
+    # Handle module import errors with detailed info
+    console.print(f"[bold red]Module Import Error:[/bold red] {e}")
+    sys.exit(1)
+  except KeyboardInterrupt:
+    # Handle Ctrl+C gracefully
+    console.print("\n[yellow]Operation cancelled by user[/yellow]")
+    sys.exit(130)
+  except Exception as e:
+    # Handle unexpected errors - show simplified message
+    console.print(f"[bold red]Unexpected error:[/bold red] {e}")
+    console.print("[dim]Use --log-level DEBUG for more details[/dim]")
+    sys.exit(1)
+
+if __name__ == "__main__":
+  run()

+ 587 - 0
cli/core/collection.py

@@ -0,0 +1,587 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import Any, Dict, List, Optional, Set, Union
+import logging
+
+from .variable import Variable
+from .section import VariableSection
+
+logger = logging.getLogger(__name__)
+
+
+class VariableCollection:
+  """Manages variables grouped by sections and builds Jinja context."""
+
+  def __init__(self, spec: dict[str, Any]) -> None:
+    """Initialize VariableCollection from a specification dictionary.
+    
+    Args:
+        spec: Dictionary containing the complete variable specification structure
+              Expected format (as used in compose.py):
+              {
+                "section_key": {
+                  "title": "Section Title",
+                  "prompt": "Optional prompt text",
+                  "toggle": "optional_toggle_var_name", 
+                  "description": "Optional description",
+                  "vars": {
+                    "var_name": {
+                      "description": "Variable description",
+                      "type": "str",
+                      "default": "default_value",
+                      ...
+                    }
+                  }
+                }
+              }
+    """
+    if not isinstance(spec, dict):
+      raise ValueError("Spec must be a dictionary")
+    
+    self._sections: Dict[str, VariableSection] = {}
+    # NOTE: The _variable_map provides a flat, O(1) lookup for any variable by its name,
+    # avoiding the need to iterate through sections. It stores references to the same
+    # Variable objects contained in the _set structure.
+    self._variable_map: Dict[str, Variable] = {}
+    self._initialize_sections(spec)
+    # Validate dependencies after all sections are loaded
+    self._validate_dependencies()
+
+  def _initialize_sections(self, spec: dict[str, Any]) -> None:
+    """Initialize sections from the spec."""
+    for section_key, section_data in spec.items():
+      if not isinstance(section_data, dict):
+        continue
+      
+      section = self._create_section(section_key, section_data)
+      # Guard against None from empty YAML sections (vars: with no content)
+      vars_data = section_data.get("vars") or {}
+      self._initialize_variables(section, vars_data)
+      self._sections[section_key] = section
+    
+    # Validate all variable names are unique across sections
+    self._validate_unique_variable_names()
+
+  def _create_section(self, key: str, data: dict[str, Any]) -> VariableSection:
+    """Create a VariableSection from data."""
+    section_init_data = {
+      "key": key,
+      "title": data.get("title", key.replace("_", " ").title()),
+      "description": data.get("description"),
+      "toggle": data.get("toggle"),
+      "required": data.get("required", key == "general"),
+      "needs": data.get("needs")
+    }
+    return VariableSection(section_init_data)
+
+  def _initialize_variables(self, section: VariableSection, vars_data: dict[str, Any]) -> None:
+    """Initialize variables for a section."""
+    # Guard against None from empty YAML sections
+    if vars_data is None:
+      vars_data = {}
+    
+    for var_name, var_data in vars_data.items():
+      var_init_data = {"name": var_name, **var_data}
+      variable = Variable(var_init_data)
+      section.variables[var_name] = variable
+      # NOTE: Populate the direct lookup map for efficient access.
+      self._variable_map[var_name] = variable
+    
+    # Validate toggle variable after all variables are added
+    self._validate_section_toggle(section)
+    # TODO: Add more section-level validation:
+    #   - Validate that required sections have at least one non-toggle variable
+    #   - Validate that enum variables have non-empty options lists
+    #   - Validate that variable names follow naming conventions (e.g., lowercase_with_underscores)
+    #   - Validate that default values are compatible with their type definitions
+
+  def _validate_unique_variable_names(self) -> None:
+    """Validate that all variable names are unique across all sections."""
+    var_to_sections: Dict[str, List[str]] = defaultdict(list)
+    
+    # Build mapping of variable names to sections
+    for section_key, section in self._sections.items():
+      for var_name in section.variables:
+        var_to_sections[var_name].append(section_key)
+    
+    # Find duplicates and format error
+    duplicates = {var: sections for var, sections in var_to_sections.items() if len(sections) > 1}
+    
+    if duplicates:
+      errors = ["Variable names must be unique across all sections, but found duplicates:"]
+      errors.extend(f"  - '{var}' appears in sections: {', '.join(secs)}" for var, secs in sorted(duplicates.items()))
+      errors.append("\nPlease rename variables to be unique or consolidate them into a single section.")
+      error_msg = "\n".join(errors)
+      logger.error(error_msg)
+      raise ValueError(error_msg)
+  
+  def _validate_section_toggle(self, section: VariableSection) -> None:
+    """Validate that toggle variable is of type bool if it exists.
+    
+    If the toggle variable doesn't exist (e.g., filtered out), removes the toggle.
+    
+    Args:
+        section: The section to validate
+        
+    Raises:
+        ValueError: If toggle variable exists but is not boolean type
+    """
+    if not section.toggle:
+      return
+    
+    toggle_var = section.variables.get(section.toggle)
+    if not toggle_var:
+      # Toggle variable doesn't exist (e.g., was filtered out) - remove toggle metadata
+      section.toggle = None
+      return
+    
+    if toggle_var.type != "bool":
+      raise ValueError(
+        f"Section '{section.key}' toggle variable '{section.toggle}' must be type 'bool', "
+        f"but is type '{toggle_var.type}'"
+      )
+  
+  def _validate_dependencies(self) -> None:
+    """Validate section dependencies for cycles and missing references.
+    
+    Raises:
+        ValueError: If circular dependencies or missing section references are found
+    """
+    # Check for missing dependencies
+    for section_key, section in self._sections.items():
+      for dep in section.needs:
+        if dep not in self._sections:
+          raise ValueError(
+            f"Section '{section_key}' depends on '{dep}', but '{dep}' does not exist"
+          )
+    
+    # Check for circular dependencies using depth-first search
+    visited = set()
+    rec_stack = set()
+    
+    def has_cycle(section_key: str) -> bool:
+      visited.add(section_key)
+      rec_stack.add(section_key)
+      
+      section = self._sections[section_key]
+      for dep in section.needs:
+        if dep not in visited:
+          if has_cycle(dep):
+            return True
+        elif dep in rec_stack:
+          raise ValueError(
+            f"Circular dependency detected: '{section_key}' depends on '{dep}', "
+            f"which creates a cycle"
+          )
+      
+      rec_stack.remove(section_key)
+      return False
+    
+    for section_key in self._sections:
+      if section_key not in visited:
+        has_cycle(section_key)
+  
+  def is_section_satisfied(self, section_key: str) -> bool:
+    """Check if all dependencies for a section are satisfied.
+    
+    A dependency is satisfied if:
+    1. The dependency section exists
+    2. The dependency section is enabled (if it has a toggle)
+    
+    Args:
+        section_key: The key of the section to check
+        
+    Returns:
+        True if all dependencies are satisfied, False otherwise
+    """
+    section = self._sections.get(section_key)
+    if not section:
+      return False
+    
+    # No dependencies = always satisfied
+    if not section.needs:
+      return True
+    
+    # Check each dependency
+    for dep_key in section.needs:
+      dep_section = self._sections.get(dep_key)
+      if not dep_section:
+        logger.warning(f"Section '{section_key}' depends on missing section '{dep_key}'")
+        return False
+      
+      # Check if dependency is enabled
+      if not dep_section.is_enabled():
+        logger.debug(f"Section '{section_key}' dependency '{dep_key}' is disabled")
+        return False
+    
+    return True
+
+  def sort_sections(self) -> None:
+    """Sort sections with the following priority:
+    
+    1. Dependencies come before dependents (topological sort)
+    2. Required sections first (in their original order)
+    3. Enabled sections with satisfied dependencies next (in their original order)
+    4. Disabled sections or sections with unsatisfied dependencies last (in their original order)
+    
+    This maintains the original ordering within each group while organizing
+    sections logically for display and user interaction, and ensures that
+    sections are prompted in the correct dependency order.
+    """
+    # First, perform topological sort to respect dependencies
+    sorted_keys = self._topological_sort()
+    
+    # Then apply priority sorting within dependency groups
+    section_items = [(key, self._sections[key]) for key in sorted_keys]
+    
+    # Define sort key: (priority, original_index)
+    # Priority: 0 = required, 1 = enabled with satisfied dependencies, 2 = disabled or unsatisfied dependencies
+    def get_sort_key(item_with_index):
+      index, (key, section) = item_with_index
+      if section.required:
+        priority = 0
+      elif section.is_enabled() and self.is_section_satisfied(key):
+        priority = 1
+      else:
+        priority = 2
+      return (priority, index)
+    
+    # Sort with original index to maintain order within each priority group
+    # Note: This preserves the topological order from earlier
+    sorted_items = sorted(
+      enumerate(section_items),
+      key=get_sort_key
+    )
+    
+    # Rebuild _sections dict in new order
+    self._sections = {key: section for _, (key, section) in sorted_items}
+  
+  def _topological_sort(self) -> List[str]:
+    """Perform topological sort on sections based on dependencies using Kahn's algorithm."""
+    in_degree = {key: len(section.needs) for key, section in self._sections.items()}
+    queue = [key for key, degree in in_degree.items() if degree == 0]
+    queue.sort(key=lambda k: list(self._sections.keys()).index(k))  # Preserve original order
+    result = []
+    
+    while queue:
+      current = queue.pop(0)
+      result.append(current)
+      
+      # Update in-degree for dependent sections
+      for key, section in self._sections.items():
+        if current in section.needs:
+          in_degree[key] -= 1
+          if in_degree[key] == 0:
+            queue.append(key)
+    
+    # Fallback to original order if cycle detected
+    if len(result) != len(self._sections):
+      logger.warning("Topological sort incomplete - using original order")
+      return list(self._sections.keys())
+    
+    return result
+
+  def get_sections(self) -> Dict[str, VariableSection]:
+    """Get all sections in the collection."""
+    return self._sections.copy()
+  
+  def get_section(self, key: str) -> Optional[VariableSection]:
+    """Get a specific section by its key."""
+    return self._sections.get(key)
+  
+  def has_sections(self) -> bool:
+    """Check if the collection has any sections."""
+    return bool(self._sections)
+
+  def get_all_values(self) -> dict[str, Any]:
+    """Get all variable values as a dictionary."""
+    # NOTE: Uses _variable_map for O(1) access
+    return {name: var.convert(var.value) for name, var in self._variable_map.items()}
+  
+  def get_satisfied_values(self) -> dict[str, Any]:
+    """Get variable values only from sections with satisfied dependencies.
+    
+    This respects both toggle states and section dependencies, ensuring that:
+    - Variables from disabled sections (toggle=false) are excluded
+    - Variables from sections with unsatisfied dependencies are excluded
+    
+    Returns:
+        Dictionary of variable names to values for satisfied sections only
+    """
+    satisfied_values = {}
+    
+    for section_key, section in self._sections.items():
+      # Skip sections with unsatisfied dependencies
+      if not self.is_section_satisfied(section_key):
+        logger.debug(f"Excluding variables from section '{section_key}' - dependencies not satisfied")
+        continue
+      
+      # Skip disabled sections (toggle check)
+      if not section.is_enabled():
+        logger.debug(f"Excluding variables from section '{section_key}' - section is disabled")
+        continue
+      
+      # Include all variables from this satisfied section
+      for var_name, variable in section.variables.items():
+        satisfied_values[var_name] = variable.convert(variable.value)
+    
+    return satisfied_values
+
+  def get_sensitive_variables(self) -> Dict[str, Any]:
+    """Get only the sensitive variables with their values."""
+    return {name: var.value for name, var in self._variable_map.items() if var.sensitive and var.value}
+
+  def apply_defaults(self, defaults: dict[str, Any], origin: str = "cli") -> list[str]:
+    """Apply default values to variables, updating their origin.
+    
+    Args:
+        defaults: Dictionary mapping variable names to their default values
+        origin: Source of these defaults (e.g., 'config', 'cli')
+        
+    Returns:
+        List of variable names that were successfully updated
+    """
+    # NOTE: This method uses the _variable_map for a significant performance gain,
+    # as it allows direct O(1) lookup of variables instead of iterating
+    # through all sections to find a match.
+    successful = []
+    errors = []
+    
+    for var_name, value in defaults.items():
+      try:
+        variable = self._variable_map.get(var_name)
+        if not variable:
+          logger.warning(f"Variable '{var_name}' not found in template")
+          continue
+        
+        # Store original value before overriding (for display purposes)
+        # Only store if this is the first time config is being applied
+        if origin == "config" and not hasattr(variable, '_original_stored'):
+          variable.original_value = variable.value
+          variable._original_stored = True
+        
+        # Convert and set the new value
+        converted_value = variable.convert(value)
+        variable.value = converted_value
+        
+        # Set origin to the current source (not a chain)
+        variable.origin = origin
+        
+        successful.append(var_name)
+          
+      except ValueError as e:
+        error_msg = f"Invalid value for '{var_name}': {value} - {e}"
+        errors.append(error_msg)
+        logger.error(error_msg)
+    
+    if errors:
+      logger.warning(f"Some defaults failed to apply: {'; '.join(errors)}")
+    
+    return successful
+  
+  def validate_all(self) -> None:
+    """Validate all variables in the collection, skipping disabled and unsatisfied sections."""
+    errors: list[str] = []
+
+    for section_key, section in self._sections.items():
+      # Skip sections with unsatisfied dependencies or disabled via toggle
+      if not self.is_section_satisfied(section_key) or not section.is_enabled():
+        logger.debug(f"Skipping validation for section '{section_key}'")
+        continue
+
+      # Validate each variable in the section
+      for var_name, variable in section.variables.items():
+        try:
+          # Skip autogenerated variables when empty
+          if variable.autogenerated and not variable.value:
+            continue
+          
+          # Check required fields
+          if variable.value is None:
+            if variable.is_required():
+              errors.append(f"{section.key}.{var_name} (required - no default provided)")
+            continue
+
+          # Validate typed value
+          typed = variable.convert(variable.value)
+          if variable.type not in ("bool",) and not typed:
+            msg = f"{section.key}.{var_name}"
+            errors.append(f"{msg} (required - cannot be empty)" if variable.is_required() else f"{msg} (empty)")
+
+        except ValueError as e:
+          errors.append(f"{section.key}.{var_name} (invalid format: {e})")
+
+    if errors:
+      error_msg = "Variable validation failed: " + ", ".join(errors)
+      logger.error(error_msg)
+      raise ValueError(error_msg)
+
+  def merge(self, other_spec: Union[Dict[str, Any], 'VariableCollection'], origin: str = "override") -> 'VariableCollection':
+    """Merge another spec or VariableCollection into this one with precedence tracking.
+    
+    OPTIMIZED: Works directly on objects without dict conversions for better performance.
+    
+    The other spec/collection has higher precedence and will override values in self.
+    Creates a new VariableCollection with merged data.
+    
+    Args:
+        other_spec: Either a spec dictionary or another VariableCollection to merge
+        origin: Origin label for variables from other_spec (e.g., 'template', 'config')
+        
+    Returns:
+        New VariableCollection with merged data
+        
+    Example:
+        module_vars = VariableCollection(module_spec)
+        template_vars = module_vars.merge(template_spec, origin='template')
+        # Variables from template_spec override module_spec
+        # Origins tracked: 'module' or 'module -> template'
+    """
+    # Convert dict to VariableCollection if needed (only once)
+    if isinstance(other_spec, dict):
+      other = VariableCollection(other_spec)
+    else:
+      other = other_spec
+    
+    # Create new collection without calling __init__ (optimization)
+    merged = VariableCollection.__new__(VariableCollection)
+    merged._sections = {}
+    merged._variable_map = {}
+    
+    # First pass: clone sections from self
+    for section_key, self_section in self._sections.items():
+      if section_key in other._sections:
+        # Section exists in both - will merge
+        merged._sections[section_key] = self._merge_sections(
+          self_section, 
+          other._sections[section_key], 
+          origin
+        )
+      else:
+        # Section only in self - clone it
+        merged._sections[section_key] = self_section.clone()
+    
+    # Second pass: add sections that only exist in other
+    for section_key, other_section in other._sections.items():
+      if section_key not in merged._sections:
+        # New section from other - clone with origin update
+        merged._sections[section_key] = other_section.clone(origin_update=origin)
+    
+    # Rebuild variable map for O(1) lookups
+    for section in merged._sections.values():
+      for var_name, variable in section.variables.items():
+        merged._variable_map[var_name] = variable
+    
+    return merged
+  
+  def _merge_sections(self, self_section: VariableSection, other_section: VariableSection, origin: str) -> VariableSection:
+    """Merge two sections, with other_section taking precedence."""
+    merged_section = self_section.clone()
+    
+    # Update section metadata from other (other takes precedence)
+    for attr in ('title', 'description', 'toggle'):
+      if getattr(other_section, attr):
+        setattr(merged_section, attr, getattr(other_section, attr))
+    
+    merged_section.required = other_section.required
+    if other_section.needs:
+      merged_section.needs = other_section.needs.copy()
+    
+    # Merge variables
+    for var_name, other_var in other_section.variables.items():
+      if var_name in merged_section.variables:
+        # Variable exists in both - merge with other taking precedence
+        self_var = merged_section.variables[var_name]
+        
+        # Build update dict with ONLY explicitly provided fields from other
+        update = {'origin': origin}
+        field_map = {
+          'type': other_var.type,
+          'description': other_var.description,
+          'prompt': other_var.prompt,
+          'options': other_var.options,
+          'sensitive': other_var.sensitive,
+          'extra': other_var.extra
+        }
+        
+        # Add fields that were explicitly provided and have values
+        for field, value in field_map.items():
+          if field in other_var._explicit_fields and value:
+            update[field] = value
+        
+        # Special handling for value/default
+        if ('value' in other_var._explicit_fields or 'default' in other_var._explicit_fields) and other_var.value is not None:
+          update['value'] = other_var.value
+        
+        merged_section.variables[var_name] = self_var.clone(update=update)
+      else:
+        # New variable from other - clone with origin
+        merged_section.variables[var_name] = other_var.clone(update={'origin': origin})
+    
+    return merged_section
+  
+  def filter_to_used(self, used_variables: Set[str], keep_sensitive: bool = True) -> 'VariableCollection':
+    """Filter collection to only variables that are used (or sensitive).
+    
+    OPTIMIZED: Works directly on objects without dict conversions for better performance.
+    
+    Creates a new VariableCollection containing only the variables in used_variables.
+    Sections with no remaining variables are removed.
+    
+    Args:
+        used_variables: Set of variable names that are actually used
+        keep_sensitive: If True, also keep sensitive variables even if not in used set
+        
+    Returns:
+        New VariableCollection with filtered variables
+        
+    Example:
+        all_vars = VariableCollection(spec)
+        used_vars = all_vars.filter_to_used({'var1', 'var2', 'var3'})
+        # Only var1, var2, var3 (and any sensitive vars) remain
+    """
+    # Create new collection without calling __init__ (optimization)
+    filtered = VariableCollection.__new__(VariableCollection)
+    filtered._sections = {}
+    filtered._variable_map = {}
+    
+    # Filter each section
+    for section_key, section in self._sections.items():
+      # Create a new section with same metadata
+      filtered_section = VariableSection({
+        'key': section.key,
+        'title': section.title,
+        'description': section.description,
+        'toggle': section.toggle,
+        'required': section.required,
+        'needs': section.needs.copy() if section.needs else None,
+      })
+      
+      # Clone only the variables that should be included
+      for var_name, variable in section.variables.items():
+        # Include if used OR if sensitive (and keep_sensitive is True)
+        should_include = (
+          var_name in used_variables or 
+          (keep_sensitive and variable.sensitive)
+        )
+        
+        if should_include:
+          filtered_section.variables[var_name] = variable.clone()
+      
+      # Only add section if it has variables
+      if filtered_section.variables:
+        filtered._sections[section_key] = filtered_section
+        # Add variables to map
+        for var_name, variable in filtered_section.variables.items():
+          filtered._variable_map[var_name] = variable
+    
+    return filtered
+  
+  def get_all_variable_names(self) -> Set[str]:
+    """Get set of all variable names across all sections.
+    
+    Returns:
+        Set of all variable names
+    """
+    return set(self._variable_map.keys())

+ 772 - 0
cli/core/config.py

@@ -0,0 +1,772 @@
+from __future__ import annotations
+
+import logging
+import os
+import re
+import shutil
+import tempfile
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+import yaml
+from rich.console import Console
+
+from .variable import Variable
+from .section import VariableSection
+from .collection import VariableCollection
+from .exceptions import ConfigError, ConfigValidationError, YAMLParseError
+
+logger = logging.getLogger(__name__)
+console = Console()
+
+# Valid Python identifier pattern for variable names
+VALID_IDENTIFIER_PATTERN = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
+
+# Valid path pattern - prevents path traversal attempts
+VALID_PATH_PATTERN = re.compile(r'^[^\x00-\x1f<>:"|?*]+$')
+
+# Maximum allowed string lengths to prevent DOS attacks
+MAX_STRING_LENGTH = 1000
+MAX_PATH_LENGTH = 4096
+MAX_LIST_LENGTH = 100
+
+class ConfigManager:
+    """Manages configuration for the CLI application."""
+    
+    def __init__(self, config_path: Optional[Union[str, Path]] = None) -> None:
+        """Initialize the configuration manager.
+        
+        Args:
+            config_path: Path to the configuration file. If None, uses default location.
+        """
+        if config_path is None:
+            # Default to ~/.config/boilerplates/config.yaml
+            config_dir = Path.home() / ".config" / "boilerplates"
+            config_dir.mkdir(parents=True, exist_ok=True)
+            self.config_path = config_dir / "config.yaml"
+        else:
+            self.config_path = Path(config_path)
+        
+        # Create default config if it doesn't exist
+        if not self.config_path.exists():
+            self._create_default_config()
+        else:
+            # Migrate existing config if needed
+            self._migrate_config_if_needed()
+    
+    def _create_default_config(self) -> None:
+        """Create a default configuration file."""
+        default_config = {
+            "defaults": {},
+            "preferences": {
+                "editor": "vim",
+                "output_dir": None,
+                "library_paths": []
+            },
+            "libraries": [
+                {
+                    "name": "default",
+                    "url": "https://github.com/christianlempa/boilerplates.git",
+                    "branch": "main",
+                    "directory": "library",
+                    "enabled": True
+                }
+            ]
+        }
+        self._write_config(default_config)
+        logger.info(f"Created default configuration at {self.config_path}")
+    
+    def _migrate_config_if_needed(self) -> None:
+        """Migrate existing config to add missing sections like libraries."""
+        try:
+            config = self._read_config()
+            needs_migration = False
+            
+            # Add libraries section if missing
+            if "libraries" not in config:
+                logger.info("Migrating config: adding libraries section")
+                config["libraries"] = [
+                    {
+                        "name": "default",
+                        "url": "https://github.com/christianlempa/boilerplates.git",
+                        "branch": "refactor/boilerplates-v2",
+                        "directory": "library",
+                        "enabled": True
+                    }
+                ]
+                needs_migration = True
+            
+            # Write back if migration was needed
+            if needs_migration:
+                self._write_config(config)
+                logger.info("Config migration completed")
+        except Exception as e:
+            logger.warning(f"Config migration failed: {e}")
+    
+    @staticmethod
+    def _validate_string_length(value: str, field_name: str, max_length: int = MAX_STRING_LENGTH) -> None:
+        """Validate string length to prevent DOS attacks.
+        
+        Args:
+            value: String value to validate
+            field_name: Name of the field for error messages
+            max_length: Maximum allowed length
+            
+        Raises:
+            ConfigValidationError: If string exceeds maximum length
+        """
+        if len(value) > max_length:
+            raise ConfigValidationError(
+                f"{field_name} exceeds maximum length of {max_length} characters "
+                f"(got {len(value)} characters)"
+            )
+    
+    @staticmethod
+    def _validate_path_string(path: str, field_name: str) -> None:
+        """Validate path string for security concerns.
+        
+        Args:
+            path: Path string to validate
+            field_name: Name of the field for error messages
+            
+        Raises:
+            ConfigValidationError: If path contains invalid characters or patterns
+        """
+        # Check length
+        if len(path) > MAX_PATH_LENGTH:
+            raise ConfigValidationError(
+                f"{field_name} exceeds maximum path length of {MAX_PATH_LENGTH} characters"
+            )
+        
+        # Check for null bytes and control characters
+        if '\x00' in path or any(ord(c) < 32 for c in path if c not in '\t\n\r'):
+            raise ConfigValidationError(
+                f"{field_name} contains invalid control characters"
+            )
+        
+        # Check for path traversal attempts
+        if '..' in path.split('/'):
+            logger.warning(f"Path '{path}' contains '..' - potential path traversal attempt")
+    
+    @staticmethod
+    def _validate_list_length(lst: list, field_name: str, max_length: int = MAX_LIST_LENGTH) -> None:
+        """Validate list length to prevent DOS attacks.
+        
+        Args:
+            lst: List to validate
+            field_name: Name of the field for error messages
+            max_length: Maximum allowed length
+            
+        Raises:
+            ConfigValidationError: If list exceeds maximum length
+        """
+        if len(lst) > max_length:
+            raise ConfigValidationError(
+                f"{field_name} exceeds maximum length of {max_length} items (got {len(lst)} items)"
+            )
+    
+    def _read_config(self) -> Dict[str, Any]:
+        """Read configuration from file.
+        
+        Returns:
+            Dictionary containing the configuration.
+            
+        Raises:
+            YAMLParseError: If YAML parsing fails.
+            ConfigValidationError: If configuration structure is invalid.
+            ConfigError: If reading fails for other reasons.
+        """
+        try:
+            with open(self.config_path, 'r') as f:
+                config = yaml.safe_load(f) or {}
+            
+            # Validate config structure
+            self._validate_config_structure(config)
+            
+            return config
+        except yaml.YAMLError as e:
+            logger.error(f"Failed to parse YAML configuration: {e}")
+            raise YAMLParseError(str(self.config_path), e)
+        except ConfigValidationError:
+            # Re-raise validation errors as-is
+            raise
+        except (IOError, OSError) as e:
+            logger.error(f"Failed to read configuration file: {e}")
+            raise ConfigError(f"Failed to read configuration file '{self.config_path}': {e}")
+    
+    def _write_config(self, config: Dict[str, Any]) -> None:
+        """Write configuration to file atomically using temp file + rename pattern.
+        
+        This prevents config file corruption if write operation fails partway through.
+        
+        Args:
+            config: Dictionary containing the configuration to write.
+            
+        Raises:
+            ConfigValidationError: If configuration structure is invalid.
+            ConfigError: If writing fails for any reason.
+        """
+        tmp_path = None
+        try:
+            # Validate config structure before writing
+            self._validate_config_structure(config)
+            
+            # Ensure parent directory exists
+            self.config_path.parent.mkdir(parents=True, exist_ok=True)
+            
+            # Write to temporary file in same directory for atomic rename
+            with tempfile.NamedTemporaryFile(
+                mode='w',
+                delete=False,
+                dir=self.config_path.parent,
+                prefix='.config_',
+                suffix='.tmp'
+            ) as tmp_file:
+                yaml.dump(config, tmp_file, default_flow_style=False)
+                tmp_path = tmp_file.name
+            
+            # Atomic rename (overwrites existing file on POSIX systems)
+            shutil.move(tmp_path, self.config_path)
+            logger.debug(f"Configuration written atomically to {self.config_path}")
+            
+        except ConfigValidationError:
+            # Re-raise validation errors as-is
+            if tmp_path:
+                Path(tmp_path).unlink(missing_ok=True)
+            raise
+        except (IOError, OSError, yaml.YAMLError) as e:
+            # Clean up temp file if it exists
+            if tmp_path:
+                try:
+                    Path(tmp_path).unlink(missing_ok=True)
+                except (IOError, OSError):
+                    logger.warning(f"Failed to clean up temporary file: {tmp_path}")
+            logger.error(f"Failed to write configuration file: {e}")
+            raise ConfigError(f"Failed to write configuration to '{self.config_path}': {e}")
+    
+    def _validate_config_structure(self, config: Dict[str, Any]) -> None:
+        """Validate the configuration structure with comprehensive checks.
+        
+        Args:
+            config: Configuration dictionary to validate.
+            
+        Raises:
+            ConfigValidationError: If configuration structure is invalid.
+        """
+        if not isinstance(config, dict):
+            raise ConfigValidationError("Configuration must be a dictionary")
+        
+        # Check top-level structure
+        if "defaults" in config and not isinstance(config["defaults"], dict):
+            raise ConfigValidationError("'defaults' must be a dictionary")
+        
+        if "preferences" in config and not isinstance(config["preferences"], dict):
+            raise ConfigValidationError("'preferences' must be a dictionary")
+        
+        # Validate defaults structure
+        if "defaults" in config:
+            for module_name, module_defaults in config["defaults"].items():
+                if not isinstance(module_name, str):
+                    raise ConfigValidationError(f"Module name must be a string, got {type(module_name).__name__}")
+                
+                # Validate module name length
+                self._validate_string_length(module_name, "Module name", max_length=100)
+                
+                if not isinstance(module_defaults, dict):
+                    raise ConfigValidationError(f"Defaults for module '{module_name}' must be a dictionary")
+                
+                # Validate number of defaults per module
+                self._validate_list_length(
+                    list(module_defaults.keys()), 
+                    f"Defaults for module '{module_name}'"
+                )
+                
+                # Validate variable names are valid Python identifiers
+                for var_name, var_value in module_defaults.items():
+                    if not isinstance(var_name, str):
+                        raise ConfigValidationError(f"Variable name must be a string, got {type(var_name).__name__}")
+                    
+                    # Validate variable name length
+                    self._validate_string_length(var_name, "Variable name", max_length=100)
+                    
+                    if not VALID_IDENTIFIER_PATTERN.match(var_name):
+                        raise ConfigValidationError(
+                            f"Invalid variable name '{var_name}' in module '{module_name}'. "
+                            f"Variable names must be valid Python identifiers (letters, numbers, underscores, "
+                            f"cannot start with a number)"
+                        )
+                    
+                    # Validate variable value types and lengths
+                    if isinstance(var_value, str):
+                        self._validate_string_length(
+                            var_value, 
+                            f"Value for '{module_name}.{var_name}'"
+                        )
+                    elif isinstance(var_value, list):
+                        self._validate_list_length(
+                            var_value, 
+                            f"Value for '{module_name}.{var_name}'"
+                        )
+                    elif var_value is not None and not isinstance(var_value, (bool, int, float)):
+                        raise ConfigValidationError(
+                            f"Invalid value type for '{module_name}.{var_name}': "
+                            f"must be string, number, boolean, list, or null (got {type(var_value).__name__})"
+                        )
+        
+        # Validate preferences structure and types
+        if "preferences" in config:
+            preferences = config["preferences"]
+            
+            # Validate known preference types
+            if "editor" in preferences:
+                if not isinstance(preferences["editor"], str):
+                    raise ConfigValidationError("Preference 'editor' must be a string")
+                self._validate_string_length(preferences["editor"], "Preference 'editor'", max_length=100)
+            
+            if "output_dir" in preferences:
+                output_dir = preferences["output_dir"]
+                if output_dir is not None:
+                    if not isinstance(output_dir, str):
+                        raise ConfigValidationError("Preference 'output_dir' must be a string or null")
+                    self._validate_path_string(output_dir, "Preference 'output_dir'")
+            
+            if "library_paths" in preferences:
+                if not isinstance(preferences["library_paths"], list):
+                    raise ConfigValidationError("Preference 'library_paths' must be a list")
+                
+                self._validate_list_length(preferences["library_paths"], "Preference 'library_paths'")
+                
+                for i, path in enumerate(preferences["library_paths"]):
+                    if not isinstance(path, str):
+                        raise ConfigValidationError(f"Library path must be a string, got {type(path).__name__}")
+                    self._validate_path_string(path, f"Library path at index {i}")
+        
+        # Validate libraries structure
+        if "libraries" in config:
+            libraries = config["libraries"]
+            
+            if not isinstance(libraries, list):
+                raise ConfigValidationError("'libraries' must be a list")
+            
+            self._validate_list_length(libraries, "Libraries list")
+            
+            for i, library in enumerate(libraries):
+                if not isinstance(library, dict):
+                    raise ConfigValidationError(f"Library at index {i} must be a dictionary")
+                
+                # Validate required fields
+                required_fields = ["name", "url", "directory"]
+                for field in required_fields:
+                    if field not in library:
+                        raise ConfigValidationError(f"Library at index {i} missing required field '{field}'")
+                    
+                    if not isinstance(library[field], str):
+                        raise ConfigValidationError(f"Library '{field}' at index {i} must be a string")
+                    
+                    self._validate_string_length(library[field], f"Library '{field}' at index {i}", max_length=500)
+                
+                # Validate optional branch field
+                if "branch" in library:
+                    if not isinstance(library["branch"], str):
+                        raise ConfigValidationError(f"Library 'branch' at index {i} must be a string")
+                    self._validate_string_length(library["branch"], f"Library 'branch' at index {i}", max_length=200)
+                
+                # Validate optional enabled field
+                if "enabled" in library and not isinstance(library["enabled"], bool):
+                    raise ConfigValidationError(f"Library 'enabled' at index {i} must be a boolean")
+    
+    def get_config_path(self) -> Path:
+        """Get the path to the configuration file.
+        
+        Returns:
+            Path to the configuration file.
+        """
+        return self.config_path
+
+    def get_defaults(self, module_name: str) -> Dict[str, Any]:
+        """Get default variable values for a module.
+        
+        Returns defaults in a flat format:
+        {
+            "var_name": "value",
+            "var2_name": "value2"
+        }
+        
+        Args:
+            module_name: Name of the module
+            
+        Returns:
+            Dictionary of default values (flat key-value pairs)
+        """
+        config = self._read_config()
+        defaults = config.get("defaults", {})
+        return defaults.get(module_name, {})
+    
+    def set_defaults(self, module_name: str, defaults: Dict[str, Any]) -> None:
+        """Set default variable values for a module with comprehensive validation.
+        
+        Args:
+            module_name: Name of the module
+            defaults: Dictionary of defaults (flat key-value pairs):
+                      {"var_name": "value", "var2_name": "value2"}
+                      
+        Raises:
+            ConfigValidationError: If module name or variable names are invalid.
+        """
+        # Validate module name
+        if not isinstance(module_name, str) or not module_name:
+            raise ConfigValidationError("Module name must be a non-empty string")
+        
+        self._validate_string_length(module_name, "Module name", max_length=100)
+        
+        # Validate defaults dictionary
+        if not isinstance(defaults, dict):
+            raise ConfigValidationError("Defaults must be a dictionary")
+        
+        # Validate number of defaults
+        self._validate_list_length(list(defaults.keys()), "Defaults dictionary")
+        
+        # Validate variable names and values
+        for var_name, var_value in defaults.items():
+            if not isinstance(var_name, str):
+                raise ConfigValidationError(f"Variable name must be a string, got {type(var_name).__name__}")
+            
+            self._validate_string_length(var_name, "Variable name", max_length=100)
+            
+            if not VALID_IDENTIFIER_PATTERN.match(var_name):
+                raise ConfigValidationError(
+                    f"Invalid variable name '{var_name}'. Variable names must be valid Python identifiers "
+                    f"(letters, numbers, underscores, cannot start with a number)"
+                )
+            
+            # Validate value types and lengths
+            if isinstance(var_value, str):
+                self._validate_string_length(var_value, f"Value for '{var_name}'")
+            elif isinstance(var_value, list):
+                self._validate_list_length(var_value, f"Value for '{var_name}'")
+            elif var_value is not None and not isinstance(var_value, (bool, int, float)):
+                raise ConfigValidationError(
+                    f"Invalid value type for '{var_name}': "
+                    f"must be string, number, boolean, list, or null (got {type(var_value).__name__})"
+                )
+        
+        config = self._read_config()
+        
+        if "defaults" not in config:
+            config["defaults"] = {}
+        
+        config["defaults"][module_name] = defaults
+        self._write_config(config)
+        logger.info(f"Updated defaults for module '{module_name}'")
+    
+    def set_default_value(self, module_name: str, var_name: str, value: Any) -> None:
+        """Set a single default variable value with comprehensive validation.
+        
+        Args:
+            module_name: Name of the module
+            var_name: Name of the variable
+            value: Default value to set
+            
+        Raises:
+            ConfigValidationError: If module name or variable name is invalid.
+        """
+        # Validate inputs
+        if not isinstance(module_name, str) or not module_name:
+            raise ConfigValidationError("Module name must be a non-empty string")
+        
+        self._validate_string_length(module_name, "Module name", max_length=100)
+        
+        if not isinstance(var_name, str):
+            raise ConfigValidationError(f"Variable name must be a string, got {type(var_name).__name__}")
+        
+        self._validate_string_length(var_name, "Variable name", max_length=100)
+        
+        if not VALID_IDENTIFIER_PATTERN.match(var_name):
+            raise ConfigValidationError(
+                f"Invalid variable name '{var_name}'. Variable names must be valid Python identifiers "
+                f"(letters, numbers, underscores, cannot start with a number)"
+            )
+        
+        # Validate value type and length
+        if isinstance(value, str):
+            self._validate_string_length(value, f"Value for '{var_name}'")
+        elif isinstance(value, list):
+            self._validate_list_length(value, f"Value for '{var_name}'")
+        elif value is not None and not isinstance(value, (bool, int, float)):
+            raise ConfigValidationError(
+                f"Invalid value type for '{var_name}': "
+                f"must be string, number, boolean, list, or null (got {type(value).__name__})"
+            )
+        
+        defaults = self.get_defaults(module_name)
+        defaults[var_name] = value
+        self.set_defaults(module_name, defaults)
+        logger.info(f"Set default for '{module_name}.{var_name}' = '{value}'")
+    
+    def get_default_value(self, module_name: str, var_name: str) -> Optional[Any]:
+        """Get a single default variable value.
+        
+        Args:
+            module_name: Name of the module
+            var_name: Name of the variable
+            
+        Returns:
+            Default value or None if not set
+        """
+        defaults = self.get_defaults(module_name)
+        return defaults.get(var_name)
+    
+    def clear_defaults(self, module_name: str) -> None:
+        """Clear all defaults for a module.
+        
+        Args:
+            module_name: Name of the module
+        """
+        config = self._read_config()
+        
+        if "defaults" in config and module_name in config["defaults"]:
+            del config["defaults"][module_name]
+            self._write_config(config)
+            logger.info(f"Cleared defaults for module '{module_name}'")
+
+    def get_preference(self, key: str) -> Optional[Any]:
+        """Get a user preference value.
+        
+        Args:
+            key: Preference key (e.g., 'editor', 'output_dir', 'library_paths')
+            
+        Returns:
+            Preference value or None if not set
+        """
+        config = self._read_config()
+        preferences = config.get("preferences", {})
+        return preferences.get(key)
+    
+    def set_preference(self, key: str, value: Any) -> None:
+        """Set a user preference value with comprehensive validation.
+        
+        Args:
+            key: Preference key
+            value: Preference value
+            
+        Raises:
+            ConfigValidationError: If key or value is invalid for known preference types.
+        """
+        # Validate key
+        if not isinstance(key, str) or not key:
+            raise ConfigValidationError("Preference key must be a non-empty string")
+        
+        self._validate_string_length(key, "Preference key", max_length=100)
+        
+        # Validate known preference types
+        if key == "editor":
+            if not isinstance(value, str):
+                raise ConfigValidationError("Preference 'editor' must be a string")
+            self._validate_string_length(value, "Preference 'editor'", max_length=100)
+        
+        elif key == "output_dir":
+            if value is not None:
+                if not isinstance(value, str):
+                    raise ConfigValidationError("Preference 'output_dir' must be a string or null")
+                self._validate_path_string(value, "Preference 'output_dir'")
+        
+        elif key == "library_paths":
+            if not isinstance(value, list):
+                raise ConfigValidationError("Preference 'library_paths' must be a list")
+            
+            self._validate_list_length(value, "Preference 'library_paths'")
+            
+            for i, path in enumerate(value):
+                if not isinstance(path, str):
+                    raise ConfigValidationError(f"Library path must be a string, got {type(path).__name__}")
+                self._validate_path_string(path, f"Library path at index {i}")
+        
+        # For unknown preference keys, apply basic validation
+        else:
+            if isinstance(value, str):
+                self._validate_string_length(value, f"Preference '{key}'")
+            elif isinstance(value, list):
+                self._validate_list_length(value, f"Preference '{key}'")
+        
+        config = self._read_config()
+        
+        if "preferences" not in config:
+            config["preferences"] = {}
+        
+        config["preferences"][key] = value
+        self._write_config(config)
+        logger.info(f"Set preference '{key}' = '{value}'")
+    
+    def get_all_preferences(self) -> Dict[str, Any]:
+        """Get all user preferences.
+        
+        Returns:
+            Dictionary of all preferences
+        """
+        config = self._read_config()
+        return config.get("preferences", {})
+    
+    def get_libraries(self) -> list[Dict[str, Any]]:
+        """Get all configured libraries.
+        
+        Returns:
+            List of library configurations
+        """
+        config = self._read_config()
+        return config.get("libraries", [])
+    
+    def get_library_by_name(self, name: str) -> Optional[Dict[str, Any]]:
+        """Get a specific library by name.
+        
+        Args:
+            name: Name of the library
+            
+        Returns:
+            Library configuration dictionary or None if not found
+        """
+        libraries = self.get_libraries()
+        for library in libraries:
+            if library.get("name") == name:
+                return library
+        return None
+    
+    def add_library(self, name: str, url: str, directory: str = "library", branch: str = "main", enabled: bool = True) -> None:
+        """Add a new library to the configuration.
+        
+        Args:
+            name: Unique name for the library
+            url: Git repository URL
+            directory: Directory within the repo containing templates
+            branch: Git branch to use
+            enabled: Whether the library is enabled
+            
+        Raises:
+            ConfigValidationError: If library with the same name already exists or validation fails
+        """
+        # Validate inputs
+        if not isinstance(name, str) or not name:
+            raise ConfigValidationError("Library name must be a non-empty string")
+        
+        self._validate_string_length(name, "Library name", max_length=100)
+        
+        if not isinstance(url, str) or not url:
+            raise ConfigValidationError("Library URL must be a non-empty string")
+        
+        self._validate_string_length(url, "Library URL", max_length=500)
+        
+        if not isinstance(directory, str) or not directory:
+            raise ConfigValidationError("Library directory must be a non-empty string")
+        
+        self._validate_string_length(directory, "Library directory", max_length=200)
+        
+        if not isinstance(branch, str) or not branch:
+            raise ConfigValidationError("Library branch must be a non-empty string")
+        
+        self._validate_string_length(branch, "Library branch", max_length=200)
+        
+        # Check if library already exists
+        if self.get_library_by_name(name):
+            raise ConfigValidationError(f"Library '{name}' already exists")
+        
+        config = self._read_config()
+        
+        if "libraries" not in config:
+            config["libraries"] = []
+        
+        config["libraries"].append({
+            "name": name,
+            "url": url,
+            "branch": branch,
+            "directory": directory,
+            "enabled": enabled
+        })
+        
+        self._write_config(config)
+        logger.info(f"Added library '{name}'")
+    
+    def remove_library(self, name: str) -> None:
+        """Remove a library from the configuration.
+        
+        Args:
+            name: Name of the library to remove
+            
+        Raises:
+            ConfigError: If library is not found
+        """
+        config = self._read_config()
+        libraries = config.get("libraries", [])
+        
+        # Find and remove the library
+        new_libraries = [lib for lib in libraries if lib.get("name") != name]
+        
+        if len(new_libraries) == len(libraries):
+            raise ConfigError(f"Library '{name}' not found")
+        
+        config["libraries"] = new_libraries
+        self._write_config(config)
+        logger.info(f"Removed library '{name}'")
+    
+    def update_library(self, name: str, **kwargs: Any) -> None:
+        """Update a library's configuration.
+        
+        Args:
+            name: Name of the library to update
+            **kwargs: Fields to update (url, branch, directory, enabled)
+            
+        Raises:
+            ConfigError: If library is not found
+            ConfigValidationError: If validation fails
+        """
+        config = self._read_config()
+        libraries = config.get("libraries", [])
+        
+        # Find the library
+        library_found = False
+        for library in libraries:
+            if library.get("name") == name:
+                library_found = True
+                
+                # Update allowed fields
+                if "url" in kwargs:
+                    url = kwargs["url"]
+                    if not isinstance(url, str) or not url:
+                        raise ConfigValidationError("Library URL must be a non-empty string")
+                    self._validate_string_length(url, "Library URL", max_length=500)
+                    library["url"] = url
+                
+                if "branch" in kwargs:
+                    branch = kwargs["branch"]
+                    if not isinstance(branch, str) or not branch:
+                        raise ConfigValidationError("Library branch must be a non-empty string")
+                    self._validate_string_length(branch, "Library branch", max_length=200)
+                    library["branch"] = branch
+                
+                if "directory" in kwargs:
+                    directory = kwargs["directory"]
+                    if not isinstance(directory, str) or not directory:
+                        raise ConfigValidationError("Library directory must be a non-empty string")
+                    self._validate_string_length(directory, "Library directory", max_length=200)
+                    library["directory"] = directory
+                
+                if "enabled" in kwargs:
+                    enabled = kwargs["enabled"]
+                    if not isinstance(enabled, bool):
+                        raise ConfigValidationError("Library enabled must be a boolean")
+                    library["enabled"] = enabled
+                
+                break
+        
+        if not library_found:
+            raise ConfigError(f"Library '{name}' not found")
+        
+        config["libraries"] = libraries
+        self._write_config(config)
+        logger.info(f"Updated library '{name}'")
+    
+    def get_libraries_path(self) -> Path:
+        """Get the path to the libraries directory.
+        
+        Returns:
+            Path to the libraries directory (same directory as config file)
+        """
+        return self.config_path.parent / "libraries"

+ 519 - 0
cli/core/display.py

@@ -0,0 +1,519 @@
+from __future__ import annotations
+
+import logging
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from rich.console import Console
+from rich.table import Table
+from rich.tree import Tree
+
+if TYPE_CHECKING:
+    from .template import Template
+
+logger = logging.getLogger(__name__)
+console = Console()
+
+
+class IconManager:
+    """Centralized icon management system for consistent CLI display.
+    
+    This class provides standardized icons for file types, status indicators,
+    and UI elements. Icons use Nerd Font glyphs for consistent display.
+    
+    Categories:
+        - File types: .yaml, .j2, .json, .md, etc.
+        - Status: success, warning, error, info, skipped
+        - UI elements: folders, config, locks, etc.
+    """
+
+    # File Type Icons
+    FILE_FOLDER = "\uf07b"          # 
+    FILE_DEFAULT = "\uf15b"         # 
+    FILE_YAML = "\uf15c"            # 
+    FILE_JSON = "\ue60b"            # 
+    FILE_MARKDOWN = "\uf48a"        # 
+    FILE_JINJA2 = "\ue235"          # 
+    FILE_DOCKER = "\uf308"          # 
+    FILE_COMPOSE = "\uf308"         # 
+    FILE_SHELL = "\uf489"           # 
+    FILE_PYTHON = "\ue73c"          # 
+    FILE_TEXT = "\uf15c"            # 
+    
+    # Status Indicators
+    STATUS_SUCCESS = "\uf00c"       #  (check)
+    STATUS_ERROR = "\uf00d"         #  (times/x)
+    STATUS_WARNING = "\uf071"       #  (exclamation-triangle)
+    STATUS_INFO = "\uf05a"          #  (info-circle)
+    STATUS_SKIPPED = "\uf05e"       #  (ban/circle-slash)
+    
+    # UI Elements
+    UI_CONFIG = "\ue5fc"            # 
+    UI_LOCK = "\uf084"              # 
+    UI_SETTINGS = "\uf013"          # 
+    UI_ARROW_RIGHT = "\uf061"       #  (arrow-right)
+    UI_BULLET = "\uf111"            #  (circle)
+    
+    @classmethod
+    def get_file_icon(cls, file_path: str | Path) -> str:
+        """Get the appropriate icon for a file based on its extension or name.
+        
+        Args:
+            file_path: Path to the file (can be string or Path object)
+            
+        Returns:
+            Unicode icon character for the file type
+            
+        Examples:
+            >>> IconManager.get_file_icon("config.yaml")
+            '\uf15c'
+            >>> IconManager.get_file_icon("template.j2")
+            '\ue235'
+        """
+        if isinstance(file_path, str):
+            file_path = Path(file_path)
+        
+        file_name = file_path.name.lower()
+        suffix = file_path.suffix.lower()
+        
+        # Check for Docker Compose files
+        compose_names = {
+            "docker-compose.yml", "docker-compose.yaml",
+            "compose.yml", "compose.yaml"
+        }
+        if file_name in compose_names or file_name.startswith("docker-compose"):
+            return cls.FILE_DOCKER
+        
+        # Check by extension
+        extension_map = {
+            ".yaml": cls.FILE_YAML,
+            ".yml": cls.FILE_YAML,
+            ".json": cls.FILE_JSON,
+            ".md": cls.FILE_MARKDOWN,
+            ".j2": cls.FILE_JINJA2,
+            ".sh": cls.FILE_SHELL,
+            ".py": cls.FILE_PYTHON,
+            ".txt": cls.FILE_TEXT,
+        }
+        
+        return extension_map.get(suffix, cls.FILE_DEFAULT)
+    
+    @classmethod
+    def get_status_icon(cls, status: str) -> str:
+        """Get the appropriate icon for a status indicator.
+        
+        Args:
+            status: Status type (success, error, warning, info, skipped)
+            
+        Returns:
+            Unicode icon character for the status
+            
+        Examples:
+            >>> IconManager.get_status_icon("success")
+            '✓'
+            >>> IconManager.get_status_icon("warning")
+            '⚠'
+        """
+        status_map = {
+            "success": cls.STATUS_SUCCESS,
+            "error": cls.STATUS_ERROR,
+            "warning": cls.STATUS_WARNING,
+            "info": cls.STATUS_INFO,
+            "skipped": cls.STATUS_SKIPPED,
+        }
+        return status_map.get(status.lower(), cls.STATUS_INFO)
+    
+    @classmethod
+    def folder(cls) -> str:
+        """Get the folder icon."""
+        return cls.FILE_FOLDER
+    
+    @classmethod
+    def config(cls) -> str:
+        """Get the config icon."""
+        return cls.UI_CONFIG
+    
+    @classmethod
+    def lock(cls) -> str:
+        """Get the lock icon (for sensitive variables)."""
+        return cls.UI_LOCK
+    
+    @classmethod
+    def arrow_right(cls) -> str:
+        """Get the right arrow icon (for showing transitions/changes)."""
+        return cls.UI_ARROW_RIGHT
+
+
+class DisplayManager:
+    """Handles all rich rendering for the CLI."""
+
+    def display_templates_table(
+        self, templates: list, module_name: str, title: str
+    ) -> None:
+        """Display a table of templates.
+        
+        Args:
+            templates: List of Template objects
+            module_name: Name of the module
+            title: Title for the table
+        """
+        if not templates:
+            logger.info(f"No templates found for module '{module_name}'")
+            return
+
+        logger.info(f"Listing {len(templates)} templates for module '{module_name}'")
+        table = Table(title=title)
+        table.add_column("ID", style="bold", no_wrap=True)
+        table.add_column("Name")
+        table.add_column("Tags")
+        table.add_column("Version", no_wrap=True)
+        table.add_column("Library", no_wrap=True)
+
+        for template in templates:
+            name = template.metadata.name or "Unnamed Template"
+            tags_list = template.metadata.tags or []
+            tags = ", ".join(tags_list) if tags_list else "-"
+            version = str(template.metadata.version) if template.metadata.version else ""
+            library = template.metadata.library or ""
+
+            table.add_row(template.id, name, tags, version, library)
+
+        console.print(table)
+
+    def display_template_details(self, template: Template, template_id: str) -> None:
+        """Display template information panel and variables table."""
+        self._display_template_header(template, template_id)
+        self._display_file_tree(template)
+        self._display_variables_table(template)
+
+    def display_section_header(self, title: str, description: str | None) -> None:
+        """Display a section header."""
+        if description:
+            console.print(f"\n[bold cyan]{title}[/bold cyan] [dim]- {description}[/dim]")
+        else:
+            console.print(f"\n[bold cyan]{title}[/bold cyan]")
+        console.print("─" * 40, style="dim")
+
+    def display_validation_error(self, message: str) -> None:
+        """Display a validation error message."""
+        self.display_message('error', message)
+    
+    def display_message(self, level: str, message: str, context: str | None = None) -> None:
+        """Display a message with consistent formatting.
+        
+        Args:
+            level: Message level (error, warning, success, info)
+            message: The message to display
+            context: Optional context information
+        """
+        icon = IconManager.get_status_icon(level)
+        colors = {'error': 'red', 'warning': 'yellow', 'success': 'green', 'info': 'blue'}
+        color = colors.get(level, 'white')
+        
+        # Format message based on context
+        if context:
+            text = f"{level.capitalize()} in {context}: {message}" if level == 'error' or level == 'warning' else f"{context}: {message}"
+        else:
+            text = f"{level.capitalize()}: {message}" if level == 'error' or level == 'warning' else message
+        
+        console.print(f"[{color}]{icon} {text}[/{color}]")
+        
+        # Log appropriately
+        log_message = f"{context}: {message}" if context else message
+        log_methods = {'error': logger.error, 'warning': logger.warning, 'success': logger.info, 'info': logger.info}
+        log_methods.get(level, logger.info)(log_message)
+    
+    def display_error(self, message: str, context: str | None = None) -> None:
+        """Display an error message."""
+        self.display_message('error', message, context)
+    
+    def display_warning(self, message: str, context: str | None = None) -> None:
+        """Display a warning message."""
+        self.display_message('warning', message, context)
+    
+    def display_success(self, message: str, context: str | None = None) -> None:
+        """Display a success message."""
+        self.display_message('success', message, context)
+    
+    def display_info(self, message: str, context: str | None = None) -> None:
+        """Display an informational message."""
+        self.display_message('info', message, context)
+
+    def _display_template_header(self, template: Template, template_id: str) -> None:
+        """Display the header for a template."""
+        template_name = template.metadata.name or "Unnamed Template"
+        version = str(template.metadata.version) if template.metadata.version else "Not specified"
+        description = template.metadata.description or "No description available"
+
+        console.print(
+            f"[bold blue]{template_name} ({template_id} - [cyan]{version}[/cyan])[/bold blue]"
+        )
+        console.print(description)
+
+    def _build_file_tree(self, root_label: str, files: list, get_file_info: callable) -> Tree:
+        """Build a file tree structure.
+        
+        Args:
+            root_label: Label for root node
+            files: List of files to display
+            get_file_info: Function that takes a file and returns (path, display_name, color, extra_text)
+        
+        Returns:
+            Tree object ready for display
+        """
+        file_tree = Tree(root_label)
+        tree_nodes = {Path("."): file_tree}
+        
+        for file_item in sorted(files, key=lambda f: get_file_info(f)[0]):
+            path, display_name, color, extra_text = get_file_info(file_item)
+            parts = path.parts
+            current_path = Path(".")
+            current_node = file_tree
+            
+            # Build directory structure
+            for part in parts[:-1]:
+                current_path = current_path / part
+                if current_path not in tree_nodes:
+                    new_node = current_node.add(f"{IconManager.folder()} [white]{part}[/white]")
+                    tree_nodes[current_path] = new_node
+                current_node = tree_nodes[current_path]
+            
+            # Add file
+            icon = IconManager.get_file_icon(display_name)
+            file_label = f"{icon} [{color}]{display_name}[/{color}]"
+            if extra_text:
+                file_label += f" {extra_text}"
+            current_node.add(file_label)
+        
+        return file_tree
+    
+    def _display_file_tree(self, template: Template) -> None:
+        """Display the file structure of a template."""
+        console.print()
+        console.print("[bold blue]Template File Structure:[/bold blue]")
+        
+        def get_template_file_info(template_file):
+            display_name = template_file.output_path.name if hasattr(template_file, 'output_path') else template_file.relative_path.name
+            return (template_file.relative_path, display_name, 'white', None)
+        
+        file_tree = self._build_file_tree(
+            f"{IconManager.folder()} [white]{template.id}[/white]",
+            template.template_files,
+            get_template_file_info
+        )
+        
+        if file_tree.children:
+            console.print(file_tree)
+
+    def _display_variables_table(self, template: Template) -> None:
+        """Display a table of variables for a template."""
+        if not (template.variables and template.variables.has_sections()):
+            return
+
+        console.print()
+        console.print("[bold blue]Template Variables:[/bold blue]")
+
+        variables_table = Table(show_header=True, header_style="bold blue")
+        variables_table.add_column("Variable", style="white", no_wrap=True)
+        variables_table.add_column("Type", style="magenta")
+        variables_table.add_column("Default", style="green")
+        variables_table.add_column("Description", style="white")
+
+        first_section = True
+        for section in template.variables.get_sections().values():
+            if not section.variables:
+                continue
+
+            if not first_section:
+                variables_table.add_row("", "", "", "", style="bright_black")
+            first_section = False
+
+            # Check if section is enabled AND dependencies are satisfied
+            is_enabled = section.is_enabled()
+            dependencies_satisfied = template.variables.is_section_satisfied(section.key)
+            is_dimmed = not (is_enabled and dependencies_satisfied)
+
+            # Only show (disabled) if section has no dependencies (dependencies make it obvious)
+            disabled_text = " (disabled)" if (is_dimmed and not section.needs) else ""
+            
+            # For disabled sections, make entire heading bold and dim (don't include colored markup inside)
+            if is_dimmed:
+                # Build text without internal markup, then wrap entire thing in bold bright_black (dimmed appearance)
+                required_part = " (required)" if section.required else ""
+                needs_part = ""
+                if section.needs:
+                    needs_list = ", ".join(section.needs)
+                    needs_part = f" (needs: {needs_list})"
+                header_text = f"[bold bright_black]{section.title}{required_part}{needs_part}{disabled_text}[/bold bright_black]"
+            else:
+                # For enabled sections, include the colored markup
+                required_text = " [yellow](required)[/yellow]" if section.required else ""
+                needs_text = ""
+                if section.needs:
+                    needs_list = ", ".join(section.needs)
+                    needs_text = f" [dim](needs: {needs_list})[/dim]"
+                header_text = f"[bold]{section.title}{required_text}{needs_text}{disabled_text}[/bold]"
+            variables_table.add_row(header_text, "", "", "")
+
+            for var_name, variable in section.variables.items():
+                row_style = "bright_black" if is_dimmed else None
+                
+                # Build default value display
+                # If origin is 'config' and original value differs from current, show: original → config_value
+                if (variable.origin == "config" and 
+                    hasattr(variable, '_original_stored') and
+                    variable.original_value != variable.value):
+                    # Format original value (use same display logic, but shorter)
+                    if variable.sensitive:
+                        orig_display = "********"
+                    elif variable.original_value is None or variable.original_value == "":
+                        orig_display = "[dim](none)[/dim]"
+                    else:
+                        orig_val_str = str(variable.original_value)
+                        orig_display = orig_val_str[:15] + "..." if len(orig_val_str) > 15 else orig_val_str
+                    
+                    # Get current (config) value display (without showing "(none)" since we have the arrow)
+                    config_display = variable.get_display_value(mask_sensitive=True, max_length=15, show_none=False)
+                    if not config_display:  # If still empty after show_none=False, show actual value
+                        config_display = str(variable.value) if variable.value else "(empty)"
+                    
+                    # Highlight the arrow and config value in bold yellow to show it's a custom override
+                    default_val = f"{orig_display} [bold yellow]{IconManager.arrow_right()} {config_display}[/bold yellow]"
+                else:
+                    # Use variable's native get_display_value() method (shows "(none)" for empty)
+                    default_val = variable.get_display_value(mask_sensitive=True, max_length=30, show_none=True)
+                
+                # Add lock icon for sensitive variables
+                sensitive_icon = f" {IconManager.lock()}" if variable.sensitive else ""
+                var_display = f"  {var_name}{sensitive_icon}"
+
+                variables_table.add_row(
+                    var_display,
+                    variable.type or "str",
+                    default_val,
+                    variable.description or "",
+                    style=row_style,
+                )
+
+        console.print(variables_table)
+
+    def display_file_generation_confirmation(
+        self, 
+        output_dir: Path, 
+        files: dict[str, str], 
+        existing_files: list[Path] | None = None
+    ) -> None:
+        """Display files to be generated with confirmation prompt."""
+        console.print()
+        console.print("[bold]Files to be generated:[/bold]")
+        
+        def get_file_generation_info(file_path_str):
+            file_path = Path(file_path_str)
+            file_name = file_path.parts[-1] if file_path.parts else file_path.name
+            full_path = output_dir / file_path
+            
+            if existing_files and full_path in existing_files:
+                return (file_path, file_name, 'yellow', '[red](will overwrite)[/red]')
+            else:
+                return (file_path, file_name, 'green', None)
+        
+        file_tree = self._build_file_tree(
+            f"{IconManager.folder()} [cyan]{output_dir.resolve()}[/cyan]",
+            files.keys(),
+            get_file_generation_info
+        )
+        
+        console.print(file_tree)
+        console.print()
+
+    def display_config_tree(self, spec: dict, module_name: str, show_all: bool = False) -> None:
+        """Display configuration spec as a tree view.
+        
+        Args:
+            spec: The configuration spec dictionary
+            module_name: Name of the module
+            show_all: If True, show all details including descriptions
+        """
+        if not spec:
+            console.print(f"[yellow]No configuration found for module '{module_name}'[/yellow]")
+            return
+
+        # Create root tree node
+        tree = Tree(f"[bold blue]{IconManager.config()} {str.capitalize(module_name)} Configuration[/bold blue]")
+
+        for section_name, section_data in spec.items():
+            if not isinstance(section_data, dict):
+                continue
+
+            # Determine if this is a section with variables
+            # Guard against None from empty YAML sections
+            section_vars = section_data.get("vars") or {}
+            section_desc = section_data.get("description", "")
+            section_required = section_data.get("required", False)
+            section_toggle = section_data.get("toggle", None)
+            section_needs = section_data.get("needs", None)
+
+            # Build section label
+            section_label = f"[cyan]{section_name}[/cyan]"
+            if section_required:
+                section_label += " [yellow](required)[/yellow]"
+            if section_toggle:
+                section_label += f" [dim](toggle: {section_toggle})[/dim]"
+            if section_needs:
+                needs_str = ", ".join(section_needs) if isinstance(section_needs, list) else section_needs
+                section_label += f" [dim](needs: {needs_str})[/dim]"
+            
+            if show_all and section_desc:
+                section_label += f"\n  [dim]{section_desc}[/dim]"
+
+            section_node = tree.add(section_label)
+
+            # Add variables
+            if section_vars:
+                for var_name, var_data in section_vars.items():
+                    if isinstance(var_data, dict):
+                        var_type = var_data.get("type", "string")
+                        var_default = var_data.get("default", "")
+                        var_desc = var_data.get("description", "")
+                        var_sensitive = var_data.get("sensitive", False)
+
+                        # Build variable label
+                        var_label = f"[green]{var_name}[/green] [dim]({var_type})[/dim]"
+                        
+                        if var_default is not None and var_default != "":
+                            display_val = "********" if var_sensitive else str(var_default)
+                            if not var_sensitive and len(display_val) > 30:
+                                display_val = display_val[:27] + "..."
+                            var_label += f" = [yellow]{display_val}[/yellow]"
+                        
+                        if show_all and var_desc:
+                            var_label += f"\n    [dim]{var_desc}[/dim]"
+                        
+                        section_node.add(var_label)
+                    else:
+                        # Simple key-value pair
+                        section_node.add(f"[green]{var_name}[/green] = [yellow]{var_data}[/yellow]")
+
+        console.print(tree)
+
+    def display_next_steps(self, next_steps: str, variable_values: dict) -> None:
+        """Display next steps after template generation, rendering them as a Jinja2 template.
+        
+        Args:
+            next_steps: The next_steps string from template metadata (may contain Jinja2 syntax)
+            variable_values: Dictionary of variable values to use for rendering
+        """
+        if not next_steps:
+            return
+        
+        console.print("\n[bold cyan]Next Steps:[/bold cyan]")
+        
+        try:
+            from jinja2 import Template as Jinja2Template
+            next_steps_template = Jinja2Template(next_steps)
+            rendered_next_steps = next_steps_template.render(variable_values)
+            console.print(rendered_next_steps)
+        except Exception as e:
+            logger.warning(f"Failed to render next_steps as template: {e}")
+            # Fallback to plain text if rendering fails
+            console.print(next_steps)

+ 133 - 0
cli/core/exceptions.py

@@ -0,0 +1,133 @@
+"""Custom exception classes for the boilerplates CLI.
+
+This module defines specific exception types for better error handling
+and diagnostics throughout the application.
+"""
+
+from typing import Optional, List
+
+
+class BoilerplatesError(Exception):
+    """Base exception for all boilerplates CLI errors."""
+    pass
+
+
+class ConfigError(BoilerplatesError):
+    """Raised when configuration operations fail."""
+    pass
+
+
+class ConfigValidationError(ConfigError):
+    """Raised when configuration validation fails."""
+    pass
+
+
+class TemplateError(BoilerplatesError):
+    """Base exception for template-related errors."""
+    pass
+
+
+class TemplateNotFoundError(TemplateError):
+    """Raised when a template cannot be found."""
+    
+    def __init__(self, template_id: str, module_name: Optional[str] = None):
+        self.template_id = template_id
+        self.module_name = module_name
+        msg = f"Template '{template_id}' not found"
+        if module_name:
+            msg += f" in module '{module_name}'"
+        super().__init__(msg)
+
+
+class TemplateLoadError(TemplateError):
+    """Raised when a template fails to load."""
+    pass
+
+
+class TemplateSyntaxError(TemplateError):
+    """Raised when a Jinja2 template has syntax errors."""
+    
+    def __init__(self, template_id: str, errors: List[str]):
+        self.template_id = template_id
+        self.errors = errors
+        msg = f"Jinja2 syntax errors in template '{template_id}':\n" + "\n".join(errors)
+        super().__init__(msg)
+
+
+class TemplateValidationError(TemplateError):
+    """Raised when template validation fails."""
+    pass
+
+
+class TemplateRenderError(TemplateError):
+    """Raised when template rendering fails."""
+    pass
+
+
+class VariableError(BoilerplatesError):
+    """Base exception for variable-related errors."""
+    pass
+
+
+class VariableValidationError(VariableError):
+    """Raised when variable validation fails."""
+    
+    def __init__(self, variable_name: str, message: str):
+        self.variable_name = variable_name
+        msg = f"Validation error for variable '{variable_name}': {message}"
+        super().__init__(msg)
+
+
+class VariableTypeError(VariableError):
+    """Raised when a variable has an incorrect type."""
+    
+    def __init__(self, variable_name: str, expected_type: str, actual_type: str):
+        self.variable_name = variable_name
+        self.expected_type = expected_type
+        self.actual_type = actual_type
+        msg = f"Type error for variable '{variable_name}': expected {expected_type}, got {actual_type}"
+        super().__init__(msg)
+
+
+class LibraryError(BoilerplatesError):
+    """Raised when library operations fail."""
+    pass
+
+
+class ModuleError(BoilerplatesError):
+    """Raised when module operations fail."""
+    pass
+
+
+class ModuleNotFoundError(ModuleError):
+    """Raised when a module cannot be found."""
+    
+    def __init__(self, module_name: str):
+        self.module_name = module_name
+        msg = f"Module '{module_name}' not found"
+        super().__init__(msg)
+
+
+class ModuleLoadError(ModuleError):
+    """Raised when a module fails to load."""
+    pass
+
+
+class FileOperationError(BoilerplatesError):
+    """Raised when file operations fail."""
+    pass
+
+
+class RenderError(BoilerplatesError):
+    """Raised when rendering operations fail."""
+    pass
+
+
+class YAMLParseError(BoilerplatesError):
+    """Raised when YAML parsing fails."""
+    
+    def __init__(self, file_path: str, original_error: Exception):
+        self.file_path = file_path
+        self.original_error = original_error
+        msg = f"Failed to parse YAML file '{file_path}': {original_error}"
+        super().__init__(msg)

+ 238 - 0
cli/core/library.py

@@ -0,0 +1,238 @@
+from __future__ import annotations
+
+from pathlib import Path
+import logging
+from typing import Optional
+import yaml
+
+from .exceptions import LibraryError, TemplateNotFoundError, YAMLParseError
+
+logger = logging.getLogger(__name__)
+
+
+class Library:
+  """Represents a single library with a specific path."""
+  
+  def __init__(self, name: str, path: Path, priority: int = 0) -> None:
+    """Initialize a library instance.
+    
+    Args:
+      name: Display name for the library
+      path: Path to the library directory
+      priority: Priority for library lookup (higher = checked first)
+    """
+    self.name = name
+    self.path = path
+    self.priority = priority  # Higher priority = checked first
+  
+  def _is_template_draft(self, template_path: Path) -> bool:
+    """Check if a template is marked as draft."""
+    # Find the template file
+    for filename in ("template.yaml", "template.yml"):
+      template_file = template_path / filename
+      if template_file.exists():
+        break
+    else:
+      return False
+    
+    try:
+      with open(template_file, "r", encoding="utf-8") as f:
+        docs = [doc for doc in yaml.safe_load_all(f) if doc]
+        return docs[0].get("metadata", {}).get("draft", False) if docs else False
+    except (yaml.YAMLError, IOError, OSError) as e:
+      logger.warning(f"Error checking draft status for {template_path}: {e}")
+      return False
+
+  def find_by_id(self, module_name: str, template_id: str) -> tuple[Path, str]:
+    """Find a template by its ID in this library.
+    
+    Args:
+        module_name: The module name (e.g., 'compose', 'terraform')
+        template_id: The template ID to find
+    
+    Returns:
+        Path to the template directory if found
+        
+    Raises:
+        FileNotFoundError: If the template ID is not found in this library or is marked as draft
+    """
+    logger.debug(f"Looking for template '{template_id}' in module '{module_name}' in library '{self.name}'")
+    
+    # Build the path to the specific template directory
+    template_path = self.path / module_name / template_id
+    
+    # Check if template directory exists with a template file
+    has_template = template_path.is_dir() and any(
+      (template_path / f).exists() for f in ("template.yaml", "template.yml")
+    )
+    
+    if not has_template or self._is_template_draft(template_path):
+      raise TemplateNotFoundError(template_id, module_name)
+    
+    logger.debug(f"Found template '{template_id}' at: {template_path}")
+    return template_path, self.name
+
+
+  def find(self, module_name: str, sort_results: bool = False) -> list[tuple[Path, str]]:
+    """Find templates in this library for a specific module.
+    
+    Excludes templates marked as draft.
+    
+    Args:
+        module_name: The module name (e.g., 'compose', 'terraform')
+        sort_results: Whether to return results sorted alphabetically
+    
+    Returns:
+        List of Path objects representing template directories (excluding drafts)
+        
+    Raises:
+        FileNotFoundError: If the module directory is not found in this library
+    """
+    logger.debug(f"Looking for templates in module '{module_name}' in library '{self.name}'")
+    
+    # Build the path to the module directory
+    module_path = self.path / module_name
+    
+    # Check if the module directory exists
+    if not module_path.is_dir():
+      raise LibraryError(f"Module '{module_name}' not found in library '{self.name}'")
+    
+    # Get non-draft templates
+    template_dirs = []
+    try:
+      for item in module_path.iterdir():
+        has_template = item.is_dir() and any((item / f).exists() for f in ("template.yaml", "template.yml"))
+        if has_template and not self._is_template_draft(item):
+          template_dirs.append((item, self.name))
+        elif has_template:
+          logger.debug(f"Skipping draft template: {item.name}")
+    except PermissionError as e:
+      raise LibraryError(f"Permission denied accessing module '{module_name}' in library '{self.name}': {e}")
+    
+    # Sort if requested
+    if sort_results:
+      template_dirs.sort(key=lambda x: x[0].name.lower())
+    
+    logger.debug(f"Found {len(template_dirs)} templates in module '{module_name}'")
+    return template_dirs
+
+class LibraryManager:
+  """Manages multiple libraries and provides methods to find templates."""
+  
+  def __init__(self) -> None:
+    """Initialize LibraryManager with git-based libraries from config."""
+    from .config import ConfigManager
+    
+    self.config = ConfigManager()
+    self.libraries = self._load_libraries_from_config()
+  
+  def _load_libraries_from_config(self) -> list[Library]:
+    """Load libraries from configuration.
+    
+    Returns:
+        List of Library instances
+    """
+    libraries = []
+    libraries_path = self.config.get_libraries_path()
+    
+    # Get library configurations from config
+    library_configs = self.config.get_libraries()
+    
+    for i, lib_config in enumerate(library_configs):
+      # Skip disabled libraries
+      if not lib_config.get("enabled", True):
+        logger.debug(f"Skipping disabled library: {lib_config.get('name')}")
+        continue
+      
+      name = lib_config.get("name")
+      directory = lib_config.get("directory", ".")
+      
+      # Build path to library: ~/.config/boilerplates/libraries/{name}/{directory}/
+      # For sparse-checkout, files remain in the specified directory
+      library_base = libraries_path / name
+      if directory and directory != ".":
+        library_path = library_base / directory
+      else:
+        library_path = library_base
+      
+      # Check if library path exists
+      if not library_path.exists():
+        logger.warning(
+          f"Library '{name}' not found at {library_path}. "
+          f"Run 'repo update' to sync libraries."
+        )
+        continue
+      
+      # Create Library instance with priority based on order (first = highest priority)
+      priority = len(library_configs) - i
+      libraries.append(Library(name=name, path=library_path, priority=priority))
+      logger.debug(f"Loaded library '{name}' from {library_path} with priority {priority}")
+    
+    if not libraries:
+      logger.warning("No libraries loaded. Run 'repo update' to sync libraries.")
+    
+    return libraries
+
+  def find_by_id(self, module_name: str, template_id: str) -> Optional[tuple[Path, str]]:
+    """Find a template by its ID across all libraries.
+    
+    Args:
+        module_name: The module name (e.g., 'compose', 'terraform')
+        template_id: The template ID to find
+    
+    Returns:
+        Path to the template directory if found, None otherwise
+    """
+    logger.debug(f"Searching for template '{template_id}' in module '{module_name}' across all libraries")
+    
+    for library in sorted(self.libraries, key=lambda x: x.priority, reverse=True):
+      try:
+        template_path, lib_name = library.find_by_id(module_name, template_id)
+        logger.debug(f"Found template '{template_id}' in library '{library.name}'")
+        return template_path, lib_name
+      except TemplateNotFoundError:
+        # Continue searching in next library
+        continue
+    
+    logger.debug(f"Template '{template_id}' not found in any library")
+    return None
+  
+  def find(self, module_name: str, sort_results: bool = False) -> list[tuple[Path, str]]:
+    """Find templates across all libraries for a specific module.
+    
+    Args:
+        module_name: The module name (e.g., 'compose', 'terraform')
+        sort_results: Whether to return results sorted alphabetically
+    
+    Returns:
+        List of Path objects representing template directories from all libraries
+    """
+    logger.debug(f"Searching for templates in module '{module_name}' across all libraries")
+    
+    all_templates = []
+    
+    for library in sorted(self.libraries, key=lambda x: x.priority, reverse=True):
+      try:
+        templates = library.find(module_name, sort_results=False)  # Sort at the end
+        all_templates.extend(templates)
+        logger.debug(f"Found {len(templates)} templates in library '{library.name}'")
+      except LibraryError:
+        # Module not found in this library, continue with next
+        logger.debug(f"Module '{module_name}' not found in library '{library.name}'")
+        continue
+    
+    # Remove duplicates based on template name (directory name)
+    seen_names = set()
+    unique_templates = []
+    for template in all_templates:
+      name, library_name = template
+      if name.name not in seen_names:
+        unique_templates.append((name, library_name))
+        seen_names.add(name.name)
+    
+    # Sort if requested
+    if sort_results:
+      unique_templates.sort(key=lambda x: x[0].name.lower())
+    
+    logger.debug(f"Found {len(unique_templates)} unique templates total")
+    return unique_templates

+ 908 - 0
cli/core/module.py

@@ -0,0 +1,908 @@
+from __future__ import annotations
+
+import logging
+import sys
+from abc import ABC
+from pathlib import Path
+from typing import Any, Optional, List, Dict, Tuple
+
+from rich.console import Console
+from rich.panel import Panel
+from rich.prompt import Confirm
+from typer import Argument, Context, Option, Typer, Exit
+
+from .display import DisplayManager, IconManager
+from .library import LibraryManager
+from .prompt import PromptHandler
+from .template import Template
+
+logger = logging.getLogger(__name__)
+console = Console()
+console_err = Console(stderr=True)
+
+
+def parse_var_inputs(var_options: List[str], extra_args: List[str]) -> Dict[str, Any]:
+  """Parse variable inputs from --var options and extra args.
+  
+  Supports formats:
+    --var KEY=VALUE
+    --var KEY VALUE
+    
+  Args:
+    var_options: List of variable options from CLI
+    extra_args: Additional arguments that may contain values
+    
+  Returns:
+    Dictionary of parsed variables
+  """
+  variables = {}
+  
+  # Parse --var KEY=VALUE format
+  for var_option in var_options:
+    if '=' in var_option:
+      key, value = var_option.split('=', 1)
+      variables[key] = value
+    else:
+      # --var KEY VALUE format - value should be in extra_args
+      if extra_args:
+        variables[var_option] = extra_args.pop(0)
+      else:
+        logger.warning(f"No value provided for variable '{var_option}'")
+  
+  return variables
+
+class Module(ABC):
+  """Streamlined base module that auto-detects variables from templates."""
+
+  def __init__(self) -> None:
+    if not all([self.name, self.description]):
+      raise ValueError(
+        f"Module {self.__class__.__name__} must define name and description"
+      )
+    
+    logger.info(f"Initializing module '{self.name}'")
+    logger.debug(f"Module '{self.name}' configuration: description='{self.description}'")
+    self.libraries = LibraryManager()
+    self.display = DisplayManager()
+
+  def list(
+    self,
+    raw: bool = Option(False, "--raw", help="Output raw list format instead of rich table")
+  ) -> list[Template]:
+    """List all templates."""
+    logger.debug(f"Listing templates for module '{self.name}'")
+    templates = []
+
+    entries = self.libraries.find(self.name, sort_results=True)
+    for template_dir, library_name in entries:
+      try:
+        template = Template(template_dir, library_name=library_name)
+        templates.append(template)
+      except Exception as exc:
+        logger.error(f"Failed to load template from {template_dir}: {exc}")
+        continue
+    
+    filtered_templates = templates
+    
+    if filtered_templates:
+      if raw:
+        # Output raw format (tab-separated values for easy filtering with awk/sed/cut)
+        # Format: ID\tNAME\tTAGS\tVERSION\tLIBRARY
+        for template in filtered_templates:
+          name = template.metadata.name or "Unnamed Template"
+          tags_list = template.metadata.tags or []
+          tags = ",".join(tags_list) if tags_list else "-"
+          version = str(template.metadata.version) if template.metadata.version else "-"
+          library = template.metadata.library or "-"
+          print(f"{template.id}\t{name}\t{tags}\t{version}\t{library}")
+      else:
+        # Output rich table format
+        self.display.display_templates_table(
+          filtered_templates,
+          self.name,
+          f"{self.name.capitalize()} templates"
+        )
+    else:
+      logger.info(f"No templates found for module '{self.name}'")
+
+    return filtered_templates
+
+  def search(
+    self,
+    query: str = Argument(..., help="Search string to filter templates by ID")
+  ) -> list[Template]:
+    """Search for templates by ID containing the search string."""
+    logger.debug(f"Searching templates for module '{self.name}' with query='{query}'")
+    templates = []
+
+    entries = self.libraries.find(self.name, sort_results=True)
+    for template_dir, library_name in entries:
+      try:
+        template = Template(template_dir, library_name=library_name)
+        templates.append(template)
+      except Exception as exc:
+        logger.error(f"Failed to load template from {template_dir}: {exc}")
+        continue
+    
+    # Apply search filtering
+    filtered_templates = [t for t in templates if query.lower() in t.id.lower()]
+    
+    if filtered_templates:
+      logger.info(f"Found {len(filtered_templates)} templates matching '{query}' for module '{self.name}'")
+      self.display.display_templates_table(
+        filtered_templates,
+        self.name,
+        f"{self.name.capitalize()} templates matching '{query}'"
+      )
+    else:
+      logger.info(f"No templates found matching '{query}' for module '{self.name}'")
+      console.print(f"[yellow]No templates found matching '{query}' for module '{self.name}'[/yellow]")
+
+    return filtered_templates
+
+
+  def show(
+    self,
+    id: str,
+  ) -> None:
+    """Show template details."""
+    logger.debug(f"Showing template '{id}' from module '{self.name}'")
+    template = self._load_template_by_id(id)
+
+    if not template:
+      self.display.display_error(f"Template '{id}' not found", context=f"module '{self.name}'")
+      return
+    
+    # Apply config defaults (same as in generate)
+    # This ensures the display shows the actual defaults that will be used
+    if template.variables:
+      from .config import ConfigManager
+      config = ConfigManager()
+      config_defaults = config.get_defaults(self.name)
+      
+      if config_defaults:
+        logger.debug(f"Loading config defaults for module '{self.name}'")
+        # Apply config defaults (this respects the variable types and validation)
+        successful = template.variables.apply_defaults(config_defaults, "config")
+        if successful:
+          logger.debug(f"Applied config defaults for: {', '.join(successful)}")
+      
+      # Re-sort sections after applying config (toggle values may have changed)
+      template.variables.sort_sections()
+    
+    self._display_template_details(template, id)
+
+  def _apply_variable_defaults(self, template: Template) -> None:
+    """Apply config defaults and CLI overrides to template variables.
+    
+    Args:
+        template: Template instance with variables to configure
+    """
+    if not template.variables:
+      return
+    
+    from .config import ConfigManager
+    config = ConfigManager()
+    config_defaults = config.get_defaults(self.name)
+    
+    if config_defaults:
+      logger.info(f"Loading config defaults for module '{self.name}'")
+      successful = template.variables.apply_defaults(config_defaults, "config")
+      if successful:
+        logger.debug(f"Applied config defaults for: {', '.join(successful)}")
+
+  def _apply_cli_overrides(self, template: Template, var: Optional[List[str]], ctx: Context) -> None:
+    """Apply CLI variable overrides to template.
+    
+    Args:
+        template: Template instance to apply overrides to
+        var: List of variable override strings from --var flags
+        ctx: Typer context containing extra args
+    """
+    if not template.variables:
+      return
+    
+    extra_args = list(ctx.args) if ctx and hasattr(ctx, "args") else []
+    cli_overrides = parse_var_inputs(var or [], extra_args)
+    
+    if cli_overrides:
+      logger.info(f"Received {len(cli_overrides)} variable overrides from CLI")
+      successful_overrides = template.variables.apply_defaults(cli_overrides, "cli")
+      if successful_overrides:
+        logger.debug(f"Applied CLI overrides for: {', '.join(successful_overrides)}")
+
+  def _collect_variable_values(self, template: Template, interactive: bool) -> Dict[str, Any]:
+    """Collect variable values from user prompts and template defaults.
+    
+    Args:
+        template: Template instance with variables
+        interactive: Whether to prompt user for values interactively
+        
+    Returns:
+        Dictionary of variable names to values
+    """
+    variable_values = {}
+    
+    # Collect values interactively if enabled
+    if interactive and template.variables:
+      prompt_handler = PromptHandler()
+      collected_values = prompt_handler.collect_variables(template.variables)
+      if collected_values:
+        variable_values.update(collected_values)
+        logger.info(f"Collected {len(collected_values)} variable values from user input")
+    
+    # Add satisfied variable values (respects dependencies and toggles)
+    if template.variables:
+      variable_values.update(template.variables.get_satisfied_values())
+    
+    return variable_values
+  def _check_output_directory(self, output_dir: Path, rendered_files: Dict[str, str], 
+                              interactive: bool) -> Optional[List[Path]]:
+    """Check output directory for conflicts and get user confirmation if needed.
+    
+    Args:
+        output_dir: Directory where files will be written
+        rendered_files: Dictionary of file paths to rendered content
+        interactive: Whether to prompt user for confirmation
+        
+    Returns:
+        List of existing files that will be overwritten, or None to cancel
+    """
+    dir_exists = output_dir.exists()
+    dir_not_empty = dir_exists and any(output_dir.iterdir())
+    
+    # Check which files already exist
+    existing_files = []
+    if dir_exists:
+      for file_path in rendered_files.keys():
+        full_path = output_dir / file_path
+        if full_path.exists():
+          existing_files.append(full_path)
+    
+    # Warn if directory is not empty
+    if dir_not_empty:
+      if interactive:
+        console.print(f"\n[yellow]{IconManager.get_status_icon('warning')} Warning: Directory '{output_dir}' is not empty.[/yellow]")
+        if existing_files:
+          console.print(f"[yellow]  {len(existing_files)} file(s) will be overwritten.[/yellow]")
+        
+        if not Confirm.ask(f"Continue and potentially overwrite files in '{output_dir}'?", default=False):
+          console.print("[yellow]Generation cancelled.[/yellow]")
+          return None
+      else:
+        # Non-interactive mode: show warning but continue
+        logger.warning(f"Directory '{output_dir}' is not empty")
+        if existing_files:
+          logger.warning(f"{len(existing_files)} file(s) will be overwritten")
+    
+    return existing_files
+
+  def _get_generation_confirmation(self, output_dir: Path, rendered_files: Dict[str, str], 
+                                    existing_files: Optional[List[Path]], dir_not_empty: bool, 
+                                    dry_run: bool, interactive: bool) -> bool:
+    """Display file generation confirmation and get user approval.
+    
+    Args:
+        output_dir: Output directory path
+        rendered_files: Dictionary of file paths to content
+        existing_files: List of existing files that will be overwritten
+        dir_not_empty: Whether output directory already contains files
+        dry_run: Whether this is a dry run
+        interactive: Whether to prompt for confirmation
+        
+    Returns:
+        True if user confirms generation, False to cancel
+    """
+    if not interactive:
+      return True
+    
+    self.display.display_file_generation_confirmation(
+      output_dir, 
+      rendered_files, 
+      existing_files if existing_files else None
+    )
+    
+    # Final confirmation (only if we didn't already ask about overwriting)
+    if not dir_not_empty and not dry_run:
+      if not Confirm.ask("Generate these files?", default=True):
+        console.print("[yellow]Generation cancelled.[/yellow]")
+        return False
+    
+    return True
+
+  def _execute_dry_run(self, id: str, output_dir: Path, rendered_files: Dict[str, str], show_files: bool) -> None:
+    """Execute dry run mode with comprehensive simulation.
+    
+    Simulates all filesystem operations that would occur during actual generation,
+    including directory creation, file writing, and permission checks.
+    
+    Args:
+        id: Template ID
+        output_dir: Directory where files would be written
+        rendered_files: Dictionary of file paths to rendered content
+        show_files: Whether to display file contents
+    """
+    import os
+    from rich.table import Table
+    
+    console.print()
+    console.print("[bold cyan]Dry Run Mode - Simulating File Generation[/bold cyan]")
+    console.print()
+    
+    # Simulate directory creation
+    console.print(f"[bold]{IconManager.folder()} Directory Operations:[/bold]")
+    
+    # Check if output directory exists
+    if output_dir.exists():
+      console.print(f"  [green]{IconManager.get_status_icon('success')}[/green] Output directory exists: [cyan]{output_dir}[/cyan]")
+      # Check if we have write permissions
+      if os.access(output_dir, os.W_OK):
+        console.print(f"  [green]{IconManager.get_status_icon('success')}[/green] Write permission verified")
+      else:
+        console.print(f"  [yellow]{IconManager.get_status_icon('warning')}[/yellow] Write permission may be denied")
+    else:
+      console.print(f"  [dim]{IconManager.arrow_right()}[/dim] Would create output directory: [cyan]{output_dir}[/cyan]")
+      # Check if parent directory exists and is writable
+      parent = output_dir.parent
+      if parent.exists() and os.access(parent, os.W_OK):
+        console.print(f"  [green]{IconManager.get_status_icon('success')}[/green] Parent directory writable")
+      else:
+        console.print(f"  [yellow]{IconManager.get_status_icon('warning')}[/yellow] Parent directory may not be writable")
+    
+    # Collect unique subdirectories that would be created
+    subdirs = set()
+    for file_path in rendered_files.keys():
+      parts = Path(file_path).parts
+      for i in range(1, len(parts)):
+        subdirs.add(Path(*parts[:i]))
+    
+    if subdirs:
+      console.print(f"  [dim]{IconManager.arrow_right()}[/dim] Would create {len(subdirs)} subdirectory(ies)")
+      for subdir in sorted(subdirs):
+        console.print(f"    [dim]{IconManager.folder()}[/dim] {subdir}/")
+    
+    console.print()
+    
+    # Display file operations in a table
+    console.print(f"[bold]{IconManager.get_file_icon('file.txt')} File Operations:[/bold]")
+    
+    table = Table(show_header=True, header_style="bold cyan", box=None, padding=(0, 1))
+    table.add_column("File", style="white", no_wrap=False)
+    table.add_column("Size", justify="right", style="dim")
+    table.add_column("Status", style="yellow")
+    
+    total_size = 0
+    new_files = 0
+    overwrite_files = 0
+    
+    for file_path, content in sorted(rendered_files.items()):
+      full_path = output_dir / file_path
+      file_size = len(content.encode('utf-8'))
+      total_size += file_size
+      
+      # Determine status
+      if full_path.exists():
+        status = "Overwrite"
+        overwrite_files += 1
+      else:
+        status = "Create"
+        new_files += 1
+      
+      # Format size
+      if file_size < 1024:
+        size_str = f"{file_size}B"
+      elif file_size < 1024 * 1024:
+        size_str = f"{file_size / 1024:.1f}KB"
+      else:
+        size_str = f"{file_size / (1024 * 1024):.1f}MB"
+      
+      table.add_row(str(file_path), size_str, status)
+    
+    console.print(table)
+    console.print()
+    
+    # Summary statistics
+    console.print(f"[bold]{IconManager.get_status_icon('info')} Summary:[/bold]")
+    console.print(f"  Total files: {len(rendered_files)}")
+    console.print(f"  New files: {new_files}")
+    console.print(f"  Files to overwrite: {overwrite_files}")
+    
+    if total_size < 1024:
+      size_str = f"{total_size}B"
+    elif total_size < 1024 * 1024:
+      size_str = f"{total_size / 1024:.1f}KB"
+    else:
+      size_str = f"{total_size / (1024 * 1024):.1f}MB"
+    console.print(f"  Total size: {size_str}")
+    console.print()
+    
+    # Show file contents if requested
+    if show_files:
+      console.print("[bold cyan]Generated File Contents:[/bold cyan]")
+      console.print()
+      for file_path, content in sorted(rendered_files.items()):
+        console.print(f"[cyan]File:[/cyan] {file_path}")
+        print(f"{'─'*80}")
+        print(content)
+        print()  # Add blank line after content
+      console.print()
+    
+    console.print(f"[yellow]{IconManager.get_status_icon('success')} Dry run complete - no files were written[/yellow]")
+    console.print(f"[dim]Files would have been generated in '{output_dir}'[/dim]")
+    logger.info(f"Dry run completed for template '{id}' - {len(rendered_files)} files, {total_size} bytes")
+
+  def _write_generated_files(self, output_dir: Path, rendered_files: Dict[str, str]) -> None:
+    """Write rendered files to the output directory.
+    
+    Args:
+        output_dir: Directory to write files to
+        rendered_files: Dictionary of file paths to rendered content
+    """
+    output_dir.mkdir(parents=True, exist_ok=True)
+    
+    for file_path, content in rendered_files.items():
+      full_path = output_dir / file_path
+      full_path.parent.mkdir(parents=True, exist_ok=True)
+      with open(full_path, 'w', encoding='utf-8') as f:
+        f.write(content)
+      console.print(f"[green]Generated file: {file_path}[/green]")
+    
+    console.print(f"\n[green]{IconManager.get_status_icon('success')} Template generated successfully in '{output_dir}'[/green]")
+    logger.info(f"Template written to directory: {output_dir}")
+
+  def generate(
+    self,
+    id: str = Argument(..., help="Template ID"),
+    directory: Optional[str] = Argument(None, help="Output directory (defaults to template ID)"),
+    interactive: bool = Option(True, "--interactive/--no-interactive", "-i/-n", help="Enable interactive prompting for variables"),
+    var: Optional[list[str]] = Option(None, "--var", "-v", help="Variable override (repeatable). Supports: KEY=VALUE or KEY VALUE"),
+    dry_run: bool = Option(False, "--dry-run", help="Preview template generation without writing files"),
+    show_files: bool = Option(False, "--show-files", help="Display generated file contents in plain text (use with --dry-run)"),
+    ctx: Context = None,
+  ) -> None:
+    """Generate from template.
+    
+    Variable precedence chain (lowest to highest):
+    1. Module spec (defined in cli/modules/*.py)
+    2. Template spec (from template.yaml)
+    3. Config defaults (from ~/.config/boilerplates/config.yaml)
+    4. CLI overrides (--var flags)
+    
+    Examples:
+        # Generate to directory named after template
+        cli compose generate traefik
+        
+        # Generate to custom directory
+        cli compose generate traefik my-proxy
+        
+        # Generate with variables
+        cli compose generate traefik --var traefik_enabled=false
+        
+        # Preview without writing files (dry run)
+        cli compose generate traefik --dry-run
+        
+        # Preview and show generated file contents
+        cli compose generate traefik --dry-run --show-files
+    """
+    logger.info(f"Starting generation for template '{id}' from module '{self.name}'")
+    template = self._load_template_by_id(id)
+
+    # Apply defaults and overrides
+    self._apply_variable_defaults(template)
+    self._apply_cli_overrides(template, var, ctx)
+    
+    # Re-sort sections after all overrides (toggle values may have changed)
+    if template.variables:
+      template.variables.sort_sections()
+
+    self._display_template_details(template, id)
+    console.print()
+
+    # Collect variable values
+    variable_values = self._collect_variable_values(template, interactive)
+
+    try:
+      # Validate and render template
+      if template.variables:
+        template.variables.validate_all()
+      
+      rendered_files, variable_values = template.render(template.variables)
+      
+      if not rendered_files:
+        self.display.display_error("Template rendering returned no files", context="template generation")
+        raise Exit(code=1)
+      
+      logger.info(f"Successfully rendered template '{id}'")
+      
+      # Determine output directory
+      output_dir = Path(directory) if directory else Path(id)
+      
+      # Check for conflicts and get confirmation
+      existing_files = self._check_output_directory(output_dir, rendered_files, interactive)
+      if existing_files is None:
+        return  # User cancelled
+      
+      # Get final confirmation for generation
+      dir_not_empty = output_dir.exists() and any(output_dir.iterdir())
+      if not self._get_generation_confirmation(output_dir, rendered_files, existing_files, 
+                                               dir_not_empty, dry_run, interactive):
+        return  # User cancelled
+      
+      # Execute generation (dry run or actual)
+      if dry_run:
+        self._execute_dry_run(id, output_dir, rendered_files, show_files)
+      else:
+        self._write_generated_files(output_dir, rendered_files)
+      
+      # Display next steps
+      if template.metadata.next_steps:
+        self.display.display_next_steps(template.metadata.next_steps, variable_values)
+
+    except Exception as e:
+      self.display.display_error(str(e), context=f"generating template '{id}'")
+      raise Exit(code=1)
+
+  def config_get(
+    self,
+    var_name: Optional[str] = Argument(None, help="Variable name to get (omit to show all defaults)"),
+  ) -> None:
+    """Get default value(s) for this module.
+    
+    Examples:
+        # Get all defaults for module
+        cli compose defaults get
+        
+        # Get specific variable default
+        cli compose defaults get service_name
+    """
+    from .config import ConfigManager
+    config = ConfigManager()
+    
+    if var_name:
+      # Get specific variable default
+      value = config.get_default_value(self.name, var_name)
+      if value is not None:
+        console.print(f"[green]{var_name}[/green] = [yellow]{value}[/yellow]")
+      else:
+        self.display.display_warning(f"No default set for variable '{var_name}'", context=f"module '{self.name}'")
+    else:
+      # Show all defaults (flat list)
+      defaults = config.get_defaults(self.name)
+      if defaults:
+        console.print(f"[bold]Config defaults for module '{self.name}':[/bold]\n")
+        for var_name, var_value in defaults.items():
+          console.print(f"  [green]{var_name}[/green] = [yellow]{var_value}[/yellow]")
+      else:
+        console.print(f"[yellow]No defaults configured for module '{self.name}'[/yellow]")
+
+  def config_set(
+    self,
+    var_name: str = Argument(..., help="Variable name or var=value format"),
+    value: Optional[str] = Argument(None, help="Default value (not needed if using var=value format)"),
+  ) -> None:
+    """Set a default value for a variable.
+    
+    This only sets the DEFAULT VALUE, not the variable spec.
+    The variable must be defined in the module or template spec.
+    
+    Supports both formats:
+      - var_name value
+      - var_name=value
+    
+    Examples:
+        # Set default value (format 1)
+        cli compose defaults set service_name my-awesome-app
+        
+        # Set default value (format 2)
+        cli compose defaults set service_name=my-awesome-app
+        
+        # Set author for all compose templates
+        cli compose defaults set author "Christian Lempa"
+    """
+    from .config import ConfigManager
+    config = ConfigManager()
+    
+    # Parse var_name and value - support both "var value" and "var=value" formats
+    if '=' in var_name and value is None:
+      # Format: var_name=value
+      parts = var_name.split('=', 1)
+      actual_var_name = parts[0]
+      actual_value = parts[1]
+    elif value is not None:
+      # Format: var_name value
+      actual_var_name = var_name
+      actual_value = value
+    else:
+      self.display.display_error(f"Missing value for variable '{var_name}'", context="config set")
+      console.print(f"[dim]Usage: defaults set VAR_NAME VALUE or defaults set VAR_NAME=VALUE[/dim]")
+      raise Exit(code=1)
+    
+    # Set the default value
+    config.set_default_value(self.name, actual_var_name, actual_value)
+    console.print(f"[green]{IconManager.get_status_icon('success')} Set default:[/green] [cyan]{actual_var_name}[/cyan] = [yellow]{actual_value}[/yellow]")
+    console.print(f"\n[dim]This will be used as the default value when generating templates with this module.[/dim]")
+
+  def config_remove(
+    self,
+    var_name: str = Argument(..., help="Variable name to remove"),
+  ) -> None:
+    """Remove a specific default variable value.
+    
+    Examples:
+        # Remove a default value
+        cli compose defaults rm service_name
+    """
+    from .config import ConfigManager
+    config = ConfigManager()
+    defaults = config.get_defaults(self.name)
+    
+    if not defaults:
+      console.print(f"[yellow]No defaults configured for module '{self.name}'[/yellow]")
+      return
+    
+    if var_name in defaults:
+      del defaults[var_name]
+      config.set_defaults(self.name, defaults)
+      console.print(f"[green]{IconManager.get_status_icon('success')} Removed default for '{var_name}'[/green]")
+    else:
+      console.print(f"[red]No default found for variable '{var_name}'[/red]")
+
+  def config_clear(
+    self,
+    var_name: Optional[str] = Argument(None, help="Variable name to clear (omit to clear all defaults)"),
+    force: bool = Option(False, "--force", "-f", help="Skip confirmation prompt"),
+  ) -> None:
+    """Clear default value(s) for this module.
+    
+    Examples:
+        # Clear specific variable default
+        cli compose defaults clear service_name
+        
+        # Clear all defaults for module
+        cli compose defaults clear --force
+    """
+    from .config import ConfigManager
+    config = ConfigManager()
+    defaults = config.get_defaults(self.name)
+    
+    if not defaults:
+      console.print(f"[yellow]No defaults configured for module '{self.name}'[/yellow]")
+      return
+    
+    if var_name:
+      # Clear specific variable
+      if var_name in defaults:
+        del defaults[var_name]
+        config.set_defaults(self.name, defaults)
+        console.print(f"[green]{IconManager.get_status_icon('success')} Cleared default for '{var_name}'[/green]")
+      else:
+        console.print(f"[red]No default found for variable '{var_name}'[/red]")
+    else:
+      # Clear all defaults
+      if not force:
+        console.print(f"[bold yellow]{IconManager.get_status_icon('warning')} Warning:[/bold yellow] This will clear ALL defaults for module '[cyan]{self.name}[/cyan]'")
+        console.print()
+        # Show what will be cleared
+        for var_name, var_value in defaults.items():
+          console.print(f"  [green]{var_name}[/green] = [yellow]{var_value}[/yellow]")
+        console.print()
+        if not Confirm.ask(f"[bold red]Are you sure?[/bold red]", default=False):
+          console.print("[green]Operation cancelled.[/green]")
+          return
+      
+      config.clear_defaults(self.name)
+      console.print(f"[green]{IconManager.get_status_icon('success')} Cleared all defaults for module '{self.name}'[/green]")
+
+  def config_list(self) -> None:
+    """Display the defaults for this specific module in YAML format.
+    
+    Examples:
+        # Show the defaults for the current module
+        cli compose defaults list
+    """
+    from .config import ConfigManager
+    import yaml
+    
+    config = ConfigManager()
+    
+    # Get only the defaults for this module
+    defaults = config.get_defaults(self.name)
+    
+    if not defaults:
+      console.print(f"[yellow]No configuration found for module '{self.name}'[/yellow]")
+      console.print(f"\n[dim]Config file location: {config.get_config_path()}[/dim]")
+      return
+    
+    # Create a minimal config structure with only this module's defaults
+    module_config = {
+      "defaults": {
+        self.name: defaults
+      }
+    }
+    
+    # Convert config to YAML string
+    yaml_output = yaml.dump(module_config, default_flow_style=False, sort_keys=False)
+    
+    console.print(f"[bold]Configuration for module:[/bold] [cyan]{self.name}[/cyan]")
+    console.print(f"[dim]Config file: {config.get_config_path()}[/dim]\n")
+    console.print(Panel(yaml_output, title=f"{self.name.capitalize()} Config", border_style="blue"))
+
+  def validate(
+    self,
+    template_id: str = Argument(None, help="Template ID to validate (if omitted, validates all templates)"),
+    verbose: bool = Option(False, "--verbose", "-v", help="Show detailed validation information"),
+    semantic: bool = Option(True, "--semantic/--no-semantic", help="Enable semantic validation (Docker Compose schema, etc.)")
+  ) -> None:
+    """Validate templates for Jinja2 syntax, undefined variables, and semantic correctness.
+    
+    Validation includes:
+    - Jinja2 syntax checking
+    - Variable definition checking
+    - Semantic validation (when --semantic is enabled):
+      - Docker Compose file structure
+      - YAML syntax
+      - Configuration best practices
+    
+    Examples:
+        # Validate all templates in this module
+        cli compose validate
+        
+        # Validate a specific template
+        cli compose validate gitlab
+        
+        # Validate with verbose output
+        cli compose validate --verbose
+        
+        # Skip semantic validation (only Jinja2)
+        cli compose validate --no-semantic
+    """
+    from rich.table import Table
+    from .validators import get_validator_registry
+    
+    if template_id:
+      # Validate a specific template
+      try:
+        template = self._load_template_by_id(template_id)
+        console.print(f"[bold]Validating template:[/bold] [cyan]{template_id}[/cyan]\n")
+        
+        try:
+          # Trigger validation by accessing used_variables
+          _ = template.used_variables
+          # Trigger variable definition validation by accessing variables
+          _ = template.variables
+          console.print(f"[green]{IconManager.get_status_icon('success')} Jinja2 validation passed[/green]")
+          
+          # Semantic validation
+          if semantic:
+            console.print(f"\n[bold cyan]Running semantic validation...[/bold cyan]")
+            registry = get_validator_registry()
+            has_semantic_errors = False
+            
+            # Render template with default values for validation
+            rendered_files, _ = template.render(template.variables)
+            
+            for file_path, content in rendered_files.items():
+              result = registry.validate_file(content, file_path)
+              
+              if result.errors or result.warnings or (verbose and result.info):
+                console.print(f"\n[cyan]File:[/cyan] {file_path}")
+                result.display(f"{file_path}")
+                
+                if result.errors:
+                  has_semantic_errors = True
+            
+            if not has_semantic_errors:
+              console.print(f"\n[green]{IconManager.get_status_icon('success')} Semantic validation passed[/green]")
+            else:
+              console.print(f"\n[red]{IconManager.get_status_icon('error')} Semantic validation found errors[/red]")
+              raise Exit(code=1)
+          
+          if verbose:
+            console.print(f"\n[dim]Template path: {template.template_dir}[/dim]")
+            console.print(f"[dim]Found {len(template.used_variables)} variables[/dim]")
+            console.print(f"[dim]Generated {len(rendered_files)} files[/dim]")
+        except ValueError as e:
+          console.print(f"[red]{IconManager.get_status_icon('error')} Validation failed for '{template_id}':[/red]")
+          console.print(f"\n{e}")
+          raise Exit(code=1)
+          
+      except Exception as e:
+        console.print(f"[red]Error loading template '{template_id}': {e}[/red]")
+        raise Exit(code=1)
+    else:
+      # Validate all templates
+      console.print(f"[bold]Validating all {self.name} templates...[/bold]\n")
+      
+      entries = self.libraries.find(self.name, sort_results=True)
+      total = len(entries)
+      valid_count = 0
+      invalid_count = 0
+      errors = []
+      
+      for template_dir, library_name in entries:
+        template_id = template_dir.name
+        try:
+          template = Template(template_dir, library_name=library_name)
+          # Trigger validation
+          _ = template.used_variables
+          _ = template.variables
+          valid_count += 1
+          if verbose:
+            console.print(f"[green]{IconManager.get_status_icon('success')}[/green] {template_id}")
+        except ValueError as e:
+          invalid_count += 1
+          errors.append((template_id, str(e)))
+          if verbose:
+            console.print(f"[red]{IconManager.get_status_icon('error')}[/red] {template_id}")
+        except Exception as e:
+          invalid_count += 1
+          errors.append((template_id, f"Load error: {e}"))
+          if verbose:
+            console.print(f"[yellow]{IconManager.get_status_icon('warning')}[/yellow] {template_id}")
+      
+      # Summary
+      console.print(f"\n[bold]Validation Summary:[/bold]")
+      summary_table = Table(show_header=False, box=None, padding=(0, 2))
+      summary_table.add_column(style="bold")
+      summary_table.add_column()
+      summary_table.add_row("Total templates:", str(total))
+      summary_table.add_row("[green]Valid:[/green]", str(valid_count))
+      summary_table.add_row("[red]Invalid:[/red]", str(invalid_count))
+      console.print(summary_table)
+      
+      # Show errors if any
+      if errors:
+        console.print(f"\n[bold red]Validation Errors:[/bold red]")
+        for template_id, error_msg in errors:
+          console.print(f"\n[yellow]Template:[/yellow] [cyan]{template_id}[/cyan]")
+          console.print(f"[dim]{error_msg}[/dim]")
+        raise Exit(code=1)
+      else:
+        console.print(f"\n[green]{IconManager.get_status_icon('success')} All templates are valid![/green]")
+
+  @classmethod
+  def register_cli(cls, app: Typer) -> None:
+    """Register module commands with the main app."""
+    logger.debug(f"Registering CLI commands for module '{cls.name}'")
+    
+    module_instance = cls()
+    
+    module_app = Typer(help=cls.description)
+    
+    module_app.command("list")(module_instance.list)
+    module_app.command("search")(module_instance.search)
+    module_app.command("show")(module_instance.show)
+    module_app.command("validate")(module_instance.validate)
+    
+    module_app.command(
+      "generate", 
+      context_settings={"allow_extra_args": True, "ignore_unknown_options": True}
+    )(module_instance.generate)
+    
+    # Add defaults commands (simplified - only manage default values)
+    defaults_app = Typer(help="Manage default values for template variables")
+    defaults_app.command("get", help="Get default value(s)")(module_instance.config_get)
+    defaults_app.command("set", help="Set a default value")(module_instance.config_set)
+    defaults_app.command("rm", help="Remove a specific default value")(module_instance.config_remove)
+    defaults_app.command("clear", help="Clear default value(s)")(module_instance.config_clear)
+    defaults_app.command("list", help="Display the config for this module in YAML format")(module_instance.config_list)
+    module_app.add_typer(defaults_app, name="defaults")
+    
+    app.add_typer(module_app, name=cls.name, help=cls.description)
+    logger.info(f"Module '{cls.name}' CLI commands registered")
+
+  def _load_template_by_id(self, id: str) -> Template:
+    result = self.libraries.find_by_id(self.name, id)
+    if not result:
+      raise FileNotFoundError(f"Template '{id}' not found in module '{self.name}'")
+    
+    template_dir, library_name = result
+    try:
+      return Template(template_dir, library_name=library_name)
+    except Exception as exc:
+      logger.error(f"Failed to load template '{id}': {exc}")
+      raise FileNotFoundError(f"Template '{id}' could not be loaded: {exc}") from exc
+
+  def _display_template_details(self, template: Template, id: str) -> None:
+    """Display template information panel and variables table."""
+    self.display.display_template_details(template, id)

+ 224 - 0
cli/core/prompt.py

@@ -0,0 +1,224 @@
+from __future__ import annotations
+
+from typing import Dict, Any, List, Callable
+import logging
+from rich.console import Console
+from rich.prompt import Prompt, Confirm, IntPrompt
+from rich.table import Table
+
+from .display import DisplayManager, IconManager
+from .variable import Variable
+from .collection import VariableCollection
+
+logger = logging.getLogger(__name__)
+
+
+class PromptHandler:
+  """Simple interactive prompt handler for collecting template variables."""
+
+  def __init__(self) -> None:
+    self.console = Console()
+    self.display = DisplayManager()
+
+  def collect_variables(self, variables: VariableCollection) -> dict[str, Any]:
+    """Collect values for variables by iterating through sections.
+    
+    Args:
+        variables: VariableCollection with organized sections and variables
+        
+    Returns:
+        Dict of variable names to collected values
+    """
+    if not Confirm.ask("Customize any settings?", default=False):
+      logger.info("User opted to keep all default values")
+      return {}
+
+    collected: Dict[str, Any] = {}
+
+    # Process each section
+    for section_key, section in variables.get_sections().items():
+      if not section.variables:
+        continue
+
+      # Check if dependencies are satisfied
+      if not variables.is_section_satisfied(section_key):
+        # Get list of unsatisfied dependencies for better user feedback
+        unsatisfied_keys = [dep for dep in section.needs if not variables.is_section_satisfied(dep)]
+        # Convert section keys to titles for user-friendly display
+        unsatisfied_titles = []
+        for dep_key in unsatisfied_keys:
+          dep_section = variables.get_section(dep_key)
+          if dep_section:
+            unsatisfied_titles.append(dep_section.title)
+          else:
+            unsatisfied_titles.append(dep_key)
+        dep_names = ", ".join(unsatisfied_titles) if unsatisfied_titles else "unknown"
+        self.console.print(
+          f"\n[dim]{IconManager.get_status_icon('skipped')} {section.title} (skipped - requires {dep_names} to be enabled)[/dim]"
+        )
+        logger.debug(f"Skipping section '{section_key}' - dependencies not satisfied: {dep_names}")
+        continue
+
+      # Always show section header first
+      self.display.display_section_header(section.title, section.description)
+
+      # Handle section toggle - skip for required sections
+      if section.required:
+        # Required sections are always processed, no toggle prompt needed
+        logger.debug(f"Processing required section '{section.key}' without toggle prompt")
+      elif section.toggle:
+        toggle_var = section.variables.get(section.toggle)
+        if toggle_var:
+          # Use description for prompt if available, otherwise use title
+          prompt_text = section.description if section.description else f"Enable {section.title}?"
+          current_value = toggle_var.convert(toggle_var.value)
+          new_value = self._prompt_bool(prompt_text, current_value)
+          
+          if new_value != current_value:
+            collected[toggle_var.name] = new_value
+            toggle_var.value = new_value
+          
+          # Use section's native is_enabled() method
+          if not section.is_enabled():
+            continue
+
+      # Collect variables in this section
+      for var_name, variable in section.variables.items():
+        # Skip toggle variable (already handled)
+        if section.toggle and var_name == section.toggle:
+          continue
+          
+        current_value = variable.convert(variable.value)
+        # Pass section.required so _prompt_variable can enforce required inputs
+        new_value = self._prompt_variable(variable, required=section.required)
+        
+        # For autogenerated variables, always update even if None (signals autogeneration)
+        if variable.autogenerated and new_value is None:
+          collected[var_name] = None
+          variable.value = None
+        elif new_value != current_value:
+          collected[var_name] = new_value
+          variable.value = new_value
+
+    logger.info(f"Variable collection completed. Collected {len(collected)} values")
+    return collected
+
+  def _prompt_variable(self, variable: Variable, required: bool = False) -> Any:
+    """Prompt for a single variable value based on its type.
+    
+    Args:
+        variable: The variable to prompt for
+        required: Whether the containing section is required (for context/display)
+        
+    Returns:
+        The validated value entered by the user
+    """
+    logger.debug(f"Prompting for variable '{variable.name}' (type: {variable.type})")
+    
+    # Use variable's native methods for prompt text and default value
+    prompt_text = variable.get_prompt_text()
+    default_value = variable.get_normalized_default()
+
+    # Add lock icon before default value for sensitive or autogenerated variables
+    if variable.sensitive or variable.autogenerated:
+      # Format: "Prompt text 🔒 (default)"
+      # The lock icon goes between the text and the default value in parentheses
+      prompt_text = f"{prompt_text} {IconManager.lock()}"
+
+    # Check if this specific variable is required (has no default and not autogenerated)
+    var_is_required = variable.is_required()
+    
+    # If variable is required, mark it in the prompt
+    if var_is_required:
+      prompt_text = f"{prompt_text} [bold red]*required[/bold red]"
+
+    handler = self._get_prompt_handler(variable)
+
+    # Add validation hint (includes both extra text and enum options)
+    hint = variable.get_validation_hint()
+    if hint:
+      # Show options/extra inline inside parentheses, before the default
+      prompt_text = f"{prompt_text} [dim]({hint})[/dim]"
+
+    while True:
+      try:
+        raw = handler(prompt_text, default_value)
+        # Use Variable's centralized validation method that handles:
+        # - Type conversion
+        # - Autogenerated variable detection
+        # - Required field validation
+        converted = variable.validate_and_convert(raw, check_required=True)
+        
+        # Return the converted value (caller will update variable.value)
+        return converted
+      except ValueError as exc:
+        # Conversion/validation failed — show a consistent error message and retry
+        self._show_validation_error(str(exc))
+      except Exception as e:
+        # Unexpected error — log and retry using the stored (unconverted) value
+        logger.error(f"Error prompting for variable '{variable.name}': {str(e)}")
+        default_value = variable.value
+        handler = self._get_prompt_handler(variable)
+
+  def _get_prompt_handler(self, variable: Variable) -> Callable:
+    """Return the prompt function for a variable type."""
+    handlers = {
+      "bool": self._prompt_bool,
+      "int": self._prompt_int,
+      # For enum prompts we pass the variable.extra through so options and extra
+      # can be combined into a single inline hint.
+      "enum": lambda text, default: self._prompt_enum(text, variable.options or [], default, extra=getattr(variable, 'extra', None)),
+    }
+    return handlers.get(variable.type, lambda text, default: self._prompt_string(text, default, is_sensitive=variable.sensitive))
+
+  def _show_validation_error(self, message: str) -> None:
+    """Display validation feedback consistently."""
+    self.display.display_validation_error(message)
+
+  def _prompt_string(self, prompt_text: str, default: Any = None, is_sensitive: bool = False) -> str | None:
+    value = Prompt.ask(
+      prompt_text,
+      default=str(default) if default is not None else "",
+      show_default=True,
+      password=is_sensitive
+    )
+    stripped = value.strip() if value else None
+    return stripped if stripped else None
+
+  def _prompt_bool(self, prompt_text: str, default: Any = None) -> bool | None:
+    if default is None:
+      return Confirm.ask(prompt_text, default=None)
+    converted = default if isinstance(default, bool) else str(default).lower() in ("true", "1", "yes", "on")
+    return Confirm.ask(prompt_text, default=converted)
+
+  def _prompt_int(self, prompt_text: str, default: Any = None) -> int | None:
+    converted = None
+    if default is not None:
+      try:
+        converted = int(default)
+      except (ValueError, TypeError):
+        logger.warning(f"Invalid default integer value: {default}")
+    return IntPrompt.ask(prompt_text, default=converted)
+
+  def _prompt_enum(self, prompt_text: str, options: list[str], default: Any = None, extra: str | None = None) -> str:
+    """Prompt for enum selection with validation.
+    
+    Note: prompt_text should already include hint from variable.get_validation_hint()
+    but we keep this for backward compatibility and fallback.
+    """
+    if not options:
+      return self._prompt_string(prompt_text, default)
+
+    # Validate default is in options
+    if default and str(default) not in options:
+      default = options[0]
+
+    while True:
+      value = Prompt.ask(
+        prompt_text,
+        default=str(default) if default else options[0],
+        show_default=True,
+      )
+      if value in options:
+        return value
+      self.console.print(f"[red]Invalid choice. Select from: {', '.join(options)}[/red]")

+ 36 - 0
cli/core/registry.py

@@ -0,0 +1,36 @@
+"""Module registry system."""
+from __future__ import annotations
+
+import logging
+from typing import Iterator, Type
+
+logger = logging.getLogger(__name__)
+
+
+class ModuleRegistry:
+  """Simple module registry without magic."""
+  
+  def __init__(self) -> None:
+    self._modules = {}
+    logger.debug("Initializing module registry")
+  
+  def register(self, module_class: Type) -> None:
+    """Register a module class."""
+    # Module class defines its own name attribute
+    logger.debug(f"Attempting to register module class '{module_class.name}'")
+    
+    if module_class.name in self._modules:
+      logger.warning(f"Module '{module_class.name}' already registered, replacing with new implementation")
+    
+    self._modules[module_class.name] = module_class
+    logger.info(f"Registered module '{module_class.name}' (total modules: {len(self._modules)})")
+    logger.debug(f"Module '{module_class.name}' details: description='{module_class.description}'")
+  
+  def iter_module_classes(self) -> Iterator[tuple[str, Type]]:
+    """Yield registered module classes without instantiating them."""
+    logger.debug(f"Iterating over {len(self._modules)} registered module classes")
+    for name in sorted(self._modules.keys()):
+      yield name, self._modules[name]
+
+# Global registry
+registry = ModuleRegistry()

+ 376 - 0
cli/core/repo.py

@@ -0,0 +1,376 @@
+"""Repository management module for syncing library repositories."""
+from __future__ import annotations
+
+import logging
+import subprocess
+from pathlib import Path
+from typing import Optional
+
+from rich.console import Console
+from rich.panel import Panel
+from rich.progress import Progress, SpinnerColumn, TextColumn
+from rich.table import Table
+from typer import Argument, Option, Typer
+
+from ..core.config import ConfigManager
+from ..core.display import DisplayManager, IconManager
+from ..core.exceptions import ConfigError
+
+logger = logging.getLogger(__name__)
+console = Console()
+console_err = Console(stderr=True)
+display = DisplayManager()
+
+app = Typer(help="Manage library repositories")
+
+
+def _run_git_command(args: list[str], cwd: Optional[Path] = None) -> tuple[bool, str, str]:
+    """Run a git command and return the result.
+    
+    Args:
+        args: Git command arguments (without 'git' prefix)
+        cwd: Working directory for the command
+        
+    Returns:
+        Tuple of (success, stdout, stderr)
+    """
+    try:
+        result = subprocess.run(
+            ["git"] + args,
+            cwd=cwd,
+            capture_output=True,
+            text=True,
+            timeout=300  # 5 minute timeout
+        )
+        return result.returncode == 0, result.stdout, result.stderr
+    except subprocess.TimeoutExpired:
+        return False, "", "Command timed out after 5 minutes"
+    except FileNotFoundError:
+        return False, "", "Git command not found. Please install git."
+    except Exception as e:
+        return False, "", str(e)
+
+
+def _clone_or_pull_repo(name: str, url: str, target_path: Path, branch: Optional[str] = None, sparse_dir: Optional[str] = None) -> tuple[bool, str]:
+    """Clone or pull a git repository with optional sparse-checkout.
+    
+    Args:
+        name: Library name
+        url: Git repository URL
+        target_path: Target directory for the repository
+        branch: Git branch to clone/pull (optional)
+        sparse_dir: Directory to sparse-checkout (optional, use None or "." for full clone)
+        
+    Returns:
+        Tuple of (success, message)
+    """
+    if target_path.exists() and (target_path / ".git").exists():
+        # Repository exists, pull updates
+        logger.debug(f"Pulling updates for library '{name}' at {target_path}")
+        
+        # Determine which branch to pull
+        pull_branch = branch if branch else "main"
+        
+        # Pull updates from specific branch
+        success, stdout, stderr = _run_git_command(
+            ["pull", "--ff-only", "origin", pull_branch],
+            cwd=target_path
+        )
+        
+        if success:
+            # Check if anything was updated
+            if "Already up to date" in stdout or "Already up-to-date" in stdout:
+                return True, "Already up to date"
+            else:
+                return True, "Updated successfully"
+        else:
+            error_msg = stderr or stdout
+            logger.error(f"Failed to pull library '{name}': {error_msg}")
+            return False, f"Pull failed: {error_msg}"
+    else:
+        # Repository doesn't exist, clone it
+        logger.debug(f"Cloning library '{name}' from {url} to {target_path}")
+        
+        # Ensure parent directory exists
+        target_path.parent.mkdir(parents=True, exist_ok=True)
+        
+        # Determine if we should use sparse-checkout
+        use_sparse = sparse_dir and sparse_dir != "."
+        
+        if use_sparse:
+            # Use sparse-checkout to clone only specific directory
+            logger.debug(f"Using sparse-checkout for directory: {sparse_dir}")
+            
+            # Initialize empty repo
+            success, stdout, stderr = _run_git_command(["init"], cwd=None)
+            if success:
+                # Create target directory
+                target_path.mkdir(parents=True, exist_ok=True)
+                
+                # Initialize git repo
+                success, stdout, stderr = _run_git_command(["init"], cwd=target_path)
+                if not success:
+                    return False, f"Failed to initialize repo: {stderr or stdout}"
+                
+                # Add remote
+                success, stdout, stderr = _run_git_command(["remote", "add", "origin", url], cwd=target_path)
+                if not success:
+                    return False, f"Failed to add remote: {stderr or stdout}"
+                
+                # Enable sparse-checkout (non-cone mode to exclude root files)
+                success, stdout, stderr = _run_git_command(
+                    ["sparse-checkout", "init", "--no-cone"], 
+                    cwd=target_path
+                )
+                if not success:
+                    return False, f"Failed to enable sparse-checkout: {stderr or stdout}"
+                
+                # Set sparse-checkout to specific directory (non-cone uses patterns)
+                success, stdout, stderr = _run_git_command(
+                    ["sparse-checkout", "set", f"{sparse_dir}/*"],
+                    cwd=target_path
+                )
+                if not success:
+                    return False, f"Failed to set sparse-checkout directory: {stderr or stdout}"
+                
+                # Fetch specific branch
+                fetch_args = ["fetch", "--depth", "1", "origin"]
+                if branch:
+                    fetch_args.append(f"{branch}:{branch}")
+                else:
+                    fetch_args.append("main:main")
+                
+                success, stdout, stderr = _run_git_command(fetch_args, cwd=target_path)
+                if not success:
+                    return False, f"Fetch failed: {stderr or stdout}"
+                
+                # Checkout the branch
+                checkout_branch = branch if branch else "main"
+                success, stdout, stderr = _run_git_command(
+                    ["checkout", checkout_branch],
+                    cwd=target_path
+                )
+                if not success:
+                    return False, f"Checkout failed: {stderr or stdout}"
+                
+                # Done! Files are in target_path/sparse_dir/
+                return True, "Cloned successfully (sparse)"
+            else:
+                return False, f"Failed to initialize: {stderr or stdout}"
+        else:
+            # Regular full clone
+            clone_args = ["clone", "--depth", "1"]
+            if branch:
+                clone_args.extend(["--branch", branch])
+            clone_args.extend([url, str(target_path)])
+            
+            success, stdout, stderr = _run_git_command(clone_args)
+            
+            if success:
+                return True, "Cloned successfully"
+            else:
+                error_msg = stderr or stdout
+                logger.error(f"Failed to clone library '{name}': {error_msg}")
+                return False, f"Clone failed: {error_msg}"
+
+
+@app.command()
+def update(
+    library_name: Optional[str] = Argument(
+        None,
+        help="Name of specific library to update (updates all if not specified)"
+    ),
+    verbose: bool = Option(False, "--verbose", "-v", help="Show detailed output")
+) -> None:
+    """Update library repositories by cloning or pulling from git.
+    
+    This command syncs all configured libraries from their git repositories.
+    If a library doesn't exist locally, it will be cloned. If it exists, it will be pulled.
+    """
+    config = ConfigManager()
+    libraries = config.get_libraries()
+    
+    if not libraries:
+        console.print("[yellow]No libraries configured.[/yellow]")
+        console.print("Libraries are auto-configured on first run with a default library.")
+        return
+    
+    # Filter to specific library if requested
+    if library_name:
+        libraries = [lib for lib in libraries if lib.get("name") == library_name]
+        if not libraries:
+            console_err.print(f"[red]Error:[/red] Library '{library_name}' not found in configuration")
+            return
+    
+    libraries_path = config.get_libraries_path()
+    
+    # Create results table
+    results = []
+    
+    with Progress(
+        SpinnerColumn(),
+        TextColumn("[progress.description]{task.description}"),
+        console=console,
+    ) as progress:
+        for lib in libraries:
+            name = lib.get("name")
+            url = lib.get("url")
+            branch = lib.get("branch")
+            directory = lib.get("directory", "library")
+            enabled = lib.get("enabled", True)
+            
+            if not enabled:
+                if verbose:
+                    console.print(f"[dim]Skipping disabled library: {name}[/dim]")
+                results.append((name, "Skipped (disabled)", False))
+                continue
+            
+            task = progress.add_task(f"Updating {name}...", total=None)
+            
+            # Target path: ~/.config/boilerplates/libraries/{name}/
+            target_path = libraries_path / name
+            
+            # Clone or pull the repository with sparse-checkout if directory is specified
+            success, message = _clone_or_pull_repo(name, url, target_path, branch, directory)
+            
+            results.append((name, message, success))
+            progress.remove_task(task)
+            
+            if verbose:
+                if success:
+                    display.display_success(f"{name}: {message}")
+                else:
+                    display.display_error(f"{name}: {message}")
+    
+    # Display summary table
+    if not verbose:
+        table = Table(title="Library Update Summary", show_header=True)
+        table.add_column("Library", style="cyan", no_wrap=True)
+        table.add_column("Status")
+        
+        for name, message, success in results:
+            status_style = "green" if success else "red"
+            status_icon = IconManager.get_status_icon("success" if success else "error")
+            table.add_row(name, f"[{status_style}]{status_icon} {message}[/{status_style}]")
+        
+        console.print(table)
+    
+    # Summary
+    total = len(results)
+    successful = sum(1 for _, _, success in results if success)
+    
+    if successful == total:
+        console.print(f"\n[green]All libraries updated successfully ({successful}/{total})[/green]")
+    elif successful > 0:
+        console.print(f"\n[yellow]Partially successful: {successful}/{total} libraries updated[/yellow]")
+    else:
+        console.print(f"\n[red]Failed to update libraries[/red]")
+
+
+@app.command()
+def list() -> None:
+    """List all configured libraries."""
+    config = ConfigManager()
+    libraries = config.get_libraries()
+    
+    if not libraries:
+        console.print("[yellow]No libraries configured.[/yellow]")
+        return
+    
+    table = Table(title="Configured Libraries", show_header=True)
+    table.add_column("Name", style="cyan", no_wrap=True)
+    table.add_column("URL", style="blue")
+    table.add_column("Branch", style="yellow")
+    table.add_column("Directory", style="magenta")
+    table.add_column("Status", style="green")
+    
+    libraries_path = config.get_libraries_path()
+    
+    for lib in libraries:
+        name = lib.get("name", "")
+        url = lib.get("url", "")
+        branch = lib.get("branch", "main")
+        directory = lib.get("directory", "library")
+        enabled = lib.get("enabled", True)
+        
+        # Check if library exists locally
+        library_base = libraries_path / name
+        if directory and directory != ".":
+            library_path = library_base / directory
+        else:
+            library_path = library_base
+        exists = library_path.exists()
+        
+        status_parts = []
+        if not enabled:
+            status_parts.append("[dim]disabled[/dim]")
+        elif exists:
+            status_parts.append("[green]synced[/green]")
+        else:
+            status_parts.append("[yellow]not synced[/yellow]")
+        
+        status = " ".join(status_parts)
+        
+        table.add_row(name, url, branch, directory, status)
+    
+    console.print(table)
+
+
+@app.command()
+def add(
+    name: str = Argument(..., help="Unique name for the library"),
+    url: str = Argument(..., help="Git repository URL"),
+    branch: str = Option("main", "--branch", "-b", help="Git branch to use"),
+    directory: str = Option("library", "--directory", "-d", help="Directory within repo containing templates (metadata only)"),
+    enabled: bool = Option(True, "--enabled/--disabled", help="Enable or disable the library"),
+    sync: bool = Option(True, "--sync/--no-sync", help="Sync the library after adding")
+) -> None:
+    """Add a new library to the configuration."""
+    config = ConfigManager()
+    
+    try:
+        config.add_library(name, url, directory, branch, enabled)
+        display.display_success(f"Added library '{name}'")
+        
+        if sync and enabled:
+            console.print(f"\nSyncing library '{name}'...")
+            # Call update for this specific library
+            update(library_name=name, verbose=True)
+    except ConfigError as e:
+        display.display_error(str(e))
+
+
+@app.command()
+def remove(
+    name: str = Argument(..., help="Name of the library to remove"),
+    keep_files: bool = Option(False, "--keep-files", help="Keep the local library files (don't delete)")
+) -> None:
+    """Remove a library from the configuration and delete its local files."""
+    config = ConfigManager()
+    
+    try:
+        # Remove from config
+        config.remove_library(name)
+        display.display_success(f"Removed library '{name}' from configuration")
+        
+        # Delete local files unless --keep-files is specified
+        if not keep_files:
+            libraries_path = config.get_libraries_path()
+            library_path = libraries_path / name
+            
+            if library_path.exists():
+                import shutil
+                shutil.rmtree(library_path)
+                display.display_success(f"Deleted local files at {library_path}")
+            else:
+                display.display_info(f"No local files found at {library_path}")
+    except ConfigError as e:
+        display.display_error(str(e))
+
+
+
+
+# Register the repo command with the CLI
+def register_cli(parent_app: Typer) -> None:
+    """Register the repo command with the parent Typer app."""
+    parent_app.add_typer(app, name="repo")

+ 113 - 0
cli/core/section.py

@@ -0,0 +1,113 @@
+from __future__ import annotations
+
+from collections import OrderedDict
+from typing import Any, Dict, List, Optional
+
+from .variable import Variable
+
+
+class VariableSection:
+  """Groups variables together with shared metadata for presentation."""
+
+  def __init__(self, data: dict[str, Any]) -> None:
+    """Initialize VariableSection from a dictionary.
+    
+    Args:
+        data: Dictionary containing section specification with required 'key' and 'title' keys
+    """
+    if not isinstance(data, dict):
+      raise ValueError("VariableSection data must be a dictionary")
+    
+    if "key" not in data:
+      raise ValueError("VariableSection data must contain 'key'")
+    
+    if "title" not in data:
+      raise ValueError("VariableSection data must contain 'title'")
+    
+    self.key: str = data["key"]
+    self.title: str = data["title"]
+    self.variables: OrderedDict[str, Variable] = OrderedDict()
+    self.description: Optional[str] = data.get("description")
+    self.toggle: Optional[str] = data.get("toggle")
+    # Default "general" section to required=True, all others to required=False
+    self.required: bool = data.get("required", data["key"] == "general")
+    # Section dependencies - can be string or list of strings
+    needs_value = data.get("needs")
+    if needs_value:
+      if isinstance(needs_value, str):
+        self.needs: List[str] = [needs_value]
+      elif isinstance(needs_value, list):
+        self.needs: List[str] = needs_value
+      else:
+        raise ValueError(f"Section '{self.key}' has invalid 'needs' value: must be string or list")
+    else:
+      self.needs: List[str] = []
+
+  def to_dict(self) -> Dict[str, Any]:
+    """Serialize VariableSection to a dictionary for storage."""
+    section_dict = {
+      'required': self.required,
+      'vars': {name: var.to_dict() for name, var in self.variables.items()}
+    }
+    
+    # Add optional fields if present
+    for field in ('title', 'description', 'toggle'):
+      if value := getattr(self, field):
+        section_dict[field] = value
+    
+    # Store dependencies (single value if only one, list otherwise)
+    if self.needs:
+      section_dict['needs'] = self.needs[0] if len(self.needs) == 1 else self.needs
+    
+    return section_dict
+  
+  def is_enabled(self) -> bool:
+    """Check if section is currently enabled based on toggle variable.
+    
+    Returns:
+        True if section is enabled (no toggle or toggle is True), False otherwise
+    """
+    if not self.toggle:
+      return True
+    
+    toggle_var = self.variables.get(self.toggle)
+    if not toggle_var:
+      return True
+    
+    try:
+      return bool(toggle_var.convert(toggle_var.value))
+    except Exception:
+      return False
+  
+  def clone(self, origin_update: Optional[str] = None) -> 'VariableSection':
+    """Create a deep copy of the section with all variables.
+    
+    This is more efficient than converting to dict and back when copying sections.
+    
+    Args:
+        origin_update: Optional origin string to apply to all cloned variables
+        
+    Returns:
+        New VariableSection instance with deep-copied variables
+        
+    Example:
+        section2 = section1.clone(origin_update='template')
+    """
+    # Create new section with same metadata
+    cloned = VariableSection({
+      'key': self.key,
+      'title': self.title,
+      'description': self.description,
+      'toggle': self.toggle,
+      'required': self.required,
+      'needs': self.needs.copy() if self.needs else None,
+    })
+    
+    # Deep copy all variables
+    for var_name, variable in self.variables.items():
+      if origin_update:
+        cloned.variables[var_name] = variable.clone(update={'origin': origin_update})
+      else:
+        cloned.variables[var_name] = variable.clone()
+    
+    return cloned

+ 562 - 0
cli/core/template.py

@@ -0,0 +1,562 @@
+from __future__ import annotations
+
+from .variable import Variable
+from .collection import VariableCollection
+from .exceptions import (
+    TemplateError,
+    TemplateLoadError,
+    TemplateSyntaxError,
+    TemplateValidationError,
+    TemplateRenderError,
+    YAMLParseError,
+    ModuleLoadError
+)
+from pathlib import Path
+from typing import Any, Dict, List, Set, Optional, Literal
+from dataclasses import dataclass, field
+from functools import lru_cache
+import logging
+import os
+import yaml
+from jinja2 import Environment, FileSystemLoader, meta
+from jinja2.sandbox import SandboxedEnvironment
+from jinja2 import nodes
+from jinja2.visitor import NodeVisitor
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class TemplateFile:
+    """Represents a single file within a template directory."""
+    relative_path: Path
+    file_type: Literal['j2', 'static']
+    output_path: Path # The path it will have in the output directory
+
+@dataclass
+class TemplateMetadata:
+  """Represents template metadata with proper typing."""
+  name: str
+  description: str
+  author: str
+  date: str
+  version: str
+  module: str = ""
+  tags: List[str] = field(default_factory=list)
+  library: str = "unknown"
+  next_steps: str = ""
+  draft: bool = False
+
+  def __init__(self, template_data: dict, library_name: str | None = None) -> None:
+    """Initialize TemplateMetadata from parsed YAML template data.
+    
+    Args:
+        template_data: Parsed YAML data from template.yaml
+        library_name: Name of the library this template belongs to
+    """
+    # Validate metadata format first
+    self._validate_metadata(template_data)
+    
+    # Extract metadata section
+    metadata_section = template_data.get("metadata", {})
+    
+    self.name = metadata_section.get("name", "")
+    # YAML block scalar (|) preserves a trailing newline. Remove only trailing newlines
+    # while preserving internal newlines/formatting.
+    raw_description = metadata_section.get("description", "")
+    if isinstance(raw_description, str):
+      description = raw_description.rstrip("\n")
+    else:
+      description = str(raw_description)
+    self.description = description or "No description available"
+    self.author = metadata_section.get("author", "")
+    self.date = metadata_section.get("date", "")
+    self.version = metadata_section.get("version", "")
+    self.module = metadata_section.get("module", "")
+    self.tags = metadata_section.get("tags", []) or []
+    self.library = library_name or "unknown"
+    self.draft = metadata_section.get("draft", False)
+    
+    # Extract next_steps (optional)
+    raw_next_steps = metadata_section.get("next_steps", "")
+    if isinstance(raw_next_steps, str):
+      next_steps = raw_next_steps.rstrip("\n")
+    else:
+      next_steps = str(raw_next_steps) if raw_next_steps else ""
+    self.next_steps = next_steps
+
+  @staticmethod
+  def _validate_metadata(template_data: dict) -> None:
+    """Validate that template has required 'metadata' section with all required fields.
+    
+    Args:
+        template_data: Parsed YAML data from template.yaml
+        
+    Raises:
+        ValueError: If metadata section is missing or incomplete
+    """
+    metadata_section = template_data.get("metadata")
+    if metadata_section is None:
+      raise ValueError("Template format error: missing 'metadata' section")
+    
+    # Validate that metadata section has all required fields
+    required_fields = ["name", "author", "version", "date", "description"]
+    missing_fields = [field for field in required_fields if not metadata_section.get(field)]
+    
+    if missing_fields:
+      raise ValueError(f"Template format error: missing required metadata fields: {missing_fields}")
+
+@dataclass
+class Template:
+  """Represents a template directory."""
+
+  def __init__(self, template_dir: Path, library_name: str) -> None:
+    """Create a Template instance from a directory path."""
+    logger.debug(f"Loading template from directory: {template_dir}")
+    self.template_dir = template_dir
+    self.id = template_dir.name
+    self.library_name = library_name
+
+    # Initialize caches for lazy loading
+    self.__module_specs: Optional[dict] = None
+    self.__merged_specs: Optional[dict] = None
+    self.__jinja_env: Optional[Environment] = None
+    self.__used_variables: Optional[Set[str]] = None
+    self.__variables: Optional[VariableCollection] = None
+    self.__template_files: Optional[List[TemplateFile]] = None # New attribute
+
+    try:
+      # Find and parse the main template file (template.yaml or template.yml)
+      main_template_path = self._find_main_template_file()
+      with open(main_template_path, "r", encoding="utf-8") as f:
+        # Load all YAML documents (handles templates with empty lines before ---)
+        documents = list(yaml.safe_load_all(f))
+        
+        # Filter out None/empty documents and get the first non-empty one
+        valid_docs = [doc for doc in documents if doc is not None]
+        
+        if not valid_docs:
+          raise ValueError("Template file contains no valid YAML data")
+        
+        if len(valid_docs) > 1:
+          logger.warning(f"Template file contains multiple YAML documents, using the first one")
+        
+        self._template_data = valid_docs[0]
+      
+      # Validate template data
+      if not isinstance(self._template_data, dict):
+        raise ValueError("Template file must contain a valid YAML dictionary")
+
+      # Load metadata (always needed)
+      self.metadata = TemplateMetadata(self._template_data, library_name)
+      logger.debug(f"Loaded metadata: {self.metadata}")
+
+      # Validate 'kind' field (always needed)
+      self._validate_kind(self._template_data)
+
+      # NOTE: File collection is now lazy-loaded via the template_files property
+      # This significantly improves performance when listing many templates
+
+      logger.info(f"Loaded template '{self.id}' (v{self.metadata.version})")
+
+    except (ValueError, FileNotFoundError) as e:
+      logger.error(f"Error loading template from {template_dir}: {e}")
+      raise TemplateLoadError(f"Error loading template from {template_dir}: {e}")
+    except yaml.YAMLError as e:
+      logger.error(f"YAML parsing error in template {template_dir}: {e}")
+      raise YAMLParseError(str(template_dir / "template.y*ml"), e)
+    except (IOError, OSError) as e:
+      logger.error(f"File I/O error loading template {template_dir}: {e}")
+      raise TemplateLoadError(f"File I/O error loading template from {template_dir}: {e}")
+
+  def _find_main_template_file(self) -> Path:
+    """Find the main template file (template.yaml or template.yml)."""
+    for filename in ["template.yaml", "template.yml"]:
+      path = self.template_dir / filename
+      if path.exists():
+        return path
+    raise FileNotFoundError(f"Main template file (template.yaml or template.yml) not found in {self.template_dir}")
+
+  @staticmethod
+  @lru_cache(maxsize=32)
+  def _load_module_specs(kind: str) -> dict:
+    """Load specifications from the corresponding module with caching.
+    
+    Uses LRU cache to avoid re-loading the same module spec multiple times.
+    This significantly improves performance when listing many templates of the same kind.
+    
+    Args:
+        kind: The module kind (e.g., 'compose', 'terraform')
+        
+    Returns:
+        Dictionary containing the module's spec, or empty dict if kind is empty
+        
+    Raises:
+        ValueError: If module cannot be loaded or spec is invalid
+    """
+    if not kind:
+      return {}
+    try:
+      import importlib
+      module = importlib.import_module(f"cli.modules.{kind}")
+      spec = getattr(module, 'spec', {})
+      logger.debug(f"Loaded and cached module spec for kind '{kind}'")
+      return spec
+    except Exception as e:
+      raise ValueError(f"Error loading module specifications for kind '{kind}': {e}")
+
+  def _merge_specs(self, module_specs: dict, template_specs: dict) -> dict:
+    """Deep merge template specs with module specs using VariableCollection.
+    
+    Uses VariableCollection's native merge() method for consistent merging logic.
+    Module specs are base, template specs override with origin tracking.
+    """
+    # Create VariableCollection from module specs (base)
+    module_collection = VariableCollection(module_specs) if module_specs else VariableCollection({})
+    
+    # Set origin for module variables
+    for section in module_collection.get_sections().values():
+      for variable in section.variables.values():
+        if not variable.origin:
+          variable.origin = "module"
+    
+    # Merge template specs into module specs (template overrides)
+    if template_specs:
+      merged_collection = module_collection.merge(template_specs, origin="template")
+    else:
+      merged_collection = module_collection
+    
+    # Convert back to dict format
+    merged_spec = {}
+    for section_key, section in merged_collection.get_sections().items():
+      merged_spec[section_key] = section.to_dict()
+    
+    return merged_spec
+
+  def _collect_template_files(self) -> None:
+    """Collects all TemplateFile objects in the template directory."""
+    template_files: List[TemplateFile] = []
+    
+    for root, _, files in os.walk(self.template_dir):
+      for filename in files:
+        file_path = Path(root) / filename
+        relative_path = file_path.relative_to(self.template_dir)
+        
+        # Skip the main template file
+        if filename in ["template.yaml", "template.yml"]:
+          continue
+        
+        if filename.endswith(".j2"):
+          file_type: Literal['j2', 'static'] = 'j2'
+          output_path = relative_path.with_suffix('') # Remove .j2 suffix
+        else:
+          file_type = 'static'
+          output_path = relative_path # Static files keep their name
+        
+        template_files.append(TemplateFile(relative_path=relative_path, file_type=file_type, output_path=output_path))
+          
+    self.__template_files = template_files
+
+  def _extract_all_used_variables(self) -> Set[str]:
+    """Extract all undeclared variables from all .j2 files in the template directory.
+    
+    Raises:
+        ValueError: If any Jinja2 template has syntax errors
+    """
+    used_variables: Set[str] = set()
+    syntax_errors = []
+    
+    for template_file in self.template_files: # Iterate over TemplateFile objects
+      if template_file.file_type == 'j2':
+        file_path = self.template_dir / template_file.relative_path
+        try:
+          with open(file_path, "r", encoding="utf-8") as f:
+            content = f.read()
+            ast = self.jinja_env.parse(content) # Use lazy-loaded jinja_env
+            used_variables.update(meta.find_undeclared_variables(ast))
+        except (IOError, OSError) as e:
+          relative_path = file_path.relative_to(self.template_dir)
+          syntax_errors.append(f"  - {relative_path}: File I/O error: {e}")
+        except Exception as e:
+          # Collect syntax errors for Jinja2 issues
+          relative_path = file_path.relative_to(self.template_dir)
+          syntax_errors.append(f"  - {relative_path}: {e}")
+    
+    # Raise error if any syntax errors were found
+    if syntax_errors:
+      logger.error(f"Jinja2 syntax errors found in template '{self.id}'")
+      raise TemplateSyntaxError(self.id, syntax_errors)
+    
+    return used_variables
+
+  def _extract_jinja_default_values(self) -> dict[str, object]:
+    """Scan all .j2 files and extract literal arguments to the `default` filter.
+
+    Returns a mapping var_name -> literal_value for simple cases like
+    {{ var | default("value") }} or {{ var | default(123) }}.
+    This does not attempt to evaluate complex expressions.
+    """
+    defaults: dict[str, object] = {}
+
+    class _DefaultVisitor(NodeVisitor):
+      def __init__(self):
+        self.found: dict[str, object] = {}
+
+      def visit_Filter(self, node: nodes.Filter) -> None:  # type: ignore[override]
+        try:
+          if getattr(node, 'name', None) == 'default' and node.args:
+            # target variable name when filter is applied directly to a Name
+            target = None
+            if isinstance(node.node, nodes.Name):
+              target = node.node.name
+
+            # first arg literal
+            first = node.args[0]
+            if isinstance(first, nodes.Const) and target:
+              self.found[target] = first.value
+        except Exception:
+          # Be resilient to unexpected node shapes
+          pass
+        # continue traversal
+        self.generic_visit(node)
+
+    visitor = _DefaultVisitor()
+
+    for template_file in self.template_files:
+      if template_file.file_type != 'j2':
+        continue
+      file_path = self.template_dir / template_file.relative_path
+      try:
+        with open(file_path, 'r', encoding='utf-8') as f:
+          content = f.read()
+        ast = self.jinja_env.parse(content)
+        visitor.visit(ast)
+      except (IOError, OSError, yaml.YAMLError):
+        # Skip failures - this extraction is best-effort only
+        continue
+
+    return visitor.found
+
+  def _filter_specs_to_used(self, used_variables: set, merged_specs: dict, module_specs: dict, template_specs: dict) -> dict:
+    """Filter specs to only include variables used in templates using VariableCollection.
+    
+    Uses VariableCollection's native filter_to_used() method.
+    Keeps sensitive variables only if they're defined in the template spec or actually used.
+    """
+    # Build set of variables explicitly defined in template spec
+    template_defined_vars = set()
+    for section_data in (template_specs or {}).values():
+      if isinstance(section_data, dict) and 'vars' in section_data:
+        template_defined_vars.update(section_data['vars'].keys())
+    
+    # Create VariableCollection from merged specs
+    merged_collection = VariableCollection(merged_specs)
+    
+    # Filter to only used variables (and sensitive ones that are template-defined)
+    # We keep sensitive variables that are either:
+    # 1. Actually used in template files, OR
+    # 2. Explicitly defined in the template spec (even if not yet used)
+    variables_to_keep = used_variables | template_defined_vars
+    filtered_collection = merged_collection.filter_to_used(variables_to_keep, keep_sensitive=False)
+    
+    # Convert back to dict format
+    filtered_specs = {}
+    for section_key, section in filtered_collection.get_sections().items():
+      filtered_specs[section_key] = section.to_dict()
+    
+    return filtered_specs
+
+  @staticmethod
+  def _validate_kind(template_data: dict) -> None:
+    """Validate that template has required 'kind' field.
+    
+    Args:
+        template_data: Parsed YAML data from template.yaml
+        
+    Raises:
+        ValueError: If 'kind' field is missing
+    """
+    if not template_data.get("kind"):
+      raise TemplateValidationError("Template format error: missing 'kind' field")
+
+  def _validate_variable_definitions(self, used_variables: set[str], merged_specs: dict[str, Any]) -> None:
+    """Validate that all variables used in Jinja2 content are defined in the spec."""
+    defined_variables = set()
+    for section_data in merged_specs.values():
+      if "vars" in section_data and isinstance(section_data["vars"], dict):
+        defined_variables.update(section_data["vars"].keys())
+    
+    undefined_variables = used_variables - defined_variables
+    if undefined_variables:
+      undefined_list = sorted(undefined_variables)
+      error_msg = (
+          f"Template validation error in '{self.id}': "
+          f"Variables used in template content but not defined in spec: {undefined_list}\n\n"
+          f"Please add these variables to your template's template.yaml spec. "
+          f"Each variable must have a default value.\n\n"
+          f"Example:\n"
+          f"spec:\n"
+          f"  general:\n"
+          f"    vars:\n"
+      )
+      for var_name in undefined_list:
+          error_msg += (
+              f"      {var_name}:\n"
+              f"        type: str\n"
+              f"        description: Description for {var_name}\n"
+              f"        default: <your_default_value_here>\n"
+          )
+      logger.error(error_msg)
+      raise TemplateValidationError(error_msg)
+
+  @staticmethod
+  def _create_jinja_env(searchpath: Path) -> Environment:
+    """Create sandboxed Jinja2 environment for secure template processing.
+    
+    Uses SandboxedEnvironment to prevent code injection vulnerabilities
+    when processing untrusted templates. This restricts access to dangerous
+    operations while still allowing safe template rendering.
+    
+    Returns:
+        SandboxedEnvironment configured for template processing.
+    """
+    # NOTE Use SandboxedEnvironment for security - prevents arbitrary code execution
+    return SandboxedEnvironment(
+      loader=FileSystemLoader(searchpath),
+      trim_blocks=True,
+      lstrip_blocks=True,
+      keep_trailing_newline=False,
+    )
+
+  def render(self, variables: VariableCollection) -> tuple[Dict[str, str], Dict[str, Any]]:
+    """Render all .j2 files in the template directory.
+    
+    Returns:
+        Tuple of (rendered_files, variable_values) where variable_values includes autogenerated values
+    """
+    # Use get_satisfied_values() to exclude variables from sections with unsatisfied dependencies
+    variable_values = variables.get_satisfied_values()
+    
+    # Auto-generate values for autogenerated variables that are empty
+    import secrets
+    import string
+    for section in variables.get_sections().values():
+      for var_name, variable in section.variables.items():
+        if variable.autogenerated and (variable.value is None or variable.value == ""):
+          # Generate a secure random string (32 characters by default)
+          alphabet = string.ascii_letters + string.digits
+          generated_value = ''.join(secrets.choice(alphabet) for _ in range(32))
+          variable_values[var_name] = generated_value
+          logger.debug(f"Auto-generated value for variable '{var_name}'")
+    
+    logger.debug(f"Rendering template '{self.id}' with variables: {variable_values}")
+    rendered_files = {}
+    for template_file in self.template_files: # Iterate over TemplateFile objects
+      if template_file.file_type == 'j2':
+        try:
+          template = self.jinja_env.get_template(str(template_file.relative_path)) # Use lazy-loaded jinja_env
+          rendered_content = template.render(**variable_values)
+          # Sanitize the rendered content to remove excessive blank lines
+          rendered_content = self._sanitize_content(rendered_content, template_file.output_path)
+          rendered_files[str(template_file.output_path)] = rendered_content
+        except Exception as e:
+          logger.error(f"Error rendering template file {template_file.relative_path}: {e}")
+          raise TemplateRenderError(f"Error rendering {template_file.relative_path}: {e}")
+      elif template_file.file_type == 'static':
+          # For static files, just read their content and add to rendered_files
+          # This ensures static files are also part of the output dictionary
+          file_path = self.template_dir / template_file.relative_path
+          try:
+              with open(file_path, "r", encoding="utf-8") as f:
+                  content = f.read()
+                  rendered_files[str(template_file.output_path)] = content
+          except (IOError, OSError) as e:
+              logger.error(f"Error reading static file {file_path}: {e}")
+              raise TemplateRenderError(f"Error reading static file {file_path}: {e}")
+          
+    return rendered_files, variable_values
+  
+  def _sanitize_content(self, content: str, file_path: Path) -> str:
+    """Sanitize rendered content by removing excessive blank lines and trailing whitespace."""
+    if not content:
+      return content
+    
+    lines = [line.rstrip() for line in content.split('\n')]
+    sanitized = []
+    prev_blank = False
+    
+    for line in lines:
+      is_blank = not line
+      if is_blank and prev_blank:
+        continue  # Skip consecutive blank lines
+      sanitized.append(line)
+      prev_blank = is_blank
+    
+    # Remove leading blanks and ensure single trailing newline
+    return '\n'.join(sanitized).lstrip('\n').rstrip('\n') + '\n'
+
+  
+  @property
+  def template_files(self) -> List[TemplateFile]:
+      if self.__template_files is None:
+          self._collect_template_files() # Populate self.__template_files
+      return self.__template_files
+
+  @property
+  def template_specs(self) -> dict:
+      """Get the spec section from template YAML data."""
+      return self._template_data.get("spec", {})
+
+  @property
+  def module_specs(self) -> dict:
+      """Get the spec from the module definition."""
+      if self.__module_specs is None:
+          kind = self._template_data.get("kind")
+          self.__module_specs = self._load_module_specs(kind)
+      return self.__module_specs
+
+  @property
+  def merged_specs(self) -> dict:
+      if self.__merged_specs is None:
+          self.__merged_specs = self._merge_specs(self.module_specs, self.template_specs)
+      return self.__merged_specs
+
+  @property
+  def jinja_env(self) -> Environment:
+      if self.__jinja_env is None:
+          self.__jinja_env = self._create_jinja_env(self.template_dir)
+      return self.__jinja_env
+
+  @property
+  def used_variables(self) -> Set[str]:
+      if self.__used_variables is None:
+          self.__used_variables = self._extract_all_used_variables()
+      return self.__used_variables
+
+  @property
+  def variables(self) -> VariableCollection:
+      if self.__variables is None:
+          # Validate that all used variables are defined
+          self._validate_variable_definitions(self.used_variables, self.merged_specs)
+          # Filter specs to only used variables
+          filtered_specs = self._filter_specs_to_used(self.used_variables, self.merged_specs, self.module_specs, self.template_specs)
+
+          # Best-effort: extract literal defaults from Jinja `default()` filter and
+          # merge them into the filtered_specs when no default exists there.
+          try:
+            jinja_defaults = self._extract_jinja_default_values()
+            for section_key, section_data in filtered_specs.items():
+              # Guard against None from empty YAML sections
+              vars_dict = section_data.get('vars') or {}
+              for var_name, var_data in vars_dict.items():
+                if 'default' not in var_data or var_data.get('default') in (None, ''):
+                  if var_name in jinja_defaults:
+                    var_data['default'] = jinja_defaults[var_name]
+          except (KeyError, TypeError, AttributeError):
+            # Keep behavior stable on any extraction errors
+            pass
+
+          self.__variables = VariableCollection(filtered_specs)
+          # Sort sections: required first, then enabled, then disabled
+          self.__variables.sort_sections()
+      return self.__variables

+ 297 - 0
cli/core/validators.py

@@ -0,0 +1,297 @@
+"""Semantic validators for template content.
+
+This module provides validators for specific file types and formats,
+enabling semantic validation beyond Jinja2 syntax checking.
+"""
+
+from __future__ import annotations
+
+import logging
+from abc import ABC, abstractmethod
+from pathlib import Path
+from typing import Any, Dict, List, Optional
+
+import yaml
+from rich.console import Console
+
+logger = logging.getLogger(__name__)
+console = Console()
+
+
+class ValidationResult:
+    """Represents the result of a validation operation."""
+    
+    def __init__(self):
+        self.errors: List[str] = []
+        self.warnings: List[str] = []
+        self.info: List[str] = []
+    
+    def add_error(self, message: str) -> None:
+        """Add an error message."""
+        self.errors.append(message)
+        logger.error(f"Validation error: {message}")
+    
+    def add_warning(self, message: str) -> None:
+        """Add a warning message."""
+        self.warnings.append(message)
+        logger.warning(f"Validation warning: {message}")
+    
+    def add_info(self, message: str) -> None:
+        """Add an info message."""
+        self.info.append(message)
+        logger.info(f"Validation info: {message}")
+    
+    @property
+    def is_valid(self) -> bool:
+        """Check if validation passed (no errors)."""
+        return len(self.errors) == 0
+    
+    @property
+    def has_warnings(self) -> bool:
+        """Check if validation has warnings."""
+        return len(self.warnings) > 0
+    
+    def display(self, context: str = "Validation") -> None:
+        """Display validation results to console."""
+        if self.errors:
+            console.print(f"\n[red]✗ {context} Failed:[/red]")
+            for error in self.errors:
+                console.print(f"  [red]• {error}[/red]")
+        
+        if self.warnings:
+            console.print(f"\n[yellow]⚠ {context} Warnings:[/yellow]")
+            for warning in self.warnings:
+                console.print(f"  [yellow]• {warning}[/yellow]")
+        
+        if self.info:
+            console.print(f"\n[blue]ℹ {context} Info:[/blue]")
+            for info_msg in self.info:
+                console.print(f"  [blue]• {info_msg}[/blue]")
+        
+        if self.is_valid and not self.has_warnings:
+            console.print(f"\n[green]✓ {context} Passed[/green]")
+
+
+class ContentValidator(ABC):
+    """Abstract base class for content validators."""
+    
+    @abstractmethod
+    def validate(self, content: str, file_path: str) -> ValidationResult:
+        """Validate content and return results.
+        
+        Args:
+            content: The file content to validate
+            file_path: Path to the file (for error messages)
+            
+        Returns:
+            ValidationResult with errors, warnings, and info
+        """
+        pass
+    
+    @abstractmethod
+    def can_validate(self, file_path: str) -> bool:
+        """Check if this validator can validate the given file.
+        
+        Args:
+            file_path: Path to the file
+            
+        Returns:
+            True if this validator can handle the file
+        """
+        pass
+
+
+class DockerComposeValidator(ContentValidator):
+    """Validator for Docker Compose files."""
+    
+    COMPOSE_FILENAMES = {
+        "docker-compose.yml",
+        "docker-compose.yaml",
+        "compose.yml",
+        "compose.yaml",
+    }
+    
+    def can_validate(self, file_path: str) -> bool:
+        """Check if file is a Docker Compose file."""
+        filename = Path(file_path).name.lower()
+        return filename in self.COMPOSE_FILENAMES
+    
+    def validate(self, content: str, file_path: str) -> ValidationResult:
+        """Validate Docker Compose file structure."""
+        result = ValidationResult()
+        
+        try:
+            # Parse YAML
+            data = yaml.safe_load(content)
+            
+            if not isinstance(data, dict):
+                result.add_error("Docker Compose file must be a YAML dictionary")
+                return result
+            
+            # Check for version (optional in Compose v2, but good practice)
+            if "version" not in data:
+                result.add_info("No 'version' field specified (using Compose v2 format)")
+            
+            # Check for services (required)
+            if "services" not in data:
+                result.add_error("Missing required 'services' section")
+                return result
+            
+            services = data.get("services", {})
+            if not isinstance(services, dict):
+                result.add_error("'services' must be a dictionary")
+                return result
+            
+            if not services:
+                result.add_warning("No services defined")
+            
+            # Validate each service
+            for service_name, service_config in services.items():
+                self._validate_service(service_name, service_config, result)
+            
+            # Check for networks (optional but recommended)
+            if "networks" in data:
+                networks = data.get("networks", {})
+                if networks and isinstance(networks, dict):
+                    result.add_info(f"Defines {len(networks)} network(s)")
+            
+            # Check for volumes (optional)
+            if "volumes" in data:
+                volumes = data.get("volumes", {})
+                if volumes and isinstance(volumes, dict):
+                    result.add_info(f"Defines {len(volumes)} volume(s)")
+            
+        except yaml.YAMLError as e:
+            result.add_error(f"YAML parsing error: {e}")
+        except Exception as e:
+            result.add_error(f"Unexpected validation error: {e}")
+        
+        return result
+    
+    def _validate_service(self, name: str, config: Any, result: ValidationResult) -> None:
+        """Validate a single service configuration."""
+        if not isinstance(config, dict):
+            result.add_error(f"Service '{name}': configuration must be a dictionary")
+            return
+        
+        # Check for image or build (at least one required)
+        has_image = "image" in config
+        has_build = "build" in config
+        
+        if not has_image and not has_build:
+            result.add_error(f"Service '{name}': must specify 'image' or 'build'")
+        
+        # Warn about common misconfigurations
+        if "restart" in config:
+            restart_value = config["restart"]
+            valid_restart_policies = ["no", "always", "on-failure", "unless-stopped"]
+            if restart_value not in valid_restart_policies:
+                result.add_warning(
+                    f"Service '{name}': restart policy '{restart_value}' may be invalid. "
+                    f"Valid values: {', '.join(valid_restart_policies)}"
+                )
+        
+        # Check for environment variables
+        if "environment" in config:
+            env = config["environment"]
+            if isinstance(env, list):
+                # Check for duplicate keys in list format
+                keys = [e.split("=")[0] for e in env if isinstance(e, str) and "=" in e]
+                duplicates = {k for k in keys if keys.count(k) > 1}
+                if duplicates:
+                    result.add_warning(
+                        f"Service '{name}': duplicate environment variables: {', '.join(duplicates)}"
+                    )
+        
+        # Check for ports
+        if "ports" in config:
+            ports = config["ports"]
+            if not isinstance(ports, list):
+                result.add_warning(f"Service '{name}': 'ports' should be a list")
+
+
+class YAMLValidator(ContentValidator):
+    """Basic YAML syntax validator."""
+    
+    def can_validate(self, file_path: str) -> bool:
+        """Check if file is a YAML file."""
+        return Path(file_path).suffix.lower() in [".yml", ".yaml"]
+    
+    def validate(self, content: str, file_path: str) -> ValidationResult:
+        """Validate YAML syntax."""
+        result = ValidationResult()
+        
+        try:
+            yaml.safe_load(content)
+            result.add_info("YAML syntax is valid")
+        except yaml.YAMLError as e:
+            result.add_error(f"YAML parsing error: {e}")
+        
+        return result
+
+
+class ValidatorRegistry:
+    """Registry for content validators."""
+    
+    def __init__(self):
+        self.validators: List[ContentValidator] = []
+        self._register_default_validators()
+    
+    def _register_default_validators(self) -> None:
+        """Register built-in validators."""
+        self.register(DockerComposeValidator())
+        self.register(YAMLValidator())
+    
+    def register(self, validator: ContentValidator) -> None:
+        """Register a validator.
+        
+        Args:
+            validator: The validator to register
+        """
+        self.validators.append(validator)
+        logger.debug(f"Registered validator: {validator.__class__.__name__}")
+    
+    def get_validator(self, file_path: str) -> Optional[ContentValidator]:
+        """Get the most appropriate validator for a file.
+        
+        Args:
+            file_path: Path to the file
+            
+        Returns:
+            ContentValidator if found, None otherwise
+        """
+        # Try specific validators first (e.g., DockerComposeValidator before YAMLValidator)
+        for validator in self.validators:
+            if validator.can_validate(file_path):
+                return validator
+        return None
+    
+    def validate_file(self, content: str, file_path: str) -> ValidationResult:
+        """Validate file content using appropriate validator.
+        
+        Args:
+            content: The file content
+            file_path: Path to the file
+            
+        Returns:
+            ValidationResult with validation results
+        """
+        validator = self.get_validator(file_path)
+        
+        if validator:
+            logger.debug(f"Validating {file_path} with {validator.__class__.__name__}")
+            return validator.validate(content, file_path)
+        
+        # No validator found - return empty result
+        result = ValidationResult()
+        result.add_info(f"No semantic validator available for {Path(file_path).suffix} files")
+        return result
+
+
+# Global registry instance
+_registry = ValidatorRegistry()
+
+
+def get_validator_registry() -> ValidatorRegistry:
+    """Get the global validator registry."""
+    return _registry

+ 377 - 0
cli/core/variable.py

@@ -0,0 +1,377 @@
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional, Set
+from urllib.parse import urlparse
+import logging
+import re
+
+logger = logging.getLogger(__name__)
+
+TRUE_VALUES = {"true", "1", "yes", "on"}
+FALSE_VALUES = {"false", "0", "no", "off"}
+EMAIL_REGEX = re.compile(r"^[^@\\s]+@[^@\\s]+\\.[^@\\s]+$")
+
+
+class Variable:
+  """Represents a single templating variable with lightweight validation."""
+
+  def __init__(self, data: dict[str, Any]) -> None:
+    """Initialize Variable from a dictionary containing variable specification.
+    
+    Args:
+        data: Dictionary containing variable specification with required 'name' key
+              and optional keys: description, type, options, prompt, value, default, section, origin
+              
+    Raises:
+        ValueError: If data is not a dict, missing 'name' key, or has invalid default value
+    """
+    # Validate input
+    if not isinstance(data, dict):
+      raise ValueError("Variable data must be a dictionary")
+    
+    if "name" not in data:
+      raise ValueError("Variable data must contain 'name' key")
+    
+    # Track which fields were explicitly provided in source data
+    self._explicit_fields: Set[str] = set(data.keys())
+    
+    # Initialize fields
+    self.name: str = data["name"]
+    self.description: Optional[str] = data.get("description") or data.get("display", "")
+    self.type: str = data.get("type", "str")
+    self.options: Optional[List[Any]] = data.get("options", [])
+    self.prompt: Optional[str] = data.get("prompt")
+    self.value: Any = data.get("value") if data.get("value") is not None else data.get("default")
+    self.origin: Optional[str] = data.get("origin")
+    self.sensitive: bool = data.get("sensitive", False)
+    # Optional extra explanation used by interactive prompts
+    self.extra: Optional[str] = data.get("extra")
+    # Flag indicating this variable should be auto-generated when empty
+    self.autogenerated: bool = data.get("autogenerated", False)
+    # Original value before config override (used for display)
+    self.original_value: Optional[Any] = data.get("original_value")
+
+    # Validate and convert the default/initial value if present
+    if self.value is not None:
+      try:
+        self.value = self.convert(self.value)
+      except ValueError as exc:
+        raise ValueError(f"Invalid default for variable '{self.name}': {exc}")
+
+
+  def convert(self, value: Any) -> Any:
+    """Validate and convert a raw value based on the variable type.
+    
+    This method performs type conversion but does NOT check if the value
+    is required. Use validate_and_convert() for full validation including
+    required field checks.
+    """
+    if value is None:
+      return None
+
+    # Treat empty strings as None to avoid storing "" for missing values.
+    if isinstance(value, str) and value.strip() == "":
+      return None
+
+    # Type conversion mapping for cleaner code
+    converters = {
+      "bool": self._convert_bool,
+      "int": self._convert_int, 
+      "float": self._convert_float,
+      "enum": self._convert_enum,
+      "url": self._convert_url,
+      "email": self._convert_email,
+    }
+    
+    converter = converters.get(self.type)
+    if converter:
+      return converter(value)
+    
+    # Default to string conversion
+    return str(value)
+  
+  def validate_and_convert(self, value: Any, check_required: bool = True) -> Any:
+    """Validate and convert a value with comprehensive checks.
+    
+    This method combines type conversion with validation logic including
+    required field checks. It's the recommended method for user input validation.
+    
+    Args:
+        value: The raw value to validate and convert
+        check_required: If True, raises ValueError for required fields with empty values
+        
+    Returns:
+        The converted and validated value
+        
+    Raises:
+        ValueError: If validation fails (invalid format, required field empty, etc.)
+        
+    Examples:
+        # Basic validation
+        var.validate_and_convert("example@email.com")  # Returns validated email
+        
+        # Required field validation
+        var.validate_and_convert("", check_required=True)  # Raises ValueError if required
+        
+        # Autogenerated variables - allow empty values
+        var.validate_and_convert("", check_required=False)  # Returns None for autogeneration
+    """
+    # First, convert the value using standard type conversion
+    converted = self.convert(value)
+    
+    # Special handling for autogenerated variables
+    # Allow empty values as they will be auto-generated later
+    if self.autogenerated and (converted is None or (isinstance(converted, str) and (converted == "" or converted == "*auto"))):
+      return None  # Signal that auto-generation should happen
+    
+    # Check if this is a required field and the value is empty
+    if check_required and self.is_required():
+      if converted is None or (isinstance(converted, str) and converted == ""):
+        raise ValueError("This field is required and cannot be empty")
+    
+    return converted
+
+  def _convert_bool(self, value: Any) -> bool:
+    """Convert value to boolean."""
+    if isinstance(value, bool):
+      return value
+    if isinstance(value, str):
+      lowered = value.strip().lower()
+      if lowered in TRUE_VALUES:
+        return True
+      if lowered in FALSE_VALUES:
+        return False
+    raise ValueError("value must be a boolean (true/false)")
+
+  def _convert_int(self, value: Any) -> Optional[int]:
+    """Convert value to integer."""
+    if isinstance(value, int):
+      return value
+    if isinstance(value, str) and value.strip() == "":
+      return None
+    try:
+      return int(value)
+    except (TypeError, ValueError) as exc:
+      raise ValueError("value must be an integer") from exc
+
+  def _convert_float(self, value: Any) -> Optional[float]:
+    """Convert value to float."""
+    if isinstance(value, float):
+      return value
+    if isinstance(value, str) and value.strip() == "":
+      return None
+    try:
+      return float(value)
+    except (TypeError, ValueError) as exc:
+      raise ValueError("value must be a float") from exc
+
+  def _convert_enum(self, value: Any) -> Optional[str]:
+    if value == "":
+      return None
+    val = str(value)
+    if self.options and val not in self.options:
+      raise ValueError(f"value must be one of: {', '.join(self.options)}")
+    return val
+
+  def _convert_url(self, value: Any) -> str:
+    val = str(value).strip()
+    if not val:
+      return None
+    parsed = urlparse(val)
+    if not (parsed.scheme and parsed.netloc):
+      raise ValueError("value must be a valid URL (include scheme and host)")
+    return val
+
+  def _convert_email(self, value: Any) -> str:
+    val = str(value).strip()
+    if not val:
+      return None
+    if not EMAIL_REGEX.fullmatch(val):
+      raise ValueError("value must be a valid email address")
+    return val
+
+  def to_dict(self) -> Dict[str, Any]:
+    """Serialize Variable to a dictionary for storage."""
+    result = {}
+    
+    # Always include type
+    if self.type:
+      result['type'] = self.type
+    
+    # Include value/default if not None
+    if self.value is not None:
+      result['default'] = self.value
+    
+    # Include string fields if truthy
+    for field in ('description', 'prompt', 'extra', 'origin'):
+      if value := getattr(self, field):
+        result[field] = value
+    
+    # Include boolean/list fields if truthy (but empty list is OK for options)
+    if self.sensitive:
+      result['sensitive'] = True
+    if self.autogenerated:
+      result['autogenerated'] = True
+    if self.options is not None:  # Allow empty list
+      result['options'] = self.options
+    
+    return result
+  
+  def get_display_value(self, mask_sensitive: bool = True, max_length: int = 30, show_none: bool = True) -> str:
+    """Get formatted display value with optional masking and truncation.
+    
+    Args:
+        mask_sensitive: If True, mask sensitive values with asterisks
+        max_length: Maximum length before truncation (0 = no limit)
+        show_none: If True, display "(none)" for None values instead of empty string
+        
+    Returns:
+        Formatted string representation of the value
+    """
+    if self.value is None or self.value == "":
+      # Show (*auto) for autogenerated variables instead of (none)
+      if self.autogenerated:
+        return "[dim](*auto)[/dim]" if show_none else ""
+      return "[dim](none)[/dim]" if show_none else ""
+    
+    # Mask sensitive values
+    if self.sensitive and mask_sensitive:
+      return "********"
+    
+    # Convert to string
+    display = str(self.value)
+    
+    # Truncate if needed
+    if max_length > 0 and len(display) > max_length:
+      return display[:max_length - 3] + "..."
+    
+    return display
+  
+  def get_normalized_default(self) -> Any:
+    """Get normalized default value suitable for prompts and display."""
+    try:
+      typed = self.convert(self.value)
+    except Exception:
+      typed = self.value
+    
+    # Autogenerated: return display hint
+    if self.autogenerated and not typed:
+      return "*auto"
+    
+    # Type-specific handlers
+    if self.type == "enum":
+      if not self.options:
+        return typed
+      return self.options[0] if typed is None or str(typed) not in self.options else str(typed)
+    
+    if self.type == "bool":
+      return typed if isinstance(typed, bool) else (None if typed is None else bool(typed))
+    
+    if self.type == "int":
+      try:
+        return int(typed) if typed not in (None, "") else None
+      except Exception:
+        return None
+    
+    # Default: return string or None
+    return None if typed is None else str(typed)
+  
+  def get_prompt_text(self) -> str:
+    """Get formatted prompt text for interactive input.
+    
+    Returns:
+        Prompt text with optional type hints and descriptions
+    """
+    prompt_text = self.prompt or self.description or self.name
+    
+    # Add type hint for semantic types if there's a default
+    if self.value is not None and self.type in ["email", "url"]:
+      prompt_text += f" ({self.type})"
+    
+    return prompt_text
+  
+  def get_validation_hint(self) -> Optional[str]:
+    """Get validation hint for prompts (e.g., enum options).
+    
+    Returns:
+        Formatted hint string or None if no hint needed
+    """
+    hints = []
+    
+    # Add enum options
+    if self.type == "enum" and self.options:
+      hints.append(f"Options: {', '.join(self.options)}")
+    
+    # Add extra help text
+    if self.extra:
+      hints.append(self.extra)
+    
+    return " — ".join(hints) if hints else None
+  
+  def is_required(self) -> bool:
+    """Check if this variable requires a value (cannot be empty/None).
+    
+    A variable is considered required if:
+    - It doesn't have a default value (value is None)
+    - It's not marked as autogenerated (which can be empty and generated later)
+    - It's not a boolean type (booleans default to False if not set)
+    
+    Returns:
+        True if the variable must have a non-empty value, False otherwise
+    """
+    # Autogenerated variables can be empty (will be generated later)
+    if self.autogenerated:
+      return False
+    
+    # Boolean variables always have a value (True or False)
+    if self.type == "bool":
+      return False
+    
+    # Variables with a default value are not required
+    if self.value is not None:
+      return False
+    
+    # No default value and not autogenerated = required
+    return True
+  
+  def clone(self, update: Optional[Dict[str, Any]] = None) -> 'Variable':
+    """Create a deep copy of the variable with optional field updates.
+    
+    This is more efficient than converting to dict and back when copying variables.
+    
+    Args:
+        update: Optional dictionary of field updates to apply to the clone
+        
+    Returns:
+        New Variable instance with copied data
+        
+    Example:
+        var2 = var1.clone(update={'origin': 'template'})
+    """
+    data = {
+      'name': self.name,
+      'type': self.type,
+      'value': self.value,
+      'description': self.description,
+      'prompt': self.prompt,
+      'options': self.options.copy() if self.options else None,
+      'origin': self.origin,
+      'sensitive': self.sensitive,
+      'extra': self.extra,
+      'autogenerated': self.autogenerated,
+      'original_value': self.original_value,
+    }
+    
+    # Apply updates if provided
+    if update:
+      data.update(update)
+    
+    # Create new variable
+    cloned = Variable(data)
+    
+    # Preserve explicit fields from original, and add any update keys
+    cloned._explicit_fields = self._explicit_fields.copy()
+    if update:
+      cloned._explicit_fields.update(update.keys())
+    
+    return cloned

+ 0 - 0
cli/modules/__init__.py


+ 292 - 0
cli/modules/compose.py

@@ -0,0 +1,292 @@
+from collections import OrderedDict
+
+from ..core.module import Module
+from ..core.registry import registry
+
+spec = OrderedDict(
+    {
+      "general": {
+        "title": "General",
+        "vars": {
+          "service_name": {
+            "description": "Service name",
+            "type": "str",
+          },
+          "container_name": {
+            "description": "Container name",
+            "type": "str",
+          },
+          "container_hostname": {
+            "description": "Container internal hostname",
+            "type": "str",
+          },
+          "container_timezone": {
+            "description": "Container timezone (e.g., Europe/Berlin)",
+            "type": "str",
+            "default": "UTC",
+          },
+          "user_uid": {
+            "description": "User UID for container process",
+            "type": "int",
+            "default": 1000,
+          },
+          "user_gid": {
+            "description": "User GID for container process",
+            "type": "int",
+            "default": 1000,
+          },
+          "container_loglevel": {
+            "description": "Container log level",
+            "type": "enum",
+            "options": ["debug", "info", "warn", "error"],
+            "default": "info",
+          },
+          "restart_policy": {
+            "description": "Container restart policy",
+            "type": "enum",
+            "options": ["unless-stopped", "always", "on-failure", "no"],
+            "default": "unless-stopped",
+          },
+        },
+      },
+      "network": {
+        "title": "Network",
+        "toggle": "network_enabled",
+        "vars": {
+          "network_enabled": {
+            "description": "Enable custom network block",
+            "type": "bool",
+            "default": False,
+          },
+          "network_name": {
+            "description": "Docker network name",
+            "type": "str",
+            "default": "bridge",
+          },
+          "network_external": {
+            "description": "Use existing Docker network",
+            "type": "bool",
+            "default": True,
+          },
+        },
+      },
+      "ports": {
+        "title": "Ports",
+        "toggle": "ports_enabled",
+        "vars": {
+          "ports_enabled": {
+            "description": "Expose ports via 'ports' mapping",
+            "type": "bool",
+            "default": True,
+          }
+        },
+      },
+      "traefik": {
+        "title": "Traefik",
+        "toggle": "traefik_enabled",
+        "description": "Traefik routes external traffic to your service.",
+        "vars": {
+          "traefik_enabled": {
+            "description": "Enable Traefik reverse proxy integration",
+            "type": "bool",
+            "default": False,
+          },
+          "traefik_network": {
+            "description": "Traefik network name",
+            "type": "str",
+            "default": "traefik",
+          },
+          "traefik_host": {
+            "description": "Domain name for your service (e.g., app.example.com)",
+            "type": "str",
+          },
+          "traefik_entrypoint": {
+            "description": "HTTP entrypoint (non-TLS)",
+            "type": "str",
+            "default": "web",
+          },
+        },
+      },
+      "traefik_tls": {
+        "title": "Traefik TLS/SSL",
+        "toggle": "traefik_tls_enabled",
+        "needs": "traefik",
+        "description": "Enable HTTPS/TLS for Traefik with certificate management.",
+        "vars": {
+          "traefik_tls_enabled": {
+            "description": "Enable HTTPS/TLS",
+            "type": "bool",
+            "default": True,
+          },
+          "traefik_tls_entrypoint": {
+            "description": "TLS entrypoint",
+            "type": "str",
+            "default": "websecure",
+          },
+          "traefik_tls_certresolver": {
+            "description": "Traefik certificate resolver name",
+            "type": "str",
+            "default": "cloudflare",
+          },
+        },
+      },
+      "swarm": {
+        "title": "Docker Swarm",
+        "toggle": "swarm_enabled",
+        "description": "Deploy service in Docker Swarm mode with replicas.",
+        "vars": {
+          "swarm_enabled": {
+            "description": "Enable Docker Swarm mode",
+            "type": "bool",
+            "default": False,
+          },
+          "swarm_replicas": {
+            "description": "Number of replicas in Swarm",
+            "type": "int",
+            "default": 1,
+          },
+          "swarm_placement_mode": {
+            "description": "Swarm placement mode",
+            "type": "enum",
+            "options": ["global", "replicated"],
+            "default": "replicated"
+          },
+          "swarm_placement_host": {
+            "description": "Limit placement to specific node",
+            "type": "str",
+          }
+        },
+      },
+      "database": {
+        "title": "Database",
+        "toggle": "database_enabled",
+        "description": "Connect to external database (PostgreSQL or MySQL)",
+        "vars": {
+          "database_enabled": {
+            "description": "Enable external database integration",
+            "type": "bool",
+            "default": False,
+          },
+          "database_type": {
+            "description": "Database type",
+            "type": "enum",
+            "options": ["postgres", "mysql"],
+            "default": "postgres",
+          },
+          "database_external": {
+            "description": "Use an external database server?",
+            "extra": "If 'no', a database container will be created in the compose project.",
+            "type": "bool",
+            "default": False,
+          },
+          "database_host": {
+            "description": "Database host",
+            "type": "str",
+            "default": "database",
+          },
+          "database_port": {
+            "description": "Database port",
+            "type": "int"
+          },
+          "database_name": {
+            "description": "Database name",
+            "type": "str",
+          },
+          "database_user": {
+            "description": "Database user",
+            "type": "str",
+          },
+          "database_password": {
+            "description": "Database password",
+            "type": "str",
+            "default": "",
+            "sensitive": True,
+            "autogenerated": True,
+          },
+        },
+      },
+      "email": {
+        "title": "Email Server",
+        "toggle": "email_enabled",
+        "description": "Configure email server for notifications and user management.",
+        "vars": {
+          "email_enabled": {
+            "description": "Enable email server configuration",
+            "type": "bool",
+            "default": False,
+          },
+          "email_host": {
+            "description": "SMTP server hostname",
+            "type": "str",
+          },
+          "email_port": {
+            "description": "SMTP server port",
+            "type": "int",
+            "default": 587,
+          },
+          "email_username": {
+            "description": "SMTP username",
+            "type": "str",
+          },
+          "email_password": {
+            "description": "SMTP password",
+            "type": "str",
+            "sensitive": True,
+          },
+          "email_from": {
+            "description": "From email address",
+            "type": "str",
+          },
+          "email_use_tls": {
+            "description": "Use TLS encryption",
+            "type": "bool",
+            "default": True,
+          },
+          "email_use_ssl": {
+            "description": "Use SSL encryption",
+            "type": "bool",
+            "default": False,
+          }
+        },
+      },
+      "authentik": {
+        "title": "Authentik SSO",
+        "toggle": "authentik_enabled",
+        "description": "Integrate with Authentik for Single Sign-On authentication.",
+        "vars": {
+          "authentik_enabled": {
+            "description": "Enable Authentik SSO integration",
+            "type": "bool",
+            "default": False,
+          },
+          "authentik_url": {
+            "description": "Authentik base URL (e.g., https://auth.example.com)",
+            "type": "str",
+          },
+          "authentik_slug": {
+            "description": "Authentik application slug",
+            "type": "str",
+          },
+          "authentik_client_id": {
+            "description": "OAuth client ID from Authentik provider",
+            "type": "str",
+          },
+          "authentik_client_secret": {
+            "description": "OAuth client secret from Authentik provider",
+            "type": "str",
+            "sensitive": True,
+          },
+        },
+      },
+    }
+  )
+
+
+class ComposeModule(Module):
+  """Docker Compose module."""
+
+  name = "compose"
+  description = "Manage Docker Compose configurations"
+
+
+registry.register(ComposeModule)

+ 0 - 42
docker-compose/alloy/compose.yaml

@@ -1,42 +0,0 @@
----
-services:
-  alloy:
-    image: grafana/alloy:v1.10.2
-    container_name: alloy
-    hostname: your-server-name
-    command:
-      - run
-      - --server.http.listen-addr=0.0.0.0:12345
-      - --storage.path=/var/lib/alloy/data
-      - /etc/alloy/config.alloy
-    ports:
-      - "12345:12345"
-    volumes:
-      - ./config.alloy:/etc/alloy/config.alloy
-      - alloy_data:/var/lib/alloy/data
-      - /:/rootfs:ro
-      - /run:/run:ro
-      - /var/log:/var/log:ro
-      - /sys:/sys:ro
-      - /var/lib/docker/:/var/lib/docker/:ro
-      - /run/udev/data:/run/udev/data:ro
-    networks:
-      - frontend
-    labels:
-      - traefik.enable=true
-      - traefik.http.services.alloy.loadbalancer.server.port=12345
-      - traefik.http.services.alloy.loadbalancer.server.scheme=http
-      - traefik.http.routers.alloy.service=alloy
-      - traefik.http.routers.alloy.rule=Host(`alloy.home.arpa`)
-      - traefik.http.routers.alloy.entrypoints=websecure
-      - traefik.http.routers.alloy.tls=true
-      - traefik.http.routers.alloy.tls.certresolver=cloudflare
-    restart: unless-stopped
-
-volumes:
-  alloy_data:
-    driver: local
-
-networks:
-  frontend:
-    external: true

+ 0 - 43
docker-compose/ansiblesemaphore/compose.yaml

@@ -1,43 +0,0 @@
----
-volumes:
-  semaphore-mysql:
-    driver: local
-services:
-  mysql:
-    image: docker.io/library/mysql:8.4
-    hostname: mysql
-    volumes:
-      - semaphore-mysql:/var/lib/mysql
-    environment:
-      - MYSQL_RANDOM_ROOT_PASSWORD=yes
-      - MYSQL_DATABASE=semaphore
-      - MYSQL_USER=semaphore
-      - MYSQL_PASSWORD=secret-password  # change!
-    restart: unless-stopped
-  semaphore:
-    container_name: ansiblesemaphore
-    image: docker.io/semaphoreui/semaphore:v2.16.29
-    user: "${UID}:${GID}"
-    ports:
-      - 3000:3000
-    environment:
-      - SEMAPHORE_DB_USER=semaphore
-      - SEMAPHORE_DB_PASS=secret-password  # change!
-      - SEMAPHORE_DB_HOST=mysql
-      - SEMAPHORE_DB_PORT=3306
-      - SEMAPHORE_DB_DIALECT=mysql
-      - SEMAPHORE_DB=semaphore
-      - SEMAPHORE_PLAYBOOK_PATH=/tmp/semaphore/
-      - SEMAPHORE_ADMIN_PASSWORD=secret-admin-password  # change!
-      - SEMAPHORE_ADMIN_NAME=admin
-      - SEMAPHORE_ADMIN_EMAIL=admin@localhost
-      - SEMAPHORE_ADMIN=admin
-      - SEMAPHORE_ACCESS_KEY_ENCRYPTION=  # add to your access key encryption !
-      - ANSIBLE_HOST_KEY_CHECKING=false  # (optional) change to true if you want to enable host key checking
-    volumes:
-      - ./inventory/:/inventory:ro
-      - ./authorized-keys/:/authorized-keys:ro
-      - ./config/:/etc/semaphore:rw
-    restart: unless-stopped
-    depends_on:
-      - mysql

+ 0 - 130
docker-compose/authentik/compose.yaml

@@ -1,130 +0,0 @@
----
-services:
-  server:
-    image: ghcr.io/goauthentik/server:2025.6.3
-    container_name: authentik-server
-    command: server
-    environment:
-      - AUTHENTIK_REDIS__HOST=authentik-redis
-      - AUTHENTIK_POSTGRESQL__HOST=authentik-db
-      - AUTHENTIK_POSTGRESQL__USER=${POSTGRES_USER:-authentik}
-      - AUTHENTIK_POSTGRESQL__NAME=${POSTGRES_DB:-authentik}
-      - AUTHENTIK_POSTGRESQL__PASSWORD=${POSTGRES_PASSWORD:?error}
-      # (Required)  To generate a secret key run the following command:
-      #             echo $(openssl rand -base64 32)
-      - AUTHENTIK_SECRET_KEY=${AUTHENTIK_SECRET_KEY:?error}
-      # (Optional)  Enable Error Reporting
-      - AUTHENTIK_ERROR_REPORTING__ENABLED=${AUTHENTIK_ERROR_REPORTING:-false}
-      # (Optional)  Enable Email Sending
-      #             Highly recommended to notify you about alerts and configuration issues.
-      - AUTHENTIK_EMAIL__HOST=${EMAIL_HOST:?error}
-      - AUTHENTIK_EMAIL__PORT=${EMAIL_PORT:-25}
-      - AUTHENTIK_EMAIL__USERNAME=${EMAIL_USERNAME:?error}
-      - AUTHENTIK_EMAIL__PASSWORD=${EMAIL_PASSWORD:?error}
-      - AUTHENTIK_EMAIL__USE_TLS=${EMAIL_USE_TLS:-false}
-      - AUTHENTIK_EMAIL__USE_SSL=${EMAIL_USE_SSL:-false}
-      - AUTHENTIK_EMAIL__TIMEOUT=${EMAIL_TIMEOUT:-10}
-      - AUTHENTIK_EMAIL__FROM=${EMAIL_FROM:?error}
-    ports:
-      # (Optional)  Remove these, if you're using a reverse proxy like Traefik.
-      - 9000:9000
-      - 9443:9443
-    labels:
-      # (Optional)  Enable Traefik integration for the Authentik Web UI. For more information
-      #             about integrating other services with Traefik and Authentik, see the
-      #             documentation at https://goauthentik.io/docs/outposts/integrations/traefik
-      #             and the middleware example files in `docker-compose/traefik/config`.
-      - traefik.enable=true
-      - traefik.http.services.authentik.loadbalancer.server.port=9000
-      - traefik.http.services.authentik.loadbalancer.server.scheme=http
-      - traefik.http.routers.authentik.entrypoints=websecure
-      - traefik.http.routers.authentik.rule=Host(`your-authentik-fqdn`)
-      - traefik.http.routers.authentik.tls=true
-      - traefik.http.routers.authentik.tls.certresolver=cloudflare
-    volumes:
-      - ./media:/media
-      - ./custom-templates:/templates
-    depends_on:
-      - postgres
-      - redis
-    restart: unless-stopped
-
-  worker:
-    image: ghcr.io/goauthentik/server:2025.6.3
-    container_name: authentik-worker
-    command: worker
-    environment:
-      - AUTHENTIK_REDIS__HOST=authentik-redis
-      - AUTHENTIK_POSTGRESQL__HOST=authentik-db
-      - AUTHENTIK_POSTGRESQL__USER=${POSTGRES_USER:-authentik}
-      - AUTHENTIK_POSTGRESQL__NAME=${POSTGRES_DB:-authentik}
-      - AUTHENTIK_POSTGRESQL__PASSWORD=${POSTGRES_PASSWORD:?error}
-      # (Required)  To generate a secret key run the following command:
-      #             echo $(openssl rand -base64 32)
-      - AUTHENTIK_SECRET_KEY=${AUTHENTIK_SECRET_KEY:?error}
-      # (Optional)  Enable Error Reporting
-      - AUTHENTIK_ERROR_REPORTING__ENABLED=${AUTHENTIK_ERROR_REPORTING:-false}
-      # (Optional)  Enable Email Sending
-      #             Highly recommended to notify you about alerts and configuration issues.
-      - AUTHENTIK_EMAIL__HOST=${EMAIL_HOST:?error}
-      - AUTHENTIK_EMAIL__PORT=${EMAIL_PORT:-25}
-      - AUTHENTIK_EMAIL__USERNAME=${EMAIL_USERNAME:?error}
-      - AUTHENTIK_EMAIL__PASSWORD=${EMAIL_PASSWORD:?error}
-      - AUTHENTIK_EMAIL__USE_TLS=${EMAIL_USE_TLS:-false}
-      - AUTHENTIK_EMAIL__USE_SSL=${EMAIL_USE_SSL:-false}
-      - AUTHENTIK_EMAIL__TIMEOUT=${EMAIL_TIMEOUT:-10}
-      - AUTHENTIK_EMAIL__FROM=${EMAIL_FROM:?error}
-    # (Optional)  See more for the docker socket integration here:
-    #             https://goauthentik.io/docs/outposts/integrations/docker
-    user: root
-    volumes:
-      - /run/docker.sock:/run/docker.sock
-      - ./media:/media
-      - ./certs:/certs
-      - ./custom-templates:/templates
-    depends_on:
-      - postgres
-      - redis
-    restart: unless-stopped
-
-  redis:
-    image: docker.io/library/redis:8.2.1
-    container_name: authentik-redis
-    command: --save 60 1 --loglevel warning
-    healthcheck:
-      test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
-      start_period: 20s
-      interval: 30s
-      retries: 5
-      timeout: 3s
-    volumes:
-      - redis_data:/data
-    restart: unless-stopped
-
-  postgres:
-    # (Optional) Add a PostgreSQL Database for Authentik
-    #   Alternatively, you can host your PostgreSQL database externally, and
-    #   change the connection settings in the `authentik-server` and
-    #   `authentik-worker`.
-    image: docker.io/library/postgres:17.6
-    container_name: authentik-db
-    environment:
-      - POSTGRES_USER=${POSTGRES_USER:-authentik}
-      - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:?error}
-      - POSTGRES_DB=${POSTGRES_DB:-authentik}
-      - TZ=${TZ:-UTC}
-    healthcheck:
-      test: ['CMD-SHELL', 'pg_isready -U "${POSTGRES_USER:-authentik}"']
-      start_period: 30s
-      interval: 10s
-      timeout: 10s
-      retries: 5
-    volumes:
-      - postgres_data:/var/lib/postgresql/data
-    restart: unless-stopped
-
-volumes:
-  postgres_data:
-    driver: local
-  redis_data:
-    driver: local

+ 0 - 12
docker-compose/bind9/compose.yaml

@@ -1,12 +0,0 @@
----
-services:
-  bind9:
-    image: docker.io/ubuntu/bind9:9.20-24.10_edge
-    container_name: bind9
-    ports:
-      - "53:53"
-    volumes:
-      - /etc/bind/:/etc/bind/
-      - /var/cache/bind:/var/cache/bind
-      - /var/lib/bind:/var/lib/bind
-    restart: unless-stopped

+ 0 - 1
docker-compose/bind9/config/example.named.conf

@@ -1 +0,0 @@
-

+ 0 - 17
docker-compose/cadvisor/compose.yaml

@@ -1,17 +0,0 @@
----
-services:
-  cadvisor:
-    image: gcr.io/cadvisor/cadvisor:v0.52.1
-    container_name: cadvisor
-    ports:
-      - 8080:8080
-    volumes:
-      - /:/rootfs:ro
-      - /run:/run:ro
-      - /sys:/sys:ro
-      - /var/lib/docker/:/var/lib/docker:ro
-      - /dev/disk/:/dev/disk:ro
-    devices:
-      - /dev/kmsg
-    privileged: true
-    restart: unless-stopped

+ 0 - 16
docker-compose/duplicati/compose.yaml

@@ -1,16 +0,0 @@
----
-services:
-  duplicati:
-    image: lscr.io/linuxserver/duplicati:2.1.0
-    container_name: duplicati
-    environment:
-      - PUID=1000
-      - PGID=1000
-      - TZ=Europe/Berlin
-    volumes:
-      - /AmberPRO/duplicati/config:/config
-      - /Backups:/backups
-      - /:/source
-    ports:
-      - 8200:8200
-    restart: unless-stopped

+ 0 - 1
docker-compose/factory/README.md

@@ -1 +0,0 @@
-

+ 0 - 23
docker-compose/factory/runner-pool/compose.yaml

@@ -1,23 +0,0 @@
----
-services:
-  refactr-runner:
-    container_name: factory-runnerpool-prod-1
-    image: docker.io/refactr/runner-pool:v0.153.4
-    user: root
-    volumes:
-      - /run/docker.sock:/run/docker.sock
-      - ./config.json:/etc/runner-agent.json
-    # stdin_open: true
-    # tty: true
-    environment:
-      - ENVIRONMENT=eval
-      - LOG_LEVEL=debug
-      - RUNNER_MANAGER_ID=${RUNNER_MANAGER_ID}
-      - RUNNER_MANAGER_KEY=${RUNNER_MANAGER_KEY}
-      - CONFIG_PATH=/etc/runner-agent.json
-      - NEW_RELIC_ENABLED=false
-      - NEW_RELIC_APP_NAME=factory-runnerpool-prod-1
-      - RUNNER_LOCAL_DOCKER_IMAGE_REGISTRY=docker.io
-      - RUNNER_LOCAL_DOCKER_IMAGE_REPOSITORY=refactr/runner
-      - RUNNER_LOCAL_DOCKER_IMAGE_TAG=latest
-    restart: unless-stopped

+ 0 - 8
docker-compose/gitea/.env.example

@@ -1,8 +0,0 @@
-# Environment Variable Example File
-# ---
-# Add internal database credentials here...
-# POSTGRES_HOST     = "your-database-host"
-# POSTGRES_PORT     = "your-database-port"
-POSTGRES_DB       = "your-database-name"
-POSTGRES_USER     = "your-database-user"
-POSTGRES_PASSWORD = "your-database-password"

+ 0 - 90
docker-compose/gitea/compose.yaml

@@ -1,90 +0,0 @@
----
-services:
-  server:
-    image: docker.io/gitea/gitea:1.24.6
-    container_name: gitea-server
-    environment:
-      - USER_UID=1000
-      - USER_GID=1000
-      # -- Change your database settings here...
-      # --> PostgreSQL
-      - GITEA__database__DB_TYPE=postgres
-      - GITEA__database__HOST=${POSTGRES_HOST:-db}:${POSTGRES_PORT:-5432}
-      - GITEA__database__NAME=${POSTGRES_DB:?POSTGRES_DB not set}
-      - GITEA__database__USER=${POSTGRES_USER:?POSTGRES_USER not set}
-      - GITEA__database__PASSWD=${POSTGRES_PASSWORD:?POSTGRES_PASSWORD not set}
-      # <--
-      # --> OR MySQL
-      # - GITEA__database__DB_TYPE=mysql
-      # - GITEA__database__HOST=db:3306
-      # - GITEA__database__NAME=${MYSQL_DATABASE:?MYSQL_DATABASE not set}
-      # - GITEA__database__USER=${MYSQL_USER:?MYSQL_USER not set}
-      # - GITEA__database__PASSWD=${MYSQL_PASSWORD:?MYSQL_PASSWORD not set}
-      # <--
-      # -- (Optional) Change your server settings here...
-      - GITEA__server__SSH_PORT=2221  # <-- (Optional) Replace with your desired SSH port
-      - GITEA__server__ROOT_URL=http://your-fqdn  # <-- Replace with your FQDN
-    # --> (Optional) When using traefik...
-    # networks:
-    #   - frontend
-    # <--
-    # --> (Optional) When using an internal database...
-    #   - backend
-    # <--
-    volumes:
-      - gitea-data:/data
-      - /etc/timezone:/etc/timezone:ro
-      - /etc/localtime:/etc/localtime:ro
-    ports:
-      # --> (Optional) Remove when using traefik...
-      - "3000:3000"
-      # <--
-      - "2221:22"  # <-- (Optional) Replace with your desired SSH port
-      # --> (Optional) When using internal database...
-    # depends_on:
-    #   - db
-    # <--
-    # --> (Optional) When using traefik...
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.gitea.loadbalancer.server.port=3000
-    #   - traefik.http.services.gitea.loadbalancer.server.scheme=http
-    #   - traefik.http.routers.gitea-https.entrypoints=websecure
-    #   - traefik.http.routers.gitea-https.rule=Host(`your-fqdn`)  # <-- Replace with your FQDN
-    #   - traefik.http.routers.gitea-https.tls=true
-    #   - traefik.http.routers.gitea-https.tls.certresolver=your-certresolver  # <-- Replace with your certresolver
-    # <--
-    restart: unless-stopped
-
-# --> When using internal database
-# db:
-#   image: docker.io/library/postgres:17.5
-#   container_name: gitea-db
-#   environment:
-#     - POSTGRES_USER=${POSTGRES_USER:?POSTGRES_USER not set}
-#     - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:?POSTGRES_PASSWORD not set}
-#     - POSTGRES_DB=${POSTGRES_DB:?POSTGRES_DB not set}
-#   networks:
-#     - backend
-#   volumes:
-#     - gitea-db:/var/lib/postgresql/data
-#   restart: unless-stopped
-# <--
-
-volumes:
-  gitea-data:
-    driver: local
-# --> When using internal database
-# gitea-db:
-#   driver: local
-# <--
-
-# --> (Optional) When using traefik...
-# networks:
-#   frontend:
-#     external: true
-# <--
-# --> (Optional) When using an internal database...
-#   backend:
-#     external: true
-# <--

+ 0 - 52
docker-compose/gitlab/compose.yaml

@@ -1,52 +0,0 @@
----
-services:
-  gitlab:
-    image: docker.io/gitlab/gitlab-ce:18.4.1-ce.0
-    container_name: gitlab
-    shm_size: '256m'
-    environment: {}
-    # --> (Optional) When using traefik...
-    # networks:
-    #   - frontend
-    # <--
-    volumes:
-      - ./config:/etc/gitlab
-      - ./logs:/var/log/gitlab
-      - gitlab-data:/var/opt/gitlab
-    ports:
-      # --> (Optional) Remove when using traefik...
-      - "80:80"
-      - "443:443"
-      # <--
-      - '2424:22'
-    # --> (Optional) When using traefik...
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.gitlab.loadbalancer.server.port=80
-    #   - traefik.http.services.gitlab.loadbalancer.server.scheme=http
-    #   - traefik.http.routers.gitlab.service=gitlab
-    #   - traefik.http.routers.gitlab.rule=Host(`your-gitlab-fqdn`)
-    #   - traefik.http.routers.gitlab.entrypoints=websecure
-    #   - traefik.http.routers.gitlab.tls=true
-    #   - traefik.http.routers.gitlab.tls.certresolver=cloudflare
-    # <--
-    # --> (Optional) Enable Container Registry settings here...
-    #   - traefik.http.services.registry.loadbalancer.server.port=5678
-    #   - traefik.http.services.registry.loadbalancer.server.scheme=http
-    #   - traefik.http.routers.registry.service=registry
-    #   - traefik.http.routers.registry.rule=Host(`your-registry-fqdn`)
-    #   - traefik.http.routers.registry.entrypoints=websecure
-    #   - traefik.http.routers.registry.tls=true
-    #   - traefik.http.routers.registry.tls.certresolver=cloudflare
-    # <--
-    restart: unless-stopped
-
-volumes:
-  gitlab-data:
-    driver: local
-
-# --> (Optional) When using traefik...
-# networks:
-#   frontend:
-#     external: true
-# <--

+ 0 - 58
docker-compose/gitlab/config/gitlab.rb

@@ -1,58 +0,0 @@
-# -- Change GitLab settings here...
-external_url 'https://your-gitlab-fqdn'  # <-- Replace with your GitLab FQDN
-
-# -- (Optional) Change GitLab Shell settings here...
-gitlab_rails['gitlab_shell_ssh_port'] = 2424
-
-# -- Change internal web service settings here...
-letsencrypt['enable'] = false
-nginx['listen_port']  = 80
-nginx['listen_https'] = false
-
-# --> (Optional) Enable Container Registry settings here...
-# registry_external_url 'https://your-registry-fqdn'  # <-- Replace with your registry FQDN
-# gitlab_rails['registry_enabled']  = true
-# registry_nginx['listen_https']    = false
-# registry_nginx['listen_port']     = 5678  # <-- Replace with your registry port
-# <--
-
-# --> (Optional) Add Authentik settings here...
-# gitlab_rails['omniauth_auto_link_user'] = ['openid_connect']
-# gitlab_rails['omniauth_providers'] = [
-#   {
-#     name: "openid_connect",  #  !-- Do not change this parameter
-#     label: "Authentik",  # <-- (Optional) Change name for login button, defaults to "Openid Connect"
-#     icon: "https://avatars.githubusercontent.com/u/82976448?s=200&v=4",
-#     args: {
-#       name: "openid_connect",
-#       scope: ["openid","profile","email"],
-#       response_type: "code",
-#       issuer: "https://your-authentik-fqdn/application/o/your-gitlab-slug/",  # <-- Replace with your Authentik FQDN and GitLab slug
-#       discovery: true,
-#       client_auth_method: "query",
-#       uid_field: "email",
-#       send_scope_to_token_endpoint: "false",
-#       pkce: true,
-#       client_options: {
-#         identifier: "your-authentik-provider-client-id",  # <-- Replace with your Authentik provider client ID
-#         secret: "your-authentik-provider-client-secret",  # <-- Replace with your Authentik provider client secret
-#         redirect_uri: "https://your-authentik-fqdn/users/auth/openid_connect/callback"  # <-- Replace with your Authentik FQDN
-#       }
-#     }
-#   }
-# ]
-# <--
-
-# --> (Optional) Change SMTP settings here...
-# gitlab_rails['smtp_enable']           = true
-# gitlab_rails['smtp_address']          = "your-smtp-server-addr"  # <-- Replace with your SMTP server address
-# gitlab_rails['smtp_port']             = 465
-# gitlab_rails['smtp_user_name']        = "your-smtp-username"  # <-- Replace with your SMTP username
-# gitlab_rails['smtp_password']         = "your-smtp-password"  # <-- Replace with your SMTP password
-# gitlab_rails['smtp_domain']           = "your-smtp-domain"  # <-- Replace with your SMTP domain
-# gitlab_rails['smtp_authentication']   = "login"
-# gitlab_rails['smtp_ssl']              = true
-# gitlab_rails['smtp_force_ssl']        = true
-# gitlab_rails['gitlab_email_from']     = 'your-email-from-addr'  # <-- Replace with your email from address
-# gitlab_rails['gitlab_email_reply_to'] = 'your-email-replyto-addr'  # <-- Replace with your email reply-to address
-# <--

+ 0 - 13
docker-compose/grafana/compose.yaml

@@ -1,13 +0,0 @@
----
-volumes:
-  grafana-data:
-    driver: local
-services:
-  grafana:
-    image: docker.io/grafana/grafana-oss:12.1.1
-    container_name: grafana
-    ports:
-      - "3000:3000"
-    volumes:
-      - grafana-data:/var/lib/grafana
-    restart: unless-stopped

+ 0 - 82
docker-compose/homer/assets/example.config.yml

@@ -1,82 +0,0 @@
----
-# Homepage configuration
-# See https://fontawesome.com/icons for icons options
-
-title: "Demo dashboard"
-subtitle: "Homer"
-logo: "logo.png"
-# icon: "fas fa-skull-crossbones" # Optional icon
-
-header: true
-footer: false
-
-# Optional theme customization
-theme: default
-colors:
-  light:
-    highlight-primary: "#3367d6"
-    highlight-secondary: "#4285f4"
-    highlight-hover: "#5a95f5"
-    background: "#f5f5f5"
-    card-background: "#ffffff"
-    text: "#363636"
-    text-header: "#ffffff"
-    text-title: "#303030"
-    text-subtitle: "#424242"
-    card-shadow: rgba(0, 0, 0, 0.1)
-    link-hover: "#363636"
-  dark:
-    highlight-primary: "#3367d6"
-    highlight-secondary: "#4285f4"
-    highlight-hover: "#5a95f5"
-    background: "#131313"
-    card-background: "#2b2b2b"
-    text: "#eaeaea"
-    text-header: "#ffffff"
-    text-title: "#fafafa"
-    text-subtitle: "#f5f5f5"
-    card-shadow: rgba(0, 0, 0, 0.4)
-    link-hover: "#ffdd57"
-
-# Optional message
-message:
-  # url: https://b4bz.io
-  style: "is-dark"  # See https://bulma.io/documentation/components/message/#colors for styling options.
-  title: "Demo !"
-  icon: "fa fa-grin"
-  content: "This is a dummy homepage demo. <br /> Find more information on <a href='https://github.com/bastienwirtz/homer'>github.com/bastienwirtz/homer</a>"
-
-# Optional navbar
-# links: [] # Allows for navbar (dark mode, layout, and search) without any links
-links:
-  - name: "Contribute"
-    icon: "fab fa-github"
-    url: "https://github.com/bastienwirtz/homer"
-    target: "_blank"  # optional html a tag target attribute
-  - name: "Wiki"
-    icon: "fas fa-book"
-    url: "https://www.wikipedia.org/"
-  # this will link to a second homer page that will load config from additionnal-page.yml and keep default config values as in config.yml file
-  # see url field and assets/additionnal-page.yml.dist used in this example:
-  - name: "another page!"
-    icon: "fas fa-file-alt"
-    url: "#additionnal-page"
-
-# Services
-# First level array represent a group.
-# Leave only a "items" key if not using group (group name, icon & tagstyle are optional, section separation will not be displayed).
-services:
-  - name: "Applications"
-    icon: "fas fa-cloud"
-    items:
-      - name: "Awesome app"
-        logo: "assets/tools/sample.png"
-        subtitle: "Bookmark example"
-        tag: "app"
-        url: "https://www.reddit.com/r/selfhosted/"
-        target: "_blank"  # optional html a tag target attribute
-      - name: "Another one"
-        logo: "assets/tools/sample2.png"
-        subtitle: "Another application"
-        tag: "app"
-        url: "#"

+ 0 - 8
docker-compose/homer/assets/example.custom.css

@@ -1,8 +0,0 @@
-@charset "UTF-8";
-
-/* Custom card colors */
-/* Use with `class:` property of services in config.yml */
-body #app .card.green {
-  background-color: #006600;
-  color: #00ff00;
-}

+ 0 - 64
docker-compose/homer/assets/example2.config.yml

@@ -1,64 +0,0 @@
----
-# Homepage configuration
-# See https://fontawesome.com/icons for icons options
-
-title: "Hello beautiful!"
-subtitle: "App dashboard"
-logo: false
-# icon: "fas fa-skull-crossbones" Optional icon
-
-header: true
-
-# Optional theme customization
-theme: sui
-colors:
-  light:
-    highlight-primary: transparent
-    highlight-secondary: transparent
-    highlight-hover: "#4a4a4a"
-    text-subtitle: "#424242"
-  dark:
-    background: "#2B2C56"
-    highlight-primary: transparent
-    highlight-secondary: transparent
-    highlight-hover: "#200b35"
-    text-subtitle: "#6375e8"
-
-# Optional navbar
-# links: [] # Allows for navbar (dark mode, layout, and search) without any links
-links: []
-
-# Services
-# First level array represent a group.
-# Leave only a "items" key if not using group (group name, icon & tagstyle are optional, section separation will not be displayed).
-services:
-  - name: "APPLICATIONS"
-    items:
-      - name: "Jenkins"
-        logo: "assets/tools/jenkins.png"
-        subtitle: "Continuous integration server"
-        url: "https://jenkins.io/"
-      - name: "RabbitMQ Management"
-        logo: "assets/tools/rabbitmq.png"
-        subtitle: "Manage & monitor RabbitMQ server"
-        # Optional tagstyle
-        # Same styling options as the optional message.
-        tagstyle: "is-success"
-        url: "https://www.rabbitmq.com/"
-      - name: "M/Monit"
-        logo: "assets/tools/monit.png"
-        subtitle: "Monitor & manage all monit enabled hosts"
-        url: "https://mmonit.com/monit/"
-      - name: "Grafana"
-        logo: "assets/tools/grafana.png"
-        subtitle: "Metric analytics & dashboards"
-        url: "https://grafana.com/"
-      - name: "Kibana"
-        logo: "assets/tools/elastic.png"
-        subtitle: "Explore & visualize logs"
-        url: "https://www.elastic.co/products/kibana"
-      - name: "Website monitoring"
-        logo: "assets/tools/pingdom.png"
-        subtitle: "Pingdom public reports overview"
-        tag: "CI"
-        url: "https://www.pingdom.com/"

+ 0 - 10
docker-compose/homer/compose.yaml

@@ -1,10 +0,0 @@
----
-services:
-  homer:
-    image: docker.io/b4bz/homer:v25.08.1
-    container_name: homer
-    ports:
-      - "8080:8080"
-    volumes:
-      - /etc/homer/assets/:/www/assets
-    restart: unless-stopped

+ 0 - 45
docker-compose/influxdb/compose.yaml

@@ -1,45 +0,0 @@
----
-# (Optional) when using custom network
-# networks:
-#   yournetwork:
-#     external: true
-volumes:
-  influxdb-data:
-services:
-  influxdb:
-    container_name: influxdb
-    image: docker.io/library/influxdb:2.7.12-alpine
-    # (Optional) remove this section when using traefik
-    ports:
-      - '8086:8086'
-    volumes:
-      - influxdb-data:/var/lib/influxdb2
-      - /etc/influxdb2:/etc/influxdb2
-      # (Optional) when using certificate
-      # - /etc/ssl/cert.pem/:/etc/ssl/cert.pem  # (optional) if you're using self-signed certs
-      # - /etc/ssl/cert-key.pem/:/etc/ssl/cert-key.pem  # (optional) if you're using self-signed certs
-    # (Optional) when using certificate
-    # command: influxd --tls-cert=/etc/ssl/cert.pem --tls-key=/etc/ssl/cert-key.pem  # (optional) if you're using self-signed certs
-    environment:
-      - DOCKER_INFLUXDB_INIT_MODE=setup
-      - DOCKER_INFLUXDB_INIT_USERNAME=my-user
-      - DOCKER_INFLUXDB_INIT_PASSWORD=my-password
-      - DOCKER_INFLUXDB_INIT_ORG=my-org
-      - DOCKER_INFLUXDB_INIT_BUCKET=my-bucket
-    # (Optional) change retention time
-    #   - DOCKER_INFLUXDB_INIT_RETENTION=1w  # (optional) configure data retention 1 week
-    # (Optional) add admin token
-    #   - DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=my-super-secret-auth-token  # (optional) set admin token
-    # (Optional) when using traefik
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.influxdb.loadbalancer.server.port=8086
-    #   - traefik.http.services.influxdb.loadbalancer.server.scheme=https
-    #   - traefik.http.routers.influxdb-https.entrypoints=websecure
-    #   - traefik.http.routers.influxdb-https.rule=Host(`your-server-url`)
-    #   - traefik.http.routers.influxdb-https.tls=true
-    #   - traefik.http.routers.influxdb-https.tls.certresolver=your-certresolver
-    # (Optional) when using custom network
-    # networks:
-    #   - yournetwork
-    restart: unless-stopped

+ 0 - 32
docker-compose/nextcloud/compose.yaml

@@ -1,32 +0,0 @@
----
-volumes:
-  nextcloud-data:
-  nextcloud-db:
-services:
-  nextcloud-app:
-    image: docker.io/library/nextcloud:31.0.9-apache
-    container_name: nextcloud-app
-    ports:
-      - 80:80
-    volumes:
-      - nextcloud-data:/var/www/html
-    environment:
-      - MYSQL_PASSWORD=$MYSQL_PASSWORD
-      - MYSQL_DATABASE=$MYSQL_DATABASE
-      - MYSQL_USER=$MYSQL_USER
-      - MYSQL_HOST=nextcloud-db
-    restart: unless-stopped
-  nextcloud-db:
-    # See compatibility matrix for Nextcloud 31
-    # https://docs.nextcloud.com/server/31/admin_manual/installation/system_requirements.html
-    image: docker.io/library/mariadb:10.11.14
-    container_name: nextcloud-db
-    command: --transaction-isolation=READ-COMMITTED --binlog-format=ROW
-    volumes:
-      - nextcloud-db:/var/lib/mysql
-    environment:
-      - MYSQL_RANDOM_ROOT_PASSWORD=true
-      - MYSQL_PASSWORD=$MYSQL_PASSWORD
-      - MYSQL_DATABASE=$MYSQL_DATABASE
-      - MYSQL_USER=$MYSQL_USER
-    restart: unless-stopped

+ 0 - 26
docker-compose/nginx/compose.yaml

@@ -1,26 +0,0 @@
----
-services:
-  nginx:
-    image: docker.io/library/nginx:1.28.0-alpine
-    container_name: nginx
-    ports:
-      - 80:80
-      - 443:443
-    volumes:
-      - ./config/default.conf:/etc/nginx/conf.d/default.conf:ro
-      - ./data:/usr/share/nginx/html:ro
-    labels:
-      - traefik.enable=true
-      - traefik.http.services.nginx.loadbalancer.server.port=80
-      - traefik.http.routers.nginx.entrypoints=websecure
-      - traefik.http.routers.nginx.rule=Host(`example.com`)
-      - traefik.http.routers.nginx.tls=true
-      - traefik.http.routers.nginx.tls.certresolver=cloudflare
-      - traefik.http.routers.nginx.service=nginx
-    networks:
-      - frontend
-    restart: unless-stopped
-
-networks:
-  frontend:
-    external: true

+ 0 - 14
docker-compose/nvidiadgcm/compose.yaml

@@ -1,14 +0,0 @@
----
-services:
-  nvidia_exporter:
-    image: nvcr.io/nvidia/k8s/dcgm-exporter:2.3.2-2.6.2-ubuntu20.04
-    container_name: nvidia_exporter
-    runtime: nvidia
-    cap_add:
-      - SYS_ADMIN
-    environment:
-      - NVIDIA_VISIBLE_DEVICES=all
-      - NVIDIA_DRIVER_CAPABILITIES=all
-    ports:
-      - 9400:9400
-    restart: unless-stopped

+ 0 - 16
docker-compose/nvidiasmi/compose.yaml

@@ -1,16 +0,0 @@
----
-services:
-  nvidia_smi_exporter:
-    image: docker.io/utkuozdemir/nvidia_gpu_exporter:1.3.2
-    container_name: nvidia_smi_exporter
-    runtime: nvidia
-    environment:
-      - NVIDIA_VISIBLE_DEVICES=all
-      - NVIDIA_DRIVER_CAPABILITIES=all
-    ports:
-      - "9835:9835"
-    volumes:
-      - /usr/bin/nvidia-smi:/usr/bin/nvidia-smi
-      - /usr/lib/x86_64-linux-gnu/libnvidia-ml.so:/usr/lib/x86_64-linux-gnu/libnvidia-ml.so
-      - /usr/lib/x86_64-linux-gnu/libnvidia-ml.so.1:/usr/lib/x86_64-linux-gnu/libnvidia-ml.so.1
-    restart: unless-stopped

+ 0 - 40
docker-compose/pihole/compose.yaml

@@ -1,40 +0,0 @@
----
-services:
-  pihole:
-    container_name: pihole
-    image: docker.io/pihole/pihole:2025.08.0
-    ports:
-      - 53:53/tcp
-      - 53:53/udp
-      - 67:67/udp
-      - 8081:80/tcp
-      - 8443:443/tcp
-    environment:
-      - TZ=Europe/Berlin
-      - FTLCONF_webserver_api_password=${FTLCONF_webserver_api_password}
-      - FTLCONF_dns_upstreams=${FTLCONF_dns_upstreams:-8.8.8.8;8.8.4.4}
-    volumes:
-      - config_dnsmasq:/etc/dnsmasq.d
-      - config_pihole:/etc/pihole
-    networks:
-      - frontend
-    labels:
-      - traefik.enable=true
-      # Pihole Web Interface
-      - traefik.http.routers.pihole.rule=Host(`example.com`)
-      - traefik.http.routers.pihole.entrypoints=websecure
-      - traefik.http.routers.pihole.tls=true
-      - traefik.http.routers.pihole.tls.certresolver=cloudflare
-      - traefik.http.routers.pihole.service=pihole
-      - traefik.http.services.pihole.loadBalancer.server.port=80
-    restart: unless-stopped
-
-volumes:
-  config_dnsmasq:
-    driver: local
-  config_pihole:
-    driver: local
-
-networks:
-  frontend:
-    external: true

+ 0 - 31
docker-compose/portainer/compose.yaml

@@ -1,31 +0,0 @@
----
-services:
-  app:
-    container_name: portainer
-    image: docker.io/portainer/portainer-ce:2.34.0-alpine
-    ports:
-      # --> (Optional) Remove when using traefik...
-      - 9000:9000
-      - 9443:9443
-      # <--
-      - 8000:8000
-    volumes:
-      - /run/docker.sock:/var/run/docker.sock
-      - portainer-data:/data
-    # --> (Optional) When using traefik...
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.portainer.loadbalancer.server.port=9000
-    #   - traefik.http.routers.portainer.service=portainer
-    #   - traefik.http.routers.portainer.entrypoints=websecure
-    #   - traefik.http.routers.portainer.rule=Host(`your-portainer-fqdn`)
-    #   - traefik.http.routers.portainer.tls=true
-    #   - traefik.http.routers.portainer.tls.certresolver=cloudflare
-    # networks:
-    #   - frontend
-    # <--
-    restart: unless-stopped
-
-volumes:
-  portainer-data:
-    driver: local

+ 0 - 45
docker-compose/postgres/compose.yaml

@@ -1,45 +0,0 @@
----
-services:
-  postgres:
-    image: docker.io/library/postgres:17.6
-    container_name: postgres
-    environment:
-      - POSTGRES_INITDB_ARGS=${POSTGRES_INITDB_ARGS---data-checksums}
-      - POSTGRES_HOST_AUTH_METHOD=${POSTGRES_HOST_AUTH_METHOD-}
-      - POSTGRES_USER=${POSTGRES_USER:-postgres}
-      - POSTGRES_PASSWORD_FILE=/run/secrets/postgres_password
-      - POSTGRES_DB=${POSTGRES_DB:-$POSTGRES_USER}
-      - TZ=${TZ:-UTC}
-    ports:
-      - 5432:5432
-    healthcheck:
-      test: ['CMD-SHELL', 'pg_isready -U "${POSTGRES_USER:-postgres}"']
-      start_period: 30s
-      interval: 10s
-      timeout: 10s
-      retries: 5
-    # (Optional)  When using custom network, see also
-    #             https://docs.docker.com/compose/compose-file/compose-file-v3/#networks
-    #
-    # networks:
-    #   - yournetwork
-    secrets:
-      - postgres_password
-    volumes:
-      - postgres_data:/var/lib/postgresql/data
-    restart: unless-stopped
-
-# (Optional)  When using custom network, see also
-#             https://docs.docker.com/compose/compose-file/compose-file-v3/#network-configuration-reference
-#
-# networks:
-#   yournetwork:
-#     external: true
-
-secrets:
-  postgres_password:
-    file: secret.postgres_password.txt
-
-volumes:
-  postgres_data:
-    driver: local

+ 0 - 36
docker-compose/swag/compose.yaml

@@ -1,36 +0,0 @@
----
-services:
-  mariadb:
-    image: docker.io/linuxserver/mariadb:10.11.10
-    container_name: mariadb
-    environment:
-      - PUID=1001
-      - PGID=1001
-      - MYSQL_ROOT_PASSWORD=mariadbpassword
-      - TZ=Europe/Berlin
-      - MYSQL_DATABASE=WP_database
-      - MYSQL_USER=WP_dbuser
-      - MYSQL_PASSWORD=WP_dbpassword
-    volumes:
-      - /opt/webserver_swag/config/mariadb:/config
-    restart: unless-stopped
-  swag:
-    image: docker.io/linuxserver/swag:3.3.0
-    container_name: swag
-    cap_add:
-      - NET_ADMIN
-    environment:
-      - PUID=1001
-      - PGID=1001
-      - TZ=Europe/Berlin
-      - URL=do-test-1.the-digital-life.com
-      - SUBDOMAINS=
-      - VALIDATION=http
-    volumes:
-      - /opt/webserver_swag/config:/config
-    ports:
-      - 443:443
-      - 80:80  # optional
-    depends_on:
-      - mariadb
-    restart: unless-stopped

+ 0 - 24
docker-compose/traefik/compose.yaml

@@ -1,24 +0,0 @@
----
-services:
-  traefik:
-    image: docker.io/library/traefik:v3.5.3
-    container_name: traefik
-    ports:
-      - 80:80
-      - 443:443
-      # --> (Optional) Enable Dashboard, don't do in production
-      # - 8080:8080
-      # <--
-    volumes:
-      - /run/docker.sock:/run/docker.sock:ro
-      - ./config/:/etc/traefik/:ro
-      - ./certs/:/var/traefik/certs/:rw
-    environment:
-      - CF_DNS_API_TOKEN=your-cloudflare-api-token  # <-- Change this to your Cloudflare API Token
-    networks:
-      - frontend
-    restart: unless-stopped
-
-networks:
-  frontend:
-    external: true  # <-- (Optional) Change this to false if you want to create a new network

+ 0 - 21
docker-compose/traefik/config/example.externalservice.yaml

@@ -1,21 +0,0 @@
-# --> (Example) Expose an external service using Traefik...
-# http:
-#   # -- Change Router Configuration here...
-#   routers:
-#     your-local-router:
-#       rule: "Host(`your-local-service.your-domain.com`)"  # <-- Change Rules here...
-#       service: your-local-service  # <-- Change Service Name here...
-#       priority: 1000  # <-- (Optional) Change Routing Priority here...
-#       entryPoints:
-#         - web
-#         - websecure
-#       tls:
-#         certResolver: cloudflare
-#
-#   # -- Change Service Configuration here...
-#   services:
-#     your-local-service:  # <-- Change Service Name here...
-#       loadBalancer:
-#         servers:
-#           - url: "http://your-local-service:port"  # <-- Change Target Service URL here...
-# <--

+ 0 - 20
docker-compose/traefik/config/example.middleware-authentik.yaml

@@ -1,20 +0,0 @@
-# --> (Example) Securely expose apps using the Traefik proxy outpost...
-# http:
-#   middlewares:
-#     authentik-middleware:
-#       forwardAuth:
-#         address: http://your-authentik-outpost-fqdn:9000/outpost.goauthentik.io/auth/traefik
-#         trustForwardHeader: true
-#         authResponseHeaders:
-#           - X-authentik-username
-#           - X-authentik-groups
-#           - X-authentik-email
-#           - X-authentik-name
-#           - X-authentik-uid
-#           - X-authentik-jwt
-#           - X-authentik-meta-jwks
-#           - X-authentik-meta-outpost
-#           - X-authentik-meta-provider
-#           - X-authentik-meta-app
-#           - X-authentik-meta-version
-# <--

+ 0 - 22
docker-compose/traefik/config/example.middleware-passbolt.yaml

@@ -1,22 +0,0 @@
-# --> (Optional) When using Passbolt with Traefik...
-# http:
-#   middlewares:
-#     passbolt-middleware:
-#       headers:
-#         FrameDeny: true
-#         AccessControlAllowMethods: 'GET,OPTIONS,PUT'
-#         AccessControlAllowOriginList:
-#           - origin-list-or-null
-#         AccessControlMaxAge: 100
-#         AddVaryHeader: true
-#         BrowserXssFilter: true
-#         ContentTypeNosniff: true
-#         ForceSTSHeader: true
-#         STSIncludeSubdomains: true
-#         STSPreload: true
-#         ContentSecurityPolicy: default-src 'self' 'unsafe-inline'
-#         CustomFrameOptionsValue: SAMEORIGIN
-#         ReferrerPolicy: same-origin
-#         PermissionsPolicy: vibrate 'self'
-#         STSSeconds: 315360000
-# <--

+ 0 - 18
docker-compose/traefik/config/example.tls.yaml

@@ -1,18 +0,0 @@
-# --> (Example) Change TLS Configuration here...
-# tls:
-#   options:
-#     default:
-#       minVersion: VersionTLS12
-#       sniStrict: true
-#       curvePreferences:
-#         - CurveP256
-#         - CurveP384
-#         - CurveP521
-#       cipherSuites:
-#         - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256
-#         - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384
-#         - TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256
-#         - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384
-#         - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
-#         - TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305
-# <--

+ 0 - 62
docker-compose/traefik/config/traefik.yaml

@@ -1,62 +0,0 @@
----
-global:
-  checkNewVersion: false
-  sendAnonymousUsage: false
-
-# --> (Optional) Change log level and format here ...
-#     - level: [TRACE, DEBUG, INFO, WARN, ERROR, FATAL]
-# log:
-#  level: ERROR
-# <--
-
-# --> (Optional) Enable accesslog here ...
-# accesslog: {}
-# <--
-
-# --> (Optional) Enable API and Dashboard here, don't do in production
-# api:
-#   dashboard: true
-#   insecure: true
-# <--
-
-# -- Change EntryPoints here...
-entryPoints:
-  web:
-    address: :80
-    # --> (Optional) Redirect all HTTP to HTTPS
-    # http:
-    #   redirections:
-    #     entryPoint:
-    #       to: websecure
-    #       scheme: https
-    # <--
-  websecure:
-    address: :443
-
-# -- Configure your CertificateResolver here...
-certificatesResolvers:
-  cloudflare:
-    acme:
-      email: your-email@example.com  # <-- Change this to your email
-      storage: /var/traefik/certs/cloudflare-acme.json
-      caServer: "https://acme-v02.api.letsencrypt.org/directory"
-      dnsChallenge:
-        provider: cloudflare  # <-- (Optional) Change this to your DNS provider
-        resolvers:
-          - "1.1.1.1:53"
-          - "8.8.8.8:53"
-
-# --> (Optional) Disable TLS Cert verification check
-# serversTransport:
-#   insecureSkipVerify: true
-# <--
-
-providers:
-  docker:
-    exposedByDefault: false  # <-- (Optional) Change this to true if you want to expose all services
-    # Specify discovery network - This ensures correct name resolving and possible issues with containers, that are in multiple networks.
-    # E.g. Database container in a separate network and a container in the frontend and database network.
-    network: frontend
-  file:
-    directory: /etc/traefik
-    watch: true

+ 0 - 36
kestra/ansible/ansible-playbook-git.yaml

@@ -1,36 +0,0 @@
----
-# Kestra ansible-playbook Template
-# ---
-#
-# Run an ansible playbook cloned from a Git Repository
-#
-id: ansible_playbook_git
-namespace: your_namespace  # <-- Replace with your namespace...
-tasks:
-  - id: ansible_job
-    type: io.kestra.plugin.core.flow.WorkingDirectory
-    inputFiles:
-      id_rsa: "{{ secret('RSA_SSH_KEY') }}"  # <-- (Required) Replace with your secret key...
-      # id_ed25519: "{{ secret('ED25519_SSH_KEY') }}"  # <-- (Optional) Replace with your secret key, when using ED25519...
-    tasks:
-      - id: git_clone
-        type: io.kestra.plugin.git.Clone
-        url: your-git-repository-url  # <-- Replace with your Git repository URL...
-        directory: ansible
-        branch: main  # <-- (Optional) Replace with your Git branch...
-        # --> (Optional) If Git repository is private, add your Git token...
-        # username: xcad
-        # password: "{{ secret('GITOKEN') }}"
-        # <--
-      - id: ansible_playbook
-        type: io.kestra.plugin.ansible.cli.AnsibleCLI
-        taskRunner:
-          type: io.kestra.plugin.scripts.runner.docker.Docker
-          image: docker.io/cytopia/ansible:latest-tools
-          user: "1000"  # <-- (Required) Replace with your user id...
-        env:
-          "ANSIBLE_HOST_KEY_CHECKING": "false"
-          "ANSIBLE_REMOTE_USER": "your-remote-user"  # <-- (Required) Replace with your remote user...
-        commands:
-          - ansible-playbook -i ansible/inventory --key-file id_rsa ansible/your-playbook.yaml
-          # - ansible-playbook -i ansible/inventory --key-file id_ed25519 ansible/your-playbook.yaml  # <-- (Optional) when using ED25519...

+ 0 - 38
kestra/ansible/ansible-playbook-inline.yaml

@@ -1,38 +0,0 @@
----
-# Kestra ansible-playbook Template
-# ---
-#
-# Run an ansible playbook defined inline the kestra flow.
-#
-id: ansible_playbook_inline
-namespace: your_namespace  # <-- Replace with your namespace...
-tasks:
-  - id: ansible_job
-    type: io.kestra.plugin.core.flow.WorkingDirectory
-    inputFiles:
-      inventory.ini: |  # <-- Replace with your inventory file content...
-        srv-demo-1.home.clcreative.de
-      myplaybook.yaml: |  # <-- Replace with your playbook file content...
-        ---
-        - hosts: srv-demo-1.home.clcreative.de
-          tasks:
-            - name: upgrade apt packages
-              become: true
-              ansible.builtin.apt:
-                upgrade: true
-                update_cache: true
-      id_rsa: "{{ secret('RSA_SSH_KEY') }}"  # <-- (Required) Replace with your secret key...
-      # id_ed25519: "{{ secret('ED25519_SSH_KEY') }}"  # <-- (Optional) Replace with your secret key, when using ED25519...
-    tasks:
-      - id: ansible_playbook
-        type: io.kestra.plugin.ansible.cli.AnsibleCLI
-        taskRunner:
-          type: io.kestra.plugin.scripts.runner.docker.Docker
-          image: docker.io/cytopia/ansible:latest-tools
-          user: "1000"  # <-- (Required) Replace with your user id...
-        env:
-          "ANSIBLE_HOST_KEY_CHECKING": "false"
-          "ANSIBLE_REMOTE_USER": "your-remote-user"  # <-- (Required) Replace with your remote user...
-        commands:
-          - ansible-playbook -i inventory.ini --key-file id_rsa myplaybook.yaml
-          # - ansible-playbook -i inventory.ini --key-file id_ed25519 myplaybook.yaml  # <-- (Optional) when using ED25519...

+ 0 - 31
kestra/docker/docker-build-git.yaml

@@ -1,31 +0,0 @@
----
-# Kestra Docker Git Build Template
-# ---
-#
-# Build a Docker image from a Git repository.
-#
-id: docker_build_git
-namespace: your_namespace  # <- Replace with your namespace...
-tasks:
-  - id: docker_job
-    type: io.kestra.plugin.core.flow.WorkingDirectory
-    tasks:
-      - id: git_clone
-        type: io.kestra.plugin.git.Clone
-        url: your-git-repository-url  # <-- Replace with your Git repository URL...
-        directory: docker
-        branch: main  # <-- (Optional) Replace with your Git branch...
-        # --> (Optional) If Git repository is private, add your Git token...
-        # username: xcad
-        # password: "{{ secret('GITOKEN') }}"
-        # <--
-      - id: docker_build
-        type: io.kestra.plugin.docker.Build
-        dockerfile: "docker/src/Dockerfile"  # <- Replace with your Dockerfile path...
-        tags:
-          - your-username/your-repository:your-tag  # <- Replace with your Docker image tag...
-        push: true
-        credentials:
-          registry: https://index.docker.io/v1/
-          username: "{{ secret('YOUR_USERNAME') }}"  # <- Replace with your Docker Hub username...
-          password: "{{ secret('YOUR_PASSWORD') }}"  # <- Replace with your Docker Hub password...

+ 0 - 33
kestra/docker/docker-build-inline.yaml

@@ -1,33 +0,0 @@
----
-# Kestra Docker File Build Template
-# ---
-#
-# Build a Docker image from a File.
-#
-id: docker_build_inline
-namespace: your_namespace  # <- Replace with your namespace...
-tasks:
-  - id: docker_job
-    type: io.kestra.plugin.core.flow.WorkingDirectory
-    inputFiles:
-      Dockerfile: |  # <- Replace with your Dockerfile content...
-        FROM alpine:latest
-        WORKDIR /app
-        COPY . /app
-        RUN apk add --update python3
-        CMD [ "python", "main.py"]
-      main.py: |  # <- Replace with your Python script content...
-        if __name__ == "__main__":
-          print("Hello from Docker!")
-          exit(0)
-    tasks:
-      - id: docker_build
-        type: io.kestra.plugin.docker.Build
-        dockerfile: "src/Dockerfile"  # <- Replace with your Dockerfile path...
-        tags:
-          - your-username/your-repository:your-tag  # <- Replace with your Docker image tag...
-        push: true
-        credentials:
-          registry: https://index.docker.io/v1/
-          username: "{{ secret('YOUR_USERNAME') }}"  # <- Replace with your Docker Hub username...
-          password: "{{ secret('YOUR_PASSWORD') }}"  # <- Replace with your Docker Hub password...

Some files were not shown because too many files changed in this diff