Răsfoiți Sursa

big updates ;)

xcad 4 luni în urmă
părinte
comite
c547c18546
57 a modificat fișierele cu 2251 adăugiri și 1747 ștergeri
  1. 3 0
      cli/TODO.md
  2. 8 6
      cli/__main__.py
  3. 0 71
      cli/core/args.py
  4. 28 7
      cli/core/library.py
  5. 248 186
      cli/core/module.py
  6. 77 143
      cli/core/prompt.py
  7. 18 3
      cli/core/registry.py
  8. 0 125
      cli/core/renderers.py
  9. 269 333
      cli/core/template.py
  10. 303 161
      cli/core/variables.py
  11. 6 4
      cli/modules/ansible.py
  12. 83 50
      cli/modules/compose.py
  13. 5 3
      cli/modules/docker.py
  14. 5 3
      cli/modules/github_actions.py
  15. 5 3
      cli/modules/gitlab_ci.py
  16. 5 3
      cli/modules/kestra.py
  17. 6 4
      cli/modules/kubernetes.py
  18. 5 3
      cli/modules/packer.py
  19. 5 3
      cli/modules/terraform.py
  20. 5 3
      cli/modules/vagrant.py
  21. 25 23
      library/compose/alloy/compose.yaml
  22. 11 9
      library/compose/ansiblesemaphore/compose.yaml
  23. 146 92
      library/compose/authentik/compose.yaml
  24. 11 9
      library/compose/bind9/compose.yaml
  25. 11 9
      library/compose/cadvisor/compose.yaml
  26. 11 9
      library/compose/checkmk/compose.yaml
  27. 11 9
      library/compose/clamav/compose.yaml
  28. 11 9
      library/compose/dockge/compose.yaml
  29. 12 10
      library/compose/gitea/compose.yaml
  30. 12 10
      library/compose/gitlab-runner/compose.yaml
  31. 12 10
      library/compose/gitlab/compose.yaml
  32. 58 16
      library/compose/grafana/compose.yaml
  33. 12 10
      library/compose/heimdall/compose.yaml
  34. 12 10
      library/compose/homeassistant/compose.yaml
  35. 11 9
      library/compose/homepage/compose.yaml
  36. 51 13
      library/compose/homer/compose.yaml
  37. 93 47
      library/compose/influxdb/compose.yaml
  38. 11 9
      library/compose/loki/compose.yaml
  39. 11 9
      library/compose/mariadb/compose.yaml
  40. 34 16
      library/compose/n8n/compose.yaml
  41. 120 28
      library/compose/nextcloud/compose.yaml
  42. 68 41
      library/compose/nginx/compose.yaml
  43. 11 9
      library/compose/nginxproxymanager/compose.yaml
  44. 11 9
      library/compose/nodeexporter/compose.yaml
  45. 11 9
      library/compose/openwebui/compose.yaml
  46. 11 9
      library/compose/passbolt/compose.yaml
  47. 79 29
      library/compose/pihole/compose.yaml
  48. 62 29
      library/compose/portainer/compose.yaml
  49. 60 31
      library/compose/postgres/compose.yaml
  50. 11 9
      library/compose/prometheus/compose.yaml
  51. 11 9
      library/compose/promtail/compose.yaml
  52. 11 9
      library/compose/teleport/compose.yaml
  53. 48 33
      library/compose/traefik/compose.yaml
  54. 11 9
      library/compose/twingate_connector/compose.yaml
  55. 11 9
      library/compose/uptimekuma/compose.yaml
  56. 11 9
      library/compose/wazuh/compose.yaml
  57. 54 14
      library/compose/whoami/compose.yaml

+ 3 - 0
cli/TODO.md

@@ -0,0 +1,3 @@
+# TODO ITEMS
+
+* Consider creating a "secret" variable type that automatically handles sensitive data and masks input during prompts, which also should be set via .env file and not directly in the compose files or other templates.

+ 8 - 6
cli/__main__.py

@@ -3,6 +3,8 @@
 Main entry point for the Boilerplates CLI application.
 This file serves as the primary executable when running the CLI.
 """
+from __future__ import annotations
+
 import importlib
 import logging
 import pkgutil
@@ -18,7 +20,7 @@ from cli.core.registry import registry
 app = Typer(no_args_is_help=True)
 console = Console()
 
-def setup_logging(log_level: str = "WARNING"):
+def setup_logging(log_level: str = "WARNING") -> None:
   """Configure the logging system with the specified log level.
   
   Args:
@@ -41,7 +43,7 @@ def setup_logging(log_level: str = "WARNING"):
       datefmt='%Y-%m-%d %H:%M:%S'
     )
 
-    logger = logging.getLogger('__name__')
+    logger = logging.getLogger(__name__)
     logger.setLevel(numeric_level)
   except Exception as e:
     raise RuntimeError(f"Failed to configure logging: {e}")
@@ -55,7 +57,7 @@ def main(
     "--log-level", 
     help="Set the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)"
   )
-):
+) -> None:
   """Main CLI application for managing boilerplates."""
   # Configure logging based on the provided log level
   setup_logging(log_level)
@@ -64,14 +66,14 @@ def main(
   ctx.ensure_object(dict)
   ctx.obj['log_level'] = log_level
 
-def init_app():
+def init_app() -> None:
   """Initialize the application by discovering and registering modules.
   
   Raises:
       ImportError: If critical module import operations fail
       RuntimeError: If application initialization fails
   """
-  logger = logging.getLogger('boilerplates')
+  logger = logging.getLogger(__name__)
   failed_imports = []
   failed_registrations = []
   
@@ -132,7 +134,7 @@ def init_app():
     details = "\n".join(error_details) if error_details else str(e)
     raise RuntimeError(f"Application initialization failed: {details}")
 
-def run():
+def run() -> None:
   """Run the CLI application."""
   try:
     init_app()

+ 0 - 71
cli/core/args.py

@@ -1,71 +0,0 @@
-from typing import Dict, List
-
-# NOTE: This helper supports both syntaxes:
-#   --var KEY=VALUE
-#   --var KEY VALUE
-# It also tolerates passing values via ctx.args when using allow_extra_args.
-
-def parse_var_inputs(var_items: List[str], extra_args: List[str]) -> Dict[str, str]:
-  overrides: Dict[str, str] = {}
-
-  # First, parse items collected by Typer's --var Option (usually KEY=VALUE forms)
-  for item in var_items:
-    if item is None:
-      continue
-    if "=" in item:
-      key, value = item.split("=", 1)
-      if key:
-        overrides[key] = value
-    else:
-      # If user provided just a key via --var KEY, try to find the next value in extra args
-      key = item
-      value = _pop_next_value(extra_args)
-      overrides[key] = value if value is not None else ""
-
-  # Next, scan extra_args for any leftover --var occurrences using space-separated form
-  i = 0
-  while i < len(extra_args):
-    tok = extra_args[i]
-    if tok in ("--var", "-v"):
-      name = None
-      value = None
-      # name may be next token; it can also be name=value
-      if i + 1 < len(extra_args):
-        nxt = extra_args[i + 1]
-        if "=" in nxt:
-          name, value = nxt.split("=", 1)
-          i += 1
-        else:
-          name = nxt
-          if i + 2 < len(extra_args):
-            valtok = extra_args[i + 2]
-            if not valtok.startswith("-"):
-              value = valtok
-              i += 2
-            else:
-              i += 1
-          else:
-            i += 1
-      if name:
-        overrides[name] = value if value is not None else ""
-    elif tok.startswith("--var=") or tok.startswith("-v="):
-      remainder = tok.split("=", 1)[1]
-      if "=" in remainder:
-        name, value = remainder.split("=", 1)
-      else:
-        name, value = remainder, _pop_next_value(extra_args[i + 1:])
-      if name:
-        overrides[name] = value if value is not None else ""
-    i += 1
-
-  return overrides
-
-
-def _pop_next_value(args: List[str]) -> str | None:
-  """Return the first non-flag token from args, if any, without modifying caller's list.
-  This is a best-effort for --var KEY VALUE when Typer didn't bind VALUE to --var.
-  """
-  for tok in args:
-    if not tok.startswith("-"):
-      return tok
-  return None

+ 28 - 7
cli/core/library.py

@@ -1,17 +1,32 @@
+from __future__ import annotations
+
 from pathlib import Path
 import logging
+from typing import Optional
+
 logger = logging.getLogger(__name__)
 
 
+# -----------------------
+# SECTION: Library Class
+# -----------------------
+
 class Library:
   """Represents a single library with a specific path."""
   
-  def __init__(self, name: str, path: Path, priority: int = 0):
+  def __init__(self, name: str, path: Path, priority: int = 0) -> None:
+    """Initialize a library instance.
+    
+    Args:
+      name: Display name for the library
+      path: Path to the library directory
+      priority: Priority for library lookup (higher = checked first)
+    """
     self.name = name
     self.path = path
     self.priority = priority  # Higher priority = checked first
 
-  def find_by_id(self, module_name, files, template_id):
+  def find_by_id(self, module_name: str, files: list[str], template_id: str) -> tuple[Path, str]:
     """Find a template by its ID in this library.
     
     Args:
@@ -53,7 +68,7 @@ class Library:
     return template_path, self.name
 
 
-  def find(self, module_name, files, sort_results=False):
+  def find(self, module_name: str, files: list[str], sort_results: bool = False) -> list[tuple[Path, str]]:
     """Find templates in this library for a specific module.
     
     Args:
@@ -108,12 +123,17 @@ class Library:
     logger.debug(f"Found {len(template_dirs)} templates in module '{module_name}'")
     return template_dirs
 
+# !SECTION
+
+# -----------------------------
+# SECTION: LibraryManager Class
+# -----------------------------
 
 class LibraryManager:
   """Manages multiple libraries and provides methods to find templates."""
   
   # FIXME: For now this is static and only has one library
-  def __init__(self):
+  def __init__(self) -> None:
 
     # get the root path of the repository
     repo_root = Path(__file__).parent.parent.parent.resolve()
@@ -122,7 +142,7 @@ class LibraryManager:
       Library(name="default", path=repo_root / "library", priority=0)
     ]
 
-  def find_by_id(self, module_name, files, template_id):
+  def find_by_id(self, module_name: str, files: list[str], template_id: str) -> Optional[tuple[Path, str]]:
     """Find a template by its ID across all libraries.
     
     Args:
@@ -147,7 +167,7 @@ class LibraryManager:
     logger.debug(f"Template '{template_id}' not found in any library")
     return None
   
-  def find(self, module_name, files, sort_results=False):
+  def find(self, module_name: str, files: list[str], sort_results: bool = False) -> list[tuple[Path, str]]:
     """Find templates across all libraries for a specific module.
     
     Args:
@@ -187,4 +207,5 @@ class LibraryManager:
     
     logger.debug(f"Found {len(unique_templates)} unique templates total")
     return unique_templates
-  
+
+# !SECTION

+ 248 - 186
cli/core/module.py

@@ -1,32 +1,70 @@
+from __future__ import annotations
+
 from abc import ABC
 from pathlib import Path
 from typing import Optional, Dict, Any, List
 import logging
 from typer import Typer, Option, Argument, Context
 from rich.console import Console
-from rich.table import Table
 from rich.panel import Panel
-from rich.rule import Rule
+from rich.table import Table
 
 from .library import LibraryManager
 from .template import Template
 from .prompt import PromptHandler
-from .args import parse_var_inputs
-from .renderers import render_variable_table, render_template_list_table
 
 logger = logging.getLogger(__name__)
 console = Console()
 
 
+# -------------------------------
+# SECTION: Helper Functions
+# -------------------------------
+
+def parse_var_inputs(var_options: list[str], extra_args: list[str]) -> dict[str, Any]:
+  """Parse variable inputs from --var options and extra args.
+  
+  Supports formats:
+    --var KEY=VALUE
+    --var KEY VALUE
+    
+  Args:
+    var_options: List of variable options from CLI
+    extra_args: Additional arguments that may contain values
+    
+  Returns:
+    Dictionary of parsed variables
+  """
+  variables = {}
+  
+  # Parse --var KEY=VALUE format
+  for var_option in var_options:
+    if '=' in var_option:
+      key, value = var_option.split('=', 1)
+      variables[key] = value
+    else:
+      # --var KEY VALUE format - value should be in extra_args
+      if extra_args:
+        variables[var_option] = extra_args.pop(0)
+      else:
+        logger.warning(f"No value provided for variable '{var_option}'")
+  
+  return variables
+
+# !SECTION
+
+# ---------------------
+# SECTION: Module Class
+# ---------------------
+
 class Module(ABC):
   """Streamlined base module that auto-detects variables from templates."""
   
-  # Required class attributes for subclasses
-  name = None
-  description = None  
-  files = None
-  
-  def __init__(self):
+  name: str | None = None
+  description: str | None = None  
+  files: list[str] | None = None
+
+  def __init__(self) -> None:
     if not all([self.name, self.description, self.files]):
       raise ValueError(
         f"Module {self.__class__.__name__} must define name, description, and files"
@@ -35,28 +73,53 @@ class Module(ABC):
     logger.info(f"Initializing module '{self.name}'")
     logger.debug(f"Module '{self.name}' configuration: files={self.files}, description='{self.description}'")
     self.libraries = LibraryManager()
-    
-    # Initialize variables if the subclass defines _init_variables method
-    if hasattr(self, '_init_variables'):
-      logger.debug(f"Module '{self.name}' has variable initialization method")
-      self._init_variables()
-    logger.info(f"Module '{self.name}' initialization completed successfully")
 
-  def list(self):
+  # --------------------------
+  # SECTION: Public Commands
+  # --------------------------
+
+  def list(self) -> list[Template]:
     """List all templates."""
     logger.debug(f"Listing templates for module '{self.name}'")
     templates = []
-    module_sections = getattr(self, 'variable_sections', {})
 
     entries = self.libraries.find(self.name, self.files, sort_results=True)
     for template_dir, library_name in entries:
-      template = self._load_template_from_dir(template_dir, library_name, module_sections)
-      if template:
-        templates.append(template)
+      # Find the first matching template file
+      template_file = None
+      for file_name in self.files:
+        candidate = template_dir / file_name
+        if candidate.exists():
+          template_file = candidate
+          break
+      
+      if template_file:
+        try:
+          template = Template(template_file, library_name=library_name)
+          templates.append(template)
+        except Exception as exc:
+          logger.error(f"Failed to load template from {template_file}: {exc}")
+          continue
     
     if templates:
       logger.info(f"Listing {len(templates)} templates for module '{self.name}'")
-      table = render_template_list_table(templates, self.name, include_library=False)
+      table = Table(title=f"{self.name.capitalize()} templates")
+      table.add_column("ID", style="bold", no_wrap=True)
+      table.add_column("Name")
+      table.add_column("Description")
+      table.add_column("Version", no_wrap=True)
+      table.add_column("Tags")
+      table.add_column("Library", no_wrap=True)
+
+      for template in templates:
+        name = template.metadata.name or 'Unnamed Template'
+        desc = template.metadata.description or 'No description available'
+        version = template.metadata.version or ''
+        tags_list = template.metadata.tags or []
+        tags = ", ".join(tags_list) if isinstance(tags_list, list) else str(tags_list)
+        library = template.metadata.library or ''
+        table.add_row(template.id, name, desc, version, tags, library)
+
       console.print(table)
     else:
       logger.info(f"No templates found for module '{self.name}'")
@@ -67,47 +130,26 @@ class Module(ABC):
     self,
     id: str,
     show_content: bool = False,
-  ):
+  ) -> None:
     """Show template details."""
     logger.debug(f"Showing template '{id}' from module '{self.name}'")
     template = self._load_template_by_id(id)
 
-    header_title = template.name or template.id
-    subtitle_parts = [template.id]
-    if template.version:
-      subtitle_parts.append(f"v{template.version}")
-    if template.library:
-      subtitle_parts.append(f"library: {template.library}")
-    subtitle = " • ".join(subtitle_parts)
-
-    description = template.description or "No description available"
-    console.print(Panel(description, title=header_title, subtitle=subtitle, border_style="magenta"))
-
-    metadata_table = Table.grid(padding=(0, 2))
-    metadata_table.add_column(style="dim", justify="right")
-    metadata_table.add_column(style="white")
-    metadata_table.add_row("Author", template.author or "-")
-    metadata_table.add_row("Date", template.date or "-")
-    metadata_table.add_row("Tags", ", ".join(template.tags) if template.tags else "-")
-    metadata_table.add_row("Files", ", ".join(template.files) if template.files else template.file_path.name)
-    console.print(Panel(metadata_table, title="Details", border_style="cyan", expand=False))
-
-    if template.variables:
-      console.print(render_variable_table(template.variables, sections=template.variable_sections))
-
-    if show_content and template.content:
-      console.print(Rule("Template Content"))
-      console.print(template.content)
-
+    if not template:
+      logger.warning(f"Template '{id}' not found in module '{self.name}'")
+      console.print(f"[red]Template '{id}' not found in module '{self.name}'[/red]")
+      return
+    
+    self._display_template_details(template, id)
 
   def generate(
     self,
     id: str = Argument(..., help="Template ID"),
     out: Optional[Path] = Option(None, "--out", "-o"),
     interactive: bool = Option(True, "--interactive/--no-interactive", "-i/-n", help="Enable interactive prompting for variables"),
-    var: Optional[List[str]] = Option(None, "--var", "-v", help="Variable override (repeatable). Use KEY=VALUE or --var KEY VALUE"),
+    var: Optional[list[str]] = Option(None, "--var", "-v", help="Variable override (repeatable). Use KEY=VALUE or --var KEY VALUE"),
     ctx: Context = None,
-  ):
+  ) -> None:
     """Generate from template.
 
     Supports variable overrides via:
@@ -118,7 +160,7 @@ class Module(ABC):
     logger.info(f"Starting generation for template '{id}' from module '{self.name}'")
     template = self._load_template_by_id(id)
 
-    # Build variable overrides from Typer-collected options and any extra args
+    # Build variable overrides from Typer-collected options and any extra args BEFORE displaying template
     extra_args = []
     try:
       if ctx is not None and hasattr(ctx, "args"):
@@ -129,36 +171,35 @@ class Module(ABC):
     cli_overrides = parse_var_inputs(var or [], extra_args)
     if cli_overrides:
       logger.info(f"Received {len(cli_overrides)} variable overrides from CLI")
+      # Apply CLI overrides to template variables before display
+      if template.variables:
+        successful_overrides = template.variables.apply_overrides(cli_overrides, " -> cli")
+        if successful_overrides:
+          logger.debug(f"Applied CLI overrides for: {', '.join(successful_overrides)}")
+
+    # Show template details with CLI overrides already applied
+    self._display_template_details(template, id)
+    console.print()  # Add spacing before variable collection
 
     # Collect variable values interactively if enabled
     variable_values = {}
     if interactive and template.variables:
       prompt_handler = PromptHandler()
       
-      # Collect values with sectioned flow
-      collected_values = prompt_handler.collect_variables(
-        variables=template.variables,
-        template_name=template.name,
-        module_name=self.name,
-        template_var_order=template.template_var_names,
-        module_var_order=template.module_var_names,
-        sections=template.variable_sections,
-      )
+      # Collect values with simplified sectioned flow
+      collected_values = prompt_handler.collect_variables(template.variables)
       
       if collected_values:
         variable_values.update(collected_values)
         logger.info(f"Collected {len(collected_values)} variable values from user input")
-        
-        # Display summary of collected values
-        prompt_handler.display_variable_summary(collected_values, template.name)
 
-    # Apply CLI overrides last to take highest precedence
-    if cli_overrides:
-      variable_values.update(cli_overrides)
+    # CLI overrides are already applied to the template variables, so collect all current values
+    # This includes defaults, interactive changes, and CLI overrides
+    if template.variables:
+      variable_values.update(template.variables.get_all_values())
 
     # Render template with collected values
     try:
-      variable_values = self._apply_common_defaults(template, variable_values)
       rendered_content = template.render(variable_values)
       logger.info(f"Successfully rendered template '{id}'")
       
@@ -172,7 +213,7 @@ class Module(ABC):
         logger.info(f"Template written to file: {out}")
       else:
         # Output to stdout
-        console.print("[bold blue]Generated Template:[/bold blue]")
+        console.print("\n\n[bold blue]Generated Template:[/bold blue]")
         console.print("─" * 50)
         console.print(rendered_content)
         logger.info("Template output to stdout")
@@ -182,94 +223,42 @@ class Module(ABC):
       console.print(f"[red]Error generating template: {str(e)}[/red]")
       raise
 
-  @classmethod
-  def register_cli(cls, app: Typer):
-    """Register module commands with the main app using lazy instantiation."""
-    logger.debug(f"Registering CLI commands for module '{cls.name}'")
+  # !SECTION
 
-    def _load_module() -> "Module":
-      logger.debug(f"Lazily instantiating module '{cls.name}'")
-      return cls()
-
-    def _invoke(method_name: str, *args, **kwargs):
-      module = _load_module()
-      method = getattr(module, method_name)
-      return method(*args, **kwargs)
-
-    module_app = Typer()
-
-    @module_app.command()
-    def list():
-      return _invoke("list")
-
-    @module_app.command()
-    def show(
-      id: str = Argument(..., help="Template ID"),
-      show_content: bool = Option(
-        False,
-        "--show-content/--hide-content",
-        "-c/-C",
-        help="Display full template content",
-      ),
-    ):
-      return _invoke("show", id, show_content)
-
-    # Allow extra args so we can parse --var overrides ourselves
-    @module_app.command(context_settings={"allow_extra_args": True, "ignore_unknown_options": True})
-    def generate(
-      id: str = Argument(..., help="Template ID"),
-      out: Optional[Path] = Option(None, "--out", "-o"),
-      interactive: bool = Option(
-        True,
-        "--interactive/--no-interactive",
-        "-i/-n",
-        help="Enable interactive prompting for variables",
-      ),
-      var: Optional[List[str]] = Option(
-        None,
-        "--var",
-        "-v",
-        help="Variable override (repeatable). Use KEY=VALUE or --var KEY VALUE",
-      ),
-      ctx: Context = None,
-    ):
-      return _invoke(
-        "generate",
-        id,
-        out,
-        interactive,
-        var,
-        ctx,
-      )
+  # ------------------------------
+  # SECTION: CLI Registration
+  # ------------------------------
 
+  @classmethod
+  def register_cli(cls, app: Typer) -> None:
+    """Register module commands with the main app."""
+    logger.debug(f"Registering CLI commands for module '{cls.name}'")
+    
+    # Create a module instance
+    module_instance = cls()
+    
+    # Create subapp for this module
+    module_app = Typer(help=cls.description)
+    
+    # Register commands directly on the instance
+    module_app.command("list")(module_instance.list)
+    module_app.command("show")(module_instance.show)
+    
+    # Generate command needs special handling for context
+    module_app.command(
+      "generate", 
+      context_settings={"allow_extra_args": True, "ignore_unknown_options": True}
+    )(module_instance.generate)
+    
+    # Add the module subapp to main app
     app.add_typer(module_app, name=cls.name, help=cls.description)
     logger.info(f"Module '{cls.name}' CLI commands registered")
 
-  def _apply_common_defaults(self, template: Template, values: Dict[str, Any]) -> Dict[str, Any]:
-    """Ensure core variables have sensible defaults for non-interactive runs."""
-    defaults = {}
-
-    def needs_value(key: str) -> bool:
-      if key not in values:
-        return True
-      current = values[key]
-      return current is None or (isinstance(current, str) and current.strip() == "")
-
-    if template.variables.get_variable("service_name") and needs_value("service_name"):
-      defaults["service_name"] = template.id
-
-    if template.variables.get_variable("container_name") and needs_value("container_name"):
-      defaults["container_name"] = template.id
+  # !SECTION
 
-    if template.variables.get_variable("container_timezone") and needs_value("container_timezone"):
-      defaults["container_timezone"] = "UTC"
-
-    if defaults:
-      logger.debug(f"Applying common defaults: {defaults}")
-      for key, value in defaults.items():
-        values[key] = value
-
-    return values
+  # --------------------------
+  # SECTION: Private Methods
+  # --------------------------
 
   def _load_template_by_id(self, template_id: str) -> Template:
     result = self.libraries.find_by_id(self.name, self.files, template_id)
@@ -278,42 +267,115 @@ class Module(ABC):
       raise FileNotFoundError(f"Template '{template_id}' not found in module '{self.name}'")
 
     template_dir, library_name = result
-    template = self._load_template_from_dir(
-      template_dir,
-      library_name,
-      getattr(self, 'variable_sections', {}),
-    )
-
-    if not template:
-      raise FileNotFoundError(f"Template file for '{template_id}' not found in module '{self.name}'")
-
-    return template
-
-  def _load_template_from_dir(
-    self,
-    template_dir: Path,
-    library_name: str,
-    module_sections: Dict[str, Any],
-  ) -> Optional[Template]:
-    template_file = self._resolve_template_file(template_dir)
+    
+    # Find the first matching template file
+    template_file = None
+    for file_name in self.files:
+      candidate = template_dir / file_name
+      if candidate.exists():
+        template_file = candidate
+        break
+    
     if not template_file:
-      logger.warning(f"Template directory '{template_dir}' missing expected files {self.files}")
-      return None
-
+      raise FileNotFoundError(f"Template directory '{template_dir}' missing expected files {self.files}")
+    
     try:
-      template = Template.from_file(
-        template_file,
-        module_sections=module_sections,
-        library_name=library_name,
-      )
-      return template
+      return Template(template_file, library_name=library_name)
+    except ValueError as exc:
+      # FIXME: Refactor error handling chain to avoid redundant exception wrapping
+      # ValueError (like validation errors) already logged - just re-raise with context
+      raise FileNotFoundError(f"Template '{template_id}' validation failed in module '{self.name}'") from exc
     except Exception as exc:
       logger.error(f"Failed to load template from {template_file}: {exc}")
-      return None
+      raise FileNotFoundError(f"Template file for '{template_id}' not found in module '{self.name}'") from exc
+
+  def _display_template_details(self, template: Template, template_id: str) -> None:
+    """Display template information panel and variables table.
+    
+    Args:
+      template: The Template object to display
+      template_id: The template ID for display purposes
+    """
+    # Show template info panel
+    console.print(Panel(
+      f"[bold]{template.metadata.name or 'Unnamed Template'}[/bold]\n\n{template.metadata.description or 'No description available'}", 
+      title=f"Template: {template_id}", 
+      subtitle=f"Module: {self.name}"
+    ))
+    
+    # Show variables table if any variables exist
+    if template.variables and template.variables._set:
+      console.print()  # Add spacing
+      
+      # Create variables table
+      variables_table = Table(title="Template Variables", show_header=True, header_style="bold blue")
+      variables_table.add_column("Variable", style="cyan", no_wrap=True)
+      variables_table.add_column("Type", style="magenta")
+      variables_table.add_column("Default", style="green")
+      variables_table.add_column("Description", style="white")
+      variables_table.add_column("Origin", style="yellow")
+      
+      # Add variables grouped by section
+      first_section = True
+      for section_key, section in template.variables._set.items():
+        if section.variables:
+          # Add spacing between sections (except before first section)
+          if not first_section:
+            variables_table.add_row("", "", "", "", "", style="dim")
+          first_section = False
+          
+          # Check if section should be dimmed (toggle is False)
+          is_dimmed = False
+          
+          if section.toggle:
+            toggle_var = section.variables.get(section.toggle)
+            if toggle_var:
+              # Get the actual typed value and check if it's falsy
+              try:
+                toggle_value = toggle_var.get_typed_value()
+                if not toggle_value:
+                  is_dimmed = True
+              except Exception as e:
+                # Fallback to raw value check
+                if not toggle_var.value:
+                  is_dimmed = True
+              
+          # Add section header row with proper styling
+          disabled_text = " (disabled)" if is_dimmed else ""
+          required_text = " [yellow](required)[/yellow]" if section.required else ""
+          
+          if is_dimmed:
+            # Use Rich markup for dimmed bold text
+            header_text = f"[bold dim]{section.title}{required_text}{disabled_text}[/bold dim]"
+          else:
+            # Use Rich markup for bold text
+            header_text = f"[bold]{section.title}{required_text}{disabled_text}[/bold]"
+          
+          variables_table.add_row(
+            header_text,
+            "", "", "", ""
+          )
+          
+          # Add variables in this section
+          for var_name, variable in section.variables.items():
+            # Apply dim style to ALL variables if section toggle is False
+            row_style = "dim" if is_dimmed else None
+            
+            # Format default value
+            default_val = str(variable.value) if variable.value is not None else ""
+            if len(default_val) > 30:
+              default_val = default_val[:27] + "..."
+            
+            variables_table.add_row(
+              f"  {var_name}",
+              variable.type or "str",
+              default_val,
+              variable.description or "",
+              variable.origin or "unknown",
+              style=row_style
+            )
+      
+      console.print(variables_table)
+
+# !SECTION
 
-  def _resolve_template_file(self, template_dir: Path) -> Optional[Path]:
-    for file_name in self.files:
-      candidate = template_dir / file_name
-      if candidate.exists():
-        return candidate
-    return None

+ 77 - 143
cli/core/prompt.py

@@ -1,129 +1,96 @@
-from typing import Dict, Any, List, Optional
-from collections import OrderedDict
+from __future__ import annotations
+
+from typing import Dict, Any, List, Callable
 import logging
 from rich.console import Console
 from rich.prompt import Prompt, Confirm, IntPrompt
 from rich.table import Table
 
 from .variables import Variable, VariableCollection
-from .renderers import render_variable_table
 
 logger = logging.getLogger(__name__)
 
 
-class PromptHandler:
-  """Interactive prompt handler for collecting template variables.
+# ---------------------------
+# SECTION: PromptHandler Class
+# ---------------------------
 
-  Simplified design:
-  - Single entrypoint: collect_variables(VariableCollection)
-  - Asks only for variables that don't have values
-  - Clear, compact output with a summary table
-  """
+class PromptHandler:
+  """Simple interactive prompt handler for collecting template variables."""
 
-  def __init__(self):
+  def __init__(self) -> None:
     self.console = Console()
 
-  def collect_variables(
-    self,
-    variables: VariableCollection,
-    template_name: str = "",
-    module_name: str = "",
-    template_var_order: List[str] = None,
-    module_var_order: List[str] = None,
-    sections: Optional[OrderedDict[str, Dict[str, Any]]] = None,
-  ) -> Dict[str, Any]:
-    """Collect values for variables that need input with an ordered, sectioned flow.
-
-    When sections metadata is provided, it defines the order, prompt text, and
-    toggle behavior for each section. Otherwise all variables are shown in a
-    single "General" group.
+  # --------------------------
+  # SECTION: Public Methods
+  # --------------------------
+
+  def collect_variables(self, variables: VariableCollection) -> dict[str, Any]:
+    """Collect values for variables by iterating through sections.
+    
+    Args:
+        variables: VariableCollection with organized sections and variables
+        
+    Returns:
+        Dict of variable names to collected values
     """
-    template_var_order = template_var_order or []
-    module_var_order = module_var_order or []
-
-    section_meta_list: List[Dict[str, Any]] = []
-    if sections:
-      section_meta_list = list(sections.values())
-    else:
-      section_meta_list = [
-        {
-          "title": "General",
-          "variables": variables.get_variable_names(),
-          "toggle": None,
-          "prompt": None,
-          "description": None,
-        }
-      ]
-
-    self._display_current_values(variables, sections)
-
     if not Confirm.ask("Customize any settings?", default=False):
       logger.info("User opted to keep all default values")
       return {}
 
     collected: Dict[str, Any] = {}
 
-    for section_meta in section_meta_list:
-      title = section_meta.get("title") or "General"
-      prompt_text = section_meta.get("prompt")
-      toggle_name = section_meta.get("toggle")
-      description_text = section_meta.get("description")
-      var_names = section_meta.get("variables", [])
-
-      # Filter to existing variables
-      variable_objects = [variables.get_variable(name) for name in var_names]
-      variable_objects = [var for var in variable_objects if var is not None]
-
-      if not variable_objects:
+    # Process each section
+    for section_key, section in variables._set.items():
+      if not section.variables:
         continue
 
-      toggle_var = None
-      if toggle_name:
-        toggle_var = variables.get_variable(toggle_name)
-        if toggle_var is None:
-          toggle_var = next((var for var in variable_objects if var.name == toggle_name), None)
-
-      if toggle_var:
-        enabled = self._prompt_bool(
-          prompt_text or f"Enable {title}?",
-          toggle_var.get_typed_value(),
-        )
-        if enabled != bool(toggle_var.get_typed_value()):
-          collected[toggle_var.name] = enabled
-          toggle_var.value = enabled
-        if not enabled:
+      # Always show section header first
+      self.console.print(f"\n[bold cyan]{section.title}[/bold cyan]")
+      if section.description:
+        self.console.print(f"[dim]{section.description}[/dim]")
+      self.console.print("─" * 40, style="dim")
+
+      # Handle section toggle - skip for required sections
+      if section.required:
+        # Required sections are always processed, no toggle prompt needed
+        logger.debug(f"Processing required section '{section.key}' without toggle prompt")
+      elif section.toggle:
+        toggle_var = section.variables.get(section.toggle)
+        if toggle_var:
+          prompt_text = section.prompt or f"Enable {section.title}?"
+          current_value = toggle_var.get_typed_value()
+          new_value = self._prompt_bool(prompt_text, current_value)
+          
+          if new_value != current_value:
+            collected[toggle_var.name] = new_value
+            toggle_var.value = new_value
+          
+          # Skip remaining variables in section if disabled
+          if not new_value:
+            continue
+
+      # Collect variables in this section
+      for var_name, variable in section.variables.items():
+        # Skip toggle variable (already handled)
+        if section.toggle and var_name == section.toggle:
           continue
-      elif prompt_text:
-        self.console.print(prompt_text, style="dim")
-
-      self.console.print(f"[bold magenta]{title}[/bold magenta]")
-      self.console.print("─" * 50, style="dim")
-      if description_text:
-        self.console.print(f"[dim]{description_text}[/dim]")
-
-      for var in variable_objects:
-        if toggle_var and var.name == toggle_var.name:
-          continue
-        current = var.get_typed_value()
-        new_value = self._prompt_variable(var)
-        if new_value != current:
-          collected[var.name] = new_value
-          var.value = new_value
-
-      self.console.print()
+          
+        current_value = variable.get_typed_value()
+        new_value = self._prompt_variable(variable)
+        
+        if new_value != current_value:
+          collected[var_name] = new_value
+          variable.value = new_value
 
     logger.info(f"Variable collection completed. Collected {len(collected)} values")
     return collected
 
-  def _display_current_values(
-    self,
-    variables: VariableCollection,
-    sections: Optional[OrderedDict[str, Dict[str, Any]]] = None,
-  ) -> None:
-    self.console.print(
-      render_variable_table(variables, title="Current Defaults", sections=sections)
-    )
+  # !SECTION
 
+  # ---------------------------
+  # SECTION: Private Methods
+  # ---------------------------
 
   def _prompt_variable(self, variable: Variable) -> Any:
     """Prompt for a single variable value based on its type."""
@@ -153,14 +120,14 @@ class PromptHandler:
         default_value = variable.value
         handler = self._get_prompt_handler(variable)
 
-  def _get_prompt_handler(self, variable: Variable):
+  def _get_prompt_handler(self, variable: Variable) -> Callable:
     """Return the prompt function for a variable type."""
-    if variable.type == "enum":
-      return lambda text, default: self._prompt_enum(text, variable.options or [], default)
-    return {
+    handlers = {
       "bool": self._prompt_bool,
       "int": self._prompt_int,
-    }.get(variable.type, self._prompt_string)
+      "enum": lambda text, default: self._prompt_enum(text, variable.options or [], default),
+    }
+    return handlers.get(variable.type, self._prompt_string)
 
   def _show_validation_error(self, message: str) -> None:
     """Display validation feedback consistently."""
@@ -189,16 +156,16 @@ class PromptHandler:
         logger.warning(f"Invalid default integer value: {default}")
     return IntPrompt.ask(prompt_text, default=default_int)
 
-  def _prompt_enum(self, prompt_text: str, options: List[str], default: Any = None) -> str:
+  def _prompt_enum(self, prompt_text: str, options: list[str], default: Any = None) -> str:
+    """Prompt for enum selection with validation."""
     if not options:
-      logger.warning("Enum variable has no options, falling back to string prompt")
       return self._prompt_string(prompt_text, default)
 
     self.console.print(f"  Options: {', '.join(options)}", style="dim")
 
-    if default and default not in options:
-      logger.warning(f"Default value '{default}' not in options {options}")
-      default = None
+    # Validate default is in options
+    if default and str(default) not in options:
+      default = options[0]
 
     while True:
       value = Prompt.ask(
@@ -208,39 +175,6 @@ class PromptHandler:
       )
       if value in options:
         return value
-      self.console.print(f"  [red]Invalid choice. Please select from: {', '.join(options)}[/red]")
-
-  def display_variable_summary(self, collected_values: Dict[str, Any], template_name: str = ""):
-    """Display a summary of collected variable values."""
-    if not collected_values:
-      return
-
-    title = "Variable Summary"
-    if template_name:
-      title += f" - {template_name}"
-
-    table = Table(title=title, show_header=True, header_style="bold blue")
-    table.add_column("Variable", style="cyan", min_width=20)
-    table.add_column("Value", style="green")
-    table.add_column("Type", style="dim", justify="center")
-
-    for var_name in sorted(collected_values.keys()):
-      value = collected_values[var_name]
-      if isinstance(value, bool):
-        display_value = "true" if value else "false"  # No emojis per logging rules
-        var_type = "bool"
-      elif isinstance(value, int):
-        display_value = str(value)
-        var_type = "int"
-      else:
-        display_value = str(value) if value else ""
-        var_type = "str"
-
-      if len(display_value) > 50:
-        display_value = display_value[:47] + "..."
-
-      table.add_row(var_name, display_value, var_type)
-
-    self.console.print()
-    self.console.print(table)
-    self.console.print()
+      self.console.print(f"[red]Invalid choice. Select from: {', '.join(options)}[/red]")
+
+# !SECTION

+ 18 - 3
cli/core/registry.py

@@ -1,17 +1,24 @@
 """Module registry system."""
+from __future__ import annotations
+
 import logging
+from typing import Iterator, Type
 
 logger = logging.getLogger(__name__)
 
 
+# ------------------------------
+# SECTION: ModuleRegistry Class
+# ------------------------------
+
 class ModuleRegistry:
   """Simple module registry without magic."""
   
-  def __init__(self):
+  def __init__(self) -> None:
     self._modules = {}
     logger.debug("Initializing module registry")
   
-  def register(self, module_class):
+  def register(self, module_class: Type) -> None:
     """Register a module class."""
     # Module class defines its own name attribute
     logger.debug(f"Attempting to register module class '{module_class.name}'")
@@ -23,11 +30,19 @@ class ModuleRegistry:
     logger.info(f"Registered module '{module_class.name}' (total modules: {len(self._modules)})")
     logger.debug(f"Module '{module_class.name}' details: description='{module_class.description}', files={module_class.files}")
   
-  def iter_module_classes(self):
+  def iter_module_classes(self) -> Iterator[tuple[str, Type]]:
     """Yield registered module classes without instantiating them."""
     logger.debug(f"Iterating over {len(self._modules)} registered module classes")
     for name in sorted(self._modules.keys()):
       yield name, self._modules[name]
 
+# !SECTION
+
+# -------------------------
+# SECTION: Global Instance
+# -------------------------
+
 # Global registry
 registry = ModuleRegistry()
+
+# !SECTION

+ 0 - 125
cli/core/renderers.py

@@ -1,125 +0,0 @@
-from collections import OrderedDict
-from typing import Dict, Optional, Any, List
-
-from rich.table import Table
-
-from .variables import VariableCollection
-
-
-def render_variable_table(
-  variables: VariableCollection,
-  title: str = "Variables",
-  show_options: bool = False,
-  sections: Optional[OrderedDict[str, Dict[str, Any]]] = None,
-) -> Table:
-  """Build a Rich table representing variable metadata."""
-
-  table = Table(title=title, header_style="bold cyan")
-  table.add_column("Name", style="cyan", no_wrap=True)
-  table.add_column("Type", style="yellow", no_wrap=True)
-  if show_options:
-    table.add_column("Options", style="magenta")
-  table.add_column("Default", style="green", no_wrap=True)
-  table.add_column("Description", style="white")
-
-  rows_by_name: Dict[str, Dict[str, str]] = {
-    row["name"]: row for row in variables.as_rows()
-  }
-
-  def _style_value(value: str, enabled: bool) -> str:
-    if enabled or not value:
-      return value
-    return f"[grey50]{value}[/grey50]"
-
-  def _add_variable_row(row: Dict[str, str], *, enabled: bool = True) -> None:
-    cells = [
-      _style_value(row["name"], enabled),
-      _style_value(row["type"], enabled),
-    ]
-    if show_options:
-      options = ", ".join(row["options"]) if row["options"] else ""
-      cells.append(_style_value(options, enabled))
-    cells.extend(
-      [
-        _style_value(row["default"], enabled),
-        _style_value(row["description"], enabled),
-      ]
-    )
-    style = None if enabled else "grey50"
-    table.add_row(*cells, style=style)
-
-  if sections:
-    column_count = 4 + (1 if show_options else 0)
-    for idx, meta in enumerate(sections.values()):
-      title = meta.get("title") or "Section"
-      names = meta.get("variables", [])
-      toggle_var = None
-      toggle_name = meta.get("toggle")
-      if toggle_name:
-        toggle_var = variables.get_variable(toggle_name)
-      enabled = True
-      if toggle_var is not None:
-        try:
-          enabled = bool(toggle_var.get_typed_value())
-        except ValueError:
-          enabled = True
-
-      header_style = "bold magenta" if enabled else "bold grey50"
-      header_title = title if enabled else f"{title} (disabled)"
-      header_cells = [
-        _style_value(header_title, enabled)
-      ] + ["" for _ in range(column_count - 1)]
-      table.add_row(*header_cells, style=header_style, end_section=False)
-      for name in names:
-        row = rows_by_name.get(name)
-        if not row:
-          continue
-        _add_variable_row(row, enabled=enabled)
-      if idx != len(sections) - 1:
-        table.add_section()
-  else:
-    for row in rows_by_name.values():
-      _add_variable_row(row)
-
-  return table
-
-
-def render_template_list_table(
-  templates: List[Any],
-  module_name: str,
-  *,
-  include_library: bool = False,
-) -> Table:
-  """Build a Rich table for template listings without extra info lines.
-  
-  Columns and formatting:
-    - ID (with dimmed (version) suffix if available)
-    - Name
-    - Description (takes remaining width, truncates with ellipsis)
-    - Author (last column)
-  """
-  table = Table(title=f"{module_name.title()} Templates", header_style="bold cyan", expand=True)
-
-  # Constrain non-description columns to preserve space
-  table.add_column("ID", style="cyan", no_wrap=True, max_width=28, overflow="ellipsis")
-  table.add_column("Name", style="white", no_wrap=True, max_width=28, overflow="ellipsis")
-  if include_library:
-    table.add_column("Library", style="magenta", no_wrap=True, max_width=16, overflow="ellipsis")
-  # Description gets most space via ratio and truncates with ellipsis
-  table.add_column("Description", style="white", no_wrap=True, overflow="ellipsis", ratio=1)
-  table.add_column("Author", style="yellow", no_wrap=True, max_width=24, overflow="ellipsis")
-
-  for tpl in templates:
-    _id = tpl.id or "-"
-    _ver = tpl.version or ""
-    id_with_ver = f"{_id} [dim]({_ver})[/dim]" if _ver else _id
-    name = tpl.name or _id
-    author = tpl.author or "-"
-    desc = tpl.description or "-"
-    if include_library:
-      library = tpl.library or "-"
-      table.add_row(id_with_ver, name, library, desc, author)
-    else:
-      table.add_row(id_with_ver, name, desc, author)
-
-  return table

+ 269 - 333
cli/core/template.py

@@ -1,140 +1,197 @@
+from __future__ import annotations
+
 from .variables import Variable, VariableCollection
 from pathlib import Path
-from typing import Any, Dict, List, Set, Tuple, Optional
+from typing import Any, Dict, List, Set
 from dataclasses import dataclass, field
-from collections import OrderedDict
 import logging
-import re
-from jinja2 import Environment, BaseLoader, meta, nodes, TemplateSyntaxError
+from jinja2 import Environment, BaseLoader, meta, nodes
 import frontmatter
 
 logger = logging.getLogger(__name__)
 
 
-def _log_variable_stage(stage: str, names) -> None:
-  """Helper to emit consistent debug output for variable lists."""
-  if not names:
-    return
-  if isinstance(names, (set, tuple)):
-    names = list(names)
-  logger.debug(f"{stage}: {names}")
-
+# -----------------------
+# SECTION: Metadata Class
+# -----------------------
 
 @dataclass
-class Template:
-  """Represents a template file with frontmatter and content."""
-
-  # Required fields
-  file_path: Path
-  content: str = ""
-
-  # Frontmatter metadata
-  id: str = ""
-  name: str = ""
-  description: str = "No description available"
-  author: str = ""
-  date: str = ""
-  version: str = ""
+class TemplateMetadata:
+  """Represents template metadata with proper typing."""
+  name: str
+  description: str
+  author: str
+  date: str
+  version: str
   module: str = ""
   tags: List[str] = field(default_factory=list)
   files: List[str] = field(default_factory=list)
-  library: str = ""
-  variable_sections: "OrderedDict[str, Dict[str, Any]]" = field(default_factory=OrderedDict, init=False)
+  library: str = "unknown"
+
+  def __init__(self, post: frontmatter.Post, library_name: str | None = None) -> None:
+    """Initialize TemplateMetadata from frontmatter post."""
+    # Validate metadata format first
+    self._validate_metadata(post)
+    
+    # Extract metadata section
+    metadata_section = post.metadata.get("metadata", {})
+    
+    self.name = metadata_section.get("name", "")
+    self.description = metadata_section.get("description", "No description available")
+    self.author = metadata_section.get("author", "")
+    self.date = metadata_section.get("date", "")
+    self.version = metadata_section.get("version", "")
+    self.module = metadata_section.get("module", "")
+    self.tags = metadata_section.get("tags", []) or []
+    self.files = metadata_section.get("files", []) or []
+    self.library = library_name or "unknown"
 
-  # Extracted/merged variables
-  variables: VariableCollection = field(default_factory=VariableCollection, init=False)
-  # Source tracking for prompting and ordering
-  template_var_names: List[str] = field(default_factory=list, init=False)
-  module_var_names: List[str] = field(default_factory=list, init=False)
-
-  def render(self, variable_values: Optional[Dict[str, Any]] = None) -> str:
-    """Render the template with given variable overrides."""
-    if variable_values:
-      for name, value in variable_values.items():
-        var = self.variables.get_variable(name)
-        if var:
-          try:
-            var.value = var.convert(value)
-          except ValueError as exc:
-            raise ValueError(f"Invalid value for variable '{name}': {exc}")
-
-    env = self._create_jinja_env()
-    context = self.variables.to_jinja_context()
-    template = env.from_string(self.content)
-    return template.render(context)
+  @staticmethod
+  def _validate_metadata(post: frontmatter.Post) -> None:
+    """Validate that template has required 'metadata' section with all required fields."""
+    metadata_section = post.metadata.get("metadata")
+    if metadata_section is None:
+      raise ValueError("Template format error: missing 'metadata' section")
+    
+    # Validate that metadata section has all required fields
+    required_fields = ["name", "author", "version", "date", "description"]
+    missing_fields = [field for field in required_fields if not metadata_section.get(field)]
+    
+    if missing_fields:
+      raise ValueError(f"Template format error: missing required metadata fields: {missing_fields}")
+
+# !SECTION
+
+# -----------------------
+# SECTION: Template Class
+# -----------------------
 
-  def get_variable_names(self) -> List[str]:
-    """List variable names in insertion order."""
-    return self.variables.get_variable_names()
+@dataclass
+class Template:
+  """Represents a template file with frontmatter and content."""
 
-  @classmethod
-  def from_file(
-    cls,
-    file_path: Path,
-    module_sections: Dict[str, Any] = None,
-    library_name: str = ""
-  ) -> "Template":
+  def __init__(self, file_path: Path, library_name: str) -> None:
     """Create a Template instance from a file path."""
     logger.debug(f"Loading template from file: {file_path}")
 
     try:
-      frontmatter_data, content = cls._parse_frontmatter(file_path)
-      template_id = file_path.parent.name
-
-      template = cls(
-        file_path=file_path,
-        content=content,
-        id=template_id,
-        name=frontmatter_data.get("name", ""),
-        description=frontmatter_data.get("description", "No description available"),
-        author=frontmatter_data.get("author", ""),
-        date=frontmatter_data.get("date", ""),
-        version=frontmatter_data.get("version", ""),
-        module=frontmatter_data.get("module", ""),
-        tags=frontmatter_data.get("tags", []),
-        files=frontmatter_data.get("files", []),
-        library=library_name,
-      )
-
-      logger.info(f"Loaded template '{template.id}' (v{template.version or 'unversioned'})")
-
-      module_section_defs = module_sections or {}
-      module_flat, module_section_meta = cls._flatten_sections(module_section_defs)
-
-      template_section_defs = frontmatter_data.get("variable_sections") or {}
-      legacy_frontmatter_vars = frontmatter_data.get("variables")
-      if legacy_frontmatter_vars:
-        template_section_defs = OrderedDict(template_section_defs)
-        template_section_defs["template_specific"] = {
-          "title": f"{template.name or template_id} Specific",
-          "prefix": "",
-          "vars": legacy_frontmatter_vars,
-        }
-
-      template_flat, template_section_meta = cls._flatten_sections(template_section_defs)
-
-      # Extract and merge variables (only those actually used)
-      variables, tpl_names, mod_names = cls._merge_variables(
-        content,
-        module_flat,
-        template_flat,
-        template_id,
-      )
-      template.variables = variables
-      template.template_var_names = tpl_names
-      template.module_var_names = mod_names
-      template.variable_sections = cls._combine_sections_meta(
-        module_section_meta,
-        template_section_meta,
-        template.variables,
-      )
-
-      logger.debug(
-        f"Final variables for template '{template.id}': {template.variables.get_variable_names()}"
-      )
-
-      return template
-
+      # Parse frontmatter and content from the file
+      logger.debug(f"Loading template from file: {file_path}")
+      with open(file_path, "r", encoding="utf-8") as f:
+        post = frontmatter.load(f)
+
+      # Load metadata using the TemplateMetadata constructor
+      self.metadata = TemplateMetadata(post, library_name)
+      logger.debug(f"Loaded metadata: {self.metadata}")
+
+      # Validate 'kind' field presence
+      self._validate_kind(post)
+
+      # Load module specifications
+      kind = post.metadata.get("kind", None)
+      module_specs = {}
+      if kind:
+        try:
+          import importlib
+          module = importlib.import_module(f"..modules.{kind}", package=__package__)
+          module_specs = getattr(module, 'spec', {})
+        except Exception as e:
+          raise ValueError(f"Error loading module specifications for kind '{kind}': {str(e)}")
+      
+      # Loading template variable specs - merge template specs with module specs
+      template_specs = post.metadata.get("spec", {})
+      
+      # Deep merge specs: merge vars within sections instead of replacing entire sections
+      # Preserve order: start with module spec order, then append template-only sections
+      merged_specs = {}
+      
+      # First, process all sections from module spec (preserves order)
+      for section_key in module_specs.keys():
+        module_section = module_specs.get(section_key, {})
+        template_section = template_specs.get(section_key, {})
+        
+        # Start with module section as base
+        merged_section = {**module_section}
+        
+        # Merge template section metadata (title, prompt, etc.)
+        for key in ['title', 'prompt', 'description', 'toggle', 'required']:
+          if key in template_section:
+            merged_section[key] = template_section[key]
+        
+        # Merge vars: template vars extend/override module vars
+        module_vars = module_section.get('vars', {})
+        template_vars = template_section.get('vars', {})
+        merged_section['vars'] = {**module_vars, **template_vars}
+        
+        merged_specs[section_key] = merged_section
+      
+      # Then, add any sections that exist only in template spec
+      for section_key in template_specs.keys():
+        if section_key not in module_specs:
+          template_section = template_specs[section_key]
+          merged_section = {**template_section}
+          merged_specs[section_key] = merged_section
+      
+      logger.debug(f"Loaded specs: {merged_specs}")
+
+      self.file_path = file_path
+      self.id = file_path.parent.name
+
+      self.content = post.content
+      logger.debug(f"Loaded content: {self.content}")
+
+      # Extract variables used in template and their defaults
+      self.jinja_env = self._create_jinja_env()
+      ast = self.jinja_env.parse(self.content)
+      used_variables: Set[str] = meta.find_undeclared_variables(ast)
+      default_values: Dict[str, str] = self._extract_jinja_defaults(ast)
+      logger.debug(f"Used variables: {used_variables}, defaults: {default_values}")
+
+      # Validate that all used variables are defined in specs
+      self._validate_variable_definitions(used_variables, merged_specs)
+
+      # Filter specs to only used variables and merge in Jinja defaults
+      filtered_specs = {}
+      for section_key, section_data in merged_specs.items():
+        if "vars" in section_data:
+          filtered_vars = {}
+          for var_name, var_data in section_data["vars"].items():
+            if var_name in used_variables:
+              # Determine origin: check where this variable comes from
+              module_has_var = (section_key in module_specs and 
+                               var_name in module_specs.get(section_key, {}).get("vars", {}))
+              template_has_var = (section_key in template_specs and 
+                                 var_name in template_specs.get(section_key, {}).get("vars", {}))
+              
+              if module_has_var and template_has_var:
+                origin = "module -> template"  # Template overrides module
+              elif template_has_var and not module_has_var:
+                origin = "template"  # Template-only variable
+              else:
+                origin = "module"  # Module-only variable
+              
+              # Merge in Jinja default and origin if present
+              var_data_with_origin = {**var_data, "origin": origin}
+              if var_name in default_values:
+                var_data_with_origin["default"] = default_values[var_name]
+              elif "default" not in var_data_with_origin:
+                var_data_with_origin["default"] = ""
+                logger.warning(f"No default specified for variable '{var_name}' in template '{self.id}'")
+              
+              filtered_vars[var_name] = var_data_with_origin
+          
+          if filtered_vars:  # Only include sections that have used variables
+            filtered_specs[section_key] = {**section_data, "vars": filtered_vars}
+
+      # Create VariableCollection from filtered specs
+      self.variables = VariableCollection(filtered_specs)
+
+      logger.info(f"Loaded template '{self.id}' (v{self.metadata.version})")
+
+    except ValueError as e:
+      # FIXME: Refactor error handling to avoid redundant catching and re-raising
+      # ValueError already logged in validation method - don't duplicate
+      raise
     except FileNotFoundError:
       logger.error(f"Template file not found: {file_path}")
       raise
@@ -142,6 +199,95 @@ class Template:
       logger.error(f"Error loading template from {file_path}: {str(e)}")
       raise
 
+  # ---------------------------
+  # SECTION: Validation Methods
+  # ---------------------------
+
+  @staticmethod
+  def _extract_jinja_defaults(ast: nodes.Node) -> dict[str, str]:
+    """Extract default values from Jinja2 template variables with default filters."""
+    defaults = {}
+    
+    def visit_node(node):
+      """Recursively visit AST nodes to find default filter usage."""
+      if isinstance(node, nodes.Filter):
+        # Check if this is a 'default' filter
+        if node.name == 'default' and len(node.args) > 0:
+          # Get the variable being filtered
+          if isinstance(node.node, nodes.Name):
+            var_name = node.node.name
+            # Get the default value (first argument to default filter)
+            default_arg = node.args[0]
+            if isinstance(default_arg, nodes.Const):
+              defaults[var_name] = str(default_arg.value)
+            elif isinstance(default_arg, nodes.Name):
+              defaults[var_name] = default_arg.name
+      
+      # Recursively visit child nodes
+      for child in node.iter_child_nodes():
+        visit_node(child)
+    
+    visit_node(ast)
+    return defaults
+
+  @staticmethod
+  def _validate_kind(post: frontmatter.Post) -> None:
+    """Validate that template has required 'kind' field."""
+    if not post.metadata.get("kind"):
+      raise ValueError("Template format error: missing 'kind' field")
+
+  def _validate_variable_definitions(self, used_variables: set[str], merged_specs: dict[str, Any]) -> None:
+    """Validate that all variables used in Jinja2 content are defined in the spec.
+    
+    Args:
+      used_variables: Set of variable names found in the Jinja2 template content
+      merged_specs: Combined module and template specifications
+      
+    Raises:
+      ValueError: If any used variables are not defined in the spec
+    """
+    # Collect all defined variables from all sections
+    defined_variables = set()
+    for section_data in merged_specs.values():
+      if "vars" in section_data and isinstance(section_data["vars"], dict):
+        defined_variables.update(section_data["vars"].keys())
+    
+    # Find variables used in template but not defined in spec
+    undefined_variables = used_variables - defined_variables
+    
+    if undefined_variables:
+      # Sort for consistent error messages
+      undefined_list = sorted(undefined_variables)
+      
+      # Create detailed error message
+      error_msg = (
+        f"Template validation error in '{self.id}': "
+        f"Variables used in template content but not defined in spec: {undefined_list}\n\n"
+        f"Please add these variables to your template spec or module spec. "
+        f"Example:\n"
+        f"spec:\n"
+        f"  general:\n"
+        f"    vars:\n"
+      )
+      
+      # Add example spec entries for each undefined variable
+      for var_name in undefined_list:
+        error_msg += (
+          f"      {var_name}:\n"
+          f"        type: str\n"
+          f"        description: Description for {var_name}\n"
+          f"        default: \"\"\n"
+        )
+      
+      logger.error(error_msg)
+      raise ValueError(error_msg)
+
+  # !SECTION
+
+  # ---------------------------------
+  # SECTION: Jinja2 Rendering Methods
+  # ---------------------------------
+
   @staticmethod
   def _create_jinja_env() -> Environment:
     """Create standardized Jinja2 environment for consistent template processing."""
@@ -152,220 +298,10 @@ class Template:
       keep_trailing_newline=False,
     )
 
-  @staticmethod
-  def _parse_frontmatter(file_path: Path) -> Tuple[Dict[str, Any], str]:
-    """Parse frontmatter and content from a file."""
-    with open(file_path, "r", encoding="utf-8") as f:
-      post = frontmatter.load(f)
-    return post.metadata, post.content
-
-  @staticmethod
-  def _extract_template_variables(content: str) -> Set[str]:
-    """Extract variable names used in Jinja2 template content (flat names only).
-
-    Strategy:
-    - Use Jinja2 AST to find undeclared variables
-    - Ignore dotted and bracket access (templates should use flat names only)
-    """
-    try:
-      env = Template._create_jinja_env()
-      ast = env.parse(content)
-      root_variables = meta.find_undeclared_variables(ast)
-      logger.debug(f"Found variables: {sorted(root_variables)}")
-      return set(root_variables)
-    except TemplateSyntaxError as e:
-      logger.warning(f"Template syntax error while analyzing variables: {e}")
-      return set()
-    except Exception as e:
-      logger.warning(f"Error analyzing template variables: {e}")
-      return set()
-
-  @staticmethod
-  def _extract_jinja_defaults(content: str) -> Dict[str, str]:
-    """Extract default values from Jinja2 | default() filters for flat names."""
-    defaults: Dict[str, str] = {}
-    try:
-      # Flat var names only (no dots). Single or double quotes supported
-      default_pattern = r"{{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\|\s*default\(\s*['\"]([^'\"]*)['\"]\s*\)"
-      matches = re.findall(default_pattern, content)
-      for var_name, default_value in matches:
-        defaults[var_name.strip()] = default_value
-      logger.debug(f"Found Jinja2 defaults: {defaults}")
-      return defaults
-    except Exception as e:
-      logger.warning(f"Error extracting Jinja2 defaults: {e}")
-      return {}
-
-  @staticmethod
-  def _merge_variables(
-    content: str,
-    module_variables: Dict[str, Any],
-    template_variables: Dict[str, Any],
-    template_id: str,
-  ) -> Tuple[VariableCollection, List[str], List[str]]:
-    """Merge module + frontmatter vars, auto-create missing, and apply Jinja defaults.
-
-    Precedence (highest to lowest when a value exists):
-      1. Template frontmatter variables
-      2. Jinja | default() values (only if no value is set)
-      3. Module variables
-      4. Auto-created variables for what's used in content
-    """
-    used_variables = Template._extract_template_variables(content)
-    jinja_defaults = Template._extract_jinja_defaults(content)
-
-    declared_variables = set(module_variables.keys()) | set(template_variables.keys())
-    missing_declared = used_variables - declared_variables
-    if missing_declared:
-      raise ValueError(
-        "Unknown variables referenced in template: "
-        + ", ".join(sorted(missing_declared))
-      )
-
-    variables = VariableCollection()
-
-    # Keep only variables that are actually referenced in the template content,
-    # plus any explicitly defined in template frontmatter.
-    relevant_names = used_variables | set(template_variables.keys())
-
-    _log_variable_stage(
-      "Processing module variables",
-      list(module_variables.keys()) if module_variables else [],
-    )
-
-    variables.add_from_dict(module_variables, relevant_names, label="module")
-    variables.add_from_dict(template_variables, relevant_names, label="template")
-
-    template_var_names_ordered: List[str] = [n for n in template_variables.keys() if n in relevant_names]
-    module_var_names_ordered: List[str] = [n for n in module_variables.keys() if n in relevant_names]
-
-    variables.apply_jinja_defaults(jinja_defaults)
-
-    Template._ensure_defaults(variables, template_id)
-
-    logger.debug(
-      f"Smart merge: {len(relevant_names)} used, {len(variables)} defined = {len(variables)} final variables"
-    )
-    return variables, template_var_names_ordered, module_var_names_ordered
-
-  @staticmethod
-  def _ensure_defaults(variables: VariableCollection, template_id: str) -> None:
-    """Ensure every variable has a default value; raise if any are missing."""
-    missing: List[str] = []
-
-    for var_name in variables.get_variable_names():
-      variable = variables.get_variable(var_name)
-      if not variable:
-        continue
-      if variable.value not in (None, ""):
-        continue
-
-      missing.append(var_name)
-
-    if missing:
-      raise ValueError(
-        f"Missing default value(s) for variables {', '.join(missing)} in template '{template_id}'"
-      )
-
-  @staticmethod
-  def _flatten_sections(
-    section_defs: Dict[str, Any],
-  ) -> Tuple[Dict[str, Dict[str, Any]], "OrderedDict[str, Dict[str, Any]]"]:
-    flat: Dict[str, Dict[str, Any]] = {}
-    meta: "OrderedDict[str, Dict[str, Any]]" = OrderedDict()
-
-    if not section_defs:
-      return flat, meta
-
-    for key, data in section_defs.items():
-      if not isinstance(data, dict):
-        continue
-
-      title = data.get("title") or key.replace('_', ' ').title()
-      toggle_name = data.get("toggle")
-      vars_spec = data.get("vars") or {}
-
-      variables_list: List[str] = []
-      for var_name, spec in vars_spec.items():
-        spec = dict(spec)
-        spec.setdefault("section", title)
-        flat[var_name] = spec
-        variables_list.append(var_name)
-
-      if toggle_name:
-        if toggle_name not in flat:
-          flat[toggle_name] = {
-            "type": "bool",
-            "default": False,
-            "section": title,
-            "description": data.get("toggle_description", ""),
-          }
-        if toggle_name not in variables_list:
-          variables_list.insert(0, toggle_name)
-
-      meta[key] = {
-        "title": title,
-        "prompt": data.get("prompt"),
-        "description": data.get("description"),
-        "toggle": toggle_name,
-        "variables": variables_list,
-      }
-
-    return flat, meta
-
-  @staticmethod
-  def _combine_sections_meta(
-    module_meta: "OrderedDict[str, Dict[str, Any]]",
-    template_meta: "OrderedDict[str, Dict[str, Any]]",
-    variables: VariableCollection,
-  ) -> "OrderedDict[str, Dict[str, Any]]":
-    combined: "OrderedDict[str, Dict[str, Any]]" = OrderedDict()
-
-    def _add_meta(source: "OrderedDict[str, Dict[str, Any]]") -> None:
-      for key, meta in source.items():
-        existing = combined.get(key)
-        if existing:
-          existing["variables"].extend(v for v in meta["variables"] if v not in existing["variables"])
-          if meta.get("prompt"):
-            existing["prompt"] = meta["prompt"]
-          if meta.get("description"):
-            existing["description"] = meta["description"]
-          if meta.get("toggle"):
-            existing["toggle"] = meta["toggle"]
-          if meta.get("title"):
-            existing["title"] = meta["title"]
-        else:
-          combined[key] = {
-            "title": meta.get("title") or key.replace('_', ' ').title(),
-            "prompt": meta.get("prompt"),
-            "description": meta.get("description"),
-            "toggle": meta.get("toggle"),
-            "variables": list(meta.get("variables", [])),
-          }
-
-    _add_meta(module_meta)
-    _add_meta(template_meta)
-
-    # Filter out variables that are not present in the final collection
-    existing_names = set(variables.get_variable_names())
-    seen: Set[str] = set()
-    for key, meta in list(combined.items()):
-      filtered = [name for name in meta["variables"] if name in existing_names]
-      if not filtered:
-        del combined[key]
-        continue
-      meta["variables"] = filtered
-      seen.update(filtered)
-
-    # Add remaining variables that were not covered by sections
-    remaining = [name for name in existing_names if name not in seen]
-    if remaining:
-      combined["other"] = {
-        "title": "Other",
-        "prompt": None,
-        "description": None,
-        "toggle": None,
-        "variables": remaining,
-      }
-
-    return combined
+  def render(self, variables: dict[str, Any]) -> str:
+    """Render the template with the given variables."""
+    logger.debug(f"Rendering template '{self.id}' with variables: {variables}")
+    template = self.jinja_env.from_string(self.content)
+    return template.render(**variables)
+  
+  # !SECTION

+ 303 - 161
cli/core/variables.py

@@ -1,3 +1,6 @@
+from __future__ import annotations
+
+from collections import OrderedDict
 from dataclasses import dataclass, field
 from typing import Any, Dict, List, Optional, Set
 from urllib.parse import urlparse
@@ -6,200 +9,339 @@ import re
 
 logger = logging.getLogger(__name__)
 
+# -----------------------
+# SECTION: Constants
+# -----------------------
+
 TRUE_VALUES = {"true", "1", "yes", "on"}
 FALSE_VALUES = {"false", "0", "no", "off"}
 HOSTNAME_REGEX = re.compile(r"^(?=.{1,253}$)(?!-)[A-Za-z0-9_-]{1,63}(?<!-)(\.(?!-)[A-Za-z0-9_-]{1,63}(?<!-))*$")
 EMAIL_REGEX = re.compile(r"^[^@\s]+@[^@\s]+\.[^@\s]+$")
 
+# !SECTION
+
+# ----------------------
+# SECTION: Variable Class
+# ----------------------
 
-@dataclass
 class Variable:
   """Represents a single templating variable with lightweight validation."""
 
-  name: str
-  description: Optional[str] = None
-  type: str = "str"
-  options: Optional[List[Any]] = field(default_factory=list)
-  prompt: Optional[str] = None
-  value: Any = None
-  section: Optional[str] = None
-
-  @classmethod
-  def from_dict(cls, name: str, data: dict) -> "Variable":
-    """Unified constructor for dict-based specs (module or frontmatter)."""
-    variable = cls(
-      name=name,
-      description=data.get("description") or data.get("display", ""),
-      type=data.get("type", "str"),
-      options=data.get("options", []),
-      prompt=data.get("prompt"),
-      value=data.get("value") if data.get("value") is not None else data.get("default"),
-      section=data.get("section"),
-    )
-
-    if variable.value is not None:
+  def __init__(self, data: dict[str, Any]) -> None:
+    """Initialize Variable from a dictionary containing variable specification.
+    
+    Args:
+        data: Dictionary containing variable specification with required 'name' key
+              and optional keys: description, type, options, prompt, value, default, section, origin
+              
+    Raises:
+        ValueError: If data is not a dict, missing 'name' key, or has invalid default value
+    """
+    # Validate input
+    if not isinstance(data, dict):
+      raise ValueError("Variable data must be a dictionary")
+    
+    if "name" not in data:
+      raise ValueError("Variable data must contain 'name' key")
+    
+    # Initialize fields
+    self.name: str = data["name"]
+    self.description: Optional[str] = data.get("description") or data.get("display", "")
+    self.type: str = data.get("type", "str")
+    self.options: Optional[List[Any]] = data.get("options", [])
+    self.prompt: Optional[str] = data.get("prompt")
+    self.value: Any = data.get("value") if data.get("value") is not None else data.get("default")
+    self.section: Optional[str] = data.get("section")
+    self.origin: Optional[str] = data.get("origin")
+
+    # Validate and convert the default/initial value if present
+    if self.value is not None:
       try:
-        variable.value = variable.convert(variable.value)
+        self.value = self.convert(self.value)
       except ValueError as exc:
-        raise ValueError(f"Invalid default for variable '{name}': {exc}")
+        raise ValueError(f"Invalid default for variable '{self.name}': {exc}")
 
-    return variable
+  # -------------------------
+  # SECTION: Type Conversion
+  # -------------------------
 
   def convert(self, value: Any) -> Any:
     """Validate and convert a raw value based on the variable type."""
     if value is None:
       return None
 
-    if self.type == "bool":
-      if isinstance(value, bool):
-        return value
-      if isinstance(value, str):
-        lowered = value.strip().lower()
-        if lowered in TRUE_VALUES:
-          return True
-        if lowered in FALSE_VALUES:
-          return False
-      raise ValueError("value must be a boolean (true/false)")
-
-    if self.type == "int":
-      if isinstance(value, int):
-        return value
-      if isinstance(value, str) and value.strip() == "":
-        return None
-      try:
-        return int(value)
-      except (TypeError, ValueError) as exc:
-        raise ValueError("value must be an integer") from exc
-
-    if self.type == "float":
-      if isinstance(value, float):
-        return value
-      if isinstance(value, str) and value.strip() == "":
-        return None
-      try:
-        return float(value)
-      except (TypeError, ValueError) as exc:
-        raise ValueError("value must be a float") from exc
-
-    if self.type == "enum":
-      if value == "":
-        return None
-      val = str(value)
-      if self.options and val not in self.options:
-        raise ValueError(f"value must be one of: {', '.join(self.options)}")
-      return val
+    # Type conversion mapping for cleaner code
+    converters = {
+      "bool": self._convert_bool,
+      "int": self._convert_int, 
+      "float": self._convert_float,
+      "enum": self._convert_enum,
+      "hostname": self._convert_hostname,
+      "url": self._convert_url,
+      "email": self._convert_email,
+    }
+    
+    converter = converters.get(self.type)
+    if converter:
+      return converter(value)
+    
+    # Default to string conversion
+    return str(value)
 
-    if self.type == "hostname":
-      val = str(value).strip()
-      if not val:
-        return ""
-      if val.lower() == "localhost":
-        return val
-      if not HOSTNAME_REGEX.fullmatch(val):
-        raise ValueError("value must be a valid hostname")
-      return val
+  def _convert_bool(self, value: Any) -> bool:
+    """Convert value to boolean."""
+    if isinstance(value, bool):
+      return value
+    if isinstance(value, str):
+      lowered = value.strip().lower()
+      if lowered in TRUE_VALUES:
+        return True
+      if lowered in FALSE_VALUES:
+        return False
+    raise ValueError("value must be a boolean (true/false)")
 
-    if self.type == "url":
-      val = str(value).strip()
-      if not val:
-        return ""
-      parsed = urlparse(val)
-      if not (parsed.scheme and parsed.netloc):
-        raise ValueError("value must be a valid URL (include scheme and host)")
-      return val
+  def _convert_int(self, value: Any) -> Optional[int]:
+    """Convert value to integer."""
+    if isinstance(value, int):
+      return value
+    if isinstance(value, str) and value.strip() == "":
+      return None
+    try:
+      return int(value)
+    except (TypeError, ValueError) as exc:
+      raise ValueError("value must be an integer") from exc
 
-    if self.type == "email":
-      val = str(value).strip()
-      if not val:
-        return ""
-      if not EMAIL_REGEX.fullmatch(val):
-        raise ValueError("value must be a valid email address")
+  def _convert_float(self, value: Any) -> Optional[float]:
+    """Convert value to float."""
+    if isinstance(value, float):
+      return value
+    if isinstance(value, str) and value.strip() == "":
+      return None
+    try:
+      return float(value)
+    except (TypeError, ValueError) as exc:
+      raise ValueError("value must be a float") from exc
+
+  def _convert_enum(self, value: Any) -> Optional[str]:
+    """Convert value to enum option."""
+    if value == "":
+      return None
+    val = str(value)
+    if self.options and val not in self.options:
+      raise ValueError(f"value must be one of: {', '.join(self.options)}")
+    return val
+
+  def _convert_hostname(self, value: Any) -> str:
+    """Convert and validate hostname."""
+    val = str(value).strip()
+    if not val:
+      return ""
+    if val.lower() == "localhost":
       return val
+    if not HOSTNAME_REGEX.fullmatch(val):
+      raise ValueError("value must be a valid hostname")
+    return val
 
-    # Default to string conversion, trimming trailing newline characters only
-    return str(value)
+  def _convert_url(self, value: Any) -> str:
+    """Convert and validate URL."""
+    val = str(value).strip()
+    if not val:
+      return ""
+    parsed = urlparse(val)
+    if not (parsed.scheme and parsed.netloc):
+      raise ValueError("value must be a valid URL (include scheme and host)")
+    return val
+
+  def _convert_email(self, value: Any) -> str:
+    """Convert and validate email."""
+    val = str(value).strip()
+    if not val:
+      return ""
+    if not EMAIL_REGEX.fullmatch(val):
+      raise ValueError("value must be a valid email address")
+    return val
 
   def get_typed_value(self) -> Any:
     """Return the stored value converted to the appropriate Python type."""
     return self.convert(self.value)
 
+  # !SECTION
 
-@dataclass
-class VariableCollection:
-  """Manages variables with merge precedence and builds Jinja context."""
+# !SECTION
 
-  variables: Dict[str, Variable] = field(default_factory=dict)
+# ----------------------------
+# SECTION: VariableSection Class
+# ----------------------------
 
-  def add_from_dict(self, specs: Dict[str, Any], used_vars: Set[str], label: str = "spec") -> None:
-    used = set(used_vars)
-    for name in specs.keys():
-      if name not in used:
+class VariableSection:
+  """Groups variables together with shared metadata for presentation."""
+
+  def __init__(self, data: dict[str, Any]) -> None:
+    """Initialize VariableSection from a dictionary.
+    
+    Args:
+        data: Dictionary containing section specification with required 'key' and 'title' keys
+    """
+    if not isinstance(data, dict):
+      raise ValueError("VariableSection data must be a dictionary")
+    
+    if "key" not in data:
+      raise ValueError("VariableSection data must contain 'key'")
+    
+    if "title" not in data:
+      raise ValueError("VariableSection data must contain 'title'")
+    
+    self.key: str = data["key"]
+    self.title: str = data["title"]
+    self.variables: OrderedDict[str, Variable] = OrderedDict()
+    self.prompt: Optional[str] = data.get("prompt")
+    self.description: Optional[str] = data.get("description")
+    self.toggle: Optional[str] = data.get("toggle")
+    # Default "general" section to required=True, all others to required=False
+    self.required: bool = data.get("required", data["key"] == "general")
+
+  def variable_names(self) -> list[str]:
+    return list(self.variables.keys())
+
+# !SECTION
+
+# --------------------------------
+# SECTION: VariableCollection Class
+# --------------------------------
+
+class VariableCollection:
+  """Manages variables grouped by sections and builds Jinja context."""
+
+  def __init__(self, spec: dict[str, Any]) -> None:
+    """Initialize VariableCollection from a specification dictionary.
+    
+    Args:
+        spec: Dictionary containing the complete variable specification structure
+              Expected format (as used in compose.py):
+              {
+                "section_key": {
+                  "title": "Section Title",
+                  "prompt": "Optional prompt text",
+                  "toggle": "optional_toggle_var_name", 
+                  "description": "Optional description",
+                  "vars": {
+                    "var_name": {
+                      "description": "Variable description",
+                      "type": "str",
+                      "default": "default_value",
+                      ...
+                    }
+                  }
+                }
+              }
+    """
+    if not isinstance(spec, dict):
+      raise ValueError("Spec must be a dictionary")
+    
+    self._set: Dict[str, VariableSection] = {}
+    
+    # Initialize sections and their variables
+    for section_key, section_data in spec.items():
+      if not isinstance(section_data, dict):
         continue
-      spec = specs[name]
-      if isinstance(spec, Variable):
-        self.variables[name] = spec
-        logger.debug(f"Added {label} variable '{name}': {spec.description} (type: {spec.type})")
-      elif isinstance(spec, dict):
-        variable = Variable.from_dict(name, spec)
-        self.variables[name] = variable
-        logger.debug(f"Added {label} variable '{name}' (dict): {variable.description} (type: {variable.type})")
-      else:
-        logger.warning(
-          f"Invalid {label} variable for '{name}': expected Variable or dict, got {type(spec).__name__}"
-        )
-
-  def apply_jinja_defaults(self, jinja_defaults: Dict[str, str]) -> None:
-    for var_name, default_value in jinja_defaults.items():
-      if var_name in self.variables:
-        if self.variables[var_name].value is None or self.variables[var_name].value == "":
-          try:
-            self.variables[var_name].value = self.variables[var_name].convert(default_value)
-            logger.debug(f"Applied Jinja2 default to '{var_name}': {default_value}")
-          except ValueError as exc:
-            logger.warning(f"Ignoring invalid Jinja default for '{var_name}': {exc}")
-
-  def to_jinja_context(self) -> Dict[str, Any]:
-    context: Dict[str, Any] = {}
-
-    for var_name, variable in self.variables.items():
-      try:
-        value = variable.get_typed_value()
-      except ValueError as exc:
-        raise ValueError(f"Invalid value for variable '{var_name}': {exc}") from exc
-      if value is None:
-        value = ""
-      context[var_name] = value
+        
+      # Create section data with the key included
+      section_init_data = {
+        "key": section_key,
+        "title": section_data.get("title", section_key.replace("_", " ").title()),
+        "prompt": section_data.get("prompt"),
+        "description": section_data.get("description"),
+        "toggle": section_data.get("toggle"),
+        "required": section_data.get("required", section_key == "general")
+      }
+      
+      section = VariableSection(section_init_data)
+      
+      # Initialize variables in this section
+      if "vars" in section_data:
+        for var_name, var_data in section_data["vars"].items():
+          # Add variable name to the data
+          var_init_data = {"name": var_name, **var_data}
+          variable = Variable(var_init_data)
+          section.variables[var_name] = variable
+      
+      self._set[section_key] = section
 
-    for var_name, variable in self.variables.items():
-      if var_name.endswith("_enabled"):
-        root = var_name[: -len("_enabled")]
-        context[root] = bool(variable.get_typed_value())
+  # -------------------------
+  # SECTION: Helper Methods
+  # -------------------------
 
-    return context
+  # NOTE: These helper methods reduce code duplication across module.py and prompt.py
+  # by centralizing common variable collection operations
 
-  def get_variable_names(self) -> List[str]:
-    return list(self.variables.keys())
+  def get_all_values(self) -> dict[str, Any]:
+    """Get all variable values as a dictionary.
+    Returns:
+      Dictionary mapping variable names to their typed values
+    """
+
+    # NOTE: Eliminates the need to iterate through sections and variables manually
+    # in module.py _extract_current_variable_values() method
+
+    all_values = {}
+    for section in self._set.values():
+      for var_name, variable in section.variables.items():
+        all_values[var_name] = variable.get_typed_value()
+    return all_values
+
+  def apply_overrides(self, overrides: dict[str, Any], origin_suffix: str = " -> cli") -> list[str]:
+    """Apply multiple variable overrides at once.
+    
+    Args:
+      overrides: Dictionary of variable names to values
+      origin_suffix: Suffix to append to origins for overridden variables
+      
+    Returns:
+      List of variable names that were successfully overridden
+    """
+
+    # NOTE: Replaces the complex _apply_cli_overrides() method in module.py
+    # by centralizing override logic with proper error handling and origin tracking
+
+    successful_overrides = []
+    errors = []
+    
+    for var_name, value in overrides.items():
+      try:
+        # Find and update the variable
+        found = False
+        for section in self._set.values():
+          if var_name in section.variables:
+            variable = section.variables[var_name]
+            
+            # Convert and set the new value
+            converted_value = variable.convert(value)
+            variable.value = converted_value
+            
+            # Update origin to show override
+            if variable.origin:
+              variable.origin = variable.origin + origin_suffix
+            else:
+              variable.origin = origin_suffix.lstrip(" -> ")
+            
+            successful_overrides.append(var_name)
+            found = True
+            break
+        
+        if not found:
+          logger.warning(f"Variable '{var_name}' not found in template")
+          
+      except ValueError as e:
+        error_msg = f"Invalid override value for '{var_name}': {value} - {e}"
+        errors.append(error_msg)
+        logger.error(error_msg)
+    
+    if errors:
+      # Log errors but don't stop the process
+      logger.warning(f"Some CLI overrides failed: {'; '.join(errors)}")
+    
+    return successful_overrides
+
+  # !SECTION
 
-  def get_variable(self, name: str) -> Optional[Variable]:
-    return self.variables.get(name)
-
-  def as_rows(self) -> List[Dict[str, Any]]:
-    """Return variable metadata for presentation or export."""
-    rows: List[Dict[str, Any]] = []
-    for name in self.get_variable_names():
-      variable = self.variables[name]
-      default = variable.get_typed_value()
-      rows.append(
-        {
-          "name": name,
-          "type": variable.type,
-          "description": variable.description or "",
-          "default": "" if default in (None, "") else str(default),
-          "options": list(variable.options or []),
-          "section": variable.section,
-        }
-      )
-    return rows
-
-  def __len__(self) -> int:
-    return len(self.variables)
+# !SECTION

+ 6 - 4
cli/modules/ansible.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class AnsibleModule(Module):
   """Module for managing Ansible playbooks and configurations."""
   
-  name = "ansible"
-  description = "Manage Ansible playbooks and configurations"
-  files = ["playbook.yml", "playbook.yaml", "main.yml", "main.yaml", 
-           "site.yml", "site.yaml"]
+  name: str = "ansible"
+  description: str = "Manage Ansible playbooks and configurations"
+  files: list[str] = ["playbook.yml", "playbook.yaml", "main.yml", "main.yaml", 
+                      "site.yml", "site.yaml"]
 
 registry.register(AnsibleModule)

+ 83 - 50
cli/modules/compose.py

@@ -3,15 +3,7 @@ from collections import OrderedDict
 from ..core.module import Module
 from ..core.registry import registry
 
-
-class ComposeModule(Module):
-  """Docker Compose module."""
-
-  name = "compose"
-  description = "Manage Docker Compose configurations"
-  files = ["compose.yaml", "compose.yml", "docker-compose.yaml", "docker-compose.yml"]
-
-  variable_sections = OrderedDict(
+spec = OrderedDict(
     {
       "general": {
         "title": "General",
@@ -43,6 +35,11 @@ class ComposeModule(Module):
             "options": ["unless-stopped", "always", "on-failure", "no"],
             "default": "unless-stopped",
           },
+          "container_hostname": {
+            "description": "Container internal hostname",
+            "type": "str",
+            "default": "",
+          },
         },
       },
       "network": {
@@ -76,22 +73,7 @@ class ComposeModule(Module):
             "description": "Expose ports via 'ports' mapping",
             "type": "bool",
             "default": False,
-          },
-          "service_port_http": {
-            "description": "HTTP service port (host)",
-            "type": "int",
-            "default": 8080,
-          },
-          "service_port_https": {
-            "description": "HTTPS service port (host)",
-            "type": "int",
-            "default": 8443,
-          },
-          "ports_http": {
-            "description": "Port for HTTP access to the service",
-            "type": "int",
-            "default": 5678,
-          },
+          }
         },
       },
       "traefik": {
@@ -134,6 +116,9 @@ class ComposeModule(Module):
       },
       "swarm": {
         "title": "Docker Swarm",
+        "prompt": "Enable Docker Swarm deployment?",
+        "toggle": "swarm_enabled",
+        "description": "Deploy service in Docker Swarm mode with replicas.",
         "vars": {
           "swarm_enabled": {
             "description": "Enable Docker Swarm mode",
@@ -147,60 +132,108 @@ class ComposeModule(Module):
           },
         },
       },
-      "nginx": {
-        "title": "Nginx Dashboard",
+      "database": {
+        "title": "Database",
+        "prompt": "Configure external database connection?",
+        "toggle": "database_enabled",
+        "description": "Connect to external database (PostgreSQL, MySQL, MariaDB, etc.)",
         "vars": {
-          "nginx_dashboard_enabled": {
-            "description": "Enable Nginx dashboard",
+          "database_enabled": {
+            "description": "Enable external database integration",
             "type": "bool",
             "default": False,
           },
-          "nginx_dashboard_port": {
-            "description": "Nginx dashboard port (host)",
+          "database_type": {
+            "description": "Database type",
+            "type": "enum",
+            "options": ["postgres", "mysql", "mariadb", "sqlite"],
+            "default": "postgres",
+          },
+          "database_host": {
+            "description": "Database host",
+            "type": "str",
+            "default": "database",
+          },
+          "database_port": {
+            "description": "Database port",
             "type": "int",
-            "default": 8081,
+            "default": 5432,
+          },
+          "database_name": {
+            "description": "Database name",
+            "type": "str",
+            "default": "",
+          },
+          "database_user": {
+            "description": "Database user",
+            "type": "str",
+            "default": "",
+          },
+          "database_password": {
+            "description": "Database password",
+            "type": "str",
+            "default": "",
           },
         },
       },
-      "postgres": {
-        "title": "PostgreSQL",
-        "prompt": "Configure external PostgreSQL database?",
-        "toggle": "postgres_enabled",
+      "email": {
+        "title": "Email Server",
+        "prompt": "Configure email server for notifications and user management?",
+        "toggle": "email_enabled",
+        "description": "Used for notifications, sign-ups, password resets, and alerts.",
         "vars": {
-          "postgres_enabled": {
-            "description": "Enable PostgreSQL integration",
+          "email_enabled": {
+            "description": "Enable email server configuration",
             "type": "bool",
             "default": False,
           },
-          "postgres_host": {
-            "description": "PostgreSQL host",
+          "email_host": {
+            "description": "SMTP server hostname",
             "type": "str",
-            "default": "postgres",
+            "default": "",
           },
-          "postgres_port": {
-            "description": "PostgreSQL port",
+          "email_port": {
+            "description": "SMTP server port",
             "type": "int",
-            "default": 5432,
+            "default": 587,
           },
-          "postgres_database": {
-            "description": "PostgreSQL database name",
+          "email_username": {
+            "description": "SMTP username",
             "type": "str",
             "default": "",
           },
-          "postgres_user": {
-            "description": "PostgreSQL user",
+          "email_password": {
+            "description": "SMTP password",
             "type": "str",
             "default": "",
           },
-          "postgres_password": {
-            "description": "PostgreSQL password",
+          "email_from": {
+            "description": "From email address",
             "type": "str",
             "default": "",
           },
+          "email_use_tls": {
+            "description": "Use TLS encryption",
+            "type": "bool",
+            "default": True,
+          },
+          "email_use_ssl": {
+            "description": "Use SSL encryption",
+            "type": "bool",
+            "default": False,
+          },
         },
       },
     }
   )
 
 
+class ComposeModule(Module):
+  """Docker Compose module."""
+
+  name = "compose"
+  description = "Manage Docker Compose configurations"
+  files = ["compose.yaml", "compose.yml", "docker-compose.yaml", "docker-compose.yml"]
+
+
 registry.register(ComposeModule)

+ 5 - 3
cli/modules/docker.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class DockerModule(Module):
   """Module for managing Docker configurations and files."""
   
-  name = "docker"
-  description = "Manage Docker configurations and files"
-  files = ["Dockerfile", "dockerfile", ".dockerignore"]
+  name: str = "docker"
+  description: str = "Manage Docker configurations and files"
+  files: list[str] = ["Dockerfile", "dockerfile", ".dockerignore"]
 
 # Register the module
 registry.register(DockerModule)

+ 5 - 3
cli/modules/github_actions.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class GitHubActionsModule(Module):
   """Module for managing GitHub Actions workflows."""
   
-  name = "github-actions"
-  description = "Manage GitHub Actions workflows"
-  files = ["action.yml", "action.yaml", "workflow.yml", "workflow.yaml"]
+  name: str = "github-actions"
+  description: str = "Manage GitHub Actions workflows"
+  files: list[str] = ["action.yml", "action.yaml", "workflow.yml", "workflow.yaml"]
 
 # Register the module
 registry.register(GitHubActionsModule)

+ 5 - 3
cli/modules/gitlab_ci.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class GitLabCIModule(Module):
   """Module for managing GitLab CI/CD pipelines."""
   
-  name = "gitlab-ci"
-  description = "Manage GitLab CI/CD pipelines"
-  files = [".gitlab-ci.yml", ".gitlab-ci.yaml", "gitlab-ci.yml", "gitlab-ci.yaml"]
+  name: str = "gitlab-ci"
+  description: str = "Manage GitLab CI/CD pipelines"
+  files: list[str] = [".gitlab-ci.yml", ".gitlab-ci.yaml", "gitlab-ci.yml", "gitlab-ci.yaml"]
 
 # Register the module
 registry.register(GitLabCIModule)

+ 5 - 3
cli/modules/kestra.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class KestraModule(Module):
   """Module for managing Kestra workflows and configurations."""
   
-  name = "kestra"
-  description = "Manage Kestra workflows and configurations"
-  files = ["inputs.yaml", "variables.yaml", "webhook.yaml", "flow.yml", "flow.yaml"]
+  name: str = "kestra"
+  description: str = "Manage Kestra workflows and configurations"
+  files: list[str] = ["inputs.yaml", "variables.yaml", "webhook.yaml", "flow.yml", "flow.yaml"]
 
 # Register the module
 registry.register(KestraModule)

+ 6 - 4
cli/modules/kubernetes.py

@@ -1,13 +1,15 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class KubernetesModule(Module):
   """Module for managing Kubernetes manifests and configurations."""
   
-  name = "kubernetes"
-  description = "Manage Kubernetes manifests and configurations"
-  files = ["deployment.yml", "deployment.yaml", "service.yml", "service.yaml", 
-           "manifest.yml", "manifest.yaml", "values.yml", "values.yaml"]
+  name: str = "kubernetes"
+  description: str = "Manage Kubernetes manifests and configurations"
+  files: list[str] = ["deployment.yml", "deployment.yaml", "service.yml", "service.yaml", 
+                      "manifest.yml", "manifest.yaml", "values.yml", "values.yaml"]
 
 # Register the module
 registry.register(KubernetesModule)

+ 5 - 3
cli/modules/packer.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class PackerModule(Module):
   """Module for managing Packer templates and configurations."""
   
-  name = "packer"
-  description = "Manage Packer templates and configurations"
-  files = ["template.pkr.hcl", "build.pkr.hcl", "variables.pkr.hcl", "sources.pkr.hcl"]
+  name: str = "packer"
+  description: str = "Manage Packer templates and configurations"
+  files: list[str] = ["template.pkr.hcl", "build.pkr.hcl", "variables.pkr.hcl", "sources.pkr.hcl"]
 
 # Register the module
 registry.register(PackerModule)

+ 5 - 3
cli/modules/terraform.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class TerraformModule(Module):
   """Terraform module."""
   
-  name = "terraform"
-  description = "Manage Terraform configurations"
-  files = ["main.tf", "variables.tf", "outputs.tf", "versions.tf"]
+  name: str = "terraform"
+  description: str = "Manage Terraform configurations"
+  files: list[str] = ["main.tf", "variables.tf", "outputs.tf", "versions.tf"]
 
 # Register the module
 registry.register(TerraformModule)

+ 5 - 3
cli/modules/vagrant.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 from ..core.module import Module
 from ..core.registry import registry
 
 class VagrantModule(Module):
   """Module for managing Vagrant configurations and files."""
   
-  name = "vagrant"
-  description = "Manage Vagrant configurations and files"
-  files = ["Vagrantfile", "vagrantfile"]
+  name: str = "vagrant"
+  description: str = "Manage Vagrant configurations and files"
+  files: list[str] = ["Vagrantfile", "vagrantfile"]
 
 # Register the module
 registry.register(VagrantModule)

+ 25 - 23
library/compose/alloy/compose.yaml

@@ -1,15 +1,17 @@
 ---
-name: "Grafana Alloy"
-description: "A lightweight and flexible service mesh"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "grafana"
-  - "alloy"
-  - "monitoring"
-  - "http"
-  - "traefik"
+kind: "compose"
+metadata:
+  name: "Grafana Alloy"
+  description: "A lightweight and flexible service mesh"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "grafana"
+    - "alloy"
+    - "monitoring"
+    - "http"
+    - "traefik"
 variables:
   container_hostname:
     description: "Sets the container's internal hostname (this will show up in the collected logs)"
@@ -26,7 +28,7 @@ services:
       - --server.http.listen-addr=0.0.0.0:12345
       - --storage.path=/var/lib/alloy/data
       - /etc/alloy/config.alloy
-    {% if ports %}
+    {% if ports_enabled %}
     ports:
       - "12345:12345"
     {% endif %}
@@ -39,23 +41,23 @@ services:
       - /sys:/sys:ro
       - /var/lib/docker/:/var/lib/docker/:ro
       - /run/udev/data:/run/udev/data:ro
-    {% if network %}
+    {% if network_enabled %}
     networks:
-      - {{ network.name | default("bridge") }}
+      - {{ network_name | default("bridge") }}
     {% endif %}
-    {% if traefik %}
+    {% if traefik_enabled %}
     labels:
       - traefik.enable=true
       - traefik.http.services.{{ service_name | default("alloy") }}.loadbalancer.server.port=12345
       - traefik.http.services.{{ service_name | default("alloy") }}.loadbalancer.server.scheme=http
       - traefik.http.routers.{{ service_name | default("alloy") }}.service={{ service_name | default("alloy") }}
-      - traefik.http.routers.{{ service_name | default("alloy") }}.rule=Host(`{{ traefik.host }}`)
-      {% if traefik.tls %}
+      - traefik.http.routers.{{ service_name | default("alloy") }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
       - traefik.http.routers.{{ service_name | default("alloy") }}.tls=true
-      - traefik.http.routers.{{ service_name | default("alloy") }}.entrypoints={{ traefik.tls.entrypoint | default("websecure") }}
-      - traefik.http.routers.{{ service_name | default("alloy") }}.tls.certresolver={{ traefik.tls.certresolver }}
+      - traefik.http.routers.{{ service_name | default("alloy") }}.entrypoints={{ traefik_tls_entrypoint | default("websecure") }}
+      - traefik.http.routers.{{ service_name | default("alloy") }}.tls.certresolver={{ traefik_tls_certresolver }}
       {% else %}
-      - traefik.http.routers.{{ service_name | default("alloy") }}.entrypoints={{ traefik.entrypoint | default("web") }}
+      - traefik.http.routers.{{ service_name | default("alloy") }}.entrypoints={{ traefik_entrypoint | default("web") }}
       {% endif %}
     {% endif %}
     restart: {{ restart_policy | default("unless-stopped") }}
@@ -64,10 +66,10 @@ volumes:
   alloy_data:
     driver: local
 
-{% if network %}
+{% if network_enabled %}
 networks:
-  {{ network.name | default("bridge") }}:
-    {% if network.external %}
+  {{ network_name | default("bridge") }}:
+    {% if network_external %}
     external: true
     {% endif %}
 {% endif %}

+ 11 - 9
library/compose/ansiblesemaphore/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Ansible Semaphore"
-description: "A powerful and flexible automation tool"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "ansible"
-  - "automation"
-  - "semaphore"
+kind: "compose"
+metadata:
+  name: "Ansible Semaphore"
+  description: "A powerful and flexible automation tool"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "ansible"
+    - "automation"
+    - "semaphore"
 ---
 volumes:
   semaphore-mysql:

+ 146 - 92
library/compose/authentik/compose.yaml

@@ -1,107 +1,149 @@
 ---
-name: "Authentik"
-description: "An open-source identity and access management solution"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "authentik"
-  - "identity"
-  - "access"
-  - "management"  
+kind: "compose"
+metadata:
+  name: "Authentik"
+  description: "An open-source identity and access management solution"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "authentik"
+    - "identity"
+    - "access"
+    - "management"
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for Authentik web interface"
+        type: int
+        default: 9000
+      ports_https:
+        description: "HTTPS port for Authentik web interface"
+        type: int
+        default: 9443
+  authentik:
+    vars:
+      authentik_secret_key:
+        description: "Authentik secret key (generate with: openssl rand -base64 32)"
+        type: str
+        default: ""
+      authentik_error_reporting:
+        description: "Enable Authentik error reporting"
+        type: bool
+        default: false
+  database:
+    required: true
+  email:
+    vars:
+      email_timeout:
+        description: "Email timeout in seconds"
+        type: int
+        default: 10
 ---
 services:
-  server:
+  {{ service_name | default('authentik-server') }}:
     image: ghcr.io/goauthentik/server:2025.6.3
-    container_name: authentik-server
+    container_name: {{ container_name | default('authentik-server') }}
     command: server
     environment:
-      - AUTHENTIK_REDIS__HOST=authentik-redis
-      - AUTHENTIK_POSTGRESQL__HOST=authentik-db
-      - AUTHENTIK_POSTGRESQL__USER=${POSTGRES_USER:-authentik}
-      - AUTHENTIK_POSTGRESQL__NAME=${POSTGRES_DB:-authentik}
-      - AUTHENTIK_POSTGRESQL__PASSWORD=${POSTGRES_PASSWORD:?error}
-      # (Required)  To generate a secret key run the following command:
-      #             echo $(openssl rand -base64 32)
-      - AUTHENTIK_SECRET_KEY=${AUTHENTIK_SECRET_KEY:?error}
-      # (Optional)  Enable Error Reporting
-      - AUTHENTIK_ERROR_REPORTING__ENABLED=${AUTHENTIK_ERROR_REPORTING:-false}
-      # (Optional)  Enable Email Sending
-      #             Highly recommended to notify you about alerts and configuration issues.
-      - AUTHENTIK_EMAIL__HOST=${EMAIL_HOST:?error}
-      - AUTHENTIK_EMAIL__PORT=${EMAIL_PORT:-25}
-      - AUTHENTIK_EMAIL__USERNAME=${EMAIL_USERNAME:?error}
-      - AUTHENTIK_EMAIL__PASSWORD=${EMAIL_PASSWORD:?error}
-      - AUTHENTIK_EMAIL__USE_TLS=${EMAIL_USE_TLS:-false}
-      - AUTHENTIK_EMAIL__USE_SSL=${EMAIL_USE_SSL:-false}
-      - AUTHENTIK_EMAIL__TIMEOUT=${EMAIL_TIMEOUT:-10}
-      - AUTHENTIK_EMAIL__FROM=${EMAIL_FROM:?error}
+      - TZ={{ container_timezone | default('UTC') }}
+      - AUTHENTIK_REDIS__HOST={{ service_name | default('authentik') }}-redis
+      - AUTHENTIK_POSTGRESQL__HOST={{ service_name | default('authentik') }}-postgres
+      - AUTHENTIK_POSTGRESQL__USER={{ database_user | default('authentik') }}
+      - AUTHENTIK_POSTGRESQL__NAME={{ database_name | default('authentik') }}
+      - AUTHENTIK_POSTGRESQL__PASSWORD={{ database_password | default('authentik') }}
+      {% if authentik_secret_key -%}
+      - AUTHENTIK_SECRET_KEY={{ authentik_secret_key }}
+      {% endif %}
+      - AUTHENTIK_ERROR_REPORTING__ENABLED={{ authentik_error_reporting | default(false) }}
+      {% if email_enabled -%}
+      - AUTHENTIK_EMAIL__HOST={{ email_host }}
+      - AUTHENTIK_EMAIL__PORT={{ email_port | default(25) }}
+      - AUTHENTIK_EMAIL__USERNAME={{ email_username }}
+      - AUTHENTIK_EMAIL__PASSWORD={{ email_password }}
+      - AUTHENTIK_EMAIL__USE_TLS={{ email_use_tls | default(false) }}
+      - AUTHENTIK_EMAIL__USE_SSL={{ email_use_ssl | default(false) }}
+      - AUTHENTIK_EMAIL__TIMEOUT={{ email_timeout | default(10) }}
+      - AUTHENTIK_EMAIL__FROM={{ email_from }}
+      {% endif %}
+    {% if ports_enabled %}
     ports:
-      # (Optional)  Remove these, if you're using a reverse proxy like Traefik.
-      - 9000:9000
-      - 9443:9443
+      - "{{ ports_http | default(9000) }}:9000"
+      - "{{ ports_https | default(9443) }}:9443"
+    {% endif %}
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
     labels:
-      # (Optional)  Enable Traefik integration for the Authentik Web UI. For more information
-      #             about integrating other services with Traefik and Authentik, see the
-      #             documentation at https://goauthentik.io/docs/outposts/integrations/traefik
-      #             and the middleware example files in `docker-compose/traefik/config`.
       - traefik.enable=true
-      - traefik.http.services.authentik.loadbalancer.server.port=9000
-      - traefik.http.services.authentik.loadbalancer.server.scheme=http
-      - traefik.http.routers.authentik.entrypoints=websecure
-      - traefik.http.routers.authentik.rule=Host(`your-authentik-fqdn`)
-      - traefik.http.routers.authentik.tls=true
-      - traefik.http.routers.authentik.tls.certresolver=cloudflare
+      - traefik.http.services.{{ service_name | default('authentik') }}.loadbalancer.server.port=9000
+      - traefik.http.services.{{ service_name | default('authentik') }}.loadbalancer.server.scheme=http
+      - traefik.http.routers.{{ service_name | default('authentik') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('authentik') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('authentik') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('authentik') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('authentik') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
     volumes:
       - ./media:/media
       - ./custom-templates:/templates
     depends_on:
-      - postgres
-      - redis
-    restart: unless-stopped
+      - {{ service_name | default('authentik') }}-postgres
+      - {{ service_name | default('authentik') }}-redis
+    restart: {{ restart_policy | default('unless-stopped') }}
 
-  worker:
+  {{ service_name | default('authentik') }}-worker:
     image: ghcr.io/goauthentik/server:2025.6.3
-    container_name: authentik-worker
+    container_name: {{ service_name | default('authentik') }}-worker
     command: worker
     environment:
-      - AUTHENTIK_REDIS__HOST=authentik-redis
-      - AUTHENTIK_POSTGRESQL__HOST=authentik-db
-      - AUTHENTIK_POSTGRESQL__USER=${POSTGRES_USER:-authentik}
-      - AUTHENTIK_POSTGRESQL__NAME=${POSTGRES_DB:-authentik}
-      - AUTHENTIK_POSTGRESQL__PASSWORD=${POSTGRES_PASSWORD:?error}
-      # (Required)  To generate a secret key run the following command:
-      #             echo $(openssl rand -base64 32)
-      - AUTHENTIK_SECRET_KEY=${AUTHENTIK_SECRET_KEY:?error}
-      # (Optional)  Enable Error Reporting
-      - AUTHENTIK_ERROR_REPORTING__ENABLED=${AUTHENTIK_ERROR_REPORTING:-false}
-      # (Optional)  Enable Email Sending
-      #             Highly recommended to notify you about alerts and configuration issues.
-      - AUTHENTIK_EMAIL__HOST=${EMAIL_HOST:?error}
-      - AUTHENTIK_EMAIL__PORT=${EMAIL_PORT:-25}
-      - AUTHENTIK_EMAIL__USERNAME=${EMAIL_USERNAME:?error}
-      - AUTHENTIK_EMAIL__PASSWORD=${EMAIL_PASSWORD:?error}
-      - AUTHENTIK_EMAIL__USE_TLS=${EMAIL_USE_TLS:-false}
-      - AUTHENTIK_EMAIL__USE_SSL=${EMAIL_USE_SSL:-false}
-      - AUTHENTIK_EMAIL__TIMEOUT=${EMAIL_TIMEOUT:-10}
-      - AUTHENTIK_EMAIL__FROM=${EMAIL_FROM:?error}
-    # (Optional)  See more for the docker socket integration here:
-    #             https://goauthentik.io/docs/outposts/integrations/docker
+      - TZ={{ container_timezone | default('UTC') }}
+      - AUTHENTIK_REDIS__HOST={{ service_name | default('authentik') }}-redis
+      - AUTHENTIK_POSTGRESQL__HOST={{ service_name | default('authentik') }}-postgres
+      - AUTHENTIK_POSTGRESQL__USER={{ database_user | default('authentik') }}
+      - AUTHENTIK_POSTGRESQL__NAME={{ database_name | default('authentik') }}
+      - AUTHENTIK_POSTGRESQL__PASSWORD={{ database_password | default('authentik') }}
+      {% if authentik_secret_key -%}
+      - AUTHENTIK_SECRET_KEY={{ authentik_secret_key }}
+      {% endif %}
+      - AUTHENTIK_ERROR_REPORTING__ENABLED={{ authentik_error_reporting | default(false) }}
+      {% if email_enabled -%}
+      - AUTHENTIK_EMAIL__HOST={{ email_host }}
+      - AUTHENTIK_EMAIL__PORT={{ email_port | default(25) }}
+      - AUTHENTIK_EMAIL__USERNAME={{ email_username }}
+      - AUTHENTIK_EMAIL__PASSWORD={{ email_password }}
+      - AUTHENTIK_EMAIL__USE_TLS={{ email_use_tls | default(false) }}
+      - AUTHENTIK_EMAIL__USE_SSL={{ email_use_ssl | default(false) }}
+      - AUTHENTIK_EMAIL__TIMEOUT={{ email_timeout | default(10) }}
+      - AUTHENTIK_EMAIL__FROM={{ email_from }}
+      {% endif %}
     user: root
     volumes:
       - /run/docker.sock:/run/docker.sock
       - ./media:/media
       - ./certs:/certs
       - ./custom-templates:/templates
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
     depends_on:
-      - postgres
-      - redis
-    restart: unless-stopped
+      - {{ service_name | default('authentik') }}-postgres
+      - {{ service_name | default('authentik') }}-redis
+    restart: {{ restart_policy | default('unless-stopped') }}
 
-  redis:
+  {{ service_name | default('authentik') }}-redis:
     image: docker.io/library/redis:8.2.1
-    container_name: authentik-redis
+    container_name: {{ service_name | default('authentik') }}-redis
     command: --save 60 1 --loglevel warning
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
     healthcheck:
       test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
       start_period: 20s
@@ -110,32 +152,44 @@ services:
       timeout: 3s
     volumes:
       - redis_data:/data
-    restart: unless-stopped
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
 
-  postgres:
-    # (Optional) Add a PostgreSQL Database for Authentik
-    #   Alternatively, you can host your PostgreSQL database externally, and
-    #   change the connection settings in the `authentik-server` and
-    #   `authentik-worker`.
+  {{ service_name | default('authentik') }}-postgres:
     image: docker.io/library/postgres:17.6
-    container_name: authentik-db
+    container_name: {{ service_name | default('authentik') }}-db
     environment:
-      - POSTGRES_USER=${POSTGRES_USER:-authentik}
-      - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:?error}
-      - POSTGRES_DB=${POSTGRES_DB:-authentik}
-      - TZ=${TZ:-UTC}
+      - POSTGRES_USER={{ database_user | default('authentik') }}
+      - POSTGRES_PASSWORD={{ database_password | default('authentik') }}
+      - POSTGRES_DB={{ database_name | default('authentik') }}
+      - TZ={{ container_timezone | default('UTC') }}
     healthcheck:
-      test: ['CMD-SHELL', 'pg_isready -U "${POSTGRES_USER:-authentik}"']
+      test: ['CMD-SHELL', 'pg_isready -U "{{ database_user | default('authentik') }}"']
       start_period: 30s
       interval: 10s
       timeout: 10s
       retries: 5
     volumes:
-      - postgres_data:/var/lib/postgresql/data
-    restart: unless-stopped
+      - database_data:/var/lib/postgresql/data
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
 
 volumes:
-  postgres_data:
+  database_data:
     driver: local
   redis_data:
     driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 11 - 9
library/compose/bind9/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "BIND9"
-description: "A powerful and flexible DNS server"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "bind9"
-  - "dns"
-  - "server"
+kind: "compose"
+metadata:
+  name: "BIND9"
+  description: "A powerful and flexible DNS server"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "bind9"
+    - "dns"
+    - "server"
 ---
 services:
   bind9:

+ 11 - 9
library/compose/cadvisor/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "cAdvisor"
-description: "A tool for monitoring container performance"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "cadvisor"
-  - "monitoring"
-  - "containers"
+kind: "compose"
+metadata:
+  name: "cAdvisor"
+  description: "A tool for monitoring container performance"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "cadvisor"
+    - "monitoring"
+    - "containers"
 ---
 services:
   cadvisor:

+ 11 - 9
library/compose/checkmk/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Checkmk"
-description: "A powerful monitoring solution"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "checkmk"
-  - "monitoring"
-  - "observability"
+kind: "compose"
+metadata:
+  name: "Checkmk"
+  description: "A powerful monitoring solution"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "checkmk"
+    - "monitoring"
+    - "observability"
 ---
 services:
   monitoring:

+ 11 - 9
library/compose/clamav/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "ClamAV"
-description: "An open-source antivirus engine"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "clamav"
-  - "antivirus"
-  - "security"
+kind: "compose"
+metadata:
+  name: "ClamAV"
+  description: "An open-source antivirus engine"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "clamav"
+    - "antivirus"
+    - "security"
 ---
 services:
   clamav:

+ 11 - 9
library/compose/dockge/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Dockge"
-description: "A Docker GUI for managing your containers"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "dockge"
-  - "docker"
-  - "management"
+kind: "compose"
+metadata:
+  name: "Dockge"
+  description: "A Docker GUI for managing your containers"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "dockge"
+    - "docker"
+    - "management"
 ---
 services:
   dockge:

+ 12 - 10
library/compose/gitea/compose.yaml

@@ -1,14 +1,16 @@
 ---
-name: "Gitea"
-description: "A self-hosted Git service"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
- - gitea
- - git
- - code
- - repository
+kind: "compose"
+metadata:
+  name: "Gitea"
+  description: "A self-hosted Git service"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - gitea
+    - git
+    - code
+    - repository
 ---
 services:
   server:

+ 12 - 10
library/compose/gitlab-runner/compose.yaml

@@ -1,14 +1,16 @@
 ---
-name: "GitLab Runner"
-description: "A self-hosted CI/CD automation tool"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - gitlab-runner
-  - ci
-  - cd
-  - automation
+kind: "compose"
+metadata:
+  name: "GitLab Runner"
+  description: "A self-hosted CI/CD automation tool"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - gitlab-runner
+    - ci
+    - cd
+    - automation
 ---
 services:
   gitlab-runner:

+ 12 - 10
library/compose/gitlab/compose.yaml

@@ -1,14 +1,16 @@
 ---
-name: "GitLab"
-description: "A self-hosted Git repository manager"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - gitlab
-  - git
-  - repository
-  - management
+kind: "compose"
+metadata:
+  name: "GitLab"
+  description: "A self-hosted Git repository manager"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - gitlab
+    - git
+    - repository
+    - management
 ---
 services:
   gitlab:

+ 58 - 16
library/compose/grafana/compose.yaml

@@ -1,23 +1,65 @@
 ---
-name: "Grafana"
-description: "An open-source platform for monitoring and observability"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - grafana
-  - monitoring
-  - observability
+kind: "compose"
+metadata:
+  name: "Grafana"
+  description: "An open-source platform for monitoring and observability"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - grafana
+    - monitoring
+    - observability
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for Grafana web interface"
+        type: int
+        default: 3000
 ---
-volumes:
-  grafana-data:
-    driver: local
 services:
-  grafana:
+  {{ service_name | default('grafana') }}:
     image: docker.io/grafana/grafana-oss:12.1.1
-    container_name: grafana
+    container_name: {{ container_name | default('grafana') }}
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+      {% if container_hostname -%}
+      - GF_SERVER_DOMAIN={{ container_hostname }}
+      {% endif %}
+    {% if ports_enabled %}
     ports:
-      - "3000:3000"
+      - "{{ ports_http | default(3000) }}:3000"
+    {% endif %}
     volumes:
       - grafana-data:/var/lib/grafana
-    restart: unless-stopped
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default('grafana') }}.loadbalancer.server.port=3000
+      - traefik.http.routers.{{ service_name | default('grafana') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('grafana') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('grafana') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('grafana') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('grafana') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
+
+volumes:
+  grafana-data:
+    driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 12 - 10
library/compose/heimdall/compose.yaml

@@ -1,14 +1,16 @@
 ---
-name: "Heimdall"
-description: "An open-source dashboard for your web applications"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - heimdall
-  - dashboard
-  - monitoring
-  - observability
+kind: "compose"
+metadata:
+  name: "Heimdall"
+  description: "An open-source dashboard for your web applications"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - heimdall
+    - dashboard
+    - monitoring
+    - observability
 ---
 services:
   heimdall:

+ 12 - 10
library/compose/homeassistant/compose.yaml

@@ -1,14 +1,16 @@
 ---
-name: "Home Assistant"
-description: "A self-hosted home automation platform"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - homeassistant
-  - automation
-  - monitoring
-  - observability
+kind: "compose"
+metadata:
+  name: "Home Assistant"
+  description: "A self-hosted home automation platform"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - homeassistant
+    - automation
+    - monitoring
+    - observability
 ---
 services:
   homeassistant:

+ 11 - 9
library/compose/homepage/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Homepage"
-description: "A self-hosted homepage for your web applications"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - homepage
-  - web
-  - dashboard
+kind: "compose"
+metadata:
+  name: "Homepage"
+  description: "A self-hosted homepage for your web applications"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - homepage
+    - web
+    - dashboard
 ---
 services:
   homepage:

+ 51 - 13
library/compose/homer/compose.yaml

@@ -1,20 +1,58 @@
 ---
-name: "Homer"
-description: "A simple homepage for your services"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "homer"
-  - "http"
-  - "testing"
+kind: "compose"
+metadata:
+  name: "Homer"
+  description: "A simple homepage for your services"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "homer"
+    - "http"
+    - "testing"
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for Homer web interface"
+        type: int
+        default: 8080
 ---
 services:
-  homer:
+  {{ service_name | default('homer') }}:
     image: docker.io/b4bz/homer:v25.08.1
-    container_name: homer
+    container_name: {{ container_name | default('homer') }}
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+    {% if ports_enabled %}
     ports:
-      - "8080:8080"
+      - "{{ ports_http | default(8080) }}:8080"
+    {% endif %}
     volumes:
       - /etc/homer/assets/:/www/assets
-    restart: unless-stopped
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default('homer') }}.loadbalancer.server.port=8080
+      - traefik.http.routers.{{ service_name | default('homer') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('homer') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('homer') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('homer') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('homer') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 93 - 47
library/compose/influxdb/compose.yaml

@@ -1,55 +1,101 @@
 ---
-name: "InfluxDB"
-description: "An open-source time series database"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - influxdb
-  - monitoring
-  - database
+kind: "compose"
+metadata:
+  name: "InfluxDB"
+  description: "An open-source time series database"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - influxdb
+    - monitoring
+    - database
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for InfluxDB web interface and API"
+        type: int
+        default: 8086
+  influxdb:
+    vars:
+      influxdb_init_username:
+        description: "Initial InfluxDB admin username"
+        type: str
+        default: "admin"
+      influxdb_init_password:
+        description: "Initial InfluxDB admin password"
+        type: str
+        default: "password"
+      influxdb_init_org:
+        description: "Initial InfluxDB organization name"
+        type: str
+        default: "myorg"
+      influxdb_init_bucket:
+        description: "Initial InfluxDB bucket name"
+        type: str
+        default: "mybucket"
+      influxdb_init_retention:
+        description: "Data retention period (e.g., 1w, 30d, 1y)"
+        type: str
+        default: "0"
+      influxdb_init_token:
+        description: "Admin token for InfluxDB (leave empty for auto-generation)"
+        type: str
+        default: ""
 ---
-# (Optional) when using custom network
-# networks:
-#   yournetwork:
-#     external: true
-volumes:
-  influxdb-data:
 services:
-  influxdb:
-    container_name: influxdb
+  {{ service_name | default('influxdb') }}:
+    container_name: {{ container_name | default('influxdb') }}
     image: docker.io/library/influxdb:2.7.12-alpine
-    # (Optional) remove this section when using traefik
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+      - DOCKER_INFLUXDB_INIT_MODE=setup
+      - DOCKER_INFLUXDB_INIT_USERNAME={{ influxdb_init_username | default('admin') }}
+      - DOCKER_INFLUXDB_INIT_PASSWORD={{ influxdb_init_password | default('password') }}
+      - DOCKER_INFLUXDB_INIT_ORG={{ influxdb_init_org | default('myorg') }}
+      - DOCKER_INFLUXDB_INIT_BUCKET={{ influxdb_init_bucket | default('mybucket') }}
+      {% if influxdb_init_retention -%}
+      - DOCKER_INFLUXDB_INIT_RETENTION={{ influxdb_init_retention }}
+      {% endif %}
+      {% if influxdb_init_token -%}
+      - DOCKER_INFLUXDB_INIT_ADMIN_TOKEN={{ influxdb_init_token }}
+      {% endif %}
+    {% if ports_enabled %}
     ports:
-      - '8086:8086'
+      - "{{ ports_http | default(8086) }}:8086"
+    {% endif %}
     volumes:
       - influxdb-data:/var/lib/influxdb2
       - /etc/influxdb2:/etc/influxdb2
-      # (Optional) when using certificate
-      # - /etc/ssl/cert.pem/:/etc/ssl/cert.pem  # (optional) if you're using self-signed certs
-      # - /etc/ssl/cert-key.pem/:/etc/ssl/cert-key.pem  # (optional) if you're using self-signed certs
-    # (Optional) when using certificate
-    # command: influxd --tls-cert=/etc/ssl/cert.pem --tls-key=/etc/ssl/cert-key.pem  # (optional) if you're using self-signed certs
-    environment:
-      - DOCKER_INFLUXDB_INIT_MODE=setup
-      - DOCKER_INFLUXDB_INIT_USERNAME=my-user
-      - DOCKER_INFLUXDB_INIT_PASSWORD=my-password
-      - DOCKER_INFLUXDB_INIT_ORG=my-org
-      - DOCKER_INFLUXDB_INIT_BUCKET=my-bucket
-    # (Optional) change retention time
-    #   - DOCKER_INFLUXDB_INIT_RETENTION=1w  # (optional) configure data retention 1 week
-    # (Optional) add admin token
-    #   - DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=my-super-secret-auth-token  # (optional) set admin token
-    # (Optional) when using traefik
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.influxdb.loadbalancer.server.port=8086
-    #   - traefik.http.services.influxdb.loadbalancer.server.scheme=https
-    #   - traefik.http.routers.influxdb-https.entrypoints=websecure
-    #   - traefik.http.routers.influxdb-https.rule=Host(`your-server-url`)
-    #   - traefik.http.routers.influxdb-https.tls=true
-    #   - traefik.http.routers.influxdb-https.tls.certresolver=your-certresolver
-    # (Optional) when using custom network
-    # networks:
-    #   - yournetwork
-    restart: unless-stopped
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default('influxdb') }}.loadbalancer.server.port=8086
+      - traefik.http.services.{{ service_name | default('influxdb') }}.loadbalancer.server.scheme=http
+      - traefik.http.routers.{{ service_name | default('influxdb') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('influxdb') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('influxdb') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('influxdb') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('influxdb') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
+
+volumes:
+  influxdb-data:
+    driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 11 - 9
library/compose/loki/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Loki"
-description: "An open-source log aggregation system"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - loki
-  - monitoring
-  - logging
+kind: "compose"
+metadata:
+  name: "Loki"
+  description: "An open-source log aggregation system"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - loki
+    - monitoring
+    - logging
 ---
 services:
   loki:

+ 11 - 9
library/compose/mariadb/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "MariaDB"
-description: "An open-source relational database management system"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - mariadb
-  - database
-  - sql
+kind: "compose"
+metadata:
+  name: "MariaDB"
+  description: "An open-source relational database management system"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - mariadb
+    - database
+    - sql
 ---
 # (Optional) when using custom network
 # networks:

+ 34 - 16
library/compose/n8n/compose.yaml

@@ -1,14 +1,23 @@
 ---
-name: "n8n"
-description: "Workflow automation and integration tool"
-version: "0.0.1"
-date: "2025-09-03"
-author: "Christian Lempa"
-tags:
-  - n8n
-  - automation
-  - workflows
-  - compose
+kind: "compose"
+metadata:
+  name: "n8n"
+  description: "Workflow automation and integration tool"
+  version: "0.0.1"
+  date: "2025-09-03"
+  author: "Christian Lempa"
+  tags:
+    - n8n
+    - automation
+    - workflows
+    - compose
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for n8n web interface"
+        type: int
+        default: 5678
 ---
 services:
   {{ service_name | default('n8n') }}:
@@ -27,13 +36,22 @@ services:
       - N8N_EDITOR_BASE_URL=http://{{ traefik_host | default('n8n.home.arpa') }}
       {% endif %}
       {% endif %}
-      {% if postgres_enabled %}
+      {% if database_enabled %}
+      {% if database_type == 'postgres' -%}
       - DB_TYPE=postgresdb
-      - DB_POSTGRESDB_HOST={{ postgres_host | default('postgres') }}
-      - DB_POSTGRESDB_PORT={{ postgres_port | default(5432) }}
-      - DB_POSTGRESDB_DATABASE={{ postgres_database | default('n8n') }}
-      - DB_POSTGRESDB_USER={{ postgres_user | default('n8n') }}
-      - DB_POSTGRESDB_PASSWORD={{ postgres_password | default('n8n') }}
+      - DB_POSTGRESDB_HOST={{ database_host | default('database') }}
+      - DB_POSTGRESDB_PORT={{ database_port | default(5432) }}
+      - DB_POSTGRESDB_DATABASE={{ database_name | default('n8n') }}
+      - DB_POSTGRESDB_USER={{ database_user | default('n8n') }}
+      - DB_POSTGRESDB_PASSWORD={{ database_password | default('n8n') }}
+      {% elif database_type == 'mysql' -%}
+      - DB_TYPE=mysqldb
+      - DB_MYSQLDB_HOST={{ database_host | default('database') }}
+      - DB_MYSQLDB_PORT={{ database_port | default(3306) }}
+      - DB_MYSQLDB_DATABASE={{ database_name | default('n8n') }}
+      - DB_MYSQLDB_USER={{ database_user | default('n8n') }}
+      - DB_MYSQLDB_PASSWORD={{ database_password | default('n8n') }}
+      {% endif %}
       {% endif %}
     volumes:
       - /etc/localtime:/etc/localtime:ro

+ 120 - 28
library/compose/nextcloud/compose.yaml

@@ -1,42 +1,134 @@
 ---
-name: "Nextcloud"
-description: "A self-hosted file sync and share platform"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - nextcloud
-  - web
-  - file-storage
+kind: "compose"
+metadata:
+  name: "Nextcloud"
+  description: "A self-hosted file sync and share platform"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - nextcloud
+    - web
+    - file-storage
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for Nextcloud web interface"
+        type: int
+        default: 80
+  database:
+    vars:
+      database_type:
+        description: "Database type (mysql or postgres)"
+        type: enum
+        options: ["mysql", "postgres"]
+        default: "mysql"
+      mysql_user:
+        description: "MySQL username"
+        type: str
+        default: "nextcloud"
+      mysql_password:
+        description: "MySQL password"
+        type: str
+        default: "nextcloud"
+      mysql_database:
+        description: "MySQL database name"
+        type: str
+        default: "nextcloud"
+      mysql_root_password_random:
+        description: "Use random MySQL root password"
+        type: bool
+        default: true
 ---
-volumes:
-  nextcloud-data:
-  nextcloud-db:
 services:
-  nextcloud-app:
+  {{ service_name | default('nextcloud-app') }}:
     image: docker.io/library/nextcloud:31.0.8-apache
-    container_name: nextcloud-app
+    container_name: {{ container_name | default('nextcloud-app') }}
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+      {% if database_type == 'mysql' -%}
+      - MYSQL_PASSWORD={{ mysql_password | default('nextcloud') }}
+      - MYSQL_DATABASE={{ mysql_database | default('nextcloud') }}
+      - MYSQL_USER={{ mysql_user | default('nextcloud') }}
+      - MYSQL_HOST={{ service_name | default('nextcloud') }}-db
+      {% elif database_type == 'postgres' -%}
+      - POSTGRES_PASSWORD={{ database_password | default('nextcloud') }}
+      - POSTGRES_DB={{ database_name | default('nextcloud') }}
+      - POSTGRES_USER={{ database_user | default('nextcloud') }}
+      - POSTGRES_HOST={{ service_name | default('nextcloud') }}-db
+      {% endif %}
+    {% if ports_enabled %}
     ports:
-      - 80:80
+      - "{{ ports_http | default(80) }}:80"
+    {% endif %}
     volumes:
       - nextcloud-data:/var/www/html
-    environment:
-      - MYSQL_PASSWORD=$MYSQL_PASSWORD
-      - MYSQL_DATABASE=$MYSQL_DATABASE
-      - MYSQL_USER=$MYSQL_USER
-      - MYSQL_HOST=nextcloud-db
-    restart: unless-stopped
-  nextcloud-db:
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default('nextcloud') }}.loadbalancer.server.port=80
+      - traefik.http.routers.{{ service_name | default('nextcloud') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('nextcloud') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('nextcloud') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('nextcloud') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('nextcloud') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
+    depends_on:
+      - {{ service_name | default('nextcloud') }}-db
+    restart: {{ restart_policy | default('unless-stopped') }}
+
+  {{ service_name | default('nextcloud') }}-db:
+    {% if database_type == 'mysql' -%}
     # See compatibility matrix for Nextcloud 31
     # https://docs.nextcloud.com/server/31/admin_manual/installation/system_requirements.html
     image: docker.io/library/mariadb:10.11.14
-    container_name: nextcloud-db
+    container_name: {{ service_name | default('nextcloud') }}-db
     command: --transaction-isolation=READ-COMMITTED --binlog-format=ROW
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+      {% if mysql_root_password_random -%}
+      - MYSQL_RANDOM_ROOT_PASSWORD=true
+      {% endif %}
+      - MYSQL_PASSWORD={{ mysql_password | default('nextcloud') }}
+      - MYSQL_DATABASE={{ mysql_database | default('nextcloud') }}
+      - MYSQL_USER={{ mysql_user | default('nextcloud') }}
     volumes:
       - nextcloud-db:/var/lib/mysql
+    {% elif database_type == 'postgres' -%}
+    image: docker.io/library/postgres:17.6
+    container_name: {{ service_name | default('nextcloud') }}-db
     environment:
-      - MYSQL_RANDOM_ROOT_PASSWORD=true
-      - MYSQL_PASSWORD=$MYSQL_PASSWORD
-      - MYSQL_DATABASE=$MYSQL_DATABASE
-      - MYSQL_USER=$MYSQL_USER
-    restart: unless-stopped
+      - TZ={{ container_timezone | default('UTC') }}
+      - POSTGRES_USER={{ database_user | default('nextcloud') }}
+      - POSTGRES_PASSWORD={{ database_password | default('nextcloud') }}
+      - POSTGRES_DB={{ database_name | default('nextcloud') }}
+    volumes:
+      - nextcloud-db:/var/lib/postgresql/data
+    {% endif %}
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
+
+volumes:
+  nextcloud-data:
+    driver: local
+  nextcloud-db:
+    driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 68 - 41
library/compose/nginx/compose.yaml

@@ -1,67 +1,94 @@
 ---
-name: "Nginx"
-description: "An open-source web server"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - nginx
-  - web
-  - reverse-proxy
+kind: "compose"
+metadata:
+  name: "Nginx"
+  description: "An open-source web server"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - nginx
+    - web
+    - reverse-proxy
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for nginx service"
+        type: int
+        default: 8080
+      ports_https:
+        description: "HTTPS port for nginx service"
+        type: int
+        default: 8443
+  nginx:
+    vars:
+      nginx_dashboard_enabled:
+        description: "Enable nginx dashboard"
+        type: bool
+        default: false
+      nginx_dashboard_port:
+        description: "Nginx dashboard port"
+        type: int
+        default: 8081
 ---
 services:
-  {{ service_name }}:
+  {{ service_name | default('nginx') }}:
     image: docker.io/library/nginx:1.28.0-alpine
-    {% if not swarm %}
+    {% if not swarm_enabled %}
     container_name: {{ container_name | default('nginx') }}
     {% endif %}
-    {% if swarm %}
+    {% if swarm_enabled %}
     deploy:
-      replicas: {{ swarm.replicas | default(1) }}
-      {% if traefik %}
+      replicas: {{ swarm_replicas | default(1) }}
+      {% if traefik_enabled %}
       labels:
-        - traefik.enable={{ traefik }}
-        - traefik.http.services.{{ container_name }}.loadbalancer.server.port=80
-        - traefik.http.routers.{{ container_name }}.entrypoints=websecure
-        - traefik.http.routers.{{ container_name }}.rule=Host(`{{ traefik.host }}`)
-        - traefik.http.routers.{{ container_name }}.tls={{ traefik.tls | default(true) }}
-        - traefik.http.routers.{{ container_name }}.tls.certresolver={{ traefik.certresolver }}
-        - traefik.http.routers.{{ container_name }}.service={{ container_name }}
+        - traefik.enable=true
+        - traefik.http.services.{{ container_name | default('nginx') }}.loadbalancer.server.port=80
+        - traefik.http.routers.{{ container_name | default('nginx') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+        - traefik.http.routers.{{ container_name | default('nginx') }}.rule=Host(`{{ traefik_host }}`)
+        - traefik.http.routers.{{ container_name | default('nginx') }}.tls={{ traefik_tls_enabled | default(true) }}
+        - traefik.http.routers.{{ container_name | default('nginx') }}.tls.certresolver={{ traefik_tls_certresolver }}
+        - traefik.http.routers.{{ container_name | default('nginx') }}.service={{ container_name | default('nginx') }}
       {% endif %}
     {% endif %}
-    {% if not traefik %}
+    {% if ports_enabled %}
     ports:
-      - "{{ service_port_http | default(8080) }}:80"
-      - "{{ service_port_https | default(8443) }}:443"
-      {% if nginx_dashboard %}
-      - "{{ nginx_dashboard.port | default(8081) }}:8080"
+      - "{{ ports_http | default(8080) }}:80"
+      - "{{ ports_https | default(8443) }}:443"
+      {% if nginx_dashboard_enabled %}
+      - "{{ nginx_dashboard_port | default(8081) }}:8080"
       {% endif %}
     {% endif %}
     # volumes:
     #   - ./config/default.conf:/etc/nginx/conf.d/default.conf:ro
     #   - ./data:/usr/share/nginx/html:ro
-    {% if traefik and not swarm %}
+    {% if traefik_enabled and not swarm_enabled %}
     labels:
-      - traefik.enable={{ traefik  }}
-      - traefik.http.services.{{ container_name }}.loadbalancer.server.port=80
-      - traefik.http.routers.{{ container_name }}.entrypoints=websecure
-      - traefik.http.routers.{{ container_name }}.rule=Host(`{{ traefik.host }}`)
-      - traefik.http.routers.{{ container_name }}.tls={{ traefik.tls | default(true) }}
-      - traefik.http.routers.{{ container_name }}.tls.certresolver={{ traefik.certresolver }}
-      - traefik.http.routers.{{ container_name }}.service={{ container_name }}
+      - traefik.enable=true
+      - traefik.http.services.{{ container_name | default('nginx') }}.loadbalancer.server.port=80
+      - traefik.http.routers.{{ container_name | default('nginx') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ container_name | default('nginx') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ container_name | default('nginx') }}.tls=true
+      - traefik.http.routers.{{ container_name | default('nginx') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ container_name | default('nginx') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+      - traefik.http.routers.{{ container_name | default('nginx') }}.service={{ container_name | default('nginx') }}
     {% endif %}
-    {% if network %}
+    {% if network_enabled %}
     networks:
-      - {{ network.name | default('bridge') }}
+      - {{ network_name | default('bridge') }}
     {% endif %}
-    {% if not swarm %}
-    restart: unless-stopped
+    {% if not swarm_enabled %}
+    restart: {{ restart_policy | default('unless-stopped') }}
     {% endif %}
 
-{% if network %}
+{% if network_enabled %}
 networks:
-  {{ network.name | default('bridge') }}:
-    {% if network.external | default(true) %}
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
     external: true
     {% endif %}
 {% endif %}

+ 11 - 9
library/compose/nginxproxymanager/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Nginx Proxy Manager"
-description: "An open-source reverse proxy manager"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - nginx
-  - reverse-proxy
-  - web
+kind: "compose"
+metadata:
+  name: "Nginx Proxy Manager"
+  description: "An open-source reverse proxy manager"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - nginx
+    - reverse-proxy
+    - web
 ---
 volumes:
   nginxproxymanager-data:

+ 11 - 9
library/compose/nodeexporter/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Node Exporter"
-description: "A Prometheus exporter for hardware and OS metrics"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - prometheus
-  - monitoring
-  - metrics
+kind: "compose"
+metadata:
+  name: "Node Exporter"
+  description: "A Prometheus exporter for hardware and OS metrics"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - prometheus
+    - monitoring
+    - metrics
 ---
 services:
   node_exporter:

+ 11 - 9
library/compose/openwebui/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Open Web UI"
-description: "A web-based user interface for managing various services"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - openwebui
-  - web
-  - user-interface
+kind: "compose"
+metadata:
+  name: "Open Web UI"
+  description: "A web-based user interface for managing various services"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - openwebui
+    - web
+    - user-interface
 ---
 services:
   openwebui:

+ 11 - 9
library/compose/passbolt/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Passbolt"
-description: "An open-source password manager"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - passbolt
-  - password-manager
-  - web
+kind: "compose"
+metadata:
+  name: "Passbolt"
+  description: "An open-source password manager"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - passbolt
+    - password-manager
+    - web
 ---
 volumes:
   passbolt-db:

+ 79 - 29
library/compose/pihole/compose.yaml

@@ -1,43 +1,89 @@
 ---
-name: "Pi-hole"
-description: "An open-source DNS sinkhole"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - pihole
-  - dns
-  - ad-blocker
+kind: "compose"
+metadata:
+  name: "Pi-hole"
+  description: "An open-source DNS sinkhole"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - pihole
+    - dns
+    - ad-blocker
+spec:
+  ports:
+    vars:
+      ports_dns_tcp:
+        description: "DNS TCP port"
+        type: int
+        default: 53
+      ports_dns_udp:
+        description: "DNS UDP port"
+        type: int
+        default: 53
+      ports_dhcp:
+        description: "DHCP port"
+        type: int
+        default: 67
+      ports_http:
+        description: "HTTP port for Pi-hole web interface"
+        type: int
+        default: 8081
+      ports_https:
+        description: "HTTPS port for Pi-hole web interface"
+        type: int
+        default: 8443
+  pihole:
+    vars:
+      pihole_webpassword:
+        description: "Pi-hole web admin password"
+        type: str
+        default: ""
+      pihole_dns_upstreams:
+        description: "Pi-hole upstream DNS servers"
+        type: str
+        default: "8.8.8.8;8.8.4.4"
 ---
 services:
-  pihole:
-    container_name: pihole
+  {{ service_name | default('pihole') }}:
+    container_name: {{ container_name | default('pihole') }}
     image: docker.io/pihole/pihole:2025.08.0
+    {% if ports_enabled %}
     ports:
-      - 53:53/tcp
-      - 53:53/udp
-      - 67:67/udp
-      - 8081:80/tcp
-      - 8443:443/tcp
+      - "{{ ports_dns_tcp | default(53) }}:53/tcp"
+      - "{{ ports_dns_udp | default(53) }}:53/udp"
+      - "{{ ports_dhcp | default(67) }}:67/udp"
+      - "{{ ports_http | default(8081) }}:80/tcp"
+      - "{{ ports_https | default(8443) }}:443/tcp"
+    {% endif %}
     environment:
-      - TZ=Europe/Berlin
-      - FTLCONF_webserver_api_password=${FTLCONF_webserver_api_password}
-      - FTLCONF_dns_upstreams=${FTLCONF_dns_upstreams:-8.8.8.8;8.8.4.4}
+      - TZ={{ container_timezone | default('UTC') }}
+      {% if pihole_webpassword -%}
+      - FTLCONF_webserver_api_password={{ pihole_webpassword }}
+      {% endif %}
+      - FTLCONF_dns_upstreams={{ pihole_dns_upstreams | default('8.8.8.8;8.8.4.4') }}
     volumes:
       - config_dnsmasq:/etc/dnsmasq.d
       - config_pihole:/etc/pihole
+    {% if network_enabled %}
     networks:
-      - frontend
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
     labels:
       - traefik.enable=true
-      # Pihole Web Interface
-      - traefik.http.routers.pihole.rule=Host(`example.com`)
-      - traefik.http.routers.pihole.entrypoints=websecure
-      - traefik.http.routers.pihole.tls=true
-      - traefik.http.routers.pihole.tls.certresolver=cloudflare
-      - traefik.http.routers.pihole.service=pihole
-      - traefik.http.services.pihole.loadBalancer.server.port=80
-    restart: unless-stopped
+      - traefik.http.routers.{{ service_name | default('pihole') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('pihole') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('pihole') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('pihole') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('pihole') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+      - traefik.http.routers.{{ service_name | default('pihole') }}.service={{ service_name | default('pihole') }}
+      - traefik.http.services.{{ service_name | default('pihole') }}.loadBalancer.server.port=80
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
 
 volumes:
   config_dnsmasq:
@@ -45,6 +91,10 @@ volumes:
   config_pihole:
     driver: local
 
+{% if network_enabled %}
 networks:
-  frontend:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
     external: true
+    {% endif %}
+{% endif %}

+ 62 - 29
library/compose/portainer/compose.yaml

@@ -1,41 +1,74 @@
 ---
-name: "Portainer"
-description: "An open-source container management tool"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - portainer
-  - container-management
-  - web
+kind: "compose"
+metadata:
+  name: "Portainer"
+  description: "An open-source container management tool"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - portainer
+    - container-management
+    - web
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for Portainer web interface"
+        type: int
+        default: 9000
+      ports_https:
+        description: "HTTPS port for Portainer web interface"
+        type: int
+        default: 9443
+      ports_edge:
+        description: "Edge agent port for Portainer"
+        type: int
+        default: 8000
 ---
 services:
-  app:
-    container_name: portainer
+  {{ service_name | default('portainer') }}:
+    container_name: {{ container_name | default('portainer') }}
     image: docker.io/portainer/portainer-ce:2.33.1-alpine
+    environment:
+      - TZ={{ container_timezone | default('UTC') }}
+    {% if ports_enabled %}
     ports:
-      # --> (Optional) Remove when using traefik...
-      - 9000:9000
-      - 9443:9443
-      # <--
-      - 8000:8000
+      - "{{ ports_http | default(9000) }}:9000"
+      - "{{ ports_https | default(9443) }}:9443"
+      - "{{ ports_edge | default(8000) }}:8000"
+    {% endif %}
     volumes:
       - /run/docker.sock:/var/run/docker.sock
       - portainer-data:/data
-    # --> (Optional) When using traefik...
-    # labels:
-    #   - traefik.enable=true
-    #   - traefik.http.services.portainer.loadbalancer.server.port=9000
-    #   - traefik.http.routers.portainer.service=portainer
-    #   - traefik.http.routers.portainer.entrypoints=websecure
-    #   - traefik.http.routers.portainer.rule=Host(`your-portainer-fqdn`)
-    #   - traefik.http.routers.portainer.tls=true
-    #   - traefik.http.routers.portainer.tls.certresolver=cloudflare
-    # networks:
-    #   - frontend
-    # <--
-    restart: unless-stopped
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default('portainer') }}.loadbalancer.server.port=9000
+      - traefik.http.routers.{{ service_name | default('portainer') }}.service={{ service_name | default('portainer') }}
+      - traefik.http.routers.{{ service_name | default('portainer') }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default('portainer') }}.entrypoints={{ traefik_tls_entrypoint | default('websecure') }}
+      - traefik.http.routers.{{ service_name | default('portainer') }}.tls=true
+      - traefik.http.routers.{{ service_name | default('portainer') }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default('portainer') }}.entrypoints={{ traefik_entrypoint | default('web') }}
+      {% endif %}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
 
 volumes:
   portainer-data:
     driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 60 - 31
library/compose/postgres/compose.yaml

@@ -1,55 +1,84 @@
 ---
-name: "PostgreSQL"
-description: "An open-source relational database management system"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - postgres
-  - database
-  - sql
+kind: "compose"
+metadata:
+  name: "PostgreSQL"
+  description: "An open-source relational database management system"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - postgres
+    - database
+    - sql
+spec:
+  postgres:
+    vars:
+      postgres_initdb_args:
+        description: "PostgreSQL initdb arguments"
+        type: str
+        default: "--data-checksums"
+      postgres_host_auth_method:
+        description: "PostgreSQL host authentication method"
+        type: str
+        default: ""
+      postgres_secrets_enabled:
+        description: "Use PostgreSQL secrets file for password"
+        type: bool
+        default: true
 ---
 services:
-  postgres:
+  {{ service_name | default('postgres') }}:
     image: docker.io/library/postgres:17.6
-    container_name: postgres
+    container_name: {{ container_name | default('postgres') }}
     environment:
-      - POSTGRES_INITDB_ARGS=${POSTGRES_INITDB_ARGS---data-checksums}
-      - POSTGRES_HOST_AUTH_METHOD=${POSTGRES_HOST_AUTH_METHOD-}
-      - POSTGRES_USER=${POSTGRES_USER:-postgres}
+      - POSTGRES_INITDB_ARGS={{ postgres_initdb_args | default('--data-checksums') }}
+      {% if postgres_host_auth_method -%}
+      - POSTGRES_HOST_AUTH_METHOD={{ postgres_host_auth_method }}
+      {% endif %}
+      - POSTGRES_USER={{ database_user | default('postgres') }}
+      {% if postgres_secrets_enabled -%}
       - POSTGRES_PASSWORD_FILE=/run/secrets/postgres_password
-      - POSTGRES_DB=${POSTGRES_DB:-$POSTGRES_USER}
-      - TZ=${TZ:-UTC}
+      {% else -%}
+      - POSTGRES_PASSWORD={{ database_password | default('postgres') }}
+      {% endif %}
+      - POSTGRES_DB={{ database_name | default('postgres') }}
+      - TZ={{ container_timezone | default('UTC') }}
+    {% if ports_enabled %}
     ports:
-      - 5432:5432
+      - "{{ database_port | default(5432) }}:5432"
+    {% endif %}
     healthcheck:
-      test: ['CMD-SHELL', 'pg_isready -U "${POSTGRES_USER:-postgres}"']
+      test: ['CMD-SHELL', 'pg_isready -U "{{ database_user | default('postgres') }}"']
       start_period: 30s
       interval: 10s
       timeout: 10s
       retries: 5
-    # (Optional)  When using custom network, see also
-    #             https://docs.docker.com/compose/compose-file/compose-file-v3/#networks
-    #
-    # networks:
-    #   - yournetwork
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default('bridge') }}
+    {% endif %}
+    {% if postgres_secrets_enabled %}
     secrets:
       - postgres_password
+    {% endif %}
     volumes:
       - postgres_data:/var/lib/postgresql/data
-    restart: unless-stopped
-
-# (Optional)  When using custom network, see also
-#             https://docs.docker.com/compose/compose-file/compose-file-v3/#network-configuration-reference
-#
-# networks:
-#   yournetwork:
-#     external: true
+    restart: {{ restart_policy | default('unless-stopped') }}
 
+{% if postgres_secrets_enabled %}
 secrets:
   postgres_password:
     file: secret.postgres_password.txt
+{% endif %}
 
 volumes:
   postgres_data:
     driver: local
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default('bridge') }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}

+ 11 - 9
library/compose/prometheus/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Prometheus"
-description: "An open-source monitoring and alerting toolkit"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - prometheus
-  - monitoring
-  - alerting
+kind: "compose"
+metadata:
+  name: "Prometheus"
+  description: "An open-source monitoring and alerting toolkit"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - prometheus
+    - monitoring
+    - alerting
 ---
 volumes:
   prometheus-data:

+ 11 - 9
library/compose/promtail/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Promtail"
-description: "An open-source log collection agent"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - promtail
-  - logging
-  - grafana
+kind: "compose"
+metadata:
+  name: "Promtail"
+  description: "An open-source log collection agent"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - promtail
+    - logging
+    - grafana
 ---
 services:
   promtail:

+ 11 - 9
library/compose/teleport/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Teleport"
-description: "An open-source access plane for managing SSH access"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - teleport
-  - ssh
-  - access-management
+kind: "compose"
+metadata:
+  name: "Teleport"
+  description: "An open-source access plane for managing SSH access"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - teleport
+    - ssh
+    - access-management
 ---
 # -- (Optional) When using Traefik, use this section
 # networks:

+ 48 - 33
library/compose/traefik/compose.yaml

@@ -1,55 +1,70 @@
 ---
-name: "Traefik"
-description: "An open-source edge router for microservices"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - traefik
-  - reverse-proxy
-  - load-balancer
+kind: "compose"
+metadata:
+  name: "Traefik"
+  description: "An open-source edge router for microservices"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - traefik
+    - reverse-proxy
+    - load-balancer
 files:
   - config/traefik.yaml
-variables:
-  acme_email:
-    display: "ACME Email"
-    description: "Email address for ACME (Let's Encrypt) registration"
-    type: "str"
-  traefik.host:
-    display: "Traefik Host"
-    description: "Domain name for Traefik dashboard"
-    type: "str"
-  database.name:
-    display: "Database Name"
-    description: "Name of the database"
-    type: "str"
+spec:
+  traefik:
+    vars:
+      acme_email:
+        description: "Email address for ACME (Let's Encrypt) registration"
+        type: str
+        default: ""
+  database:
+    vars:
+      database_name:
+        description: "Name of the database"
+        type: str
+        default: ""
 ---
 services:
-  traefik:
+  {{ service_name | default('traefik') }}:
     image: docker.io/library/traefik:v3.5.1
-    container_name: traefik
+    container_name: {{ container_name | default('traefik') }}
+    {% if ports_enabled %}
     ports:
       - 80:80
       - 443:443
       # --> (Optional) Enable Dashboard, don't do in production
       # - 8080:8080
       # <--
+    {% endif %}
     volumes:
       - /run/docker.sock:/run/docker.sock:ro
       - ./config/:/etc/traefik/:ro
       - ./certs/:/var/traefik/certs/:rw
     environment:
-      - CF_DNS_API_TOKEN={{ acme_email }}  # Using template variable
-      {% if traefik.host -%}
-      - TRAEFIK_HOST={{ traefik.host }}
+      - TZ={{ container_timezone | default('UTC') }}
+      {% if acme_email -%}
+      - CF_DNS_API_TOKEN={{ acme_email }}
+      {% endif %}
+      {% if traefik_host -%}
+      - TRAEFIK_HOST={{ traefik_host }}
       {% endif %}
-      {% if database.name -%}
-      - DB_NAME={{ database.name }}
+      {% if database_name -%}
+      - DB_NAME={{ database_name }}
       {% endif %}
+    {% if network_enabled %}
     networks:
-      - frontend
-    restart: unless-stopped
+      - {{ network_name | default('frontend') }}
+    {% endif %}
+    restart: {{ restart_policy | default('unless-stopped') }}
 
+{% if network_enabled %}
 networks:
-  frontend:
-    external: true  # <-- (Optional) Change this to false if you want to create a new network
+  {{ network_name | default('frontend') }}:
+    {% if network_external %}
+    external: true
+    {% else %}
+    driver: bridge
+    {% endif %}
+{% endif %}

+ 11 - 9
library/compose/twingate_connector/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Twingate Connector"
-description: "A connector for Twingate"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - twingate
-  - connector
-  - networking
+kind: "compose"
+metadata:
+  name: "Twingate Connector"
+  description: "A connector for Twingate"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - twingate
+    - connector
+    - networking
 ---
 services:
   twingate_connector:

+ 11 - 9
library/compose/uptimekuma/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Uptime Kuma"
-description: "A self-hosted status monitoring solution"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - uptime-kuma
-  - monitoring
-  - self-hosted
+kind: "compose"
+metadata:
+  name: "Uptime Kuma"
+  description: "A self-hosted status monitoring solution"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - uptime-kuma
+    - monitoring
+    - self-hosted
 ---
 volumes:
   uptimekuma-data:

+ 11 - 9
library/compose/wazuh/compose.yaml

@@ -1,13 +1,15 @@
 ---
-name: "Wazuh"
-description: "A security monitoring platform"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - wazuh
-  - security
-  - monitoring
+kind: "compose"
+metadata:
+  name: "Wazuh"
+  description: "A security monitoring platform"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - wazuh
+    - security
+    - monitoring
 ---
 services:
   wazuh.manager:

+ 54 - 14
library/compose/whoami/compose.yaml

@@ -1,26 +1,66 @@
 ---
-name: "Whoami"
-description: "Simple HTTP service that returns information about the request"
-version: "0.0.1"
-date: "2023-10-01"
-author: "Christian Lempa"
-tags:
-  - "traefik"
-  - "whoami"
-  - "http"
-  - "testing"
+kind: "compose"
+metadata:
+  name: "Whoami"
+  description: "Simple HTTP service that returns information about the request"
+  version: "0.0.1"
+  date: "2023-10-01"
+  author: "Christian Lempa"
+  tags:
+    - "traefik"
+    - "whoami"
+    - "http"
+    - "testing"
+spec:
+  ports:
+    vars:
+      ports_http:
+        description: "HTTP port for whoami service"
+        type: int
+        default: 8080
+      ports_https:
+        description: "HTTPS port for whoami service"
+        type: int
+        default: 8443
 ---
 services:
   {{ service_name | default('whoami') }}:
     image: traefik/whoami
     container_name: {{ container_name | default('whoami') }}
-    {% if swarm %}
+    {% if swarm_enabled %}
     deploy:
       replicas: {{ swarm_replicas | default(1) }}
       restart_policy:
         condition: on-failure
     {% endif %}
+    {% if ports_enabled %}
     ports:
-      - "{{ service_port['http'] | default(8080) }}:80"
-      - "{{ service_port['https'] | default(8443) }}:443"
-    restart: unless-stopped
+      - "{{ ports_http | default(8080) }}:80"
+      - "{{ ports_https | default(8443) }}:443"
+    {% endif %}
+    {% if network_enabled %}
+    networks:
+      - {{ network_name | default("bridge") }}
+    {% endif %}
+    {% if traefik_enabled %}
+    labels:
+      - traefik.enable=true
+      - traefik.http.services.{{ service_name | default("whoami") }}.loadbalancer.server.port=80
+      - traefik.http.routers.{{ service_name | default("whoami") }}.rule=Host(`{{ traefik_host }}`)
+      {% if traefik_tls_enabled %}
+      - traefik.http.routers.{{ service_name | default("whoami") }}.entrypoints={{ traefik_tls_entrypoint | default("websecure") }}
+      - traefik.http.routers.{{ service_name | default("whoami") }}.tls=true
+      - traefik.http.routers.{{ service_name | default("whoami") }}.tls.certresolver={{ traefik_tls_certresolver }}
+      {% else %}
+      - traefik.http.routers.{{ service_name | default("whoami") }}.entrypoints={{ traefik_entrypoint | default("web") }}
+      {% endif %}
+    {% endif %}
+    restart: {{ restart_policy | default("unless-stopped") }}
+
+{% if network_enabled %}
+networks:
+  {{ network_name | default("bridge") }}:
+    {% if network_external %}
+    external: true
+    {% endif %}
+{% endif %}