xcad 5 месяцев назад
Родитель
Сommit
e080cffa08

+ 7 - 4
WARP.md

@@ -79,6 +79,8 @@ boilerplate --log-level DEBUG [command]
 - `BaseModule`: Abstract base class providing shared commands (config management)
 - Module Commands: Each module implements technology-specific operations
 - Template Library: Structured collection of boilerplates with metadata
+- `Template.vars_map`: Unified variables metadata and defaults (merged from module variables_spec and frontmatter)
+- `PromptHandler`: Interactive prompting based on vars_map and template usage
 
 ### Template Format
 
@@ -109,10 +111,11 @@ tags:
 
 The codebase has been optimized following the ARCHITECTURE_OPTIMIZATION.md plan:
 
-### Simplified Variable System
-- Reduced from 3 classes (Variable, VariableGroup, VariableManager) to 2 classes (Variable, VariableRegistry)
-- Removed complex enable/disable logic
-- Streamlined data transformations
+### Simplified Variable System (2025-09)
+- Replaced custom registry with a unified variables map (vars_map) on Template
+- Module variables are defined as a simple dict (variables_spec) and merged with template frontmatter
+- Dotted names (e.g., traefik.tls.certresolver) imply hierarchy for prompting/sections
+- No separate Variable/Registry classes needed
 
 ### Streamlined Module System
 - Removed decorator pattern (@register_module)

+ 0 - 213
cli/core/config.py

@@ -1,213 +0,0 @@
-"""Global configuration management for the boilerplate CLI."""
-
-from dataclasses import dataclass, field
-from pathlib import Path
-from typing import Any, Dict, List, Optional
-import logging
-import yaml
-
-# Using standard Python exceptions
-
-logger = logging.getLogger('boilerplates')
-
-
-@dataclass
-class LibraryConfig:
-  """Configuration for a single library."""
-  name: str
-  type: str  # 'local' or 'git'
-  path: Optional[str] = None  # For local libraries
-  repo: Optional[str] = None  # For git libraries
-  branch: str = "main"  # For git libraries
-  priority: int = 0  # Higher priority = checked first
-
-
-@dataclass
-class Config:
-  """Global configuration management."""
-  
-  # Paths
-  config_dir: Path = field(default_factory=lambda: Path.home() / ".boilerplates")
-  cache_dir: Path = field(default_factory=lambda: Path.home() / ".boilerplates" / "cache")
-  
-  # Libraries
-  libraries: List[LibraryConfig] = field(default_factory=list)
-  
-  # Application settings
-  log_level: str = "INFO"
-  default_editor: str = "vim"
-  auto_update_remotes: bool = False
-  template_validation: bool = True
-  
-  # UI settings
-  use_rich_output: bool = True
-  confirm_generation: bool = True
-  show_summary: bool = True
-  
-  def __post_init__(self):
-    """Ensure directories exist."""
-    self.config_dir.mkdir(parents=True, exist_ok=True)
-    self.cache_dir.mkdir(parents=True, exist_ok=True)
-  
-  @classmethod
-  def load(cls, config_path=None):
-    """Load configuration from file or use defaults.
-    
-    Args:
-        config_path: Optional path to config file. If not provided,
-                    uses ~/.boilerplates/config.yaml
-    
-    Returns:
-        Config instance with loaded or default values
-    """
-    if config_path is None:
-      config_path = Path.home() / ".boilerplates" / "config.yaml"
-    
-    if config_path.exists():
-      try:
-        with open(config_path, 'r') as f:
-          data = yaml.safe_load(f) or {}
-        
-        # Parse libraries if present
-        libraries = []
-        for lib_data in data.get('libraries', []):
-          try:
-            libraries.append(LibraryConfig(**lib_data))
-          except TypeError as e:
-            logger.warning(f"Invalid library configuration: {lib_data}, error: {e}")
-        
-        # Remove libraries from data to avoid duplicate in Config init
-        if 'libraries' in data:
-          del data['libraries']
-        
-        # Convert path strings to Path objects
-        if 'config_dir' in data:
-          data['config_dir'] = Path(data['config_dir'])
-        if 'cache_dir' in data:
-          data['cache_dir'] = Path(data['cache_dir'])
-        
-        config = cls(**data, libraries=libraries)
-        logger.debug(f"Loaded configuration from {config_path}")
-        return config
-        
-      except yaml.YAMLError as e:
-        raise ValueError(f"Invalid YAML format in config.yaml: {e}")
-      except Exception as e:
-        logger.warning(f"Failed to load config from {config_path}: {e}, using defaults")
-        return cls()
-    else:
-      logger.debug(f"No config file found at {config_path}, using defaults")
-      return cls()
-  
-  def save(self, config_path=None):
-    """Save configuration to file.
-    
-    Args:
-        config_path: Optional path to save config. If not provided,
-                    uses ~/.boilerplates/config.yaml
-    """
-    if config_path is None:
-      config_path = self.config_dir / "config.yaml"
-    
-    data = {
-      'config_dir': str(self.config_dir),
-      'cache_dir': str(self.cache_dir),
-      'log_level': self.log_level,
-      'default_editor': self.default_editor,
-      'auto_update_remotes': self.auto_update_remotes,
-      'template_validation': self.template_validation,
-      'use_rich_output': self.use_rich_output,
-      'confirm_generation': self.confirm_generation,
-      'show_summary': self.show_summary,
-      'libraries': [
-        {
-          'name': lib.name,
-          'type': lib.type,
-          'path': lib.path,
-          'repo': lib.repo,
-          'branch': lib.branch,
-          'priority': lib.priority
-        }
-        for lib in self.libraries
-      ]
-    }
-    
-    # Remove None values from library configs
-    for lib in data['libraries']:
-      lib = {k: v for k, v in lib.items() if v is not None}
-    
-    try:
-      config_path.parent.mkdir(parents=True, exist_ok=True)
-      with open(config_path, 'w') as f:
-        yaml.safe_dump(data, f, default_flow_style=False, sort_keys=False)
-      logger.debug(f"Saved configuration to {config_path}")
-    except Exception as e:
-      raise OSError(f"Failed to save config.yaml: {e}")
-  
-  def add_library(self, library):
-    """Add a library configuration.
-    
-    Args:
-        library: LibraryConfig instance to add
-    """
-    # Check for duplicate names
-    existing_names = {lib.name for lib in self.libraries}
-    if library.name in existing_names:
-      raise ValueError(f"Library with name '{library.name}' already exists")
-    
-    self.libraries.append(library)
-    # Sort by priority (highest first)
-    self.libraries.sort(key=lambda l: l.priority, reverse=True)
-  
-  def remove_library(self, name):
-    """Remove a library configuration by name.
-    
-    Args:
-        name: Name of the library to remove
-        
-    Returns:
-        True if library was removed, False if not found
-    """
-    original_count = len(self.libraries)
-    self.libraries = [lib for lib in self.libraries if lib.name != name]
-    return len(self.libraries) < original_count
-  
-  def get_library(self, name):
-    """Get a library configuration by name.
-    
-    Args:
-        name: Name of the library
-        
-    Returns:
-        LibraryConfig if found, None otherwise
-    """
-    for lib in self.libraries:
-      if lib.name == name:
-        return lib
-    return None
-
-
-# Global configuration instance
-_config = None
-
-
-def get_config():
-  """Get the global configuration instance.
-  
-  Returns:
-      The global Config instance, loading it if necessary
-  """
-  global _config
-  if _config is None:
-    _config = Config.load()
-  return _config
-
-
-def set_config(config):
-  """Set the global configuration instance.
-  
-  Args:
-      config: Config instance to use globally
-  """
-  global _config
-  _config = config

+ 17 - 364
cli/core/library.py

@@ -1,8 +1,6 @@
 from pathlib import Path
 import subprocess
 import logging
-from .config import get_config, LibraryConfig
-# Using standard Python exceptions
 
 logger = logging.getLogger(__name__)
 
@@ -16,378 +14,33 @@ class Library:
     self.priority = priority  # Higher priority = checked first
 
   def find_by_id(self, module_name, files, template_id):
-    """
-    Find a template by its ID in this library.
-    
-    Args:
-        module_name: The module name (e.g., 'terraform', 'compose') to search within.
-                    This narrows the search to the specific technology directory in the library.
-        files: List of file patterns to search for (e.g., ['*.tf', '*.yaml']).
-               This filters templates to only those with matching file extensions,
-               ensuring we only process relevant template files for the module.
-        template_id: The unique identifier of the template to find.
-                    This is typically derived from the template's directory name or filename.
-    
-    Returns:
-        Template object if found, None otherwise.
-    """
-    from .template import Template  # Import here to avoid circular import
-    
-    logger.debug(f"Searching for template '{template_id}' in library '{self.name}' at {self.path} (module: {module_name})")
-    
-    module_path = self.path / module_name
-    if not module_path.exists():
-      logger.debug(f"Module path '{module_path}' does not exist in library '{self.name}'")
-      return None
-    
-    # Try to find the template directory directly by ID
-    template_dir = module_path / template_id
-    if template_dir.exists() and template_dir.is_dir():
-      # Look for template files in this specific directory
-      for filename in files:
-        for file_path in template_dir.glob(filename):
-          if file_path.is_file():
-            template = Template.from_file(file_path)
-            # Set module context if not already specified in frontmatter
-            if not template.module:
-              template.module = module_name
-            # Verify this is actually the template we want
-            if template.id == template_id:
-              logger.info(f"Found template '{template_id}' in library '{self.name}' (direct lookup)")
-              return template
-    
-    # Fallback to the original method if direct lookup fails
-    # This handles cases where template ID doesn't match directory structure
-    logger.debug(f"Direct lookup failed for '{template_id}', falling back to full scan in library '{self.name}'")
-    for template in self.find(module_name, files, sorted=False):
-      if template.id == template_id:
-        logger.info(f"Found template '{template_id}' in library '{self.name}' (full scan)")
-        return template
-    
-    logger.debug(f"Template '{template_id}' not found in library '{self.name}'")
-    return None
+    """Find a template by its ID in this library."""
+    pass
 
   def find(self, module_name, files, sorted=False):
     """Find templates in this library for a specific module."""
-    from .template import Template  # Import here to avoid circular import
-    
-    logger.debug(f"Scanning for templates in library '{self.name}' (module: {module_name}, files: {files})")
-    
-    templates = []
-    module_path = self.path / module_name
-    
-    if not module_path.exists():
-      logger.debug(f"Module path '{module_path}' does not exist in library '{self.name}'")
-      return templates
-    
-    # Find all files matching the specified filenames
-    for filename in files:
-      for file_path in module_path.rglob(filename):
-        if file_path.is_file():
-          # Create Template object using the new class method
-          template = Template.from_file(file_path)
-          # Set module context if not already specified in frontmatter
-          if not template.module:
-            template.module = module_name
-          templates.append(template)
-
-    if sorted:
-      templates.sort(key=lambda t: t.id)
-
-    if templates:
-      logger.info(f"Found {len(templates)} templates in library '{self.name}' for module '{module_name}'")
-    else:
-      logger.debug(f"No templates found in library '{self.name}' for module '{module_name}'")
-    return templates
-
-
-class RemoteLibrary(Library):
-  """Support for Git-based remote template libraries."""
-  
-  def __init__(self, name: str, repo_url: str, branch: str = "main", priority: int = 0):
-    """Initialize a remote library.
-    
-    Args:
-        name: Name of the library
-        repo_url: Git repository URL
-        branch: Branch to use (default: main)
-        priority: Library priority (higher = checked first)
-    """
-    self.repo_url = repo_url
-    self.branch = branch
-    
-    # Set up local cache path
-    config = get_config()
-    local_cache = config.cache_dir / name
-    
-    # Initialize parent with cache path
-    super().__init__(name, local_cache, priority)
-    
-    # Update the cache on initialization if configured
-    if config.auto_update_remotes:
-      try:
-        self.update()
-      except Exception as e:
-        logger.warning(f"Failed to auto-update remote library '{name}': {e}")
-  
-  def update(self) -> bool:
-    """Pull latest changes from remote repository.
-    
-    Returns:
-        True if update was successful, False otherwise
-    """
-    try:
-      if not self.path.exists():
-        # Clone repository
-        logger.info(f"Cloning remote library '{self.name}' from {self.repo_url}")
-        self.path.parent.mkdir(parents=True, exist_ok=True)
-        
-        result = subprocess.run(
-          ["git", "clone", "-b", self.branch, self.repo_url, str(self.path)],
-          capture_output=True,
-          text=True,
-          check=True
-        )
-        
-        if result.returncode != 0:
-          raise ConnectionError(f"Git clone failed for '{self.name}': {result.stderr}")
-        
-        logger.info(f"Successfully cloned library '{self.name}'")
-        return True
-        
-      else:
-        # Pull updates
-        logger.info(f"Updating remote library '{self.name}'")
-        
-        # First, fetch to see if there are updates
-        result = subprocess.run(
-          ["git", "fetch", "origin", self.branch],
-          cwd=self.path,
-          capture_output=True,
-          text=True
-        )
-        
-        if result.returncode != 0:
-          logger.warning(f"Failed to fetch updates for '{self.name}': {result.stderr}")
-          return False
-        
-        # Check if we're behind
-        result = subprocess.run(
-          ["git", "rev-list", "--count", f"HEAD..origin/{self.branch}"],
-          cwd=self.path,
-          capture_output=True,
-          text=True
-        )
-        
-        behind_count = int(result.stdout.strip()) if result.stdout.strip().isdigit() else 0
-        
-        if behind_count > 0:
-          # Pull the updates
-          result = subprocess.run(
-            ["git", "pull", "origin", self.branch],
-            cwd=self.path,
-            capture_output=True,
-            text=True,
-            check=True
-          )
-          
-          if result.returncode != 0:
-            raise ConnectionError(f"Git pull failed for '{self.name}': {result.stderr}")
-          
-          logger.info(f"Successfully updated library '{self.name}' ({behind_count} new commits)")
-          return True
-        else:
-          logger.debug(f"Library '{self.name}' is already up to date")
-          return True
-          
-    except subprocess.CalledProcessError as e:
-      raise RuntimeError(
-        f"Git command failed for '{self.name}': {e.stderr if hasattr(e, 'stderr') else str(e)}"
-      )
-    except Exception as e:
-      raise RuntimeError(f"Library update failed for '{self.name}': {str(e)}")
-  
-  def get_info(self) -> dict:
-    """Get information about the remote library.
-    
-    Returns:
-        Dictionary with library information
-    """
-    info = {
-      'name': self.name,
-      'type': 'remote',
-      'repo': self.repo_url,
-      'branch': self.branch,
-      'priority': self.priority,
-      'cached': self.path.exists(),
-      'cache_path': str(self.path)
-    }
-    
-    if self.path.exists():
-      try:
-        # Get current commit hash
-        result = subprocess.run(
-          ["git", "rev-parse", "HEAD"],
-          cwd=self.path,
-          capture_output=True,
-          text=True
-        )
-        if result.returncode == 0:
-          info['current_commit'] = result.stdout.strip()[:8]
-        
-        # Get last update time
-        result = subprocess.run(
-          ["git", "log", "-1", "--format=%ci"],
-          cwd=self.path,
-          capture_output=True,
-          text=True
-        )
-        if result.returncode == 0:
-          info['last_updated'] = result.stdout.strip()
-          
-      except Exception as e:
-        logger.debug(f"Failed to get git info for '{self.name}': {e}")
-    
-    return info
-
+    pass
 
 class LibraryManager:
-  """Manager for multiple libraries with priority-based ordering."""
+  """Manages multiple libraries and provides methods to find templates."""
   
+  # FIXME: For now this is static and only has one library
   def __init__(self):
-    self.libraries = []
-    self._initialize_libraries()
-  
-  def _initialize_libraries(self):
-    """Initialize libraries from configuration."""
-    config = get_config()
-    logger.info(f"Initializing library manager with {len(config.libraries)} configured libraries")
-    
-    # First, add configured libraries
-    for lib_config in config.libraries:
-      try:
-        library = self._create_library_from_config(lib_config)
-        if library:
-          self.libraries.append(library)
-          logger.info(f"Loaded library '{lib_config.name}' (type: {lib_config.type}, priority: {lib_config.priority})")
-      except Exception as e:
-        logger.warning(f"Failed to load library '{lib_config.name}': {e}")
-    
-    # Then add the default built-in library if not already configured
-    if not any(lib.name == "default" for lib in self.libraries):
-      script_dir = Path(__file__).parent.parent.parent  # Go up from cli/core/ to project root
-      default_library = Library("default", script_dir / "library", priority=-1)  # Lower priority
-      self.libraries.append(default_library)
-      logger.info(f"Added default built-in library at '{script_dir / 'library'}' (priority: -1)")
-    
-    # Sort libraries by priority (highest first)
-    self._sort_by_priority()
-    logger.info(f"Successfully initialized {len(self.libraries)} libraries")
-    if self.libraries:
-      logger.debug(f"Libraries in priority order: {[(lib.name, lib.priority) for lib in self.libraries]}")
-  
-  def _create_library_from_config(self, lib_config):
-    """Create a Library instance from configuration.
-    
-    Args:
-        lib_config: LibraryConfig instance
-        
-    Returns:
-        Library instance or None if creation fails
-    """
-    if lib_config.type == "local":
-      if lib_config.path:
-        path = Path(lib_config.path).expanduser()
-        if path.exists():
-          return Library(lib_config.name, path, lib_config.priority)
-        else:
-          logger.warning(f"Local library path does not exist: {path}")
-          return None
-    elif lib_config.type == "git":
-      if lib_config.repo:
-        return RemoteLibrary(
-          lib_config.name,
-          lib_config.repo,
-          lib_config.branch,
-          lib_config.priority
-        )
-      else:
-        logger.warning(f"Git library '{lib_config.name}' missing repo URL")
-        return None
-    else:
-      logger.warning(f"Unknown library type: {lib_config.type}")
-      return None
-  
-  def _sort_by_priority(self):
-    """Sort libraries by priority (highest first)."""
-    self.libraries.sort(key=lambda lib: lib.priority, reverse=True)
-  
-  def add_library(self, library: Library):
-    """Add a library to the collection.
-    
-    Args:
-        library: Library instance to add
-    """
-    # Check for duplicate names
-    if any(lib.name == library.name for lib in self.libraries):
-      logger.warning(f"Library '{library.name}' already exists, replacing")
-      self.libraries = [lib for lib in self.libraries if lib.name != library.name]
-    
-    self.libraries.append(library)
-    self._sort_by_priority()
+    self.libraries = [
+      Library(name="default", path=Path(__file__).parent.parent / "libraries", priority=0)
+    ]
 
+  def find_by_id(self, module_name, files, template_id):
+    """Find a template by its ID across all libraries."""
+    for library in self.libraries:
+      template = library.find_by_id(module_name, files, template_id)
+      if template:
+        return template
+  
   def find(self, module_name, files, sorted=False):
     """Find templates across all libraries for a specific module."""
-    logger.debug(f"Searching across {len(self.libraries)} libraries for module '{module_name}'")
-    all_templates = []
-    library_counts = {}
-    
     for library in self.libraries:
       templates = library.find(module_name, files, sorted=sorted)
       if templates:
-        library_counts[library.name] = len(templates)
-      all_templates.extend(templates)
-
-    if sorted:
-      all_templates.sort(key=lambda t: t.id)
-
-    if all_templates:
-      logger.info(f"Found {len(all_templates)} total templates for module '{module_name}'")
-      if library_counts:
-        logger.debug(f"Template distribution: {library_counts}")
-    else:
-      logger.debug(f"No templates found for module '{module_name}' across any library")
-    return all_templates
-
-  def find_by_id(self, module_name, files, template_id):
-    """
-    Find a template by its ID across all libraries.
-    
-    Args:
-        module_name: The module name (e.g., 'terraform', 'compose') to search within.
-                    This narrows the search to the specific technology directory across all libraries,
-                    allowing for modular organization of templates by technology type.
-        files: List of file patterns to search for (e.g., ['*.tf', '*.yaml']).
-               This filters templates to only those with matching file extensions,
-               ensuring we only process relevant template files for the specific module type.
-        template_id: The unique identifier of the template to find.
-                    This is typically derived from the template's directory name or filename,
-                    providing a human-readable way to reference specific templates.
-    
-    Returns:
-        Template object if found across any library, None otherwise.
-        
-    Note:
-        This method searches through all registered libraries in priority order (highest first),
-        returning the first matching template found. This allows higher-priority libraries
-        to override templates from lower-priority ones.
-    """
-    logger.debug(f"Searching for template '{template_id}' across {len(self.libraries)} libraries (module: {module_name})")
-    for library in self.libraries:  # Already sorted by priority
-      template = library.find_by_id(module_name, files, template_id)
-      if template:
-        logger.info(f"Retrieved template '{template_id}' from library '{library.name}' (priority: {library.priority})")
-        return template
-    
-    logger.warning(f"Template '{template_id}' not found in any library")
-    return None
+        return templates
+    return []

+ 4 - 138
cli/core/module.py

@@ -2,10 +2,9 @@ from abc import ABC
 from pathlib import Path
 from typing import Optional, Dict, Any
 import logging
-import yaml
 from typer import Typer, Option, Argument
 from rich.console import Console
-# Using standard Python exceptions
+
 from .library import LibraryManager
 
 logger = logging.getLogger(__name__)
@@ -34,21 +33,6 @@ class Module(ABC):
     if hasattr(self, '_init_variables'):
       logger.debug(f"Module '{self.name}' has variable initialization method")
       self._init_variables()
-      
-      # Validate module variable registry consistency after initialization
-      # NOTE: This ensures the module's variable hierarchy is properly structured (e.g., traefik.host requires traefik to exist).
-      # The registry defines parent-child relationships where child variables like 'traefik.tls.certresolver' can only be used
-      # when their parents ('traefik' and 'traefik.tls') are enabled. This prevents invalid module configurations.
-      if hasattr(self, 'variables') and self.variables:
-        var_count = len(self.variables.get_all_variables())
-        logger.info(f"Module '{self.name}' registered {var_count} variables")
-        
-        registry_errors = self.variables.validate_parent_child_relationships()
-        if registry_errors:
-          error_msg = f"Module '{self.name}' has invalid variable registry:\n" + "\n".join(f"  - {e}" for e in registry_errors)
-          logger.error(error_msg)
-          raise ValueError(error_msg)
-        logger.debug(f"Module '{self.name}' variable registry validation completed successfully")
     
     self.metadata = self._build_metadata()
     logger.info(f"Module '{self.name}' initialization completed successfully")
@@ -118,117 +102,6 @@ class Module(ABC):
       print(f"\n{template.content}")
 
 
-  def _enrich_template_with_variables(self, template):
-    """Enrich template with module variable registry defaults (optimized).
-    
-    This method updates the template's vars with module defaults while preserving
-    template-specific variables and frontmatter definitions.
-    
-    Args:
-        template: Template instance to enrich
-    """
-    # Skip if already enriched or no variables
-    if template._is_enriched or not hasattr(self, 'variables') or not self.variables:
-      if template._is_enriched:
-        logger.debug(f"Template '{template.id}' already enriched, skipping")
-      else:
-        logger.debug(f"Module '{self.name}' has no variables, skipping enrichment for '{template.id}'")
-      return
-    
-    logger.debug(f"Enriching template '{template.id}' with {len(self.variables.get_all_variables())} module variables")
-    
-    # Get template variables first (this is cached)
-    template_vars = template._parse_template_variables(
-      template.content, 
-      getattr(template, 'frontmatter_variables', {})
-    )
-    
-    # Only get module variables that are actually used in the template
-    used_variables = template._get_used_variables()
-    module_vars = {}
-    module_defaults = {}
-    
-    for var_name in used_variables:
-      var_obj = self.variables.get_variable(var_name)
-      if var_obj:
-        module_vars[var_name] = var_obj.default if var_obj.default is not None else None
-        if var_obj.default is not None:
-          module_defaults[var_name] = var_obj.default
-    
-    if module_defaults:
-      logger.debug(f"Module provides {len(module_defaults)} defaults for used variables")
-      logger.debug(f"Module default values: {module_defaults}")
-    
-    # Merge with template taking precedence
-    final_vars = dict(module_vars)
-    overrides = {}
-    
-    for var_name, var_value in template_vars.items():
-      if var_name in final_vars and final_vars[var_name] != var_value and var_value is not None:
-        logger.warning(
-          f"Variable '{var_name}' defined in both module and template. Template takes precedence."
-        )
-        overrides[var_name] = var_value
-      final_vars[var_name] = var_value
-    
-    if overrides:
-      logger.debug(f"Template overrode {len(overrides)} module variables")
-    
-    # Set final variables and mark as enriched
-    template.vars = final_vars
-    template._is_enriched = True
-    
-    if final_vars:
-      logger.info(f"Template '{template.id}' enriched with {len(final_vars)} variables from module '{self.name}'")
-    else:
-      logger.debug(f"Template '{template.id}' has no variables after enrichment")
-
-  def _check_template_readiness(self, template):
-    """Check if template is ready for generation (replaces complex validation).
-    
-    Args:
-        template: Template instance to check
-    
-    Raises:
-        ValueError: If template has critical issues preventing generation
-    """
-    logger.debug(f"Checking template readiness for '{template.id}'")
-    errors = []
-    
-    # Check for basic template issues
-    if not template.content.strip():
-      errors.append("Template has no content")
-    
-    # Check for undefined variables (variables used but not available)
-    undefined_vars = []
-    for var_name, var_value in template.vars.items():
-      if var_value is None:
-        # Check if it's in module registry
-        if hasattr(self, 'variables') and self.variables:
-          var_obj = self.variables.get_variable(var_name)
-          if not var_obj:
-            # Not in module registry and no template default - problematic
-            undefined_vars.append(var_name)
-    
-    if undefined_vars:
-      errors.append(
-        f"Template uses undefined variables: {', '.join(undefined_vars)}. "
-        f"These variables are not registered in the module and have no template defaults."
-      )
-    
-    # Check for syntax errors by attempting to create AST
-    try:
-      template._get_ast()
-    except Exception as e:
-      errors.append(f"Template has Jinja2 syntax errors: {str(e)}")
-    
-    if errors:
-      logger.error(f"Template '{template.id}' failed readiness check with {len(errors)} errors")
-      error_msg = f"Template '{template.id}' is not ready for generation:\n" + "\n".join(f"  - {e}" for e in errors)
-      raise ValueError(error_msg)
-    
-    logger.debug(f"Template '{template.id}' passed readiness check")
-
   def generate(
     self,
     id: str = Argument(..., help="Template ID"),
@@ -243,16 +116,9 @@ class Module(ABC):
     if not template:
       logger.error(f"Template '{id}' not found for generation in module '{self.name}'")
       raise FileNotFoundError(f"Template '{id}' not found in module '{self.name}'")
-    
-    # Enrich template with module variables if available
-    self._enrich_template_with_variables(template)
-    
-    # Check for critical template issues after enrichment
-    self._check_template_readiness(template)
-    
-    logger.info(f"Template '{id}' generation completed successfully for module '{self.name}'")
-    print("TEST SUCCESSFUL")
-  
+
+    # PLACEHOLDER FOR TEMPLATE GENERATION LOGIC
+
   def register_cli(self, app: Typer):
     """Register module commands with the main app."""
     logger.debug(f"Registering CLI commands for module '{self.name}'")

+ 4 - 135
cli/core/template.py

@@ -17,8 +17,8 @@ class Template:
   file_path: Path
   content: str = ""
   
-  
   # Frontmatter fields with defaults
+  id: str = ""
   name: str = ""
   description: str = "No description available"
   author: str = ""
@@ -28,34 +28,10 @@ class Template:
   tags: List[str] = field(default_factory=list)
   files: List[str] = field(default_factory=list)
   
-  # Computed properties (will be set in __post_init__)
-  id: str = field(init=False)
-  relative_path: str = field(init=False)
-  size: int = field(init=False)
-  
   # Template variable analysis results
   vars: Dict[str, Any] = field(default_factory=dict, init=False)
-  frontmatter_variables: Dict[str, Any] = field(default_factory=dict, init=False)
-  
-  # Cache for performance optimization
-  _jinja_ast: Any = field(default=None, init=False, repr=False)
-  _parsed_vars: Dict[str, Any] = field(default=None, init=False, repr=False)
-  _is_enriched: bool = field(default=False, init=False, repr=False)
- 
-  def __post_init__(self):
-    """Initialize computed properties after dataclass initialization."""
-    # Set default name if not provided
-    if not self.name:
-      self.name = self.file_path.parent.name
-    
-    # Computed properties
-    self.id = self.file_path.parent.name
-    self.relative_path = self.file_path.name
-    self.size = self.file_path.stat().st_size if self.file_path.exists() else 0
-    
-    # Initialize with empty vars - modules will enrich with their variables
-    # Template parsing and variable enrichment is handled by the module
-    self.vars = {}
+
+
 
   @classmethod
   def from_file(cls, file_path: Path) -> "Template":
@@ -85,7 +61,7 @@ class Template:
       if template.frontmatter_variables:
         logger.debug(f"Template '{template.id}' has {len(template.frontmatter_variables)} frontmatter variables: {list(template.frontmatter_variables.keys())}")
       
-      logger.info(f"Loaded template '{template.id}' (v{template.version or 'unversioned'}, {template.size} bytes)")
+      logger.info(f"Loaded template '{template.id}' (v{template.version or 'unversioned'}")
       logger.debug(f"Template details: author='{template.author}', tags={template.tags}")
       return template
     except Exception as e:
@@ -152,110 +128,3 @@ class Template:
     with open(file_path, 'r', encoding='utf-8') as f:
       post = frontmatter.load(f)
     return post.metadata, post.content
-
-  def render(self, variable_values: Dict[str, Any]) -> str:
-    """Render the template with the provided variable values."""
-    logger = logging.getLogger('boilerplates')
-    
-    try:
-      logger.debug(f"Rendering template '{self.id}' with {len(variable_values)} provided variables")
-      env = self._create_jinja_env()
-      jinja_template = env.from_string(self.content)
-      # Merge template vars (with defaults) with provided values
-      # All variables should be defined at this point due to validation
-      merged_variable_values = {**self.vars, **variable_values}
-      logger.debug(f"Final render context has {len(merged_variable_values)} variables")
-      
-      rendered_content = jinja_template.render(**merged_variable_values)
-      initial_size = len(rendered_content)
-      
-      # Clean up excessive blank lines and whitespace
-      rendered_content = re.sub(r'\n\s*\n\s*\n+', '\n\n', rendered_content)
-      final_content = rendered_content.strip()
-      
-      logger.info(f"Successfully rendered template '{self.id}' ({initial_size} -> {len(final_content)} bytes)")
-      return final_content
-      
-    except Exception as e:
-      logger.error(f"Failed to render template '{self.id}': {e}")
-      raise ValueError(f"Failed to render template: {e}")
-
-  def _parse_template_variables(self, template_content: str, frontmatter_vars: Dict[str, Any] = None) -> Dict[str, Any]:
-    """Parse Jinja2 template to extract variables and their defaults (cached).
-    
-    Handles:
-    - Simple variables: service_name
-    - Dotted notation: traefik.host, service_port.http
-    - Frontmatter variable definitions
-    
-    Args:
-        template_content: The Jinja2 template content (ignored if cached)
-        frontmatter_vars: Variables defined in template frontmatter
-    
-    Returns:
-        Dict mapping variable names to their default values (None if no default)
-    """
-    # Use cache if available and no frontmatter changes
-    cache_key = f"{hash(frontmatter_vars.__str__() if frontmatter_vars else 'None')}"
-    if self._parsed_vars is not None and not frontmatter_vars:
-      return self._parsed_vars
-    
-    try:
-      ast = self._get_ast()  # Use cached AST
-      
-      # Get all variables used in template
-      all_variables = self._get_used_variables()
-      if all_variables:
-        logger.debug(f"Template uses {len(all_variables)} variables: {sorted(all_variables)}")
-      else:
-        logger.debug("Template does not use any variables")
-      
-      # Initialize vars dict with all variables (default to None)
-      vars_dict = {var_name: None for var_name in all_variables}
-      
-      # Extract default values from | default() filters
-      template_defaults = {}
-      for node in ast.find_all(nodes.Filter):
-        if node.name == 'default' and node.args and isinstance(node.args[0], nodes.Const):
-          # Handle simple variable defaults
-          if isinstance(node.node, nodes.Name):
-            template_defaults[node.node.name] = node.args[0].value
-            vars_dict[node.node.name] = node.args[0].value
-          # Handle dotted variable defaults
-          elif isinstance(node.node, nodes.Getattr):
-            dotted_name = Template._build_dotted_name(node.node)
-            if dotted_name:
-              template_defaults[dotted_name] = node.args[0].value
-              vars_dict[dotted_name] = node.args[0].value
-      
-      if template_defaults:
-        logger.info(f"Template defines {len(template_defaults)} variable defaults")
-        logger.debug(f"Template default values: {template_defaults}")
-      
-      # Process frontmatter variables (frontmatter takes precedence)
-      if frontmatter_vars:
-        frontmatter_overrides = {}
-        for var_name, var_config in frontmatter_vars.items():
-          if var_name in vars_dict and vars_dict[var_name] is not None:
-            logger.warning(f"Variable '{var_name}' defined in both template content and frontmatter. Frontmatter definition takes precedence.")
-          
-          # Handle both simple values and complex variable configurations
-          if isinstance(var_config, dict) and 'default' in var_config:
-            frontmatter_overrides[var_name] = var_config['default']
-            vars_dict[var_name] = var_config['default']
-          else:
-            frontmatter_overrides[var_name] = var_config
-            vars_dict[var_name] = var_config
-        
-        if frontmatter_overrides:
-          logger.info(f"Frontmatter defines/overrides {len(frontmatter_overrides)} variables")
-          logger.debug(f"Frontmatter variable values: {frontmatter_overrides}")
-      
-      # Cache result if no frontmatter (pure template parsing)
-      if not frontmatter_vars:
-        self._parsed_vars = vars_dict.copy()
-      
-      return vars_dict
-    except Exception as e:
-      logger.debug(f"Error parsing template variables: {e}")
-      return {}

+ 0 - 151
cli/core/variables.py

@@ -1,151 +0,0 @@
-from typing import Any, Dict, List, Optional
-from dataclasses import dataclass, field
-from enum import Enum
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class VariableType(Enum):
-  """Supported variable types."""
-  STR = "str"
-  INT = "int" 
-  BOOL = "bool"
-  ENUM = "enum"
-  FLOAT = "float"
-
-
-@dataclass
-class Variable:
-  """Represents a single variable with metadata."""
-  
-  name: str
-  type: VariableType = VariableType.STR
-  description: str = ""
-  display: str = ""  # Display name for UI
-  default: Any = None
-  options: List[str] = field(default_factory=list)  # For enum types
-  parent: Optional[str] = None  # Parent variable name (for dotted notation)
-  
-  def has_parent(self) -> bool:
-    """Check if this variable has a parent."""
-    return self.parent is not None
-  
-  def get_full_name(self) -> str:
-    """Get the full dotted name."""
-    if self.parent:
-      return f"{self.parent}.{self.name}"
-    return self.name
-
-
-class VariableRegistry:
-  """Registry for managing module variables."""
-  
-  def __init__(self):
-    self._variables: Dict[str, Variable] = {}  # Full name -> Variable
-  
-  def register_variable(self, variable: Variable) -> Variable:
-    """Register a variable in the registry."""
-    full_name = variable.get_full_name()
-    logger.debug(f"Attempting to register variable '{full_name}' of type '{variable.type}'")
-    
-    # Convert string type to enum if needed
-    if isinstance(variable.type, str):
-      try:
-        original_type = variable.type
-        variable.type = VariableType(variable.type.lower())
-        logger.debug(f"Converted string type '{original_type}' to VariableType.{variable.type.name} for '{full_name}'")
-      except ValueError:
-        logger.warning(f"Unknown variable type '{variable.type}' for '{full_name}', defaulting to STR")
-        variable.type = VariableType.STR
-    
-    # Validate enum options
-    if variable.type == VariableType.ENUM and not variable.options:
-      logger.error(f"Variable '{full_name}' of type 'enum' must have options")
-      raise ValueError(f"Variable '{full_name}' of type 'enum' must have options")
-    
-    if variable.type == VariableType.ENUM:
-      logger.debug(f"Variable '{full_name}' has {len(variable.options)} enum options: {variable.options}")
-    
-    if variable.default is not None:
-      logger.debug(f"Variable '{full_name}' has default value: {variable.default}")
-    
-    # Check if already registered
-    if full_name in self._variables:
-      logger.debug(f"Variable '{full_name}' already registered, replacing")
-    
-    self._variables[full_name] = variable
-    logger.info(f"Registered variable '{full_name}' (type: {variable.type.name}, parent: {variable.parent or 'none'})")
-    return variable
-  
-  def get_variable(self, name: str) -> Optional[Variable]:
-    """Get variable by full name."""
-    variable = self._variables.get(name)
-    if variable:
-      logger.debug(f"Retrieved variable '{name}' from registry (type: {variable.type.name})")
-    else:
-      logger.debug(f"Variable '{name}' not found in registry (available: {list(self._variables.keys())})")
-    return variable
-  
-  def get_all_variables(self) -> Dict[str, Variable]:
-    """Get all registered variables."""
-    count = len(self._variables)
-    if count > 0:
-      logger.debug(f"Retrieved {count} variables from registry: {sorted(self._variables.keys())}")
-    else:
-      logger.debug("No variables registered in registry")
-    return self._variables.copy()
-  
-  def get_parent_variables(self) -> List[Variable]:
-    """Get all variables that have children (enabler variables)."""
-    parent_names = set()
-    for var in self._variables.values():
-      if var.parent:
-        parent_names.add(var.parent)
-    
-    parent_vars = [self._variables[name] for name in parent_names if name in self._variables]
-    logger.debug(f"Found {len(parent_vars)} parent variables: {sorted(parent_names)}")
-    return parent_vars
-  
-  def get_children_of(self, parent_name: str) -> List[Variable]:
-    """Get all child variables of a specific parent."""
-    children = [var for var in self._variables.values() if var.parent == parent_name]
-    logger.debug(f"Found {len(children)} children for parent '{parent_name}'")
-    return children
-  
-  def validate_parent_child_relationships(self) -> List[str]:
-    """Validate that all parent-child relationships are consistent."""
-    logger.debug(f"Starting validation of parent-child relationships for {len(self._variables)} variables")
-    errors = []
-    parent_count = 0
-    child_count = 0
-    
-    for var in self._variables.values():
-      if var.parent:
-        child_count += 1
-        # Check if parent exists
-        if var.parent not in self._variables:
-          error_msg = f"Variable '{var.get_full_name()}' references non-existent parent '{var.parent}'"
-          logger.warning(f"Validation error: {error_msg}")
-          errors.append(error_msg)
-        else:
-          parent_var = self._variables[var.parent]
-          # Parent should generally be boolean if it has children
-          if parent_var.type != VariableType.BOOL:
-            warning_msg = f"Parent variable '{var.parent}' is type '{parent_var.type.name}' but has children"
-            logger.warning(f"Validation warning: {warning_msg}")
-            errors.append(f"Parent variable '{var.parent}' should be boolean type (has children)")
-      else:
-        # Count root/parent variables
-        children = self.get_children_of(var.name)
-        if children:
-          parent_count += 1
-    
-    if errors:
-      logger.error(f"Variable registry validation failed with {len(errors)} errors")
-      for error in errors:
-        logger.debug(f"  - {error}")
-    else:
-      logger.info(f"Variable registry validation passed ({parent_count} parents, {child_count} children)")
-    
-    return errors

+ 0 - 37
cli/modules/__init__.py

@@ -1,37 +0,0 @@
-"""
-Modules package for the Boilerplates CLI.
-
-To add a new module:
-1. Create a new Python file: cli/modules/[module_name].py
-2. Create a class inheriting from Module with the import: from ..core.module import Module
-3. Ensure the class properly sets 'files' parameter and implements required methods
-4. Import and register the module in cli/__main__.py
-
-Available modules:
-- compose: Manage Docker Compose configurations and services
-- ansible: Manage Ansible playbooks and configurations
-- docker: Manage Docker configurations and files
-- github_actions: Manage GitHub Actions workflows
-- gitlab_ci: Manage GitLab CI/CD pipelines
-- kestra: Manage Kestra workflows and configurations
-- kubernetes: Manage Kubernetes manifests and configurations
-- packer: Manage Packer templates and configurations
-- terraform: Manage Terraform configurations and modules
-- vagrant: Manage Vagrant configurations and files
-
-Example:
-    # In cli/modules/mymodule.py
-    from ..core.module import Module
-    
-    class MyModule(Module):
-        def __init__(self):
-            super().__init__(
-                name="mymodule",
-                description="My module description",
-                files=["config.yml", "settings.json"],
-                vars={"key": "value"}  # optional
-            )
-        
-        def register(self, app):
-            return super().register(app)
-"""

+ 0 - 1
cli/modules/ansible.py

@@ -9,5 +9,4 @@ class AnsibleModule(Module):
   files = ["playbook.yml", "playbook.yaml", "main.yml", "main.yaml", 
            "site.yml", "site.yaml"]
 
-# Register the module
 registry.register(AnsibleModule)

+ 36 - 168
cli/modules/compose.py

@@ -1,179 +1,47 @@
 from ..core.module import Module
 from ..core.registry import registry
-from ..core.variables import Variable, VariableRegistry, VariableType
+
 
 class ComposeModule(Module):
   """Docker Compose module."""
   
   name = "compose"
   description = "Manage Docker Compose configurations"
-  files = ["docker-compose.yml", "compose.yml", "compose.yaml"]
-  
-  def _init_variables(self):
-    """Initialize module-specific variables."""
-    self.variables = VariableRegistry()
-    
-    # Register root variables
-    self.variables.register_variable(Variable(
-      name="service_name",
-      type=VariableType.STR,
-      description="Service name",
-      display="Service Name"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="container_name",
-      type=VariableType.STR,
-      description="Custom container name (leave empty to use service name)",
-      display="Container Name"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="container_timezone",
-      type=VariableType.STR,
-      description="Container timezone (e.g., Europe/Berlin, America/New_York)",
-      display="Container Timezone"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="container_loglevel",
-      type=VariableType.ENUM,
-      description="Container log level",
-      display="Log Level",
-      default="info",
-      options=["debug", "info", "warn", "error"]
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="container_hostname",
-      type=VariableType.STR,
-      description="Container hostname (shows up in logs and networking)",
-      display="Container Hostname"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="restart_policy",
-      type=VariableType.ENUM,
-      description="Container restart policy",
-      display="Restart Policy",
-      default="unless-stopped",
-      options=["unless-stopped", "always", "on-failure", "no"]
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="ports",
-      type=VariableType.BOOL,
-      description="Enable port mapping",
-      display="Enable Ports"
-    ))
-    
-    # Network variables
-    self.variables.register_variable(Variable(
-      name="network",
-      type=VariableType.BOOL,
-      description="Enable custom network configuration",
-      display="Enable Network"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="name",
-      type=VariableType.STR,
-      description="Docker network name (e.g., frontend, backend, bridge)",
-      display="Network Name",
-      default="bridge",
-      parent="network"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="external",
-      type=VariableType.BOOL,
-      description="Use existing network (must be created before running)",
-      display="External Network",
-      parent="network"
-    ))
-    
-    # Traefik variables
-    self.variables.register_variable(Variable(
-      name="traefik",
-      type=VariableType.BOOL,
-      description="Enable Traefik reverse proxy (requires Traefik to be running separately)",
-      display="Enable Traefik"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="host",
-      type=VariableType.STR,
-      description="Domain name for your service (e.g., app.example.com)",
-      display="Host Domain",
-      parent="traefik"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="entrypoint",
-      type=VariableType.STR,
-      description="HTTP entrypoint for non-TLS traffic (e.g., web, http)",
-      display="HTTP Entrypoint",
-      default="web",
-      parent="traefik"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="tls",
-      type=VariableType.BOOL,
-      description="Enable HTTPS/TLS (requires valid domain and DNS configuration)",
-      display="Enable TLS",
-      parent="traefik"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="entrypoint",
-      type=VariableType.STR,
-      description="TLS entrypoint for HTTPS traffic (e.g., websecure, https)",
-      display="TLS Entrypoint",
-      default="websecure",
-      parent="traefik.tls"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="certresolver",
-      type=VariableType.STR,
-      description="Certificate resolver name (e.g., letsencrypt, staging)",
-      display="Cert Resolver",
-      parent="traefik.tls"
-    ))
-    
-    # PostgreSQL variables
-    self.variables.register_variable(Variable(
-      name="postgres",
-      type=VariableType.BOOL,
-      description="Enable PostgreSQL database",
-      display="Enable PostgreSQL"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="host",
-      type=VariableType.STR,
-      description="PostgreSQL host (e.g., localhost, postgres, db.example.com)",
-      display="PostgreSQL Host",
-      parent="postgres"
-    ))
-    
-    # Docker Swarm variables
-    self.variables.register_variable(Variable(
-      name="swarm",
-      type=VariableType.BOOL,
-      description="Enable Docker Swarm mode (requires Docker Swarm to be initialized)",
-      display="Enable Swarm"
-    ))
-    
-    self.variables.register_variable(Variable(
-      name="replicas",
-      type=VariableType.INT,
-      description="Number of container instances",
-      display="Replicas",
-      default=1,
-      parent="swarm"
-    ))
+  files = ["docker-compose.yml", "docker-compose.yaml", "compose.yml", "compose.yaml"]
+
+  variables_spec = {
+    # Root
+    "service_name": {"type": "str", "display": "Service Name", "description": "Service name"},
+    "container_name": {"type": "str", "display": "Container Name", "description": "Custom container name (leave empty to use service name)"},
+    "container_timezone": {"type": "str", "display": "Container Timezone", "description": "Container timezone (e.g., Europe/Berlin, America/New_York)"},
+    "container_loglevel": {"type": "enum", "display": "Log Level", "description": "Container log level", "default": "info", "options": ["debug", "info", "warn", "error"]},
+    "container_hostname": {"type": "str", "display": "Container Hostname", "description": "Container hostname (shows up in logs and networking)"},
+    "restart_policy": {"type": "enum", "display": "Restart Policy", "description": "Container restart policy", "default": "unless-stopped", "options": ["unless-stopped", "always", "on-failure", "no"]},
+
+    # Ports
+    "ports": {"type": "bool", "display": "Enable Ports", "description": "Enable port mapping"},
+
+    # Network
+    "network": {"type": "bool", "display": "Enable Network", "description": "Enable custom network configuration"},
+    "network.name": {"type": "str", "display": "Network Name", "description": "Docker network name (e.g., frontend, backend, bridge)", "default": "bridge"},
+    "network.external": {"type": "bool", "display": "External Network", "description": "Use existing network (must be created before running)"},
+
+    # Traefik
+    "traefik": {"type": "bool", "display": "Enable Traefik", "description": "Enable Traefik reverse proxy (requires Traefik to be running separately)"},
+    "traefik.host": {"type": "hostname", "display": "Host Domain", "description": "Domain name for your service (e.g., app.example.com)"},
+    "traefik.entrypoint": {"type": "str", "display": "HTTP Entrypoint", "description": "HTTP entrypoint for non-TLS traffic (e.g., web, http)", "default": "web"},
+    "traefik.tls": {"type": "bool", "display": "Enable TLS", "description": "Enable HTTPS/TLS (requires valid domain and DNS configuration)"},
+    "traefik.tls.entrypoint": {"type": "str", "display": "TLS Entrypoint", "description": "TLS entrypoint for HTTPS traffic (e.g., websecure, https)", "default": "websecure"},
+    "traefik.tls.certresolver": {"type": "str", "display": "Cert Resolver", "description": "Certificate resolver name (e.g., letsencrypt, staging)"},
+
+    # PostgreSQL
+    "postgres": {"type": "bool", "display": "Enable PostgreSQL", "description": "Enable PostgreSQL database"},
+    "postgres.host": {"type": "str", "display": "PostgreSQL Host", "description": "PostgreSQL host (e.g., localhost, postgres, db.example.com)"},
+
+    # Swarm
+    "swarm": {"type": "bool", "display": "Enable Swarm", "description": "Enable Docker Swarm mode (requires Docker Swarm to be initialized)"},
+    "swarm.replicas": {"type": "int", "display": "Replicas", "description": "Number of container instances", "default": 1},
+  }
 
 # Register the module
 registry.register(ComposeModule)

+ 4 - 6
library/compose/n8n/compose.yaml

@@ -10,11 +10,9 @@ tags:
   - workflows
   - compose
 variables:
-  template.custom_config:
-    description: "Custom configuration for n8n"
-    hint: "Additional environment variables or settings"
-    type: "string"
-    default: ""
+  ports.http:
+    description: "Port for HTTP access to n8n"
+    default: "5678"
 ---
 services:
   {{ service_name }}:
@@ -63,7 +61,7 @@ services:
     restart: {{ restart_policy | default('unless-stopped') }}
     {% if ports %}
     ports:
-      - "5678:5678"
+      - "{{ ports.http | default('5678') }}:5678"
     {% endif %}
 
 volumes:

+ 6 - 0
library/compose/traefik/compose.yaml

@@ -8,6 +8,12 @@ tags:
   - traefik
   - reverse-proxy
   - load-balancer
+files:
+  - config/traefik.yaml
+variables:
+  acme_email:
+    display: "ACME Email"
+    description: "Email address for ACME (Let's Encrypt) registration"
 ---
 services:
   traefik:

+ 1 - 1
library/compose/traefik/config/traefik.yaml

@@ -37,7 +37,7 @@ entryPoints:
 certificatesResolvers:
   cloudflare:
     acme:
-      email: your-email@example.com  # <-- Change this to your email
+      email: {{ acme_email }}
       storage: /var/traefik/certs/cloudflare-acme.json
       caServer: "https://acme-v02.api.letsencrypt.org/directory"
       dnsChallenge: