Browse Source

Merge branch 'main' into feature

Jeremy Stretch 10 hours ago
parent
commit
5086f27aba
100 changed files with 4465 additions and 3197 deletions
  1. 1 1
      .github/ISSUE_TEMPLATE/01-feature_request.yaml
  2. 1 1
      .github/ISSUE_TEMPLATE/02-bug_report.yaml
  3. 1 1
      .github/ISSUE_TEMPLATE/03-performance.yaml
  4. 1 2
      base_requirements.txt
  5. 131 43
      contrib/openapi.json
  6. 8 0
      docs/configuration/miscellaneous.md
  7. 39 9
      docs/configuration/system.md
  8. 12 0
      docs/customization/custom-scripts.md
  9. 26 0
      docs/release-notes/version-4.5.md
  10. 1 1
      netbox/circuits/migrations/0057_default_ordering_indexes.py
  11. 2 1
      netbox/circuits/tables/virtual_circuits.py
  12. 46 48
      netbox/circuits/tests/test_tables.py
  13. 14 0
      netbox/circuits/tests/test_views.py
  14. 2 2
      netbox/circuits/ui/panels.py
  15. 4 4
      netbox/core/api/views.py
  16. 1 0
      netbox/core/tables/config.py
  17. 26 0
      netbox/core/tests/test_tables.py
  18. 8 8
      netbox/core/tests/test_views.py
  19. 10 10
      netbox/core/utils.py
  20. 4 4
      netbox/core/views.py
  21. 9 1
      netbox/dcim/api/serializers_/base.py
  22. 15 0
      netbox/dcim/cable_profiles.py
  23. 2 0
      netbox/dcim/choices.py
  24. 2 1
      netbox/dcim/filtersets.py
  25. 8 2
      netbox/dcim/forms/bulk_edit.py
  26. 1 0
      netbox/dcim/forms/common.py
  27. 2 2
      netbox/dcim/forms/filtersets.py
  28. 8 2
      netbox/dcim/graphql/filters.py
  29. 1 0
      netbox/dcim/graphql/types.py
  30. 15 0
      netbox/dcim/migrations/0227_alter_interface_speed_bigint.py
  31. 1 1
      netbox/dcim/migrations/0228_rack_group.py
  32. 1 1
      netbox/dcim/migrations/0229_cable_bundle.py
  33. 1 1
      netbox/dcim/migrations/0230_devicebay_modulebay_enabled.py
  34. 1 1
      netbox/dcim/migrations/0231_interface_rf_channel_frequency_precision.py
  35. 1 1
      netbox/dcim/migrations/0232_default_ordering_indexes.py
  36. 1 0
      netbox/dcim/models/cables.py
  37. 8 0
      netbox/dcim/models/device_component_templates.py
  38. 43 11
      netbox/dcim/models/device_components.py
  39. 9 0
      netbox/dcim/models/devices.py
  40. 8 0
      netbox/dcim/models/modules.py
  41. 21 1
      netbox/dcim/tables/devices.py
  42. 3 1
      netbox/dcim/tables/modules.py
  43. 2 2
      netbox/dcim/tests/test_api.py
  44. 2 2
      netbox/dcim/tests/test_filtersets.py
  45. 133 1
      netbox/dcim/tests/test_models.py
  46. 204 0
      netbox/dcim/tests/test_tables.py
  47. 21 4
      netbox/dcim/tests/test_views.py
  48. 3 3
      netbox/dcim/ui/panels.py
  49. 28 28
      netbox/dcim/utils.py
  50. 11 1
      netbox/extras/api/customfields.py
  51. 53 2
      netbox/extras/api/serializers_/scripts.py
  52. 1 0
      netbox/extras/api/urls.py
  53. 7 1
      netbox/extras/api/views.py
  54. 62 14
      netbox/extras/events.py
  55. 1 1
      netbox/extras/migrations/0137_default_ordering_indexes.py
  56. 3 2
      netbox/extras/tables/tables.py
  57. 74 4
      netbox/extras/tests/test_api.py
  58. 77 1
      netbox/extras/tests/test_customfields.py
  59. 94 0
      netbox/extras/tests/test_event_rules.py
  60. 85 0
      netbox/extras/tests/test_models.py
  61. 93 24
      netbox/extras/tests/test_tables.py
  62. 82 17
      netbox/extras/tests/test_utils.py
  63. 16 2
      netbox/extras/tests/test_views.py
  64. 23 9
      netbox/extras/utils.py
  65. 1 1
      netbox/ipam/migrations/0089_default_ordering_indexes.py
  66. 1 1
      netbox/ipam/tables/vlans.py
  67. 85 2
      netbox/ipam/tests/test_tables.py
  68. 4 3
      netbox/netbox/api/serializers/base.py
  69. 4 0
      netbox/netbox/configuration_testing.py
  70. 1 1
      netbox/netbox/models/features.py
  71. 1 0
      netbox/netbox/settings.py
  72. 9 2
      netbox/netbox/ui/attrs.py
  73. 2 2
      netbox/release.yaml
  74. 2 2
      netbox/templates/extras/inc/script_list_content.html
  75. 9 1
      netbox/templates/ui/attrs/nested_object.html
  76. 26 2
      netbox/templates/ui/attrs/object.html
  77. 1 1
      netbox/templates/ui/attrs/object_list.html
  78. 2 2
      netbox/templates/virtualization/panels/cluster_resources.html
  79. 2 2
      netbox/templates/virtualization/panels/virtual_machine_resources.html
  80. 1 1
      netbox/templates/virtualization/virtualdisk/attrs/size.html
  81. 26 0
      netbox/tenancy/tests/test_tables.py
  82. BIN
      netbox/translations/cs/LC_MESSAGES/django.mo
  83. 275 295
      netbox/translations/cs/LC_MESSAGES/django.po
  84. BIN
      netbox/translations/da/LC_MESSAGES/django.mo
  85. 275 295
      netbox/translations/da/LC_MESSAGES/django.po
  86. BIN
      netbox/translations/de/LC_MESSAGES/django.mo
  87. 277 297
      netbox/translations/de/LC_MESSAGES/django.po
  88. 233 233
      netbox/translations/en/LC_MESSAGES/django.po
  89. BIN
      netbox/translations/es/LC_MESSAGES/django.mo
  90. 275 295
      netbox/translations/es/LC_MESSAGES/django.po
  91. BIN
      netbox/translations/fr/LC_MESSAGES/django.mo
  92. 281 300
      netbox/translations/fr/LC_MESSAGES/django.po
  93. BIN
      netbox/translations/it/LC_MESSAGES/django.mo
  94. 275 295
      netbox/translations/it/LC_MESSAGES/django.po
  95. BIN
      netbox/translations/ja/LC_MESSAGES/django.mo
  96. 273 295
      netbox/translations/ja/LC_MESSAGES/django.po
  97. BIN
      netbox/translations/lv/LC_MESSAGES/django.mo
  98. 275 295
      netbox/translations/lv/LC_MESSAGES/django.po
  99. BIN
      netbox/translations/nl/LC_MESSAGES/django.mo
  100. 275 295
      netbox/translations/nl/LC_MESSAGES/django.po

+ 1 - 1
.github/ISSUE_TEMPLATE/01-feature_request.yaml

@@ -15,7 +15,7 @@ body:
     attributes:
     attributes:
       label: NetBox version
       label: NetBox version
       description: What version of NetBox are you currently running?
       description: What version of NetBox are you currently running?
-      placeholder: v4.5.6
+      placeholder: v4.5.7
     validations:
     validations:
       required: true
       required: true
   - type: dropdown
   - type: dropdown

+ 1 - 1
.github/ISSUE_TEMPLATE/02-bug_report.yaml

@@ -27,7 +27,7 @@ body:
     attributes:
     attributes:
       label: NetBox Version
       label: NetBox Version
       description: What version of NetBox are you currently running?
       description: What version of NetBox are you currently running?
-      placeholder: v4.5.6
+      placeholder: v4.5.7
     validations:
     validations:
       required: true
       required: true
   - type: dropdown
   - type: dropdown

+ 1 - 1
.github/ISSUE_TEMPLATE/03-performance.yaml

@@ -8,7 +8,7 @@ body:
     attributes:
     attributes:
       label: NetBox Version
       label: NetBox Version
       description: What version of NetBox are you currently running?
       description: What version of NetBox are you currently running?
-      placeholder: v4.5.6
+      placeholder: v4.5.7
     validations:
     validations:
       required: true
       required: true
   - type: dropdown
   - type: dropdown

+ 1 - 2
base_requirements.txt

@@ -49,8 +49,7 @@ django-rich
 
 
 # Django integration for RQ (Reqis queuing)
 # Django integration for RQ (Reqis queuing)
 # https://github.com/rq/django-rq/blob/master/CHANGELOG.md
 # https://github.com/rq/django-rq/blob/master/CHANGELOG.md
-# See https://github.com/netbox-community/netbox/issues/21696
-django-rq<4.0
+django-rq
 
 
 # Provides a variety of storage backends
 # Provides a variety of storage backends
 # https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst
 # https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst

+ 131 - 43
contrib/openapi.json

@@ -2,7 +2,7 @@
     "openapi": "3.0.3",
     "openapi": "3.0.3",
     "info": {
     "info": {
         "title": "NetBox REST API",
         "title": "NetBox REST API",
-        "version": "4.5.6",
+        "version": "4.5.7",
         "license": {
         "license": {
             "name": "Apache v2 License"
             "name": "Apache v2 License"
         }
         }
@@ -25468,7 +25468,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25488,7 +25488,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25501,7 +25501,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25514,7 +25514,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25527,7 +25527,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25540,7 +25540,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25553,7 +25553,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25566,7 +25566,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25579,7 +25579,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25592,7 +25592,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25605,7 +25605,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -25618,7 +25618,7 @@
                             "type": "array",
                             "type": "array",
                             "items": {
                             "items": {
                                 "type": "string",
                                 "type": "string",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             }
                             }
                         },
                         },
                         "explode": true,
                         "explode": true,
@@ -138591,6 +138591,50 @@
                 }
                 }
             }
             }
         },
         },
+        "/api/extras/scripts/upload/": {
+            "post": {
+                "operationId": "extras_scripts_upload_create",
+                "description": "Post a list of script module objects.",
+                "tags": [
+                    "extras"
+                ],
+                "requestBody": {
+                    "content": {
+                        "application/json": {
+                            "schema": {
+                                "$ref": "#/components/schemas/ScriptModuleRequest"
+                            }
+                        },
+                        "multipart/form-data": {
+                            "schema": {
+                                "$ref": "#/components/schemas/ScriptModuleRequest"
+                            }
+                        }
+                    },
+                    "required": true
+                },
+                "security": [
+                    {
+                        "cookieAuth": []
+                    },
+                    {
+                        "tokenAuth": []
+                    }
+                ],
+                "responses": {
+                    "201": {
+                        "content": {
+                            "application/json": {
+                                "schema": {
+                                    "$ref": "#/components/schemas/ScriptModule"
+                                }
+                            }
+                        },
+                        "description": ""
+                    }
+                }
+            }
+        },
         "/api/extras/subscriptions/": {
         "/api/extras/subscriptions/": {
             "get": {
             "get": {
                 "operationId": "extras_subscriptions_list",
                 "operationId": "extras_subscriptions_list",
@@ -228046,13 +228090,14 @@
                                     "trunk-4c6p",
                                     "trunk-4c6p",
                                     "trunk-4c8p",
                                     "trunk-4c8p",
                                     "trunk-8c4p",
                                     "trunk-8c4p",
+                                    "breakout-1c2p-2c1p",
                                     "breakout-1c4p-4c1p",
                                     "breakout-1c4p-4c1p",
                                     "breakout-1c6p-6c1p",
                                     "breakout-1c6p-6c1p",
                                     "breakout-2c4p-8c1p-shuffle"
                                     "breakout-2c4p-8c1p-shuffle"
                                 ],
                                 ],
                                 "type": "string",
                                 "type": "string",
-                                "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
-                                "x-spec-enum-id": "5e0f85310f0184ea"
+                                "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c2p-2c1p` - 1C2P:2C1P breakout\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
+                                "x-spec-enum-id": "f566e6df6572f5d0"
                             },
                             },
                             "label": {
                             "label": {
                                 "type": "string",
                                 "type": "string",
@@ -228078,6 +228123,7 @@
                                     "4C6P trunk",
                                     "4C6P trunk",
                                     "4C8P trunk",
                                     "4C8P trunk",
                                     "8C4P trunk",
                                     "8C4P trunk",
+                                    "1C2P:2C1P breakout",
                                     "1C4P:4C1P breakout",
                                     "1C4P:4C1P breakout",
                                     "1C6P:6C1P breakout",
                                     "1C6P:6C1P breakout",
                                     "2C4P:8C1P breakout (shuffle)"
                                     "2C4P:8C1P breakout (shuffle)"
@@ -228282,13 +228328,14 @@
                             "trunk-4c6p",
                             "trunk-4c6p",
                             "trunk-4c8p",
                             "trunk-4c8p",
                             "trunk-8c4p",
                             "trunk-8c4p",
+                            "breakout-1c2p-2c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-2c4p-8c1p-shuffle"
                             "breakout-2c4p-8c1p-shuffle"
                         ],
                         ],
                         "type": "string",
                         "type": "string",
-                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
-                        "x-spec-enum-id": "5e0f85310f0184ea"
+                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c2p-2c1p` - 1C2P:2C1P breakout\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
+                        "x-spec-enum-id": "f566e6df6572f5d0"
                     },
                     },
                     "tenant": {
                     "tenant": {
                         "oneOf": [
                         "oneOf": [
@@ -254488,8 +254535,7 @@
                     "size": {
                     "size": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
-                        "minimum": 0,
-                        "title": "Size (MB)"
+                        "minimum": 0
                     },
                     },
                     "owner": {
                     "owner": {
                         "oneOf": [
                         "oneOf": [
@@ -254774,14 +254820,15 @@
                             "trunk-4c6p",
                             "trunk-4c6p",
                             "trunk-4c8p",
                             "trunk-4c8p",
                             "trunk-8c4p",
                             "trunk-8c4p",
+                            "breakout-1c2p-2c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-2c4p-8c1p-shuffle",
                             "breakout-2c4p-8c1p-shuffle",
                             ""
                             ""
                         ],
                         ],
                         "type": "string",
                         "type": "string",
-                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
-                        "x-spec-enum-id": "5e0f85310f0184ea"
+                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c2p-2c1p` - 1C2P:2C1P breakout\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
+                        "x-spec-enum-id": "f566e6df6572f5d0"
                     },
                     },
                     "tenant": {
                     "tenant": {
                         "oneOf": [
                         "oneOf": [
@@ -262819,15 +262866,13 @@
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Memory (MB)"
+                        "nullable": true
                     },
                     },
                     "disk": {
                     "disk": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Disk (MB)"
+                        "nullable": true
                     },
                     },
                     "description": {
                     "description": {
                         "type": "string",
                         "type": "string",
@@ -270340,6 +270385,56 @@
                     "data"
                     "data"
                 ]
                 ]
             },
             },
+            "ScriptModule": {
+                "type": "object",
+                "description": "Extends the built-in ModelSerializer to enforce calling full_clean() on a copy of the associated instance during\nvalidation. (DRF does not do this by default; see https://github.com/encode/django-rest-framework/issues/3144)",
+                "properties": {
+                    "id": {
+                        "type": "integer",
+                        "readOnly": true
+                    },
+                    "display": {
+                        "type": "string",
+                        "readOnly": true
+                    },
+                    "file_path": {
+                        "type": "string",
+                        "readOnly": true
+                    },
+                    "created": {
+                        "type": "string",
+                        "format": "date-time",
+                        "readOnly": true
+                    },
+                    "last_updated": {
+                        "type": "string",
+                        "format": "date-time",
+                        "readOnly": true,
+                        "nullable": true
+                    }
+                },
+                "required": [
+                    "created",
+                    "display",
+                    "file_path",
+                    "id",
+                    "last_updated"
+                ]
+            },
+            "ScriptModuleRequest": {
+                "type": "object",
+                "description": "Extends the built-in ModelSerializer to enforce calling full_clean() on a copy of the associated instance during\nvalidation. (DRF does not do this by default; see https://github.com/encode/django-rest-framework/issues/3144)",
+                "properties": {
+                    "file": {
+                        "type": "string",
+                        "format": "binary",
+                        "writeOnly": true
+                    }
+                },
+                "required": [
+                    "file"
+                ]
+            },
             "Service": {
             "Service": {
                 "type": "object",
                 "type": "object",
                 "description": "Base serializer class for models inheriting from PrimaryModel.",
                 "description": "Base serializer class for models inheriting from PrimaryModel.",
@@ -275384,8 +275479,7 @@
                     "size": {
                     "size": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
-                        "minimum": 0,
-                        "title": "Size (MB)"
+                        "minimum": 0
                     },
                     },
                     "owner": {
                     "owner": {
                         "allOf": [
                         "allOf": [
@@ -275456,8 +275550,7 @@
                     "size": {
                     "size": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
-                        "minimum": 0,
-                        "title": "Size (MB)"
+                        "minimum": 0
                     },
                     },
                     "owner": {
                     "owner": {
                         "oneOf": [
                         "oneOf": [
@@ -275662,15 +275755,13 @@
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Memory (MB)"
+                        "nullable": true
                     },
                     },
                     "disk": {
                     "disk": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Disk (MB)"
+                        "nullable": true
                     },
                     },
                     "description": {
                     "description": {
                         "type": "string",
                         "type": "string",
@@ -275926,15 +276017,13 @@
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Memory (MB)"
+                        "nullable": true
                     },
                     },
                     "disk": {
                     "disk": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Disk (MB)"
+                        "nullable": true
                     },
                     },
                     "description": {
                     "description": {
                         "type": "string",
                         "type": "string",
@@ -277220,14 +277309,15 @@
                             "trunk-4c6p",
                             "trunk-4c6p",
                             "trunk-4c8p",
                             "trunk-4c8p",
                             "trunk-8c4p",
                             "trunk-8c4p",
+                            "breakout-1c2p-2c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c4p-4c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-1c6p-6c1p",
                             "breakout-2c4p-8c1p-shuffle",
                             "breakout-2c4p-8c1p-shuffle",
                             ""
                             ""
                         ],
                         ],
                         "type": "string",
                         "type": "string",
-                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
-                        "x-spec-enum-id": "5e0f85310f0184ea"
+                        "description": "* `single-1c1p` - 1C1P\n* `single-1c2p` - 1C2P\n* `single-1c4p` - 1C4P\n* `single-1c6p` - 1C6P\n* `single-1c8p` - 1C8P\n* `single-1c12p` - 1C12P\n* `single-1c16p` - 1C16P\n* `trunk-2c1p` - 2C1P trunk\n* `trunk-2c2p` - 2C2P trunk\n* `trunk-2c4p` - 2C4P trunk\n* `trunk-2c4p-shuffle` - 2C4P trunk (shuffle)\n* `trunk-2c6p` - 2C6P trunk\n* `trunk-2c8p` - 2C8P trunk\n* `trunk-2c12p` - 2C12P trunk\n* `trunk-4c1p` - 4C1P trunk\n* `trunk-4c2p` - 4C2P trunk\n* `trunk-4c4p` - 4C4P trunk\n* `trunk-4c4p-shuffle` - 4C4P trunk (shuffle)\n* `trunk-4c6p` - 4C6P trunk\n* `trunk-4c8p` - 4C8P trunk\n* `trunk-8c4p` - 8C4P trunk\n* `breakout-1c2p-2c1p` - 1C2P:2C1P breakout\n* `breakout-1c4p-4c1p` - 1C4P:4C1P breakout\n* `breakout-1c6p-6c1p` - 1C6P:6C1P breakout\n* `breakout-2c4p-8c1p-shuffle` - 2C4P:8C1P breakout (shuffle)",
+                        "x-spec-enum-id": "f566e6df6572f5d0"
                     },
                     },
                     "tenant": {
                     "tenant": {
                         "oneOf": [
                         "oneOf": [
@@ -285520,15 +285610,13 @@
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Memory (MB)"
+                        "nullable": true
                     },
                     },
                     "disk": {
                     "disk": {
                         "type": "integer",
                         "type": "integer",
                         "maximum": 2147483647,
                         "maximum": 2147483647,
                         "minimum": 0,
                         "minimum": 0,
-                        "nullable": true,
-                        "title": "Disk (MB)"
+                        "nullable": true
                     },
                     },
                     "description": {
                     "description": {
                         "type": "string",
                         "type": "string",

+ 8 - 0
docs/configuration/miscellaneous.md

@@ -237,6 +237,14 @@ This parameter defines the URL of the repository that will be checked for new Ne
 
 
 ---
 ---
 
 
+## RQ
+
+Default: `{}` (Empty)
+
+This is a wrapper for passing global configuration parameters to [Django RQ](https://github.com/rq/django-rq) to customize its behavior. It is employed within NetBox primarily to alter conditions during testing.
+
+---
+
 ## RQ_DEFAULT_TIMEOUT
 ## RQ_DEFAULT_TIMEOUT
 
 
 Default: `300`
 Default: `300`

+ 39 - 9
docs/configuration/system.md

@@ -248,21 +248,49 @@ STORAGES = {
 
 
 Within the `STORAGES` dictionary, `"default"` is used for image uploads, "staticfiles" is for static files and `"scripts"` is used for custom scripts.
 Within the `STORAGES` dictionary, `"default"` is used for image uploads, "staticfiles" is for static files and `"scripts"` is used for custom scripts.
 
 
-If using a remote storage like S3, define the config as `STORAGES[key]["OPTIONS"]` for each storage item as needed. For example:
+If using a remote storage such as S3 or an S3-compatible service, define the configuration as `STORAGES[key]["OPTIONS"]` for each storage item as needed. For example:
 
 
 ```python
 ```python
-STORAGES = { 
-    "scripts": { 
-        "BACKEND": "storages.backends.s3boto3.S3Boto3Storage", 
-        "OPTIONS": { 
-            'access_key': 'access key', 
+STORAGES = {
+    'default': {
+        'BACKEND': 'storages.backends.s3.S3Storage',
+        'OPTIONS': {
+            'bucket_name': 'netbox',
+            'access_key': 'access key',
             'secret_key': 'secret key',
             'secret_key': 'secret key',
-            "allow_overwrite": True,
-        }
-    }, 
+            'region_name': 'us-east-1',
+            'endpoint_url': 'https://s3.example.com',
+            'location': 'media/',
+        },
+    },
+    'staticfiles': {
+        'BACKEND': 'storages.backends.s3.S3Storage',
+        'OPTIONS': {
+            'bucket_name': 'netbox',
+            'access_key': 'access key',
+            'secret_key': 'secret key',
+            'region_name': 'us-east-1',
+            'endpoint_url': 'https://s3.example.com',
+            'location': 'static/',
+        },
+    },
+    'scripts': {
+        'BACKEND': 'storages.backends.s3.S3Storage',
+        'OPTIONS': {
+            'bucket_name': 'netbox',
+            'access_key': 'access key',
+            'secret_key': 'secret key',
+            'region_name': 'us-east-1',
+            'endpoint_url': 'https://s3.example.com',
+            'location': 'scripts/',
+            'file_overwrite': True,
+        },
+    },
 }
 }
 ```
 ```
 
 
+`bucket_name` is required for `S3Storage`. When using an S3-compatible service, set `region_name` and `endpoint_url` according to your provider.
+
 The specific configuration settings for each storage backend can be found in the [django-storages documentation](https://django-storages.readthedocs.io/en/latest/index.html).
 The specific configuration settings for each storage backend can be found in the [django-storages documentation](https://django-storages.readthedocs.io/en/latest/index.html).
 
 
 !!! note
 !!! note
@@ -286,6 +314,7 @@ STORAGES = {
             'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
             'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
             'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
             'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
             'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
             'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
+            'region_name': os.environ.get('AWS_S3_REGION_NAME'),
             'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
             'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
             'location': 'media/',
             'location': 'media/',
         }
         }
@@ -296,6 +325,7 @@ STORAGES = {
             'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
             'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
             'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
             'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
             'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
             'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
+            'region_name': os.environ.get('AWS_S3_REGION_NAME'),
             'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
             'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
             'location': 'static/',
             'location': 'static/',
         }
         }

+ 12 - 0
docs/customization/custom-scripts.md

@@ -384,6 +384,18 @@ A calendar date. Returns a `datetime.date` object.
 
 
 A complete date & time. Returns a `datetime.datetime` object.
 A complete date & time. Returns a `datetime.datetime` object.
 
 
+## Uploading Scripts via the API
+
+Script modules can be uploaded to NetBox via the REST API by sending a `multipart/form-data` POST request to `/api/extras/scripts/upload/`. The caller must have the `extras.add_scriptmodule` and `core.add_managedfile` permissions.
+
+```no-highlight
+curl -X POST \
+-H "Authorization: Token $TOKEN" \
+-H "Accept: application/json; indent=4" \
+-F "file=@/path/to/myscript.py" \
+http://netbox/api/extras/scripts/upload/
+```
+
 ## Running Custom Scripts
 ## Running Custom Scripts
 
 
 !!! note
 !!! note

+ 26 - 0
docs/release-notes/version-4.5.md

@@ -1,5 +1,31 @@
 # NetBox v4.5
 # NetBox v4.5
 
 
+## v4.5.7 (2026-04-03)
+
+### Enhancements
+
+* [#21095](https://github.com/netbox-community/netbox/issues/21095) - Adopt IEC unit labels (e.g. GiB) for virtual machine resources
+* [#21696](https://github.com/netbox-community/netbox/issues/21696) - Add support for django-rq 4.0 and introduce `RQ` configuration parameter
+* [#21701](https://github.com/netbox-community/netbox/issues/21701) - Support uploading custom scripts via the REST API (`/api/extras/scripts/upload/`)
+* [#21760](https://github.com/netbox-community/netbox/issues/21760) - Add a 1C2P:2C1P breakout cable profile
+
+### Performance Improvements
+
+* [#21655](https://github.com/netbox-community/netbox/issues/21655) - Optimize queries for object and multi-object type custom fields
+
+### Bug Fixes
+
+* [#20474](https://github.com/netbox-community/netbox/issues/20474) - Fix installation of modules with placeholder values in component names
+* [#21498](https://github.com/netbox-community/netbox/issues/21498) - Fix server error triggered by event rules referencing deleted objects
+* [#21533](https://github.com/netbox-community/netbox/issues/21533) - Ensure read-only fields are included in REST API responses upon object creation
+* [#21535](https://github.com/netbox-community/netbox/issues/21535) - Fix filtering of object-type custom fields when "is empty" is selected
+* [#21784](https://github.com/netbox-community/netbox/issues/21784) - Fix `AttributeError` exception when sorting a table as an anonymous user
+* [#21808](https://github.com/netbox-community/netbox/issues/21808) - Fix `RelatedObjectDoesNotExist` exception when viewing an interface with a virtual circuit termination
+* [#21810](https://github.com/netbox-community/netbox/issues/21810) - Fix `AttributeError` exception when viewing virtual chassis member
+* [#21825](https://github.com/netbox-community/netbox/issues/21825) - Fix sorting by broken columns in several object lists
+
+---
+
 ## v4.5.6 (2026-03-31)
 ## v4.5.6 (2026-03-31)
 
 
 ### Enhancements
 ### Enhancements

+ 1 - 1
netbox/circuits/migrations/0057_default_ordering_indexes.py

@@ -5,7 +5,7 @@ class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
         ('circuits', '0056_gfk_indexes'),
         ('circuits', '0056_gfk_indexes'),
         ('contenttypes', '0002_remove_content_type_name'),
         ('contenttypes', '0002_remove_content_type_name'),
-        ('dcim', '0230_interface_rf_channel_frequency_precision'),
+        ('dcim', '0231_interface_rf_channel_frequency_precision'),
         ('extras', '0136_customfield_validation_schema'),
         ('extras', '0136_customfield_validation_schema'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('users', '0015_owner'),
         ('users', '0015_owner'),

+ 2 - 1
netbox/circuits/tables/virtual_circuits.py

@@ -95,6 +95,7 @@ class VirtualCircuitTerminationTable(NetBoxTable):
         verbose_name=_('Provider network')
         verbose_name=_('Provider network')
     )
     )
     provider_account = tables.Column(
     provider_account = tables.Column(
+        accessor=tables.A('virtual_circuit__provider_account'),
         linkify=True,
         linkify=True,
         verbose_name=_('Account')
         verbose_name=_('Account')
     )
     )
@@ -112,7 +113,7 @@ class VirtualCircuitTerminationTable(NetBoxTable):
     class Meta(NetBoxTable.Meta):
     class Meta(NetBoxTable.Meta):
         model = VirtualCircuitTermination
         model = VirtualCircuitTermination
         fields = (
         fields = (
-            'pk', 'id', 'virtual_circuit', 'provider', 'provider_network', 'provider_account', 'role', 'interfaces',
+            'pk', 'id', 'virtual_circuit', 'provider', 'provider_network', 'provider_account', 'role', 'interface',
             'description', 'created', 'last_updated', 'actions',
             'description', 'created', 'last_updated', 'actions',
         )
         )
         default_columns = (
         default_columns = (

+ 46 - 48
netbox/circuits/tests/test_tables.py

@@ -1,48 +1,46 @@
-from django.test import RequestFactory, TestCase, tag
-
-from circuits.models import CircuitGroupAssignment, CircuitTermination
-from circuits.tables import CircuitGroupAssignmentTable, CircuitTerminationTable
-
-
-@tag('regression')
-class CircuitTerminationTableTest(TestCase):
-    def test_every_orderable_field_does_not_throw_exception(self):
-        terminations = CircuitTermination.objects.all()
-        disallowed = {
-            'actions',
-        }
-
-        orderable_columns = [
-            column.name
-            for column in CircuitTerminationTable(terminations).columns
-            if column.orderable and column.name not in disallowed
-        ]
-        fake_request = RequestFactory().get('/')
-
-        for col in orderable_columns:
-            for direction in ('-', ''):
-                table = CircuitTerminationTable(terminations)
-                table.order_by = f'{direction}{col}'
-                table.as_html(fake_request)
-
-
-@tag('regression')
-class CircuitGroupAssignmentTableTest(TestCase):
-    def test_every_orderable_field_does_not_throw_exception(self):
-        assignment = CircuitGroupAssignment.objects.all()
-        disallowed = {
-            'actions',
-        }
-
-        orderable_columns = [
-            column.name
-            for column in CircuitGroupAssignmentTable(assignment).columns
-            if column.orderable and column.name not in disallowed
-        ]
-        fake_request = RequestFactory().get('/')
-
-        for col in orderable_columns:
-            for direction in ('-', ''):
-                table = CircuitGroupAssignmentTable(assignment)
-                table.order_by = f'{direction}{col}'
-                table.as_html(fake_request)
+from circuits.tables import *
+from utilities.testing import TableTestCases
+
+
+class CircuitTypeTableTest(TableTestCases.StandardTableTestCase):
+    table = CircuitTypeTable
+
+
+class CircuitTableTest(TableTestCases.StandardTableTestCase):
+    table = CircuitTable
+
+
+class CircuitTerminationTableTest(TableTestCases.StandardTableTestCase):
+    table = CircuitTerminationTable
+
+
+class CircuitGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = CircuitGroupTable
+
+
+class CircuitGroupAssignmentTableTest(TableTestCases.StandardTableTestCase):
+    table = CircuitGroupAssignmentTable
+
+
+class ProviderTableTest(TableTestCases.StandardTableTestCase):
+    table = ProviderTable
+
+
+class ProviderAccountTableTest(TableTestCases.StandardTableTestCase):
+    table = ProviderAccountTable
+
+
+class ProviderNetworkTableTest(TableTestCases.StandardTableTestCase):
+    table = ProviderNetworkTable
+
+
+class VirtualCircuitTypeTableTest(TableTestCases.StandardTableTestCase):
+    table = VirtualCircuitTypeTable
+
+
+class VirtualCircuitTableTest(TableTestCases.StandardTableTestCase):
+    table = VirtualCircuitTable
+
+
+class VirtualCircuitTerminationTableTest(TableTestCases.StandardTableTestCase):
+    table = VirtualCircuitTerminationTable

+ 14 - 0
netbox/circuits/tests/test_views.py

@@ -196,6 +196,20 @@ class CircuitTestCase(ViewTestCases.PrimaryObjectViewTestCase):
             'comments': 'New comments',
             'comments': 'New comments',
         }
         }
 
 
+    def test_circuit_type_display_colored(self):
+        circuit_type = CircuitType.objects.first()
+        circuit_type.color = '12ab34'
+        circuit_type.save()
+
+        circuit = Circuit.objects.first()
+
+        self.add_permissions('circuits.view_circuit')
+        response = self.client.get(circuit.get_absolute_url())
+
+        self.assertHttpStatus(response, 200)
+        self.assertContains(response, circuit_type.name)
+        self.assertContains(response, 'background-color: #12ab34')
+
     @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
     @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
     def test_bulk_import_objects_with_terminations(self):
     def test_bulk_import_objects_with_terminations(self):
         site = Site.objects.first()
         site = Site.objects.first()

+ 2 - 2
netbox/circuits/ui/panels.py

@@ -89,7 +89,7 @@ class CircuitPanel(panels.ObjectAttributesPanel):
     provider = attrs.RelatedObjectAttr('provider', linkify=True)
     provider = attrs.RelatedObjectAttr('provider', linkify=True)
     provider_account = attrs.RelatedObjectAttr('provider_account', linkify=True)
     provider_account = attrs.RelatedObjectAttr('provider_account', linkify=True)
     cid = attrs.TextAttr('cid', label=_('Circuit ID'), style='font-monospace', copy_button=True)
     cid = attrs.TextAttr('cid', label=_('Circuit ID'), style='font-monospace', copy_button=True)
-    type = attrs.RelatedObjectAttr('type', linkify=True)
+    type = attrs.RelatedObjectAttr('type', linkify=True, colored=True)
     status = attrs.ChoiceAttr('status')
     status = attrs.ChoiceAttr('status')
     distance = attrs.NumericAttr('distance', unit_accessor='get_distance_unit_display')
     distance = attrs.NumericAttr('distance', unit_accessor='get_distance_unit_display')
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
@@ -132,7 +132,7 @@ class VirtualCircuitPanel(panels.ObjectAttributesPanel):
     provider_network = attrs.RelatedObjectAttr('provider_network', linkify=True)
     provider_network = attrs.RelatedObjectAttr('provider_network', linkify=True)
     provider_account = attrs.RelatedObjectAttr('provider_account', linkify=True)
     provider_account = attrs.RelatedObjectAttr('provider_account', linkify=True)
     cid = attrs.TextAttr('cid', label=_('Circuit ID'), style='font-monospace', copy_button=True)
     cid = attrs.TextAttr('cid', label=_('Circuit ID'), style='font-monospace', copy_button=True)
-    type = attrs.RelatedObjectAttr('type', linkify=True)
+    type = attrs.RelatedObjectAttr('type', linkify=True, colored=True)
     status = attrs.ChoiceAttr('status')
     status = attrs.ChoiceAttr('status')
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
     description = attrs.TextAttr('description')
     description = attrs.TextAttr('description')

+ 4 - 4
netbox/core/api/views.py

@@ -2,7 +2,7 @@ from django.http import Http404, HttpResponse
 from django.shortcuts import get_object_or_404
 from django.shortcuts import get_object_or_404
 from django.utils.translation import gettext_lazy as _
 from django.utils.translation import gettext_lazy as _
 from django_rq.queues import get_redis_connection
 from django_rq.queues import get_redis_connection
-from django_rq.settings import QUEUES_LIST
+from django_rq.settings import get_queues_list
 from django_rq.utils import get_statistics
 from django_rq.utils import get_statistics
 from drf_spectacular.types import OpenApiTypes
 from drf_spectacular.types import OpenApiTypes
 from drf_spectacular.utils import OpenApiParameter, extend_schema
 from drf_spectacular.utils import OpenApiParameter, extend_schema
@@ -195,7 +195,7 @@ class BackgroundWorkerViewSet(BaseRQViewSet):
         return 'Background Workers'
         return 'Background Workers'
 
 
     def get_data(self):
     def get_data(self):
-        config = QUEUES_LIST[0]
+        config = get_queues_list()[0]
         return Worker.all(get_redis_connection(config['connection_config']))
         return Worker.all(get_redis_connection(config['connection_config']))
 
 
     @extend_schema(
     @extend_schema(
@@ -205,7 +205,7 @@ class BackgroundWorkerViewSet(BaseRQViewSet):
     )
     )
     def retrieve(self, request, name):
     def retrieve(self, request, name):
         # all the RQ queues should use the same connection
         # all the RQ queues should use the same connection
-        config = QUEUES_LIST[0]
+        config = get_queues_list()[0]
         workers = Worker.all(get_redis_connection(config['connection_config']))
         workers = Worker.all(get_redis_connection(config['connection_config']))
         worker = next((item for item in workers if item.name == name), None)
         worker = next((item for item in workers if item.name == name), None)
         if not worker:
         if not worker:
@@ -229,7 +229,7 @@ class BackgroundTaskViewSet(BaseRQViewSet):
         return get_rq_jobs()
         return get_rq_jobs()
 
 
     def get_task_from_id(self, task_id):
     def get_task_from_id(self, task_id):
-        config = QUEUES_LIST[0]
+        config = get_queues_list()[0]
         task = RQ_Job.fetch(task_id, connection=get_redis_connection(config['connection_config']))
         task = RQ_Job.fetch(task_id, connection=get_redis_connection(config['connection_config']))
         if not task:
         if not task:
             raise Http404
             raise Http404

+ 1 - 0
netbox/core/tables/config.py

@@ -19,6 +19,7 @@ REVISION_BUTTONS = """
 class ConfigRevisionTable(NetBoxTable):
 class ConfigRevisionTable(NetBoxTable):
     is_active = columns.BooleanColumn(
     is_active = columns.BooleanColumn(
         verbose_name=_('Is Active'),
         verbose_name=_('Is Active'),
+        accessor='active',
         false_mark=None
         false_mark=None
     )
     )
     actions = columns.ActionsColumn(
     actions = columns.ActionsColumn(

+ 26 - 0
netbox/core/tests/test_tables.py

@@ -0,0 +1,26 @@
+from core.models import ObjectChange
+from core.tables import *
+from utilities.testing import TableTestCases
+
+
+class DataSourceTableTest(TableTestCases.StandardTableTestCase):
+    table = DataSourceTable
+
+
+class DataFileTableTest(TableTestCases.StandardTableTestCase):
+    table = DataFileTable
+
+
+class JobTableTest(TableTestCases.StandardTableTestCase):
+    table = JobTable
+
+
+class ObjectChangeTableTest(TableTestCases.StandardTableTestCase):
+    table = ObjectChangeTable
+    queryset_sources = [
+        ('ObjectChangeListView', ObjectChange.objects.valid_models()),
+    ]
+
+
+class ConfigRevisionTableTest(TableTestCases.StandardTableTestCase):
+    table = ConfigRevisionTable

+ 8 - 8
netbox/core/tests/test_views.py

@@ -6,7 +6,7 @@ from datetime import datetime
 from django.urls import reverse
 from django.urls import reverse
 from django.utils import timezone
 from django.utils import timezone
 from django_rq import get_queue
 from django_rq import get_queue
-from django_rq.settings import QUEUES_MAP
+from django_rq.settings import get_queues_map
 from django_rq.workers import get_worker
 from django_rq.workers import get_worker
 from rq.job import Job as RQ_Job
 from rq.job import Job as RQ_Job
 from rq.job import JobStatus
 from rq.job import JobStatus
@@ -189,7 +189,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_list_default(self):
     def test_background_tasks_list_default(self):
         queue = get_queue('default')
         queue = get_queue('default')
         queue.enqueue(self.dummy_job_default)
         queue.enqueue(self.dummy_job_default)
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
 
 
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
@@ -198,7 +198,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_list_high(self):
     def test_background_tasks_list_high(self):
         queue = get_queue('high')
         queue = get_queue('high')
         queue.enqueue(self.dummy_job_high)
         queue.enqueue(self.dummy_job_high)
-        queue_index = QUEUES_MAP['high']
+        queue_index = get_queues_map()['high']
 
 
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
@@ -207,7 +207,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_list_finished(self):
     def test_background_tasks_list_finished(self):
         queue = get_queue('default')
         queue = get_queue('default')
         job = queue.enqueue(self.dummy_job_default)
         job = queue.enqueue(self.dummy_job_default)
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
 
 
         registry = FinishedJobRegistry(queue.name, queue.connection)
         registry = FinishedJobRegistry(queue.name, queue.connection)
         registry.add(job, 2)
         registry.add(job, 2)
@@ -218,7 +218,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_list_failed(self):
     def test_background_tasks_list_failed(self):
         queue = get_queue('default')
         queue = get_queue('default')
         job = queue.enqueue(self.dummy_job_default)
         job = queue.enqueue(self.dummy_job_default)
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
 
 
         registry = FailedJobRegistry(queue.name, queue.connection)
         registry = FailedJobRegistry(queue.name, queue.connection)
         registry.add(job, 2)
         registry.add(job, 2)
@@ -229,7 +229,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_scheduled(self):
     def test_background_tasks_scheduled(self):
         queue = get_queue('default')
         queue = get_queue('default')
         queue.enqueue_at(datetime.now(), self.dummy_job_default)
         queue.enqueue_at(datetime.now(), self.dummy_job_default)
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
 
 
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'scheduled']))
         response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'scheduled']))
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
@@ -238,7 +238,7 @@ class BackgroundTaskTestCase(TestCase):
     def test_background_tasks_list_deferred(self):
     def test_background_tasks_list_deferred(self):
         queue = get_queue('default')
         queue = get_queue('default')
         job = queue.enqueue(self.dummy_job_default)
         job = queue.enqueue(self.dummy_job_default)
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
 
 
         registry = DeferredJobRegistry(queue.name, queue.connection)
         registry = DeferredJobRegistry(queue.name, queue.connection)
         registry.add(job, 2)
         registry.add(job, 2)
@@ -335,7 +335,7 @@ class BackgroundTaskTestCase(TestCase):
         worker2 = get_worker('high')
         worker2 = get_worker('high')
         worker2.register_birth()
         worker2.register_birth()
 
 
-        queue_index = QUEUES_MAP['default']
+        queue_index = get_queues_map()['default']
         response = self.client.get(reverse('core:worker_list', args=[queue_index]))
         response = self.client.get(reverse('core:worker_list', args=[queue_index]))
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
         self.assertIn(str(worker1.name), str(response.content))
         self.assertIn(str(worker1.name), str(response.content))

+ 10 - 10
netbox/core/utils.py

@@ -1,7 +1,7 @@
 from django.http import Http404
 from django.http import Http404
 from django.utils.translation import gettext_lazy as _
 from django.utils.translation import gettext_lazy as _
 from django_rq.queues import get_queue, get_queue_by_index, get_redis_connection
 from django_rq.queues import get_queue, get_queue_by_index, get_redis_connection
-from django_rq.settings import QUEUES_LIST, QUEUES_MAP
+from django_rq.settings import get_queues_list, get_queues_map
 from django_rq.utils import get_jobs, stop_jobs
 from django_rq.utils import get_jobs, stop_jobs
 from rq import requeue_job
 from rq import requeue_job
 from rq.exceptions import NoSuchJobError
 from rq.exceptions import NoSuchJobError
@@ -31,7 +31,7 @@ def get_rq_jobs():
     """
     """
     jobs = set()
     jobs = set()
 
 
-    for queue in QUEUES_LIST:
+    for queue in get_queues_list():
         queue = get_queue(queue['name'])
         queue = get_queue(queue['name'])
         jobs.update(queue.get_jobs())
         jobs.update(queue.get_jobs())
 
 
@@ -78,13 +78,13 @@ def delete_rq_job(job_id):
     """
     """
     Delete the specified RQ job.
     Delete the specified RQ job.
     """
     """
-    config = QUEUES_LIST[0]
+    config = get_queues_list()[0]
     try:
     try:
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
     except NoSuchJobError:
     except NoSuchJobError:
         raise Http404(_("Job {job_id} not found").format(job_id=job_id))
         raise Http404(_("Job {job_id} not found").format(job_id=job_id))
 
 
-    queue_index = QUEUES_MAP[job.origin]
+    queue_index = get_queues_map()[job.origin]
     queue = get_queue_by_index(queue_index)
     queue = get_queue_by_index(queue_index)
 
 
     # Remove job id from queue and delete the actual job
     # Remove job id from queue and delete the actual job
@@ -96,13 +96,13 @@ def requeue_rq_job(job_id):
     """
     """
     Requeue the specified RQ job.
     Requeue the specified RQ job.
     """
     """
-    config = QUEUES_LIST[0]
+    config = get_queues_list()[0]
     try:
     try:
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
     except NoSuchJobError:
     except NoSuchJobError:
         raise Http404(_("Job {id} not found.").format(id=job_id))
         raise Http404(_("Job {id} not found.").format(id=job_id))
 
 
-    queue_index = QUEUES_MAP[job.origin]
+    queue_index = get_queues_map()[job.origin]
     queue = get_queue_by_index(queue_index)
     queue = get_queue_by_index(queue_index)
 
 
     requeue_job(job_id, connection=queue.connection, serializer=queue.serializer)
     requeue_job(job_id, connection=queue.connection, serializer=queue.serializer)
@@ -112,13 +112,13 @@ def enqueue_rq_job(job_id):
     """
     """
     Enqueue the specified RQ job.
     Enqueue the specified RQ job.
     """
     """
-    config = QUEUES_LIST[0]
+    config = get_queues_list()[0]
     try:
     try:
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
     except NoSuchJobError:
     except NoSuchJobError:
         raise Http404(_("Job {id} not found.").format(id=job_id))
         raise Http404(_("Job {id} not found.").format(id=job_id))
 
 
-    queue_index = QUEUES_MAP[job.origin]
+    queue_index = get_queues_map()[job.origin]
     queue = get_queue_by_index(queue_index)
     queue = get_queue_by_index(queue_index)
 
 
     try:
     try:
@@ -144,13 +144,13 @@ def stop_rq_job(job_id):
     """
     """
     Stop the specified RQ job.
     Stop the specified RQ job.
     """
     """
-    config = QUEUES_LIST[0]
+    config = get_queues_list()[0]
     try:
     try:
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
         job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
     except NoSuchJobError:
     except NoSuchJobError:
         raise Http404(_("Job {job_id} not found").format(job_id=job_id))
         raise Http404(_("Job {job_id} not found").format(job_id=job_id))
 
 
-    queue_index = QUEUES_MAP[job.origin]
+    queue_index = get_queues_map()[job.origin]
     queue = get_queue_by_index(queue_index)
     queue = get_queue_by_index(queue_index)
 
 
     return stop_jobs(queue, job_id)[0]
     return stop_jobs(queue, job_id)[0]

+ 4 - 4
netbox/core/views.py

@@ -14,7 +14,7 @@ from django.urls import reverse
 from django.utils.translation import gettext_lazy as _
 from django.utils.translation import gettext_lazy as _
 from django.views.generic import View
 from django.views.generic import View
 from django_rq.queues import get_connection, get_queue_by_index, get_redis_connection
 from django_rq.queues import get_connection, get_queue_by_index, get_redis_connection
-from django_rq.settings import QUEUES_LIST, QUEUES_MAP
+from django_rq.settings import get_queues_list, get_queues_map
 from django_rq.utils import get_statistics
 from django_rq.utils import get_statistics
 from rq.exceptions import NoSuchJobError
 from rq.exceptions import NoSuchJobError
 from rq.job import Job as RQ_Job
 from rq.job import Job as RQ_Job
@@ -528,13 +528,13 @@ class BackgroundTaskView(BaseRQView):
 
 
     def get(self, request, job_id):
     def get(self, request, job_id):
         # all the RQ queues should use the same connection
         # all the RQ queues should use the same connection
-        config = QUEUES_LIST[0]
+        config = get_queues_list()[0]
         try:
         try:
             job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
             job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
         except NoSuchJobError:
         except NoSuchJobError:
             raise Http404(_("Job {job_id} not found").format(job_id=job_id))
             raise Http404(_("Job {job_id} not found").format(job_id=job_id))
 
 
-        queue_index = QUEUES_MAP[job.origin]
+        queue_index = get_queues_map()[job.origin]
         queue = get_queue_by_index(queue_index)
         queue = get_queue_by_index(queue_index)
 
 
         try:
         try:
@@ -644,7 +644,7 @@ class WorkerView(BaseRQView):
 
 
     def get(self, request, key):
     def get(self, request, key):
         # all the RQ queues should use the same connection
         # all the RQ queues should use the same connection
-        config = QUEUES_LIST[0]
+        config = get_queues_list()[0]
         worker = Worker.find_by_key('rq:worker:' + key, connection=get_redis_connection(config['connection_config']))
         worker = Worker.find_by_key('rq:worker:' + key, connection=get_redis_connection(config['connection_config']))
         # Convert microseconds to milliseconds
         # Convert microseconds to milliseconds
         worker.total_working_time = worker.total_working_time / 1000
         worker.total_working_time = worker.total_working_time / 1000

+ 9 - 1
netbox/dcim/api/serializers_/base.py

@@ -38,7 +38,15 @@ class ConnectedEndpointsSerializer(serializers.ModelSerializer):
 
 
     @extend_schema_field(serializers.BooleanField)
     @extend_schema_field(serializers.BooleanField)
     def get_connected_endpoints_reachable(self, obj):
     def get_connected_endpoints_reachable(self, obj):
-        return obj._path and obj._path.is_complete and obj._path.is_active
+        """
+        Return whether the connected endpoints are reachable via a complete, active cable path.
+        """
+        # Use the public `path` accessor rather than dereferencing `_path`
+        # directly. `path` already handles the stale in-memory relation case
+        # that can occur while CablePath rows are rebuilt during cable edits.
+        if path := obj.path:
+            return path.is_complete and path.is_active
+        return False
 
 
 
 
 class PortSerializer(serializers.ModelSerializer):
 class PortSerializer(serializers.ModelSerializer):

+ 15 - 0
netbox/dcim/cable_profiles.py

@@ -254,6 +254,21 @@ class Trunk8C4PCableProfile(BaseCableProfile):
     b_connectors = a_connectors
     b_connectors = a_connectors
 
 
 
 
+class Breakout1C2Px2C1PCableProfile(BaseCableProfile):
+    a_connectors = {
+        1: 2,
+    }
+    b_connectors = {
+        1: 1,
+        2: 1,
+    }
+    _mapping = {
+        (1, 1): (1, 1),
+        (1, 2): (2, 1),
+        (2, 1): (1, 2),
+    }
+
+
 class Breakout1C4Px4C1PCableProfile(BaseCableProfile):
 class Breakout1C4Px4C1PCableProfile(BaseCableProfile):
     a_connectors = {
     a_connectors = {
         1: 4,
         1: 4,

+ 2 - 0
netbox/dcim/choices.py

@@ -1776,6 +1776,7 @@ class CableProfileChoices(ChoiceSet):
     TRUNK_4C8P = 'trunk-4c8p'
     TRUNK_4C8P = 'trunk-4c8p'
     TRUNK_8C4P = 'trunk-8c4p'
     TRUNK_8C4P = 'trunk-8c4p'
     # Breakouts
     # Breakouts
+    BREAKOUT_1C2P_2C1P = 'breakout-1c2p-2c1p'
     BREAKOUT_1C4P_4C1P = 'breakout-1c4p-4c1p'
     BREAKOUT_1C4P_4C1P = 'breakout-1c4p-4c1p'
     BREAKOUT_1C6P_6C1P = 'breakout-1c6p-6c1p'
     BREAKOUT_1C6P_6C1P = 'breakout-1c6p-6c1p'
     BREAKOUT_2C4P_8C1P_SHUFFLE = 'breakout-2c4p-8c1p-shuffle'
     BREAKOUT_2C4P_8C1P_SHUFFLE = 'breakout-2c4p-8c1p-shuffle'
@@ -1815,6 +1816,7 @@ class CableProfileChoices(ChoiceSet):
         (
         (
             _('Breakout'),
             _('Breakout'),
             (
             (
+                (BREAKOUT_1C2P_2C1P, _('1C2P:2C1P breakout')),
                 (BREAKOUT_1C4P_4C1P, _('1C4P:4C1P breakout')),
                 (BREAKOUT_1C4P_4C1P, _('1C4P:4C1P breakout')),
                 (BREAKOUT_1C6P_6C1P, _('1C6P:6C1P breakout')),
                 (BREAKOUT_1C6P_6C1P, _('1C6P:6C1P breakout')),
                 (BREAKOUT_2C4P_8C1P_SHUFFLE, _('2C4P:8C1P breakout (shuffle)')),
                 (BREAKOUT_2C4P_8C1P_SHUFFLE, _('2C4P:8C1P breakout (shuffle)')),

+ 2 - 1
netbox/dcim/filtersets.py

@@ -27,6 +27,7 @@ from tenancy.models import *
 from users.filterset_mixins import OwnerFilterMixin
 from users.filterset_mixins import OwnerFilterMixin
 from users.models import User
 from users.models import User
 from utilities.filters import (
 from utilities.filters import (
+    MultiValueBigNumberFilter,
     MultiValueCharFilter,
     MultiValueCharFilter,
     MultiValueContentTypeFilter,
     MultiValueContentTypeFilter,
     MultiValueMACAddressFilter,
     MultiValueMACAddressFilter,
@@ -2230,7 +2231,7 @@ class InterfaceFilterSet(
         distinct=False,
         distinct=False,
         label=_('LAG interface (ID)'),
         label=_('LAG interface (ID)'),
     )
     )
-    speed = MultiValueNumberFilter()
+    speed = MultiValueBigNumberFilter(min_value=0)
     duplex = django_filters.MultipleChoiceFilter(
     duplex = django_filters.MultipleChoiceFilter(
         choices=InterfaceDuplexChoices,
         choices=InterfaceDuplexChoices,
         distinct=False,
         distinct=False,

+ 8 - 2
netbox/dcim/forms/bulk_edit.py

@@ -20,7 +20,13 @@ from netbox.forms.mixins import ChangelogMessageMixin, OwnerMixin
 from tenancy.models import Tenant
 from tenancy.models import Tenant
 from users.models import User
 from users.models import User
 from utilities.forms import BulkEditForm, add_blank_choice, form_from_model
 from utilities.forms import BulkEditForm, add_blank_choice, form_from_model
-from utilities.forms.fields import ColorField, DynamicModelChoiceField, DynamicModelMultipleChoiceField, JSONField
+from utilities.forms.fields import (
+    ColorField,
+    DynamicModelChoiceField,
+    DynamicModelMultipleChoiceField,
+    JSONField,
+    PositiveBigIntegerField,
+)
 from utilities.forms.rendering import FieldSet, InlineFields, TabbedGroups
 from utilities.forms.rendering import FieldSet, InlineFields, TabbedGroups
 from utilities.forms.widgets import BulkEditNullBooleanSelect, NumberWithOptions
 from utilities.forms.widgets import BulkEditNullBooleanSelect, NumberWithOptions
 from virtualization.models import Cluster
 from virtualization.models import Cluster
@@ -1470,7 +1476,7 @@ class InterfaceBulkEditForm(
             'device_id': '$device',
             'device_id': '$device',
         }
         }
     )
     )
-    speed = forms.IntegerField(
+    speed = PositiveBigIntegerField(
         label=_('Speed'),
         label=_('Speed'),
         required=False,
         required=False,
         widget=NumberWithOptions(
         widget=NumberWithOptions(

+ 1 - 0
netbox/dcim/forms/common.py

@@ -113,6 +113,7 @@ class ModuleCommonForm(forms.Form):
                         raise forms.ValidationError(
                         raise forms.ValidationError(
                             _("Cannot install module with placeholder values in a module bay with no position defined.")
                             _("Cannot install module with placeholder values in a module bay with no position defined.")
                         )
                         )
+
                     try:
                     try:
                         resolved_name = resolve_module_placeholder(template.name, positions)
                         resolved_name = resolve_module_placeholder(template.name, positions)
                     except ValueError as e:
                     except ValueError as e:

+ 2 - 2
netbox/dcim/forms/filtersets.py

@@ -19,7 +19,7 @@ from tenancy.forms import ContactModelFilterForm, TenancyFilterForm
 from tenancy.models import Tenant
 from tenancy.models import Tenant
 from users.models import User
 from users.models import User
 from utilities.forms import BOOLEAN_WITH_BLANK_CHOICES, FilterForm, add_blank_choice
 from utilities.forms import BOOLEAN_WITH_BLANK_CHOICES, FilterForm, add_blank_choice
-from utilities.forms.fields import ColorField, DynamicModelMultipleChoiceField, TagFilterField
+from utilities.forms.fields import ColorField, DynamicModelMultipleChoiceField, PositiveBigIntegerField, TagFilterField
 from utilities.forms.rendering import FieldSet
 from utilities.forms.rendering import FieldSet
 from utilities.forms.widgets import NumberWithOptions
 from utilities.forms.widgets import NumberWithOptions
 from virtualization.models import Cluster, ClusterGroup, VirtualMachine
 from virtualization.models import Cluster, ClusterGroup, VirtualMachine
@@ -1652,7 +1652,7 @@ class InterfaceFilterForm(PathEndpointFilterForm, DeviceComponentFilterForm):
         choices=InterfaceTypeChoices,
         choices=InterfaceTypeChoices,
         required=False
         required=False
     )
     )
-    speed = forms.IntegerField(
+    speed = PositiveBigIntegerField(
         label=_('Speed'),
         label=_('Speed'),
         required=False,
         required=False,
         widget=NumberWithOptions(
         widget=NumberWithOptions(

+ 8 - 2
netbox/dcim/graphql/filters.py

@@ -47,7 +47,13 @@ if TYPE_CHECKING:
         VRFFilter,
         VRFFilter,
     )
     )
     from netbox.graphql.enums import ColorEnum
     from netbox.graphql.enums import ColorEnum
-    from netbox.graphql.filter_lookups import FloatLookup, IntegerArrayLookup, IntegerLookup, TreeNodeFilter
+    from netbox.graphql.filter_lookups import (
+        BigIntegerLookup,
+        FloatLookup,
+        IntegerArrayLookup,
+        IntegerLookup,
+        TreeNodeFilter,
+    )
     from users.graphql.filters import UserFilter
     from users.graphql.filters import UserFilter
     from virtualization.graphql.filters import ClusterFilter
     from virtualization.graphql.filters import ClusterFilter
     from vpn.graphql.filters import L2VPNFilter, TunnelTerminationFilter
     from vpn.graphql.filters import L2VPNFilter, TunnelTerminationFilter
@@ -527,7 +533,7 @@ class InterfaceFilter(
         strawberry_django.filter_field()
         strawberry_django.filter_field()
     )
     )
     mgmt_only: FilterLookup[bool] | None = strawberry_django.filter_field()
     mgmt_only: FilterLookup[bool] | None = strawberry_django.filter_field()
-    speed: Annotated['IntegerLookup', strawberry.lazy('netbox.graphql.filter_lookups')] | None = (
+    speed: Annotated['BigIntegerLookup', strawberry.lazy('netbox.graphql.filter_lookups')] | None = (
         strawberry_django.filter_field()
         strawberry_django.filter_field()
     )
     )
     duplex: BaseFilterLookup[Annotated['InterfaceDuplexEnum', strawberry.lazy('dcim.graphql.enums')]] | None = (
     duplex: BaseFilterLookup[Annotated['InterfaceDuplexEnum', strawberry.lazy('dcim.graphql.enums')]] | None = (

+ 1 - 0
netbox/dcim/graphql/types.py

@@ -447,6 +447,7 @@ class MACAddressType(PrimaryObjectType):
 )
 )
 class InterfaceType(IPAddressesMixin, ModularComponentType, CabledObjectMixin, PathEndpointMixin):
 class InterfaceType(IPAddressesMixin, ModularComponentType, CabledObjectMixin, PathEndpointMixin):
     _name: str
     _name: str
+    speed: BigInt | None
     wwn: str | None
     wwn: str | None
     parent: Annotated["InterfaceType", strawberry.lazy('dcim.graphql.types')] | None
     parent: Annotated["InterfaceType", strawberry.lazy('dcim.graphql.types')] | None
     bridge: Annotated["InterfaceType", strawberry.lazy('dcim.graphql.types')] | None
     bridge: Annotated["InterfaceType", strawberry.lazy('dcim.graphql.types')] | None

+ 15 - 0
netbox/dcim/migrations/0227_alter_interface_speed_bigint.py

@@ -0,0 +1,15 @@
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ('dcim', '0226_modulebay_rebuild_tree'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='interface',
+            name='speed',
+            field=models.PositiveBigIntegerField(blank=True, null=True),
+        ),
+    ]

+ 1 - 1
netbox/dcim/migrations/0227_rack_group.py → netbox/dcim/migrations/0228_rack_group.py

@@ -8,7 +8,7 @@ import utilities.json
 
 
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
-        ('dcim', '0226_modulebay_rebuild_tree'),
+        ('dcim', '0227_alter_interface_speed_bigint'),
         ('extras', '0134_owner'),
         ('extras', '0134_owner'),
         ('users', '0015_owner'),
         ('users', '0015_owner'),
     ]
     ]

+ 1 - 1
netbox/dcim/migrations/0228_cable_bundle.py → netbox/dcim/migrations/0229_cable_bundle.py

@@ -9,7 +9,7 @@ import utilities.json
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
 
 
     dependencies = [
     dependencies = [
-        ('dcim', '0227_rack_group'),
+        ('dcim', '0228_rack_group'),
         ('extras', '0134_owner'),
         ('extras', '0134_owner'),
         ('users', '0015_owner'),
         ('users', '0015_owner'),
     ]
     ]

+ 1 - 1
netbox/dcim/migrations/0229_devicebay_modulebay_enabled.py → netbox/dcim/migrations/0230_devicebay_modulebay_enabled.py

@@ -3,7 +3,7 @@ from django.db import migrations, models
 
 
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
-        ('dcim', '0228_cable_bundle'),
+        ('dcim', '0229_cable_bundle'),
     ]
     ]
 
 
     operations = [
     operations = [

+ 1 - 1
netbox/dcim/migrations/0230_interface_rf_channel_frequency_precision.py → netbox/dcim/migrations/0231_interface_rf_channel_frequency_precision.py

@@ -4,7 +4,7 @@ from django.db import migrations, models
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
 
 
     dependencies = [
     dependencies = [
-        ('dcim', '0229_devicebay_modulebay_enabled'),
+        ('dcim', '0230_devicebay_modulebay_enabled'),
     ]
     ]
 
 
     operations = [
     operations = [

+ 1 - 1
netbox/dcim/migrations/0231_default_ordering_indexes.py → netbox/dcim/migrations/0232_default_ordering_indexes.py

@@ -5,7 +5,7 @@ from django.db import migrations, models
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
         ('contenttypes', '0002_remove_content_type_name'),
         ('contenttypes', '0002_remove_content_type_name'),
-        ('dcim', '0230_interface_rf_channel_frequency_precision'),
+        ('dcim', '0231_interface_rf_channel_frequency_precision'),
         ('extras', '0136_customfield_validation_schema'),
         ('extras', '0136_customfield_validation_schema'),
         ('ipam', '0088_rename_vlangroup_total_vlan_ids'),
         ('ipam', '0088_rename_vlangroup_total_vlan_ids'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('tenancy', '0023_add_mptt_tree_indexes'),

+ 1 - 0
netbox/dcim/models/cables.py

@@ -196,6 +196,7 @@ class Cable(PrimaryModel):
             CableProfileChoices.TRUNK_4C6P: cable_profiles.Trunk4C6PCableProfile,
             CableProfileChoices.TRUNK_4C6P: cable_profiles.Trunk4C6PCableProfile,
             CableProfileChoices.TRUNK_4C8P: cable_profiles.Trunk4C8PCableProfile,
             CableProfileChoices.TRUNK_4C8P: cable_profiles.Trunk4C8PCableProfile,
             CableProfileChoices.TRUNK_8C4P: cable_profiles.Trunk8C4PCableProfile,
             CableProfileChoices.TRUNK_8C4P: cable_profiles.Trunk8C4PCableProfile,
+            CableProfileChoices.BREAKOUT_1C2P_2C1P: cable_profiles.Breakout1C2Px2C1PCableProfile,
             CableProfileChoices.BREAKOUT_1C4P_4C1P: cable_profiles.Breakout1C4Px4C1PCableProfile,
             CableProfileChoices.BREAKOUT_1C4P_4C1P: cable_profiles.Breakout1C4Px4C1PCableProfile,
             CableProfileChoices.BREAKOUT_1C6P_6C1P: cable_profiles.Breakout1C6Px6C1PCableProfile,
             CableProfileChoices.BREAKOUT_1C6P_6C1P: cable_profiles.Breakout1C6Px6C1PCableProfile,
             CableProfileChoices.BREAKOUT_2C4P_8C1P_SHUFFLE: cable_profiles.Breakout2C4Px8C1PShuffleCableProfile,
             CableProfileChoices.BREAKOUT_2C4P_8C1P_SHUFFLE: cable_profiles.Breakout2C4Px8C1PShuffleCableProfile,

+ 8 - 0
netbox/dcim/models/device_component_templates.py

@@ -584,6 +584,14 @@ class PortTemplateMapping(PortMappingBase):
         self.module_type = self.front_port.module_type
         self.module_type = self.front_port.module_type
         super().save(*args, **kwargs)
         super().save(*args, **kwargs)
 
 
+    def to_yaml(self):
+        return {
+            'front_port': self.front_port.name,
+            'front_port_position': self.front_port_position,
+            'rear_port': self.rear_port.name,
+            'rear_port_position': self.rear_port_position,
+        }
+
 
 
 class FrontPortTemplate(ModularComponentTemplateModel):
 class FrontPortTemplate(ModularComponentTemplateModel):
     """
     """

+ 43 - 11
netbox/dcim/models/device_components.py

@@ -2,7 +2,7 @@ from functools import cached_property
 
 
 from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
 from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
 from django.contrib.postgres.fields import ArrayField
 from django.contrib.postgres.fields import ArrayField
-from django.core.exceptions import ValidationError
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
 from django.core.validators import MaxValueValidator, MinValueValidator
 from django.core.validators import MaxValueValidator, MinValueValidator
 from django.db import models
 from django.db import models
 from django.db.models import Sum
 from django.db.models import Sum
@@ -307,11 +307,12 @@ class PathEndpoint(models.Model):
 
 
     `connected_endpoints()` is a convenience method for returning the destination of the associated CablePath, if any.
     `connected_endpoints()` is a convenience method for returning the destination of the associated CablePath, if any.
     """
     """
+
     _path = models.ForeignKey(
     _path = models.ForeignKey(
         to='dcim.CablePath',
         to='dcim.CablePath',
         on_delete=models.SET_NULL,
         on_delete=models.SET_NULL,
         null=True,
         null=True,
-        blank=True
+        blank=True,
     )
     )
 
 
     class Meta:
     class Meta:
@@ -323,11 +324,14 @@ class PathEndpoint(models.Model):
 
 
         # Construct the complete path (including e.g. bridged interfaces)
         # Construct the complete path (including e.g. bridged interfaces)
         while origin is not None:
         while origin is not None:
-
-            if origin._path is None:
+            # Go through the public accessor rather than dereferencing `_path`
+            # directly. During cable edits, CablePath rows can be deleted and
+            # recreated while this endpoint instance is still in memory.
+            cable_path = origin.path
+            if cable_path is None:
                 break
                 break
 
 
-            path.extend(origin._path.path_objects)
+            path.extend(cable_path.path_objects)
 
 
             # If the path ends at a non-connected pass-through port, pad out the link and far-end terminations
             # If the path ends at a non-connected pass-through port, pad out the link and far-end terminations
             if len(path) % 3 == 1:
             if len(path) % 3 == 1:
@@ -336,8 +340,8 @@ class PathEndpoint(models.Model):
             elif len(path) % 3 == 2:
             elif len(path) % 3 == 2:
                 path.insert(-1, [])
                 path.insert(-1, [])
 
 
-            # Check for a bridged relationship to continue the trace
-            destinations = origin._path.destinations
+            # Check for a bridged relationship to continue the trace.
+            destinations = cable_path.destinations
             if len(destinations) == 1:
             if len(destinations) == 1:
                 origin = getattr(destinations[0], 'bridge', None)
                 origin = getattr(destinations[0], 'bridge', None)
             else:
             else:
@@ -348,14 +352,42 @@ class PathEndpoint(models.Model):
 
 
     @property
     @property
     def path(self):
     def path(self):
-        return self._path
+        """
+        Return this endpoint's current CablePath, if any.
+
+        `_path` is a denormalized reference that is updated from CablePath
+        save/delete handlers, including queryset.update() calls on origin
+        endpoints. That means an already-instantiated endpoint can briefly hold
+        a stale in-memory `_path` relation while the database already points to
+        a different CablePath (or to no path at all).
+
+        If the cached relation points to a CablePath that has just been
+        deleted, refresh only the `_path` field from the database and retry.
+        This keeps the fix cheap and narrowly scoped to the denormalized FK.
+        """
+        if self._path_id is None:
+            return None
+
+        try:
+            return self._path
+        except ObjectDoesNotExist:
+            # Refresh only the denormalized FK instead of the whole model.
+            # The expected problem here is in-memory staleness during path
+            # rebuilds, not persistent database corruption.
+            self.refresh_from_db(fields=['_path'])
+            return self._path if self._path_id else None
 
 
     @cached_property
     @cached_property
     def connected_endpoints(self):
     def connected_endpoints(self):
         """
         """
-        Caching accessor for the attached CablePath's destination (if any)
+        Caching accessor for the attached CablePath's destinations (if any).
+
+        Always route through `path` so stale in-memory `_path` references are
+        repaired before we cache the result for the lifetime of this instance.
         """
         """
-        return self._path.destinations if self._path else []
+        if cable_path := self.path:
+            return cable_path.destinations
+        return []
 
 
 
 
 #
 #
@@ -774,7 +806,7 @@ class Interface(
         verbose_name=_('management only'),
         verbose_name=_('management only'),
         help_text=_('This interface is used only for out-of-band management')
         help_text=_('This interface is used only for out-of-band management')
     )
     )
-    speed = models.PositiveIntegerField(
+    speed = models.PositiveBigIntegerField(
         blank=True,
         blank=True,
         null=True,
         null=True,
         verbose_name=_('speed (Kbps)')
         verbose_name=_('speed (Kbps)')

+ 9 - 0
netbox/dcim/models/devices.py

@@ -276,6 +276,15 @@ class DeviceType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
             data['rear-ports'] = [
             data['rear-ports'] = [
                 c.to_yaml() for c in self.rearporttemplates.all()
                 c.to_yaml() for c in self.rearporttemplates.all()
             ]
             ]
+
+        # Port mappings
+        port_mapping_data = [
+            c.to_yaml() for c in self.port_mappings.all()
+        ]
+
+        if port_mapping_data:
+            data['port-mappings'] = port_mapping_data
+
         if self.modulebaytemplates.exists():
         if self.modulebaytemplates.exists():
             data['module-bays'] = [
             data['module-bays'] = [
                 c.to_yaml() for c in self.modulebaytemplates.all()
                 c.to_yaml() for c in self.modulebaytemplates.all()

+ 8 - 0
netbox/dcim/models/modules.py

@@ -195,6 +195,14 @@ class ModuleType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
                 c.to_yaml() for c in self.rearporttemplates.all()
                 c.to_yaml() for c in self.rearporttemplates.all()
             ]
             ]
 
 
+        # Port mappings
+        port_mapping_data = [
+            c.to_yaml() for c in self.port_mappings.all()
+        ]
+
+        if port_mapping_data:
+            data['port-mappings'] = port_mapping_data
+
         return yaml.dump(dict(data), sort_keys=False)
         return yaml.dump(dict(data), sort_keys=False)
 
 
 
 

+ 21 - 1
netbox/dcim/tables/devices.py

@@ -382,6 +382,17 @@ class PathEndpointTable(CableTerminationTable):
         orderable=False
         orderable=False
     )
     )
 
 
+    def value_connection(self, value):
+        if value:
+            connections = []
+            for termination in value:
+                if hasattr(termination, 'parent_object'):
+                    connections.append(f'{termination.parent_object} > {termination}')
+                else:
+                    connections.append(str(termination))
+            return ', '.join(connections)
+        return None
+
 
 
 class ConsolePortTable(ModularDeviceComponentTable, PathEndpointTable):
 class ConsolePortTable(ModularDeviceComponentTable, PathEndpointTable):
     device = tables.Column(
     device = tables.Column(
@@ -683,6 +694,15 @@ class InterfaceTable(BaseInterfaceTable, ModularDeviceComponentTable, PathEndpoi
         orderable=False
         orderable=False
     )
     )
 
 
+    def value_connection(self, record, value):
+        if record.is_virtual and hasattr(record, 'virtual_circuit_termination') and record.virtual_circuit_termination:
+            connections = [
+                f"{t.interface.parent_object} > {t.interface} via {t.parent_object}"
+                for t in record.connected_endpoints
+            ]
+            return ', '.join(connections)
+        return super().value_connection(value)
+
     class Meta(DeviceComponentTable.Meta):
     class Meta(DeviceComponentTable.Meta):
         model = models.Interface
         model = models.Interface
         fields = (
         fields = (
@@ -1161,7 +1181,7 @@ class VirtualDeviceContextTable(TenancyColumnsMixin, PrimaryModelTable):
     )
     )
     device = tables.Column(
     device = tables.Column(
         verbose_name=_('Device'),
         verbose_name=_('Device'),
-        order_by=('device___name',),
+        order_by=('device__name',),
         linkify=True
         linkify=True
     )
     )
     status = columns.ChoiceFieldColumn(
     status = columns.ChoiceFieldColumn(

+ 3 - 1
netbox/dcim/tables/modules.py

@@ -56,7 +56,9 @@ class ModuleTypeTable(PrimaryModelTable):
         template_code=WEIGHT,
         template_code=WEIGHT,
         order_by=('_abs_weight', 'weight_unit')
         order_by=('_abs_weight', 'weight_unit')
     )
     )
-    attributes = columns.DictColumn()
+    attributes = columns.DictColumn(
+        orderable=False,
+    )
     module_count = columns.LinkedCountColumn(
     module_count = columns.LinkedCountColumn(
         viewname='dcim:module_list',
         viewname='dcim:module_list',
         url_params={'module_type_id': 'pk'},
         url_params={'module_type_id': 'pk'},

+ 2 - 2
netbox/dcim/tests/test_api.py

@@ -2431,9 +2431,9 @@ class InterfaceTest(Mixins.ComponentTraceMixin, APIViewTestCases.APIViewTestCase
             {
             {
                 'device': device.pk,
                 'device': device.pk,
                 'name': 'Interface 4',
                 'name': 'Interface 4',
-                'type': '1000base-t',
+                'type': 'other',
                 'mode': InterfaceModeChoices.MODE_TAGGED,
                 'mode': InterfaceModeChoices.MODE_TAGGED,
-                'speed': 1000000,
+                'speed': 16_000_000_000,
                 'duplex': 'full',
                 'duplex': 'full',
                 'vrf': vrfs[0].pk,
                 'vrf': vrfs[0].pk,
                 'poe_mode': InterfacePoEModeChoices.MODE_PD,
                 'poe_mode': InterfacePoEModeChoices.MODE_PD,

+ 2 - 2
netbox/dcim/tests/test_filtersets.py

@@ -4753,7 +4753,7 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
                 enabled=True,
                 enabled=True,
                 mgmt_only=True,
                 mgmt_only=True,
                 tx_power=40,
                 tx_power=40,
-                speed=100000,
+                speed=16_000_000_000,
                 duplex='full',
                 duplex='full',
                 poe_mode=InterfacePoEModeChoices.MODE_PD,
                 poe_mode=InterfacePoEModeChoices.MODE_PD,
                 poe_type=InterfacePoETypeChoices.TYPE_2_8023AT,
                 poe_type=InterfacePoETypeChoices.TYPE_2_8023AT,
@@ -4855,7 +4855,7 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
         self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
         self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
 
 
     def test_speed(self):
     def test_speed(self):
-        params = {'speed': [1000000, 100000]}
+        params = {'speed': [16_000_000_000, 1_000_000, 100_000]}
         self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4)
         self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4)
 
 
     def test_duplex(self):
     def test_duplex(self):

+ 133 - 1
netbox/dcim/tests/test_models.py

@@ -5,6 +5,7 @@ from circuits.models import *
 from core.models import ObjectType
 from core.models import ObjectType
 from dcim.choices import *
 from dcim.choices import *
 from dcim.models import *
 from dcim.models import *
+from extras.events import serialize_for_event
 from extras.models import CustomField
 from extras.models import CustomField
 from ipam.models import Prefix
 from ipam.models import Prefix
 from netbox.choices import WeightUnitChoices
 from netbox.choices import WeightUnitChoices
@@ -999,7 +1000,79 @@ class ModuleBayTestCase(TestCase):
         nested_bay = module.modulebays.get(name='Sub-bay 1-1')
         nested_bay = module.modulebays.get(name='Sub-bay 1-1')
         self.assertEqual(nested_bay.position, '1-1')
         self.assertEqual(nested_bay.position, '1-1')
 
 
-    #
+    @tag('regression')  # #20474
+    def test_single_module_token_at_nested_depth(self):
+        """
+        A module type with a single {module} token should install at depth > 1
+        without raising a token count mismatch error, resolving to the immediate
+        parent bay's position.
+        """
+        manufacturer = Manufacturer.objects.first()
+        site = Site.objects.first()
+        device_role = DeviceRole.objects.first()
+
+        device_type = DeviceType.objects.create(
+            manufacturer=manufacturer,
+            model='Chassis with Rear Card',
+            slug='chassis-with-rear-card'
+        )
+        ModuleBayTemplate.objects.create(
+            device_type=device_type,
+            name='Rear card slot',
+            position='1'
+        )
+
+        rear_card_type = ModuleType.objects.create(
+            manufacturer=manufacturer,
+            model='Rear Card'
+        )
+        ModuleBayTemplate.objects.create(
+            module_type=rear_card_type,
+            name='SFP slot 1',
+            position='1'
+        )
+        ModuleBayTemplate.objects.create(
+            module_type=rear_card_type,
+            name='SFP slot 2',
+            position='2'
+        )
+
+        sfp_type = ModuleType.objects.create(
+            manufacturer=manufacturer,
+            model='SFP Module'
+        )
+        InterfaceTemplate.objects.create(
+            module_type=sfp_type,
+            name='SFP {module}',
+            type=InterfaceTypeChoices.TYPE_10GE_SFP_PLUS
+        )
+
+        device = Device.objects.create(
+            name='Test Chassis',
+            device_type=device_type,
+            role=device_role,
+            site=site
+        )
+
+        rear_card_bay = device.modulebays.get(name='Rear card slot')
+        rear_card = Module.objects.create(
+            device=device,
+            module_bay=rear_card_bay,
+            module_type=rear_card_type
+        )
+
+        sfp_bay = rear_card.modulebays.get(name='SFP slot 2')
+        sfp_module = Module.objects.create(
+            device=device,
+            module_bay=sfp_bay,
+            module_type=sfp_type
+        )
+
+        interface = sfp_module.interfaces.first()
+        self.assertEqual(interface.name, 'SFP 2')
+
+
+#
     # Position inheritance tests (#19796)
     # Position inheritance tests (#19796)
     #
     #
 
 
@@ -1666,6 +1739,65 @@ class CableTestCase(TestCase):
         self.assertEqual(a_terms, [interface1])
         self.assertEqual(a_terms, [interface1])
         self.assertEqual(b_terms, [interface2])
         self.assertEqual(b_terms, [interface2])
 
 
+    @tag('regression')  # #21498
+    def test_path_refreshes_replaced_cablepath_reference(self):
+        """
+        An already-instantiated interface should refresh its denormalized
+        `_path` foreign key when the referenced CablePath row has been
+        replaced in the database.
+        """
+        stale_interface = Interface.objects.get(device__name='TestDevice1', name='eth0')
+        old_path = CablePath.objects.get(pk=stale_interface._path_id)
+
+        new_path = CablePath(
+            path=old_path.path,
+            is_active=old_path.is_active,
+            is_complete=old_path.is_complete,
+            is_split=old_path.is_split,
+        )
+        old_path_id = old_path.pk
+        old_path.delete()
+        new_path.save()
+
+        # The old CablePath no longer exists
+        self.assertFalse(CablePath.objects.filter(pk=old_path_id).exists())
+
+        # The already-instantiated interface still points to the deleted path
+        # until the accessor refreshes `_path` from the database.
+        self.assertEqual(stale_interface._path_id, old_path_id)
+        self.assertEqual(stale_interface.path.pk, new_path.pk)
+
+    @tag('regression')  # #21498
+    def test_serialize_for_event_handles_stale_cablepath_reference_after_retermination(self):
+        """
+        Serializing an interface whose previously cached `_path` row has been
+        deleted during cable retermination must not raise.
+        """
+        stale_interface = Interface.objects.get(device__name='TestDevice2', name='eth0')
+        old_path_id = stale_interface._path_id
+        new_peer = Interface.objects.get(device__name='TestDevice2', name='eth1')
+        cable = stale_interface.cable
+
+        self.assertIsNotNone(cable)
+        self.assertIsNotNone(old_path_id)
+        self.assertEqual(stale_interface.cable_end, 'B')
+
+        cable.b_terminations = [new_peer]
+        cable.save()
+
+        # The old CablePath was deleted during retrace.
+        self.assertFalse(CablePath.objects.filter(pk=old_path_id).exists())
+
+        # The stale in-memory instance still holds the deleted FK value.
+        self.assertEqual(stale_interface._path_id, old_path_id)
+
+        # Serialization must not raise ObjectDoesNotExist. Because this interface
+        # was the former B-side termination, it is now disconnected.
+        data = serialize_for_event(stale_interface)
+        self.assertIsNone(data['connected_endpoints'])
+        self.assertIsNone(data['connected_endpoints_type'])
+        self.assertFalse(data['connected_endpoints_reachable'])
+
 
 
 class VirtualDeviceContextTestCase(TestCase):
 class VirtualDeviceContextTestCase(TestCase):
 
 

+ 204 - 0
netbox/dcim/tests/test_tables.py

@@ -0,0 +1,204 @@
+from dcim.models import ConsolePort, Interface, PowerPort
+from dcim.tables import *
+from utilities.testing import TableTestCases
+
+#
+# Sites
+#
+
+
+class RegionTableTest(TableTestCases.StandardTableTestCase):
+    table = RegionTable
+
+
+class SiteGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = SiteGroupTable
+
+
+class SiteTableTest(TableTestCases.StandardTableTestCase):
+    table = SiteTable
+
+
+class LocationTableTest(TableTestCases.StandardTableTestCase):
+    table = LocationTable
+
+
+#
+# Racks
+#
+
+class RackRoleTableTest(TableTestCases.StandardTableTestCase):
+    table = RackRoleTable
+
+
+class RackTypeTableTest(TableTestCases.StandardTableTestCase):
+    table = RackTypeTable
+
+
+class RackTableTest(TableTestCases.StandardTableTestCase):
+    table = RackTable
+
+
+class RackReservationTableTest(TableTestCases.StandardTableTestCase):
+    table = RackReservationTable
+
+
+#
+# Device types
+#
+
+class ManufacturerTableTest(TableTestCases.StandardTableTestCase):
+    table = ManufacturerTable
+
+
+class DeviceTypeTableTest(TableTestCases.StandardTableTestCase):
+    table = DeviceTypeTable
+
+
+#
+# Module types
+#
+
+class ModuleTypeProfileTableTest(TableTestCases.StandardTableTestCase):
+    table = ModuleTypeProfileTable
+
+
+class ModuleTypeTableTest(TableTestCases.StandardTableTestCase):
+    table = ModuleTypeTable
+
+
+class ModuleTableTest(TableTestCases.StandardTableTestCase):
+    table = ModuleTable
+
+
+#
+# Devices
+#
+
+class DeviceRoleTableTest(TableTestCases.StandardTableTestCase):
+    table = DeviceRoleTable
+
+
+class PlatformTableTest(TableTestCases.StandardTableTestCase):
+    table = PlatformTable
+
+
+class DeviceTableTest(TableTestCases.StandardTableTestCase):
+    table = DeviceTable
+
+
+#
+# Device components
+#
+
+class ConsolePortTableTest(TableTestCases.StandardTableTestCase):
+    table = ConsolePortTable
+
+
+class ConsoleServerPortTableTest(TableTestCases.StandardTableTestCase):
+    table = ConsoleServerPortTable
+
+
+class PowerPortTableTest(TableTestCases.StandardTableTestCase):
+    table = PowerPortTable
+
+
+class PowerOutletTableTest(TableTestCases.StandardTableTestCase):
+    table = PowerOutletTable
+
+
+class InterfaceTableTest(TableTestCases.StandardTableTestCase):
+    table = InterfaceTable
+
+
+class FrontPortTableTest(TableTestCases.StandardTableTestCase):
+    table = FrontPortTable
+
+
+class RearPortTableTest(TableTestCases.StandardTableTestCase):
+    table = RearPortTable
+
+
+class ModuleBayTableTest(TableTestCases.StandardTableTestCase):
+    table = ModuleBayTable
+
+
+class DeviceBayTableTest(TableTestCases.StandardTableTestCase):
+    table = DeviceBayTable
+
+
+class InventoryItemTableTest(TableTestCases.StandardTableTestCase):
+    table = InventoryItemTable
+
+
+class InventoryItemRoleTableTest(TableTestCases.StandardTableTestCase):
+    table = InventoryItemRoleTable
+
+
+#
+# Connections
+#
+
+class ConsoleConnectionTableTest(TableTestCases.StandardTableTestCase):
+    table = ConsoleConnectionTable
+    queryset_sources = [
+        ('ConsoleConnectionsListView', ConsolePort.objects.filter(_path__is_complete=True)),
+    ]
+
+
+class PowerConnectionTableTest(TableTestCases.StandardTableTestCase):
+    table = PowerConnectionTable
+    queryset_sources = [
+        ('PowerConnectionsListView', PowerPort.objects.filter(_path__is_complete=True)),
+    ]
+
+
+class InterfaceConnectionTableTest(TableTestCases.StandardTableTestCase):
+    table = InterfaceConnectionTable
+    queryset_sources = [
+        ('InterfaceConnectionsListView', Interface.objects.filter(_path__is_complete=True)),
+    ]
+
+
+#
+# Cables
+#
+
+class CableTableTest(TableTestCases.StandardTableTestCase):
+    table = CableTable
+
+
+#
+# Power
+#
+
+class PowerPanelTableTest(TableTestCases.StandardTableTestCase):
+    table = PowerPanelTable
+
+
+class PowerFeedTableTest(TableTestCases.StandardTableTestCase):
+    table = PowerFeedTable
+
+
+#
+# Virtual chassis
+#
+
+class VirtualChassisTableTest(TableTestCases.StandardTableTestCase):
+    table = VirtualChassisTable
+
+
+#
+# Virtual device contexts
+#
+
+class VirtualDeviceContextTableTest(TableTestCases.StandardTableTestCase):
+    table = VirtualDeviceContextTable
+
+
+#
+# MAC addresses
+#
+
+class MACAddressTableTest(TableTestCases.StandardTableTestCase):
+    table = MACAddressTable

+ 21 - 4
netbox/dcim/tests/test_views.py

@@ -2413,6 +2413,23 @@ class DeviceTestCase(ViewTestCases.PrimaryObjectViewTestCase):
         self.remove_permissions('dcim.view_device')
         self.remove_permissions('dcim.view_device')
         self.assertHttpStatus(self.client.get(url), 403)
         self.assertHttpStatus(self.client.get(url), 403)
 
 
+    def test_device_role_display_colored(self):
+        parent_role = DeviceRole.objects.create(name='Parent Role', slug='parent-role', color='111111')
+        child_role = DeviceRole.objects.create(name='Child Role', slug='child-role', parent=parent_role, color='aa00bb')
+
+        device = Device.objects.first()
+        device.role = child_role
+        device.save()
+
+        self.add_permissions('dcim.view_device')
+        response = self.client.get(device.get_absolute_url())
+
+        self.assertHttpStatus(response, 200)
+        self.assertContains(response, 'Parent Role')
+        self.assertContains(response, 'Child Role')
+        self.assertContains(response, 'background-color: #aa00bb')
+        self.assertNotContains(response, 'background-color: #111111')
+
     @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
     @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
     def test_bulk_import_duplicate_ids_error_message(self):
     def test_bulk_import_duplicate_ids_error_message(self):
         device = Device.objects.first()
         device = Device.objects.first()
@@ -3056,13 +3073,13 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
         cls.form_data = {
         cls.form_data = {
             'device': device.pk,
             'device': device.pk,
             'name': 'Interface X',
             'name': 'Interface X',
-            'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
+            'type': InterfaceTypeChoices.TYPE_OTHER,
             'enabled': False,
             'enabled': False,
             'bridge': interfaces[4].pk,
             'bridge': interfaces[4].pk,
             'lag': interfaces[3].pk,
             'lag': interfaces[3].pk,
             'wwn': EUI('01:02:03:04:05:06:07:08', version=64),
             'wwn': EUI('01:02:03:04:05:06:07:08', version=64),
             'mtu': 65000,
             'mtu': 65000,
-            'speed': 1000000,
+            'speed': 16_000_000_000,
             'duplex': 'full',
             'duplex': 'full',
             'mgmt_only': True,
             'mgmt_only': True,
             'description': 'A front port',
             'description': 'A front port',
@@ -3080,13 +3097,13 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
         cls.bulk_create_data = {
         cls.bulk_create_data = {
             'device': device.pk,
             'device': device.pk,
             'name': 'Interface [4-6]',
             'name': 'Interface [4-6]',
-            'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
+            'type': InterfaceTypeChoices.TYPE_OTHER,
             'enabled': False,
             'enabled': False,
             'bridge': interfaces[4].pk,
             'bridge': interfaces[4].pk,
             'lag': interfaces[3].pk,
             'lag': interfaces[3].pk,
             'wwn': EUI('01:02:03:04:05:06:07:08', version=64),
             'wwn': EUI('01:02:03:04:05:06:07:08', version=64),
             'mtu': 2000,
             'mtu': 2000,
-            'speed': 100000,
+            'speed': 16_000_000_000,
             'duplex': 'half',
             'duplex': 'half',
             'mgmt_only': True,
             'mgmt_only': True,
             'description': 'A front port',
             'description': 'A front port',

+ 3 - 3
netbox/dcim/ui/panels.py

@@ -50,7 +50,7 @@ class RackPanel(panels.ObjectAttributesPanel):
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
     tenant = attrs.RelatedObjectAttr('tenant', linkify=True, grouped_by='group')
     status = attrs.ChoiceAttr('status')
     status = attrs.ChoiceAttr('status')
     rack_type = attrs.RelatedObjectAttr('rack_type', linkify=True, grouped_by='manufacturer')
     rack_type = attrs.RelatedObjectAttr('rack_type', linkify=True, grouped_by='manufacturer')
-    role = attrs.RelatedObjectAttr('role', linkify=True)
+    role = attrs.RelatedObjectAttr('role', linkify=True, colored=True)
     description = attrs.TextAttr('description')
     description = attrs.TextAttr('description')
     serial = attrs.TextAttr('serial', label=_('Serial number'), style='font-monospace', copy_button=True)
     serial = attrs.TextAttr('serial', label=_('Serial number'), style='font-monospace', copy_button=True)
     asset_tag = attrs.TextAttr('asset_tag', style='font-monospace', copy_button=True)
     asset_tag = attrs.TextAttr('asset_tag', style='font-monospace', copy_button=True)
@@ -104,7 +104,7 @@ class DeviceManagementPanel(panels.ObjectAttributesPanel):
     title = _('Management')
     title = _('Management')
 
 
     status = attrs.ChoiceAttr('status')
     status = attrs.ChoiceAttr('status')
-    role = attrs.NestedObjectAttr('role', linkify=True, max_depth=3)
+    role = attrs.NestedObjectAttr('role', linkify=True, max_depth=3, colored=True)
     platform = attrs.NestedObjectAttr('platform', linkify=True, max_depth=3)
     platform = attrs.NestedObjectAttr('platform', linkify=True, max_depth=3)
     primary_ip4 = attrs.TemplatedAttr(
     primary_ip4 = attrs.TemplatedAttr(
         'primary_ip4',
         'primary_ip4',
@@ -295,7 +295,7 @@ class InventoryItemPanel(panels.ObjectAttributesPanel):
     name = attrs.TextAttr('name')
     name = attrs.TextAttr('name')
     label = attrs.TextAttr('label')
     label = attrs.TextAttr('label')
     status = attrs.ChoiceAttr('status')
     status = attrs.ChoiceAttr('status')
-    role = attrs.RelatedObjectAttr('role', linkify=True)
+    role = attrs.RelatedObjectAttr('role', linkify=True, colored=True)
     component = attrs.GenericForeignKeyAttr('component', linkify=True)
     component = attrs.GenericForeignKeyAttr('component', linkify=True)
     manufacturer = attrs.RelatedObjectAttr('manufacturer', linkify=True)
     manufacturer = attrs.RelatedObjectAttr('manufacturer', linkify=True)
     part_id = attrs.TextAttr('part_id', label=_('Part ID'))
     part_id = attrs.TextAttr('part_id', label=_('Part ID'))

+ 28 - 28
netbox/dcim/utils.py

@@ -8,6 +8,34 @@ from django.utils.translation import gettext as _
 from dcim.constants import MODULE_TOKEN
 from dcim.constants import MODULE_TOKEN
 
 
 
 
+def compile_path_node(ct_id, object_id):
+    return f'{ct_id}:{object_id}'
+
+
+def decompile_path_node(repr):
+    ct_id, object_id = repr.split(':')
+    return int(ct_id), int(object_id)
+
+
+def object_to_path_node(obj):
+    """
+    Return a representation of an object suitable for inclusion in a CablePath path. Node representation is in the
+    form <ContentType ID>:<Object ID>.
+    """
+    ct = ContentType.objects.get_for_model(obj)
+    return compile_path_node(ct.pk, obj.pk)
+
+
+def path_node_to_object(repr):
+    """
+    Given the string representation of a path node, return the corresponding instance. If the object no longer
+    exists, return None.
+    """
+    ct_id, object_id = decompile_path_node(repr)
+    ct = ContentType.objects.get_for_id(ct_id)
+    return ct.model_class().objects.filter(pk=object_id).first()
+
+
 def get_module_bay_positions(module_bay):
 def get_module_bay_positions(module_bay):
     """
     """
     Given a module bay, traverse up the module hierarchy and return
     Given a module bay, traverse up the module hierarchy and return
@@ -58,34 +86,6 @@ def resolve_module_placeholder(value, positions):
     )
     )
 
 
 
 
-def compile_path_node(ct_id, object_id):
-    return f'{ct_id}:{object_id}'
-
-
-def decompile_path_node(repr):
-    ct_id, object_id = repr.split(':')
-    return int(ct_id), int(object_id)
-
-
-def object_to_path_node(obj):
-    """
-    Return a representation of an object suitable for inclusion in a CablePath path. Node representation is in the
-    form <ContentType ID>:<Object ID>.
-    """
-    ct = ContentType.objects.get_for_model(obj)
-    return compile_path_node(ct.pk, obj.pk)
-
-
-def path_node_to_object(repr):
-    """
-    Given the string representation of a path node, return the corresponding instance. If the object no longer
-    exists, return None.
-    """
-    ct_id, object_id = decompile_path_node(repr)
-    ct = ContentType.objects.get_for_id(ct_id)
-    return ct.model_class().objects.filter(pk=object_id).first()
-
-
 def create_cablepaths(objects):
 def create_cablepaths(objects):
     """
     """
     Create CablePaths for all paths originating from the specified set of nodes.
     Create CablePaths for all paths originating from the specified set of nodes.

+ 11 - 1
netbox/extras/api/customfields.py

@@ -85,8 +85,18 @@ class CustomFieldsDataField(Field):
                 "values."
                 "values."
             )
             )
 
 
+        custom_fields = {cf.name: cf for cf in self._get_custom_fields()}
+
+        # Reject any unknown custom field names
+        invalid_fields = set(data) - set(custom_fields)
+        if invalid_fields:
+            raise ValidationError({
+                field: _("Custom field '{name}' does not exist for this object type.").format(name=field)
+                for field in sorted(invalid_fields)
+            })
+
         # Serialize object and multi-object values
         # Serialize object and multi-object values
-        for cf in self._get_custom_fields():
+        for cf in custom_fields.values():
             if cf.name in data and data[cf.name] not in CUSTOMFIELD_EMPTY_VALUES and cf.type in (
             if cf.name in data and data[cf.name] not in CUSTOMFIELD_EMPTY_VALUES and cf.type in (
                     CustomFieldTypeChoices.TYPE_OBJECT,
                     CustomFieldTypeChoices.TYPE_OBJECT,
                     CustomFieldTypeChoices.TYPE_MULTIOBJECT
                     CustomFieldTypeChoices.TYPE_MULTIOBJECT

+ 53 - 2
netbox/extras/api/serializers_/scripts.py

@@ -1,19 +1,70 @@
-from django.utils.translation import gettext as _
+import logging
+
+from django.core.files.storage import storages
+from django.db import IntegrityError
+from django.utils.translation import gettext_lazy as _
 from drf_spectacular.utils import extend_schema_field
 from drf_spectacular.utils import extend_schema_field
 from rest_framework import serializers
 from rest_framework import serializers
 
 
 from core.api.serializers_.jobs import JobSerializer
 from core.api.serializers_.jobs import JobSerializer
-from extras.models import Script
+from core.choices import ManagedFileRootPathChoices
+from extras.models import Script, ScriptModule
 from netbox.api.serializers import ValidatedModelSerializer
 from netbox.api.serializers import ValidatedModelSerializer
 from utilities.datetime import local_now
 from utilities.datetime import local_now
 
 
+logger = logging.getLogger(__name__)
+
 __all__ = (
 __all__ = (
     'ScriptDetailSerializer',
     'ScriptDetailSerializer',
     'ScriptInputSerializer',
     'ScriptInputSerializer',
+    'ScriptModuleSerializer',
     'ScriptSerializer',
     'ScriptSerializer',
 )
 )
 
 
 
 
+class ScriptModuleSerializer(ValidatedModelSerializer):
+    file = serializers.FileField(write_only=True)
+    file_path = serializers.CharField(read_only=True)
+
+    class Meta:
+        model = ScriptModule
+        fields = ['id', 'display', 'file_path', 'file', 'created', 'last_updated']
+        brief_fields = ('id', 'display')
+
+    def validate(self, data):
+        # ScriptModule.save() sets file_root; inject it here so full_clean() succeeds.
+        # Pop 'file' before model instantiation — ScriptModule has no such field.
+        file = data.pop('file', None)
+        data['file_root'] = ManagedFileRootPathChoices.SCRIPTS
+        data = super().validate(data)
+        data.pop('file_root', None)
+        if file is not None:
+            data['file'] = file
+        return data
+
+    def create(self, validated_data):
+        file = validated_data.pop('file')
+        storage = storages.create_storage(storages.backends["scripts"])
+        validated_data['file_path'] = storage.save(file.name, file)
+        created = False
+        try:
+            instance = super().create(validated_data)
+            created = True
+            return instance
+        except IntegrityError as e:
+            if 'file_path' in str(e):
+                raise serializers.ValidationError(
+                    _("A script module with this file name already exists.")
+                )
+            raise
+        finally:
+            if not created and (file_path := validated_data.get('file_path')):
+                try:
+                    storage.delete(file_path)
+                except Exception:
+                    logger.warning(f"Failed to delete orphaned script file '{file_path}' from storage.")
+
+
 class ScriptSerializer(ValidatedModelSerializer):
 class ScriptSerializer(ValidatedModelSerializer):
     description = serializers.SerializerMethodField(read_only=True)
     description = serializers.SerializerMethodField(read_only=True)
     vars = serializers.SerializerMethodField(read_only=True)
     vars = serializers.SerializerMethodField(read_only=True)

+ 1 - 0
netbox/extras/api/urls.py

@@ -26,6 +26,7 @@ router.register('journal-entries', views.JournalEntryViewSet)
 router.register('config-contexts', views.ConfigContextViewSet)
 router.register('config-contexts', views.ConfigContextViewSet)
 router.register('config-context-profiles', views.ConfigContextProfileViewSet)
 router.register('config-context-profiles', views.ConfigContextProfileViewSet)
 router.register('config-templates', views.ConfigTemplateViewSet)
 router.register('config-templates', views.ConfigTemplateViewSet)
+router.register('scripts/upload', views.ScriptModuleViewSet)
 router.register('scripts', views.ScriptViewSet, basename='script')
 router.register('scripts', views.ScriptViewSet, basename='script')
 
 
 app_name = 'extras-api'
 app_name = 'extras-api'

+ 7 - 1
netbox/extras/api/views.py

@@ -6,7 +6,7 @@ from rest_framework import status
 from rest_framework.decorators import action
 from rest_framework.decorators import action
 from rest_framework.exceptions import PermissionDenied
 from rest_framework.exceptions import PermissionDenied
 from rest_framework.generics import RetrieveUpdateDestroyAPIView
 from rest_framework.generics import RetrieveUpdateDestroyAPIView
-from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
+from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin
 from rest_framework.renderers import JSONRenderer
 from rest_framework.renderers import JSONRenderer
 from rest_framework.response import Response
 from rest_framework.response import Response
 from rest_framework.routers import APIRootView
 from rest_framework.routers import APIRootView
@@ -21,6 +21,7 @@ from netbox.api.features import SyncedDataMixin
 from netbox.api.metadata import ContentTypeMetadata
 from netbox.api.metadata import ContentTypeMetadata
 from netbox.api.renderers import TextRenderer
 from netbox.api.renderers import TextRenderer
 from netbox.api.viewsets import BaseViewSet, NetBoxModelViewSet
 from netbox.api.viewsets import BaseViewSet, NetBoxModelViewSet
+from netbox.api.viewsets.mixins import ObjectValidationMixin
 from utilities.exceptions import RQWorkerNotRunningException
 from utilities.exceptions import RQWorkerNotRunningException
 from utilities.request import copy_safe_request
 from utilities.request import copy_safe_request
 
 
@@ -264,6 +265,11 @@ class ConfigTemplateViewSet(SyncedDataMixin, ConfigTemplateRenderMixin, NetBoxMo
 # Scripts
 # Scripts
 #
 #
 
 
+class ScriptModuleViewSet(ObjectValidationMixin, CreateModelMixin, BaseViewSet):
+    queryset = ScriptModule.objects.all()
+    serializer_class = serializers.ScriptModuleSerializer
+
+
 @extend_schema_view(
 @extend_schema_view(
     update=extend_schema(request=serializers.ScriptInputSerializer),
     update=extend_schema(request=serializers.ScriptInputSerializer),
     partial_update=extend_schema(request=serializers.ScriptInputSerializer),
     partial_update=extend_schema(request=serializers.ScriptInputSerializer),

+ 62 - 14
netbox/extras/events.py

@@ -25,16 +25,54 @@ logger = logging.getLogger('netbox.events_processor')
 
 
 class EventContext(UserDict):
 class EventContext(UserDict):
     """
     """
-    A custom dictionary that automatically serializes its associated object on demand.
+    Dictionary-compatible wrapper for queued events that lazily serializes
+    ``event['data']`` on first access.
+
+    Backward-compatible with the plain-dict interface expected by existing
+    EVENTS_PIPELINE consumers. When the same object is enqueued more than once
+    in a single request, the serialization source is updated so consumers see
+    the latest state.
     """
     """
 
 
-    # We're emulating a dictionary here (rather than using a custom class) because prior to NetBox v4.5.2, events were
-    # queued as dictionaries for processing by handles in EVENTS_PIPELINE. We need to avoid introducing any breaking
-    # changes until a suitable minor release.
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+        # Track which model instance should be serialized if/when `data` is
+        # requested. This may be refreshed on duplicate enqueue, while leaving
+        # the public `object` entry untouched for compatibility.
+        self._serialization_source = None
+        if 'object' in self:
+            self._serialization_source = super().__getitem__('object')
+
+    def refresh_serialization_source(self, instance):
+        """
+        Point lazy serialization at a fresher instance, invalidating any
+        already-materialized ``data``.
+        """
+        self._serialization_source = instance
+        # UserDict.__contains__ checks the backing dict directly, so `in`
+        # does not trigger __getitem__'s lazy serialization.
+        if 'data' in self:
+            del self['data']
+
+    def freeze_data(self, instance):
+        """
+        Eagerly serialize and cache the payload for delete events, where the
+        object may become inaccessible after deletion.
+        """
+        super().__setitem__('data', serialize_for_event(instance))
+        self._serialization_source = None
+
     def __getitem__(self, item):
     def __getitem__(self, item):
         if item == 'data' and 'data' not in self:
         if item == 'data' and 'data' not in self:
-            data = serialize_for_event(self['object'])
-            self.__setitem__('data', data)
+            # Materialize the payload only when an event consumer asks for it.
+            #
+            # On coalesced events, use the latest explicitly queued instance so
+            # webhooks/scripts/notifications observe the final queued state for
+            # that object within the request.
+            source = self._serialization_source or super().__getitem__('object')
+            super().__setitem__('data', serialize_for_event(source))
+
         return super().__getitem__(item)
         return super().__getitem__(item)
 
 
 
 
@@ -76,8 +114,9 @@ def get_snapshots(instance, event_type):
 
 
 def enqueue_event(queue, instance, request, event_type):
 def enqueue_event(queue, instance, request, event_type):
     """
     """
-    Enqueue a serialized representation of a created/updated/deleted object for the processing of
-    events once the request has completed.
+    Enqueue (or coalesce) an event for a created/updated/deleted object.
+
+    Events are processed after the request completes.
     """
     """
     # Bail if this type of object does not support event rules
     # Bail if this type of object does not support event rules
     if not has_feature(instance, 'event_rules'):
     if not has_feature(instance, 'event_rules'):
@@ -88,11 +127,18 @@ def enqueue_event(queue, instance, request, event_type):
 
 
     assert instance.pk is not None
     assert instance.pk is not None
     key = f'{app_label}.{model_name}:{instance.pk}'
     key = f'{app_label}.{model_name}:{instance.pk}'
+
     if key in queue:
     if key in queue:
         queue[key]['snapshots']['postchange'] = get_snapshots(instance, event_type)['postchange']
         queue[key]['snapshots']['postchange'] = get_snapshots(instance, event_type)['postchange']
-        # If the object is being deleted, update any prior "update" event to "delete"
+
+        # If the object is being deleted, convert any prior update event into a
+        # delete event and freeze the payload before the object (or related
+        # rows) become inaccessible.
         if event_type == OBJECT_DELETED:
         if event_type == OBJECT_DELETED:
             queue[key]['event_type'] = event_type
             queue[key]['event_type'] = event_type
+        else:
+            # Keep the public `object` entry stable for compatibility.
+            queue[key].refresh_serialization_source(instance)
     else:
     else:
         queue[key] = EventContext(
         queue[key] = EventContext(
             object_type=ObjectType.objects.get_for_model(instance),
             object_type=ObjectType.objects.get_for_model(instance),
@@ -106,9 +152,11 @@ def enqueue_event(queue, instance, request, event_type):
             username=request.user.username,  # DEPRECATED, will be removed in NetBox v4.7.0
             username=request.user.username,  # DEPRECATED, will be removed in NetBox v4.7.0
             request_id=request.id,           # DEPRECATED, will be removed in NetBox v4.7.0
             request_id=request.id,           # DEPRECATED, will be removed in NetBox v4.7.0
         )
         )
-    # Force serialization of objects prior to them actually being deleted
+
+    # For delete events, eagerly serialize the payload before the row is gone.
+    # This covers both first-time enqueues and coalesced update→delete promotions.
     if event_type == OBJECT_DELETED:
     if event_type == OBJECT_DELETED:
-        queue[key]['data'] = serialize_for_event(instance)
+        queue[key].freeze_data(instance)
 
 
 
 
 def process_event_rules(event_rules, object_type, event):
 def process_event_rules(event_rules, object_type, event):
@@ -133,9 +181,9 @@ def process_event_rules(event_rules, object_type, event):
         if not event_rule.eval_conditions(event['data']):
         if not event_rule.eval_conditions(event['data']):
             continue
             continue
 
 
-        # Compile event data
-        event_data = event_rule.action_data or {}
-        event_data.update(event['data'])
+        # Merge rule-specific action_data with the event payload.
+        # Copy to avoid mutating the rule's stored action_data dict.
+        event_data = {**(event_rule.action_data or {}), **event['data']}
 
 
         # Webhooks
         # Webhooks
         if event_rule.action_type == EventRuleActionChoices.WEBHOOK:
         if event_rule.action_type == EventRuleActionChoices.WEBHOOK:

+ 1 - 1
netbox/extras/migrations/0137_default_ordering_indexes.py

@@ -6,7 +6,7 @@ class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
         ('contenttypes', '0002_remove_content_type_name'),
         ('contenttypes', '0002_remove_content_type_name'),
         ('core', '0022_default_ordering_indexes'),
         ('core', '0022_default_ordering_indexes'),
-        ('dcim', '0231_default_ordering_indexes'),
+        ('dcim', '0232_default_ordering_indexes'),
         ('extras', '0136_customfield_validation_schema'),
         ('extras', '0136_customfield_validation_schema'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('users', '0015_owner'),
         ('users', '0015_owner'),

+ 3 - 2
netbox/extras/tables/tables.py

@@ -421,6 +421,7 @@ class NotificationTable(NetBoxTable):
     icon = columns.TemplateColumn(
     icon = columns.TemplateColumn(
         template_code=NOTIFICATION_ICON,
         template_code=NOTIFICATION_ICON,
         accessor=tables.A('event'),
         accessor=tables.A('event'),
+        orderable=False,
         attrs={
         attrs={
             'td': {'class': 'w-1'},
             'td': {'class': 'w-1'},
             'th': {'class': 'w-1'},
             'th': {'class': 'w-1'},
@@ -483,8 +484,8 @@ class WebhookTable(NetBoxTable):
         verbose_name=_('Name'),
         verbose_name=_('Name'),
         linkify=True
         linkify=True
     )
     )
-    ssl_validation = columns.BooleanColumn(
-        verbose_name=_('SSL Validation')
+    ssl_verification = columns.BooleanColumn(
+        verbose_name=_('SSL Verification'),
     )
     )
     owner = tables.Column(
     owner = tables.Column(
         linkify=True,
         linkify=True,

+ 74 - 4
netbox/extras/tests/test_api.py

@@ -1,7 +1,10 @@
 import datetime
 import datetime
 import hashlib
 import hashlib
+import io
+from unittest.mock import MagicMock, patch
 
 
 from django.contrib.contenttypes.models import ContentType
 from django.contrib.contenttypes.models import ContentType
+from django.core.files.uploadedfile import SimpleUploadedFile
 from django.urls import reverse
 from django.urls import reverse
 from django.utils.timezone import make_aware, now
 from django.utils.timezone import make_aware, now
 from rest_framework import status
 from rest_framework import status
@@ -1011,10 +1014,14 @@ class ScriptTest(APITestCase):
 
 
     @classmethod
     @classmethod
     def setUpTestData(cls):
     def setUpTestData(cls):
-        module = ScriptModule.objects.create(
-            file_root=ManagedFileRootPathChoices.SCRIPTS,
-            file_path='script.py',
-        )
+        # Avoid trying to import a non-existent on-disk module during setup.
+        # This test creates the Script row explicitly and monkey-patches
+        # Script.python_class below.
+        with patch.object(ScriptModule, 'sync_classes'):
+            module = ScriptModule.objects.create(
+                file_root=ManagedFileRootPathChoices.SCRIPTS,
+                file_path='script.py',
+            )
         script = Script.objects.create(
         script = Script.objects.create(
             module=module,
             module=module,
             name='Test script',
             name='Test script',
@@ -1384,3 +1391,66 @@ class NotificationTest(APIViewTestCases.APIViewTestCase):
                 'event_type': OBJECT_DELETED,
                 'event_type': OBJECT_DELETED,
             },
             },
         ]
         ]
+
+
+class ScriptModuleTest(APITestCase):
+    """
+    Tests for the POST /api/extras/scripts/upload/ endpoint.
+
+    ScriptModule is a proxy of core.ManagedFile (a different app) so the standard
+    APIViewTestCases mixins cannot be used directly. All tests use add_permissions()
+    with explicit Django model-level permissions.
+    """
+
+    def setUp(self):
+        super().setUp()
+        self.url = reverse('extras-api:scriptmodule-list')  # /api/extras/scripts/upload/
+
+    def test_upload_script_module_without_permission(self):
+        script_content = b"from extras.scripts import Script\nclass TestScript(Script):\n    pass\n"
+        upload_file = SimpleUploadedFile('test_upload.py', script_content, content_type='text/plain')
+        response = self.client.post(
+            self.url,
+            {'file': upload_file},
+            format='multipart',
+            **self.header,
+        )
+        self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
+
+    def test_upload_script_module(self):
+        # ScriptModule is a proxy of core.ManagedFile; both permissions required.
+        self.add_permissions('extras.add_scriptmodule', 'core.add_managedfile')
+        script_content = b"from extras.scripts import Script\nclass TestScript(Script):\n    pass\n"
+        upload_file = SimpleUploadedFile('test_upload.py', script_content, content_type='text/plain')
+        mock_storage = MagicMock()
+        mock_storage.save.return_value = 'test_upload.py'
+
+        # The upload serializer writes the file via storages.create_storage(...).save(),
+        # but ScriptModule.sync_classes() later imports it via storages["scripts"].open().
+        # Provide both behaviors so the uploaded module can actually be loaded during the test.
+        mock_storage.open.side_effect = lambda *args, **kwargs: io.BytesIO(script_content)
+
+        with (
+            patch('extras.api.serializers_.scripts.storages') as mock_serializer_storages,
+            patch('extras.models.mixins.storages') as mock_module_storages,
+        ):
+            mock_serializer_storages.create_storage.return_value = mock_storage
+            mock_serializer_storages.backends = {'scripts': {}}
+            mock_module_storages.__getitem__.return_value = mock_storage
+
+            response = self.client.post(
+                self.url,
+                {'file': upload_file},
+                format='multipart',
+                **self.header,
+            )
+        self.assertHttpStatus(response, status.HTTP_201_CREATED)
+        self.assertEqual(response.data['file_path'], 'test_upload.py')
+        mock_storage.save.assert_called_once()
+        self.assertTrue(ScriptModule.objects.filter(file_path='test_upload.py').exists())
+        self.assertTrue(Script.objects.filter(module__file_path='test_upload.py', name='TestScript').exists())
+
+    def test_upload_script_module_without_file_fails(self):
+        self.add_permissions('extras.add_scriptmodule', 'core.add_managedfile')
+        response = self.client.post(self.url, {}, format='json', **self.header)
+        self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)

+ 77 - 1
netbox/extras/tests/test_customfields.py

@@ -7,7 +7,7 @@ from django.test import tag
 from django.urls import reverse
 from django.urls import reverse
 from rest_framework import status
 from rest_framework import status
 
 
-from core.models import ObjectType
+from core.models import ObjectChange, ObjectType
 from dcim.filtersets import SiteFilterSet
 from dcim.filtersets import SiteFilterSet
 from dcim.forms import SiteImportForm
 from dcim.forms import SiteImportForm
 from dcim.models import Manufacturer, Rack, Site
 from dcim.models import Manufacturer, Rack, Site
@@ -1233,6 +1233,82 @@ class CustomFieldAPITest(APITestCase):
             list(original_cfvs['multiobject_field'])
             list(original_cfvs['multiobject_field'])
         )
         )
 
 
+    @tag('regression')
+    def test_update_single_object_rejects_unknown_custom_fields(self):
+        site2 = Site.objects.get(name='Site 2')
+        original_cf_data = {**site2.custom_field_data}
+        url = reverse('dcim-api:site-detail', kwargs={'pk': site2.pk})
+        self.add_permissions('dcim.change_site')
+
+        data = {
+            'custom_fields': {
+                'text_field': 'valid',
+                'thisfieldshouldntexist': 'random text here',
+            },
+        }
+
+        response = self.client.patch(url, data, format='json', **self.header)
+        self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
+        self.assertIn('custom_fields', response.data)
+        self.assertIn('thisfieldshouldntexist', response.data['custom_fields'])
+
+        # Ensure the object was not modified
+        site2.refresh_from_db()
+        self.assertEqual(site2.custom_field_data, original_cf_data)
+
+    @tag('regression')
+    def test_update_single_object_prunes_stale_custom_field_data_from_database_and_postchange_data(self):
+        stale_key = 'thisfieldshouldntexist'
+        stale_value = 'random text here'
+        updated_text_value = 'ABCD'
+
+        site2 = Site.objects.get(name='Site 2')
+        original_text_value = site2.custom_field_data['text_field']
+        object_type = ObjectType.objects.get_for_model(Site)
+
+        # Seed stale custom field data directly in the database to mimic a polluted row.
+        Site.objects.filter(pk=site2.pk).update(
+            custom_field_data={
+                **site2.custom_field_data,
+                stale_key: stale_value,
+            }
+        )
+        site2.refresh_from_db()
+        self.assertIn(stale_key, site2.custom_field_data)
+
+        existing_change_ids = set(
+            ObjectChange.objects.filter(
+                changed_object_type=object_type,
+                changed_object_id=site2.pk,
+            ).values_list('pk', flat=True)
+        )
+
+        url = reverse('dcim-api:site-detail', kwargs={'pk': site2.pk})
+        self.add_permissions('dcim.change_site')
+        data = {
+            'custom_fields': {
+                'text_field': updated_text_value,
+            },
+        }
+
+        response = self.client.patch(url, data, format='json', **self.header)
+        self.assertHttpStatus(response, status.HTTP_200_OK)
+
+        site2.refresh_from_db()
+        self.assertEqual(site2.cf['text_field'], updated_text_value)
+        self.assertNotIn(stale_key, site2.custom_field_data)
+
+        object_changes = ObjectChange.objects.filter(
+            changed_object_type=object_type,
+            changed_object_id=site2.pk,
+        ).exclude(pk__in=existing_change_ids)
+        self.assertEqual(object_changes.count(), 1)
+
+        object_change = object_changes.get()
+        self.assertEqual(object_change.prechange_data['custom_fields']['text_field'], original_text_value)
+        self.assertEqual(object_change.postchange_data['custom_fields']['text_field'], updated_text_value)
+        self.assertNotIn(stale_key, object_change.postchange_data['custom_fields'])
+
     def test_specify_related_object_by_attr(self):
     def test_specify_related_object_by_attr(self):
         site1 = Site.objects.get(name='Site 1')
         site1 = Site.objects.get(name='Site 1')
         vlans = VLAN.objects.all()[:3]
         vlans = VLAN.objects.all()[:3]

+ 94 - 0
netbox/extras/tests/test_event_rules.py

@@ -1,8 +1,10 @@
 import json
 import json
 import uuid
 import uuid
+from unittest import skipIf
 from unittest.mock import Mock, patch
 from unittest.mock import Mock, patch
 
 
 import django_rq
 import django_rq
+from django.conf import settings
 from django.http import HttpResponse
 from django.http import HttpResponse
 from django.test import RequestFactory
 from django.test import RequestFactory
 from django.urls import reverse
 from django.urls import reverse
@@ -343,6 +345,7 @@ class EventRuleTest(APITestCase):
             self.assertEqual(job.kwargs['snapshots']['prechange']['name'], sites[i].name)
             self.assertEqual(job.kwargs['snapshots']['prechange']['name'], sites[i].name)
             self.assertEqual(job.kwargs['snapshots']['prechange']['tags'], ['Bar', 'Foo'])
             self.assertEqual(job.kwargs['snapshots']['prechange']['tags'], ['Bar', 'Foo'])
 
 
+    @skipIf('netbox.tests.dummy_plugin' not in settings.PLUGINS, 'dummy_plugin not in settings.PLUGINS')
     def test_send_webhook(self):
     def test_send_webhook(self):
         request_id = uuid.uuid4()
         request_id = uuid.uuid4()
         url_path = reverse('dcim:site_add')
         url_path = reverse('dcim:site_add')
@@ -431,6 +434,97 @@ class EventRuleTest(APITestCase):
         self.assertEqual(job.kwargs['object_type'], script_type)
         self.assertEqual(job.kwargs['object_type'], script_type)
         self.assertEqual(job.kwargs['username'], self.user.username)
         self.assertEqual(job.kwargs['username'], self.user.username)
 
 
+    def test_duplicate_enqueue_refreshes_lazy_payload(self):
+        """
+        When the same object is enqueued more than once in a single request,
+        lazy serialization should use the most recently enqueued instance while
+        preserving the original event['object'] reference.
+        """
+        request = RequestFactory().get(reverse('dcim:site_add'))
+        request.id = uuid.uuid4()
+        request.user = self.user
+
+        site = Site.objects.create(name='Site 1', slug='site-1')
+        stale_site = Site.objects.get(pk=site.pk)
+
+        queue = {}
+        enqueue_event(queue, stale_site, request, OBJECT_UPDATED)
+
+        event = queue[f'dcim.site:{site.pk}']
+
+        # Data should not be materialized yet (lazy serialization)
+        self.assertNotIn('data', event.data)
+
+        fresh_site = Site.objects.get(pk=site.pk)
+        fresh_site.description = 'foo'
+        fresh_site.save()
+
+        enqueue_event(queue, fresh_site, request, OBJECT_UPDATED)
+
+        # The original object reference should be preserved
+        self.assertIs(event['object'], stale_site)
+
+        # But serialized data should reflect the fresher instance
+        self.assertEqual(event['data']['description'], 'foo')
+        self.assertEqual(event['snapshots']['postchange']['description'], 'foo')
+
+    def test_duplicate_enqueue_invalidates_materialized_data(self):
+        """
+        If event['data'] has already been materialized before a second enqueue
+        for the same object, the stale payload should be discarded and rebuilt
+        from the fresher instance on next access.
+        """
+        request = RequestFactory().get(reverse('dcim:site_add'))
+        request.id = uuid.uuid4()
+        request.user = self.user
+
+        site = Site.objects.create(name='Site 1', slug='site-1')
+
+        queue = {}
+        enqueue_event(queue, site, request, OBJECT_UPDATED)
+
+        event = queue[f'dcim.site:{site.pk}']
+
+        # Force early materialization
+        self.assertEqual(event['data']['description'], '')
+
+        # Now update and re-enqueue
+        fresh_site = Site.objects.get(pk=site.pk)
+        fresh_site.description = 'updated'
+        fresh_site.save()
+
+        enqueue_event(queue, fresh_site, request, OBJECT_UPDATED)
+
+        # Stale data should have been invalidated; new access should reflect update
+        self.assertEqual(event['data']['description'], 'updated')
+
+    def test_update_then_delete_enqueue_freezes_payload(self):
+        """
+        When an update event is coalesced with a subsequent delete, the event
+        type should be promoted to OBJECT_DELETED and the payload should be
+        eagerly frozen (since the object will be inaccessible after deletion).
+        """
+        request = RequestFactory().get(reverse('dcim:site_add'))
+        request.id = uuid.uuid4()
+        request.user = self.user
+
+        site = Site.objects.create(name='Site 1', slug='site-1')
+
+        queue = {}
+        enqueue_event(queue, site, request, OBJECT_UPDATED)
+
+        event = queue[f'dcim.site:{site.pk}']
+
+        enqueue_event(queue, site, request, OBJECT_DELETED)
+
+        # Event type should have been promoted
+        self.assertEqual(event['event_type'], OBJECT_DELETED)
+
+        # Data should already be materialized (frozen), not lazy
+        self.assertIn('data', event.data)
+        self.assertEqual(event['data']['name'], 'Site 1')
+        self.assertIsNone(event['snapshots']['postchange'])
+
     def test_duplicate_triggers(self):
     def test_duplicate_triggers(self):
         """
         """
         Test for erroneous duplicate event triggers resulting from saving an object multiple times
         Test for erroneous duplicate event triggers resulting from saving an object multiple times

+ 85 - 0
netbox/extras/tests/test_models.py

@@ -1,10 +1,15 @@
+import io
 import tempfile
 import tempfile
 from pathlib import Path
 from pathlib import Path
+from unittest.mock import patch
 
 
 from django.contrib.contenttypes.models import ContentType
 from django.contrib.contenttypes.models import ContentType
+from django.core.files.base import ContentFile
+from django.core.files.storage import Storage
 from django.core.files.uploadedfile import SimpleUploadedFile
 from django.core.files.uploadedfile import SimpleUploadedFile
 from django.forms import ValidationError
 from django.forms import ValidationError
 from django.test import TestCase, tag
 from django.test import TestCase, tag
+from PIL import Image
 
 
 from core.models import AutoSyncRecord, DataSource, ObjectType
 from core.models import AutoSyncRecord, DataSource, ObjectType
 from dcim.models import Device, DeviceRole, DeviceType, Location, Manufacturer, Platform, Region, Site, SiteGroup
 from dcim.models import Device, DeviceRole, DeviceType, Location, Manufacturer, Platform, Region, Site, SiteGroup
@@ -22,10 +27,50 @@ from utilities.exceptions import AbortRequest
 from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine
 from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine
 
 
 
 
+class OverwriteStyleMemoryStorage(Storage):
+    """
+    In-memory storage that mimics overwrite-style backends by returning the
+    incoming name unchanged from get_available_name().
+    """
+
+    def __init__(self):
+        self.files = {}
+
+    def _open(self, name, mode='rb'):
+        return ContentFile(self.files[name], name=name)
+
+    def _save(self, name, content):
+        self.files[name] = content.read()
+        return name
+
+    def delete(self, name):
+        self.files.pop(name, None)
+
+    def exists(self, name):
+        return name in self.files
+
+    def get_available_name(self, name, max_length=None):
+        return name
+
+    def get_alternative_name(self, file_root, file_ext):
+        return f'{file_root}_sdmmer4{file_ext}'
+
+    def listdir(self, path):
+        return [], list(self.files)
+
+    def size(self, name):
+        return len(self.files[name])
+
+    def url(self, name):
+        return f'https://example.invalid/{name}'
+
+
 class ImageAttachmentTests(TestCase):
 class ImageAttachmentTests(TestCase):
     @classmethod
     @classmethod
     def setUpTestData(cls):
     def setUpTestData(cls):
         cls.ct_rack = ContentType.objects.get_by_natural_key('dcim', 'rack')
         cls.ct_rack = ContentType.objects.get_by_natural_key('dcim', 'rack')
+        cls.ct_site = ContentType.objects.get_by_natural_key('dcim', 'site')
+        cls.site = Site.objects.create(name='Site 1')
         cls.image_content = b''
         cls.image_content = b''
 
 
     def _stub_image_attachment(self, object_id, image_filename, name=None):
     def _stub_image_attachment(self, object_id, image_filename, name=None):
@@ -49,6 +94,15 @@ class ImageAttachmentTests(TestCase):
         )
         )
         return ia
         return ia
 
 
+    def _uploaded_png(self, filename):
+        image = io.BytesIO()
+        Image.new('RGB', (1, 1)).save(image, format='PNG')
+        return SimpleUploadedFile(
+            name=filename,
+            content=image.getvalue(),
+            content_type='image/png',
+        )
+
     def test_filename_strips_expected_prefix(self):
     def test_filename_strips_expected_prefix(self):
         """
         """
         Tests that the filename of the image attachment is stripped of the expected
         Tests that the filename of the image attachment is stripped of the expected
@@ -97,6 +151,37 @@ class ImageAttachmentTests(TestCase):
         ia = self._stub_image_attachment(12, 'image-attachments/rack_12_file.png', name='')
         ia = self._stub_image_attachment(12, 'image-attachments/rack_12_file.png', name='')
         self.assertEqual('file.png', str(ia))
         self.assertEqual('file.png', str(ia))
 
 
+    def test_duplicate_uploaded_names_get_suffixed_with_overwrite_style_storage(self):
+        storage = OverwriteStyleMemoryStorage()
+        field = ImageAttachment._meta.get_field('image')
+
+        with patch.object(field, 'storage', storage):
+            first = ImageAttachment(
+                object_type=self.ct_site,
+                object_id=self.site.pk,
+                image=self._uploaded_png('action-buttons.png'),
+            )
+            first.save()
+
+            second = ImageAttachment(
+                object_type=self.ct_site,
+                object_id=self.site.pk,
+                image=self._uploaded_png('action-buttons.png'),
+            )
+            second.save()
+
+        base_name = f'image-attachments/site_{self.site.pk}_action-buttons.png'
+        suffixed_name = f'image-attachments/site_{self.site.pk}_action-buttons_sdmmer4.png'
+
+        self.assertEqual(first.image.name, base_name)
+        self.assertEqual(second.image.name, suffixed_name)
+        self.assertNotEqual(first.image.name, second.image.name)
+
+        self.assertEqual(first.filename, 'action-buttons.png')
+        self.assertEqual(second.filename, 'action-buttons_sdmmer4.png')
+
+        self.assertCountEqual(storage.files.keys(), {base_name, suffixed_name})
+
 
 
 class TagTest(TestCase):
 class TagTest(TestCase):
 
 

+ 93 - 24
netbox/extras/tests/test_tables.py

@@ -1,24 +1,93 @@
-from django.test import RequestFactory, TestCase, tag
-
-from extras.models import EventRule
-from extras.tables import EventRuleTable
-
-
-@tag('regression')
-class EventRuleTableTest(TestCase):
-    def test_every_orderable_field_does_not_throw_exception(self):
-        rule = EventRule.objects.all()
-        disallowed = {
-            'actions',
-        }
-
-        orderable_columns = [
-            column.name for column in EventRuleTable(rule).columns if column.orderable and column.name not in disallowed
-        ]
-        fake_request = RequestFactory().get('/')
-
-        for col in orderable_columns:
-            for direction in ('-', ''):
-                table = EventRuleTable(rule)
-                table.order_by = f'{direction}{col}'
-                table.as_html(fake_request)
+from extras.models import Bookmark, Notification, Subscription
+from extras.tables import *
+from utilities.testing import TableTestCases
+
+
+class CustomFieldTableTest(TableTestCases.StandardTableTestCase):
+    table = CustomFieldTable
+
+
+class CustomFieldChoiceSetTableTest(TableTestCases.StandardTableTestCase):
+    table = CustomFieldChoiceSetTable
+
+
+class CustomLinkTableTest(TableTestCases.StandardTableTestCase):
+    table = CustomLinkTable
+
+
+class ExportTemplateTableTest(TableTestCases.StandardTableTestCase):
+    table = ExportTemplateTable
+
+
+class SavedFilterTableTest(TableTestCases.StandardTableTestCase):
+    table = SavedFilterTable
+
+
+class TableConfigTableTest(TableTestCases.StandardTableTestCase):
+    table = TableConfigTable
+
+
+class BookmarkTableTest(TableTestCases.StandardTableTestCase):
+    table = BookmarkTable
+
+    # The list view for this table lives in account.views (not extras.views),
+    # so auto-discovery cannot find it. Provide an explicit queryset source.
+    queryset_sources = [
+        ('Bookmark.objects.all()', Bookmark.objects.all()),
+    ]
+
+
+class NotificationGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = NotificationGroupTable
+
+
+class NotificationTableTest(TableTestCases.StandardTableTestCase):
+    table = NotificationTable
+
+    # The list view for this table lives in account.views (not extras.views),
+    # so auto-discovery cannot find it. Provide an explicit queryset source.
+    queryset_sources = [
+        ('Notification.objects.all()', Notification.objects.all()),
+    ]
+
+
+class SubscriptionTableTest(TableTestCases.StandardTableTestCase):
+    table = SubscriptionTable
+
+    # The list view for this table lives in account.views (not extras.views),
+    # so auto-discovery cannot find it. Provide an explicit queryset source.
+    queryset_sources = [
+        ('Subscription.objects.all()', Subscription.objects.all()),
+    ]
+
+
+class WebhookTableTest(TableTestCases.StandardTableTestCase):
+    table = WebhookTable
+
+
+class EventRuleTableTest(TableTestCases.StandardTableTestCase):
+    table = EventRuleTable
+
+
+class TagTableTest(TableTestCases.StandardTableTestCase):
+    table = TagTable
+
+
+class ConfigContextProfileTableTest(TableTestCases.StandardTableTestCase):
+    table = ConfigContextProfileTable
+
+
+class ConfigContextTableTest(TableTestCases.StandardTableTestCase):
+    table = ConfigContextTable
+
+
+class ConfigTemplateTableTest(TableTestCases.StandardTableTestCase):
+    table = ConfigTemplateTable
+
+
+class ImageAttachmentTableTest(TableTestCases.StandardTableTestCase):
+    table = ImageAttachmentTable
+
+
+class JournalEntryTableTest(TableTestCases.StandardTableTestCase):
+    table = JournalEntryTable

+ 82 - 17
netbox/extras/tests/test_utils.py

@@ -1,10 +1,12 @@
 from types import SimpleNamespace
 from types import SimpleNamespace
+from unittest.mock import patch
 
 
 from django.contrib.contenttypes.models import ContentType
 from django.contrib.contenttypes.models import ContentType
+from django.core.files.storage import Storage
 from django.test import TestCase
 from django.test import TestCase
 
 
-from extras.models import ExportTemplate
-from extras.utils import filename_from_model, image_upload
+from extras.models import ExportTemplate, ImageAttachment
+from extras.utils import _build_image_attachment_path, filename_from_model, image_upload
 from tenancy.models import ContactGroup, TenantGroup
 from tenancy.models import ContactGroup, TenantGroup
 from wireless.models import WirelessLANGroup
 from wireless.models import WirelessLANGroup
 
 
@@ -22,6 +24,25 @@ class FilenameFromModelTests(TestCase):
             self.assertEqual(filename_from_model(model), expected)
             self.assertEqual(filename_from_model(model), expected)
 
 
 
 
+class OverwriteStyleStorage(Storage):
+    """
+    Mimic an overwrite-style backend (for example, S3 with file_overwrite=True),
+    where get_available_name() returns the incoming name unchanged.
+    """
+
+    def __init__(self, existing_names=None):
+        self.existing_names = set(existing_names or [])
+
+    def exists(self, name):
+        return name in self.existing_names
+
+    def get_available_name(self, name, max_length=None):
+        return name
+
+    def get_alternative_name(self, file_root, file_ext):
+        return f'{file_root}_sdmmer4{file_ext}'
+
+
 class ImageUploadTests(TestCase):
 class ImageUploadTests(TestCase):
     @classmethod
     @classmethod
     def setUpTestData(cls):
     def setUpTestData(cls):
@@ -31,16 +52,18 @@ class ImageUploadTests(TestCase):
 
 
     def _stub_instance(self, object_id=12, name=None):
     def _stub_instance(self, object_id=12, name=None):
         """
         """
-        Creates a minimal stub for use with the `image_upload()` function.
-
-        This method generates an instance of `SimpleNamespace` containing a set
-        of attributes required to simulate the expected input for the
-        `image_upload()` method.
-        It is designed to simplify testing or processing by providing a
-        lightweight representation of an object.
+        Creates a minimal stub for use with image attachment path generation.
         """
         """
         return SimpleNamespace(object_type=self.ct_rack, object_id=object_id, name=name)
         return SimpleNamespace(object_type=self.ct_rack, object_id=object_id, name=name)
 
 
+    def _bound_instance(self, *, storage, object_id=12, name=None, max_length=100):
+        return SimpleNamespace(
+            object_type=self.ct_rack,
+            object_id=object_id,
+            name=name,
+            image=SimpleNamespace(field=SimpleNamespace(storage=storage, max_length=max_length)),
+        )
+
     def _second_segment(self, path: str):
     def _second_segment(self, path: str):
         """
         """
         Extracts and returns the portion of the input string after the
         Extracts and returns the portion of the input string after the
@@ -53,7 +76,7 @@ class ImageUploadTests(TestCase):
         Tests handling of a Windows file path with a fake directory and extension.
         Tests handling of a Windows file path with a fake directory and extension.
         """
         """
         inst = self._stub_instance(name=None)
         inst = self._stub_instance(name=None)
-        path = image_upload(inst, r'C:\fake_path\MyPhoto.JPG')
+        path = _build_image_attachment_path(inst, r'C:\fake_path\MyPhoto.JPG')
         # Base directory and single-level path
         # Base directory and single-level path
         seg2 = self._second_segment(path)
         seg2 = self._second_segment(path)
         self.assertTrue(path.startswith('image-attachments/rack_12_'))
         self.assertTrue(path.startswith('image-attachments/rack_12_'))
@@ -67,7 +90,7 @@ class ImageUploadTests(TestCase):
         create subdirectories.
         create subdirectories.
         """
         """
         inst = self._stub_instance(name='5/31/23')
         inst = self._stub_instance(name='5/31/23')
-        path = image_upload(inst, 'image.png')
+        path = _build_image_attachment_path(inst, 'image.png')
         seg2 = self._second_segment(path)
         seg2 = self._second_segment(path)
         self.assertTrue(seg2.startswith('rack_12_'))
         self.assertTrue(seg2.startswith('rack_12_'))
         self.assertNotIn('/', seg2)
         self.assertNotIn('/', seg2)
@@ -80,7 +103,7 @@ class ImageUploadTests(TestCase):
         into a single directory name without creating subdirectories.
         into a single directory name without creating subdirectories.
         """
         """
         inst = self._stub_instance(name=r'5\31\23')
         inst = self._stub_instance(name=r'5\31\23')
-        path = image_upload(inst, 'image_name.png')
+        path = _build_image_attachment_path(inst, 'image_name.png')
 
 
         seg2 = self._second_segment(path)
         seg2 = self._second_segment(path)
         self.assertTrue(seg2.startswith('rack_12_'))
         self.assertTrue(seg2.startswith('rack_12_'))
@@ -93,7 +116,7 @@ class ImageUploadTests(TestCase):
         Tests the output path format generated by the `image_upload` function.
         Tests the output path format generated by the `image_upload` function.
         """
         """
         inst = self._stub_instance(object_id=99, name='label')
         inst = self._stub_instance(object_id=99, name='label')
-        path = image_upload(inst, 'a.webp')
+        path = _build_image_attachment_path(inst, 'a.webp')
         # The second segment must begin with "rack_99_"
         # The second segment must begin with "rack_99_"
         seg2 = self._second_segment(path)
         seg2 = self._second_segment(path)
         self.assertTrue(seg2.startswith('rack_99_'))
         self.assertTrue(seg2.startswith('rack_99_'))
@@ -105,7 +128,7 @@ class ImageUploadTests(TestCase):
         is omitted.
         is omitted.
         """
         """
         inst = self._stub_instance(name='test')
         inst = self._stub_instance(name='test')
-        path = image_upload(inst, 'document.txt')
+        path = _build_image_attachment_path(inst, 'document.txt')
 
 
         seg2 = self._second_segment(path)
         seg2 = self._second_segment(path)
         self.assertTrue(seg2.startswith('rack_12_test'))
         self.assertTrue(seg2.startswith('rack_12_test'))
@@ -121,7 +144,7 @@ class ImageUploadTests(TestCase):
         # Suppose the instance name has surrounding whitespace and
         # Suppose the instance name has surrounding whitespace and
         # extra slashes.
         # extra slashes.
         inst = self._stub_instance(name='  my/complex\\name  ')
         inst = self._stub_instance(name='  my/complex\\name  ')
-        path = image_upload(inst, 'irrelevant.png')
+        path = _build_image_attachment_path(inst, 'irrelevant.png')
 
 
         # The output should be flattened and sanitized.
         # The output should be flattened and sanitized.
         # We expect the name to be transformed into a valid filename without
         # We expect the name to be transformed into a valid filename without
@@ -141,7 +164,7 @@ class ImageUploadTests(TestCase):
         for name in ['2025/09/12', r'2025\09\12']:
         for name in ['2025/09/12', r'2025\09\12']:
             with self.subTest(name=name):
             with self.subTest(name=name):
                 inst = self._stub_instance(name=name)
                 inst = self._stub_instance(name=name)
-                path = image_upload(inst, 'x.jpeg')
+                path = _build_image_attachment_path(inst, 'x.jpeg')
                 seg2 = self._second_segment(path)
                 seg2 = self._second_segment(path)
                 self.assertTrue(seg2.startswith('rack_12_'))
                 self.assertTrue(seg2.startswith('rack_12_'))
                 self.assertNotIn('/', seg2)
                 self.assertNotIn('/', seg2)
@@ -154,7 +177,49 @@ class ImageUploadTests(TestCase):
         SuspiciousFileOperation, the fallback default is used.
         SuspiciousFileOperation, the fallback default is used.
         """
         """
         inst = self._stub_instance(name=' ')
         inst = self._stub_instance(name=' ')
-        path = image_upload(inst, 'sample.png')
+        path = _build_image_attachment_path(inst, 'sample.png')
         # Expect the fallback name 'unnamed' to be used.
         # Expect the fallback name 'unnamed' to be used.
         self.assertIn('unnamed', path)
         self.assertIn('unnamed', path)
         self.assertTrue(path.startswith('image-attachments/rack_12_'))
         self.assertTrue(path.startswith('image-attachments/rack_12_'))
+
+    def test_image_upload_preserves_original_name_when_available(self):
+        inst = self._bound_instance(
+            storage=OverwriteStyleStorage(),
+            name='action-buttons',
+        )
+
+        path = image_upload(inst, 'action-buttons.png')
+
+        self.assertEqual(path, 'image-attachments/rack_12_action-buttons.png')
+
+    def test_image_upload_uses_base_collision_handling_with_overwrite_style_storage(self):
+        inst = self._bound_instance(
+            storage=OverwriteStyleStorage(existing_names={'image-attachments/rack_12_action-buttons.png'}),
+            name='action-buttons',
+        )
+
+        path = image_upload(inst, 'action-buttons.png')
+
+        self.assertEqual(
+            path,
+            'image-attachments/rack_12_action-buttons_sdmmer4.png',
+        )
+
+    def test_image_field_generate_filename_uses_image_upload_collision_handling(self):
+        field = ImageAttachment._meta.get_field('image')
+        instance = ImageAttachment(
+            object_type=self.ct_rack,
+            object_id=12,
+        )
+
+        with patch.object(
+            field,
+            'storage',
+            OverwriteStyleStorage(existing_names={'image-attachments/rack_12_action-buttons.png'}),
+        ):
+            path = field.generate_filename(instance, 'action-buttons.png')
+
+        self.assertEqual(
+            path,
+            'image-attachments/rack_12_action-buttons_sdmmer4.png',
+        )

+ 16 - 2
netbox/extras/tests/test_views.py

@@ -924,7 +924,14 @@ class ScriptValidationErrorTest(TestCase):
 
 
     @classmethod
     @classmethod
     def setUpTestData(cls):
     def setUpTestData(cls):
-        module = ScriptModule.objects.create(file_root=ManagedFileRootPathChoices.SCRIPTS, file_path='test_script.py')
+        # Avoid trying to import a non-existent on-disk module during setup.
+        # This test creates the Script row explicitly and monkey-patches
+        # Script.python_class below.
+        with patch.object(ScriptModule, 'sync_classes'):
+            module = ScriptModule.objects.create(
+                file_root=ManagedFileRootPathChoices.SCRIPTS,
+                file_path='test_script.py',
+            )
         cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
         cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
 
 
     def setUp(self):
     def setUp(self):
@@ -986,7 +993,14 @@ class ScriptDefaultValuesTest(TestCase):
 
 
     @classmethod
     @classmethod
     def setUpTestData(cls):
     def setUpTestData(cls):
-        module = ScriptModule.objects.create(file_root=ManagedFileRootPathChoices.SCRIPTS, file_path='test_script.py')
+        # Avoid trying to import a non-existent on-disk module during setup.
+        # This test creates the Script row explicitly and monkey-patches
+        # Script.python_class below.
+        with patch.object(ScriptModule, 'sync_classes'):
+            module = ScriptModule.objects.create(
+                file_root=ManagedFileRootPathChoices.SCRIPTS,
+                file_path='test_script.py',
+            )
         cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
         cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
 
 
     def setUp(self):
     def setUp(self):

+ 23 - 9
netbox/extras/utils.py

@@ -2,7 +2,7 @@ import importlib
 from pathlib import Path
 from pathlib import Path
 
 
 from django.core.exceptions import ImproperlyConfigured, SuspiciousFileOperation
 from django.core.exceptions import ImproperlyConfigured, SuspiciousFileOperation
-from django.core.files.storage import default_storage
+from django.core.files.storage import Storage, default_storage
 from django.core.files.utils import validate_file_name
 from django.core.files.utils import validate_file_name
 from django.db import models
 from django.db import models
 from django.db.models import Q
 from django.db.models import Q
@@ -67,15 +67,13 @@ def is_taggable(obj):
     return False
     return False
 
 
 
 
-def image_upload(instance, filename):
+def _build_image_attachment_path(instance, filename, *, storage=default_storage):
     """
     """
-    Return a path for uploading image attachments.
+    Build a deterministic relative path for an image attachment.
 
 
     - Normalizes browser paths (e.g., C:\\fake_path\\photo.jpg)
     - Normalizes browser paths (e.g., C:\\fake_path\\photo.jpg)
     - Uses the instance.name if provided (sanitized to a *basename*, no ext)
     - Uses the instance.name if provided (sanitized to a *basename*, no ext)
     - Prefixes with a machine-friendly identifier
     - Prefixes with a machine-friendly identifier
-
-    Note: Relies on Django's default_storage utility.
     """
     """
     upload_dir = 'image-attachments'
     upload_dir = 'image-attachments'
     default_filename = 'unnamed'
     default_filename = 'unnamed'
@@ -92,22 +90,38 @@ def image_upload(instance, filename):
     # Rely on Django's get_valid_filename to perform sanitization.
     # Rely on Django's get_valid_filename to perform sanitization.
     stem = (instance.name or file_path.stem).strip()
     stem = (instance.name or file_path.stem).strip()
     try:
     try:
-        safe_stem = default_storage.get_valid_name(stem)
+        safe_stem = storage.get_valid_name(stem)
     except SuspiciousFileOperation:
     except SuspiciousFileOperation:
         safe_stem = default_filename
         safe_stem = default_filename
 
 
     # Append the uploaded extension only if it's an allowed image type
     # Append the uploaded extension only if it's an allowed image type
-    final_name = f"{safe_stem}.{ext}" if ext in allowed_img_extensions else safe_stem
+    final_name = f'{safe_stem}.{ext}' if ext in allowed_img_extensions else safe_stem
 
 
     # Create a machine-friendly prefix from the instance
     # Create a machine-friendly prefix from the instance
-    prefix = f"{instance.object_type.model}_{instance.object_id}"
-    name_with_path = f"{upload_dir}/{prefix}_{final_name}"
+    prefix = f'{instance.object_type.model}_{instance.object_id}'
+    name_with_path = f'{upload_dir}/{prefix}_{final_name}'
 
 
     # Validate the generated relative path (blocks absolute/traversal)
     # Validate the generated relative path (blocks absolute/traversal)
     validate_file_name(name_with_path, allow_relative_path=True)
     validate_file_name(name_with_path, allow_relative_path=True)
     return name_with_path
     return name_with_path
 
 
 
 
+def image_upload(instance, filename):
+    """
+    Return a relative upload path for an image attachment, applying Django's
+    usual suffix-on-collision behavior regardless of storage backend.
+    """
+    field = instance.image.field
+    name_with_path = _build_image_attachment_path(instance, filename, storage=field.storage)
+
+    # Intentionally call Django's base Storage implementation here. Some
+    # backends override get_available_name() to reuse the incoming name
+    # unchanged, but we want Django's normal suffix-on-collision behavior
+    # while still dispatching exists() / get_alternative_name() to the
+    # configured storage instance.
+    return Storage.get_available_name(field.storage, name_with_path, max_length=field.max_length)
+
+
 def is_script(obj):
 def is_script(obj):
     """
     """
     Returns True if the object is a Script or Report.
     Returns True if the object is a Script or Report.

+ 1 - 1
netbox/ipam/migrations/0089_default_ordering_indexes.py

@@ -4,7 +4,7 @@ from django.db import migrations, models
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
     dependencies = [
     dependencies = [
         ('contenttypes', '0002_remove_content_type_name'),
         ('contenttypes', '0002_remove_content_type_name'),
-        ('dcim', '0231_default_ordering_indexes'),
+        ('dcim', '0232_default_ordering_indexes'),
         ('extras', '0137_default_ordering_indexes'),
         ('extras', '0137_default_ordering_indexes'),
         ('ipam', '0088_rename_vlangroup_total_vlan_ids'),
         ('ipam', '0088_rename_vlangroup_total_vlan_ids'),
         ('tenancy', '0023_add_mptt_tree_indexes'),
         ('tenancy', '0023_add_mptt_tree_indexes'),

+ 1 - 1
netbox/ipam/tables/vlans.py

@@ -251,6 +251,6 @@ class VLANTranslationRuleTable(NetBoxTable):
     class Meta(NetBoxTable.Meta):
     class Meta(NetBoxTable.Meta):
         model = VLANTranslationRule
         model = VLANTranslationRule
         fields = (
         fields = (
-            'pk', 'id', 'name', 'policy', 'local_vid', 'remote_vid', 'description', 'tags', 'created', 'last_updated',
+            'pk', 'id', 'policy', 'local_vid', 'remote_vid', 'description', 'tags', 'created', 'last_updated',
         )
         )
         default_columns = ('pk', 'policy', 'local_vid', 'remote_vid', 'description')
         default_columns = ('pk', 'policy', 'local_vid', 'remote_vid', 'description')

+ 85 - 2
netbox/ipam/tests/test_tables.py

@@ -1,9 +1,10 @@
 from django.test import RequestFactory, TestCase
 from django.test import RequestFactory, TestCase
 from netaddr import IPNetwork
 from netaddr import IPNetwork
 
 
-from ipam.models import IPAddress, IPRange, Prefix
-from ipam.tables import AnnotatedIPAddressTable
+from ipam.models import FHRPGroupAssignment, IPAddress, IPRange, Prefix
+from ipam.tables import *
 from ipam.utils import annotate_ip_space
 from ipam.utils import annotate_ip_space
+from utilities.testing import TableTestCases
 
 
 
 
 class AnnotatedIPAddressTableTest(TestCase):
 class AnnotatedIPAddressTableTest(TestCase):
@@ -168,3 +169,85 @@ class AnnotatedIPAddressTableTest(TestCase):
         # Pools are fully usable
         # Pools are fully usable
         self.assertEqual(available.first_ip, '2001:db8:1::/126')
         self.assertEqual(available.first_ip, '2001:db8:1::/126')
         self.assertEqual(available.size, 4)
         self.assertEqual(available.size, 4)
+
+
+#
+# Table ordering tests
+#
+
+class VRFTableTest(TableTestCases.StandardTableTestCase):
+    table = VRFTable
+
+
+class RouteTargetTableTest(TableTestCases.StandardTableTestCase):
+    table = RouteTargetTable
+
+
+class RIRTableTest(TableTestCases.StandardTableTestCase):
+    table = RIRTable
+
+
+class AggregateTableTest(TableTestCases.StandardTableTestCase):
+    table = AggregateTable
+
+
+class RoleTableTest(TableTestCases.StandardTableTestCase):
+    table = RoleTable
+
+
+class PrefixTableTest(TableTestCases.StandardTableTestCase):
+    table = PrefixTable
+
+
+class IPRangeTableTest(TableTestCases.StandardTableTestCase):
+    table = IPRangeTable
+
+
+class IPAddressTableTest(TableTestCases.StandardTableTestCase):
+    table = IPAddressTable
+
+
+class FHRPGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = FHRPGroupTable
+
+
+class FHRPGroupAssignmentTableTest(TableTestCases.StandardTableTestCase):
+    table = FHRPGroupAssignmentTable
+
+    # No ObjectListView exists for this table; it is only rendered inline on
+    # the FHRPGroup detail view. Provide an explicit queryset source.
+    queryset_sources = [
+        ('FHRPGroupAssignment.objects.all()', FHRPGroupAssignment.objects.all()),
+    ]
+
+
+class VLANGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = VLANGroupTable
+
+
+class VLANTableTest(TableTestCases.StandardTableTestCase):
+    table = VLANTable
+
+
+class VLANTranslationPolicyTableTest(TableTestCases.StandardTableTestCase):
+    table = VLANTranslationPolicyTable
+
+
+class VLANTranslationRuleTableTest(TableTestCases.StandardTableTestCase):
+    table = VLANTranslationRuleTable
+
+
+class ASNRangeTableTest(TableTestCases.StandardTableTestCase):
+    table = ASNRangeTable
+
+
+class ASNTableTest(TableTestCases.StandardTableTestCase):
+    table = ASNTable
+
+
+class ServiceTemplateTableTest(TableTestCases.StandardTableTestCase):
+    table = ServiceTemplateTable
+
+
+class ServiceTableTest(TableTestCases.StandardTableTestCase):
+    table = ServiceTable

+ 4 - 3
netbox/netbox/api/serializers/base.py

@@ -95,9 +95,6 @@ class ValidatedModelSerializer(BaseModelSerializer):
 
 
         attrs = data.copy()
         attrs = data.copy()
 
 
-        # Remove custom field data (if any) prior to model validation
-        attrs.pop('custom_fields', None)
-
         # Skip ManyToManyFields
         # Skip ManyToManyFields
         opts = self.Meta.model._meta
         opts = self.Meta.model._meta
         m2m_values = {}
         m2m_values = {}
@@ -116,4 +113,8 @@ class ValidatedModelSerializer(BaseModelSerializer):
         # Skip uniqueness validation of individual fields inside `full_clean()` (this is handled by the serializer)
         # Skip uniqueness validation of individual fields inside `full_clean()` (this is handled by the serializer)
         instance.full_clean(validate_unique=False)
         instance.full_clean(validate_unique=False)
 
 
+        # Preserve any normalization performed by model.clean() (e.g. stale custom field pruning)
+        if 'custom_field_data' in attrs:
+            data['custom_field_data'] = instance.custom_field_data
+
         return data
         return data

+ 4 - 0
netbox/netbox/configuration_testing.py

@@ -20,6 +20,10 @@ PLUGINS = [
     'netbox.tests.dummy_plugin',
     'netbox.tests.dummy_plugin',
 ]
 ]
 
 
+RQ = {
+    'COMMIT_MODE': 'auto',
+}
+
 REDIS = {
 REDIS = {
     'tasks': {
     'tasks': {
         'HOST': 'localhost',
         'HOST': 'localhost',

+ 1 - 1
netbox/netbox/models/features.py

@@ -467,7 +467,7 @@ class JobsMixin(models.Model):
         """
         """
         Return a list of the most recent jobs for this instance.
         Return a list of the most recent jobs for this instance.
         """
         """
-        return self.jobs.filter(status__in=JobStatusChoices.TERMINAL_STATE_CHOICES).order_by('-created').defer('data')
+        return self.jobs.filter(status__in=JobStatusChoices.TERMINAL_STATE_CHOICES).order_by('-started').defer('data')
 
 
 
 
 class JournalingMixin(models.Model):
 class JournalingMixin(models.Model):

+ 1 - 0
netbox/netbox/settings.py

@@ -168,6 +168,7 @@ REMOTE_AUTH_USER_FIRST_NAME = getattr(configuration, 'REMOTE_AUTH_USER_FIRST_NAM
 REMOTE_AUTH_USER_LAST_NAME = getattr(configuration, 'REMOTE_AUTH_USER_LAST_NAME', 'HTTP_REMOTE_USER_LAST_NAME')
 REMOTE_AUTH_USER_LAST_NAME = getattr(configuration, 'REMOTE_AUTH_USER_LAST_NAME', 'HTTP_REMOTE_USER_LAST_NAME')
 # Required by extras/migrations/0109_script_models.py
 # Required by extras/migrations/0109_script_models.py
 REPORTS_ROOT = getattr(configuration, 'REPORTS_ROOT', os.path.join(BASE_DIR, 'reports')).rstrip('/')
 REPORTS_ROOT = getattr(configuration, 'REPORTS_ROOT', os.path.join(BASE_DIR, 'reports')).rstrip('/')
+RQ = getattr(configuration, 'RQ', {})
 RQ_DEFAULT_TIMEOUT = getattr(configuration, 'RQ_DEFAULT_TIMEOUT', 300)
 RQ_DEFAULT_TIMEOUT = getattr(configuration, 'RQ_DEFAULT_TIMEOUT', 300)
 RQ_RETRY_INTERVAL = getattr(configuration, 'RQ_RETRY_INTERVAL', 60)
 RQ_RETRY_INTERVAL = getattr(configuration, 'RQ_RETRY_INTERVAL', 60)
 RQ_RETRY_MAX = getattr(configuration, 'RQ_RETRY_MAX', 0)
 RQ_RETRY_MAX = getattr(configuration, 'RQ_RETRY_MAX', 0)

+ 9 - 2
netbox/netbox/ui/attrs.py

@@ -275,19 +275,22 @@ class RelatedObjectAttr(ObjectAttribute):
          linkify (bool): If True, the rendered value will be hyperlinked to the related object's detail view
          linkify (bool): If True, the rendered value will be hyperlinked to the related object's detail view
          grouped_by (str): A second-order object to annotate alongside the related object; for example, an attribute
          grouped_by (str): A second-order object to annotate alongside the related object; for example, an attribute
             representing the dcim.Site model might specify grouped_by="region"
             representing the dcim.Site model might specify grouped_by="region"
+         colored (bool): If True, render the object as a colored badge when it exposes a `color` attribute
     """
     """
     template_name = 'ui/attrs/object.html'
     template_name = 'ui/attrs/object.html'
 
 
-    def __init__(self, *args, linkify=None, grouped_by=None, **kwargs):
+    def __init__(self, *args, linkify=None, grouped_by=None, colored=False, **kwargs):
         super().__init__(*args, **kwargs)
         super().__init__(*args, **kwargs)
         self.linkify = linkify
         self.linkify = linkify
         self.grouped_by = grouped_by
         self.grouped_by = grouped_by
+        self.colored = colored
 
 
     def get_context(self, obj, attr, value, context):
     def get_context(self, obj, attr, value, context):
         group = getattr(value, self.grouped_by, None) if self.grouped_by else None
         group = getattr(value, self.grouped_by, None) if self.grouped_by else None
         return {
         return {
             'linkify': self.linkify,
             'linkify': self.linkify,
             'group': group,
             'group': group,
+            'colored': self.colored,
         }
         }
 
 
 
 
@@ -344,6 +347,7 @@ class RelatedObjectListAttr(RelatedObjectAttr):
 
 
         return {
         return {
             'linkify': self.linkify,
             'linkify': self.linkify,
+            'colored': self.colored,
             'items': [
             'items': [
                 {
                 {
                     'value': item,
                     'value': item,
@@ -376,13 +380,15 @@ class NestedObjectAttr(ObjectAttribute):
     Parameters:
     Parameters:
          linkify (bool): If True, the rendered value will be hyperlinked to the related object's detail view
          linkify (bool): If True, the rendered value will be hyperlinked to the related object's detail view
          max_depth (int): Maximum number of ancestors to display (default: all)
          max_depth (int): Maximum number of ancestors to display (default: all)
+         colored (bool): If True, render the object as a colored badge when it exposes a `color` attribute
     """
     """
     template_name = 'ui/attrs/nested_object.html'
     template_name = 'ui/attrs/nested_object.html'
 
 
-    def __init__(self, *args, linkify=None, max_depth=None, **kwargs):
+    def __init__(self, *args, linkify=None, max_depth=None, colored=False, **kwargs):
         super().__init__(*args, **kwargs)
         super().__init__(*args, **kwargs)
         self.linkify = linkify
         self.linkify = linkify
         self.max_depth = max_depth
         self.max_depth = max_depth
+        self.colored = colored
 
 
     def get_context(self, obj, attr, value, context):
     def get_context(self, obj, attr, value, context):
         nodes = []
         nodes = []
@@ -393,6 +399,7 @@ class NestedObjectAttr(ObjectAttribute):
         return {
         return {
             'nodes': nodes,
             'nodes': nodes,
             'linkify': self.linkify,
             'linkify': self.linkify,
+            'colored': self.colored,
         }
         }
 
 
 
 

+ 2 - 2
netbox/release.yaml

@@ -1,3 +1,3 @@
-version: "4.5.6"
+version: "4.5.7"
 edition: "Community"
 edition: "Community"
-published: "2026-03-31"
+published: "2026-04-03"

+ 2 - 2
netbox/templates/extras/inc/script_list_content.html

@@ -11,7 +11,7 @@
       <h2 class="card-header" id="module{{ module.pk }}">
       <h2 class="card-header" id="module{{ module.pk }}">
         <i class="mdi mdi-file-document-outline"></i> {{ module }}
         <i class="mdi mdi-file-document-outline"></i> {{ module }}
         <div class="card-actions">
         <div class="card-actions">
-          {% if perms.extras.edit_scriptmodule %}
+          {% if perms.extras.change_scriptmodule %}
             <a href="{% url 'extras:scriptmodule_edit' pk=module.pk %}" class="btn btn-ghost-warning btn-sm">
             <a href="{% url 'extras:scriptmodule_edit' pk=module.pk %}" class="btn btn-ghost-warning btn-sm">
               <i class="mdi mdi-pencil" aria-hidden="true"></i> {% trans "Edit" %}
               <i class="mdi mdi-pencil" aria-hidden="true"></i> {% trans "Edit" %}
             </a>
             </a>
@@ -54,7 +54,7 @@
                     <td>{{ script.python_class.description|markdown|placeholder }}</td>
                     <td>{{ script.python_class.description|markdown|placeholder }}</td>
                     {% if last_job %}
                     {% if last_job %}
                       <td>
                       <td>
-                        <a href="{% url 'extras:script_result' job_pk=last_job.pk %}">{{ last_job.created|isodatetime }}</a>
+                        <a href="{% url 'extras:script_result' job_pk=last_job.pk %}">{{ last_job.started|isodatetime }}</a>
                       </td>
                       </td>
                       <td>
                       <td>
                         {% badge last_job.get_status_display last_job.get_status_color %}
                         {% badge last_job.get_status_display last_job.get_status_color %}

+ 9 - 1
netbox/templates/ui/attrs/nested_object.html

@@ -1,7 +1,15 @@
 <ol class="breadcrumb" aria-label="breadcrumbs">
 <ol class="breadcrumb" aria-label="breadcrumbs">
   {% for node in nodes %}
   {% for node in nodes %}
     <li class="breadcrumb-item">
     <li class="breadcrumb-item">
-      {% if linkify %}
+      {% if forloop.last and colored and node.color %}
+        {% if linkify %}
+          {% with badge_url=node.get_absolute_url %}
+            {% badge node hex_color=node.color url=badge_url %}
+          {% endwith %}
+        {% else %}
+          {% badge node hex_color=node.color %}
+        {% endif %}
+      {% elif linkify %}
         <a href="{{ node.get_absolute_url }}">{{ node }}</a>
         <a href="{{ node.get_absolute_url }}">{{ node }}</a>
       {% else %}
       {% else %}
         {{ node }}
         {{ node }}

+ 26 - 2
netbox/templates/ui/attrs/object.html

@@ -5,10 +5,34 @@
       {% if linkify %}{{ group|linkify }}{% else %}{{ group }}{% endif %}
       {% if linkify %}{{ group|linkify }}{% else %}{{ group }}{% endif %}
     </li>
     </li>
     <li class="breadcrumb-item">
     <li class="breadcrumb-item">
-      {% if linkify %}{{ value|linkify }}{% else %}{{ value }}{% endif %}
+      {% if colored and value.color %}
+        {% if linkify %}
+          {% with badge_url=value.get_absolute_url %}
+            {% badge value hex_color=value.color url=badge_url %}
+          {% endwith %}
+        {% else %}
+          {% badge value hex_color=value.color %}
+        {% endif %}
+      {% elif linkify %}
+        {{ value|linkify }}
+      {% else %}
+        {{ value }}
+      {% endif %}
     </li>
     </li>
   </ol>
   </ol>
 {% else %}
 {% else %}
   {# Display only the object #}
   {# Display only the object #}
-  {% if linkify %}{{ value|linkify }}{% else %}{{ value }}{% endif %}
+  {% if colored and value.color %}
+    {% if linkify %}
+      {% with badge_url=value.get_absolute_url %}
+        {% badge value hex_color=value.color url=badge_url %}
+      {% endwith %}
+    {% else %}
+      {% badge value hex_color=value.color %}
+    {% endif %}
+  {% elif linkify %}
+    {{ value|linkify }}
+  {% else %}
+    {{ value }}
+  {% endif %}
 {% endif %}
 {% endif %}

+ 1 - 1
netbox/templates/ui/attrs/object_list.html

@@ -1,7 +1,7 @@
 <ul class="list-unstyled mb-0">
 <ul class="list-unstyled mb-0">
   {% for item in items %}
   {% for item in items %}
     <li>
     <li>
-      {% include "ui/attrs/object.html" with value=item.value group=item.group linkify=linkify only %}
+      {% include "ui/attrs/object.html" with value=item.value group=item.group linkify=linkify colored=colored only %}
     </li>
     </li>
   {% endfor %}
   {% endfor %}
   {% if overflow_indicator %}
   {% if overflow_indicator %}

+ 2 - 2
netbox/templates/virtualization/panels/cluster_resources.html

@@ -12,7 +12,7 @@
       <th scope="row"><i class="mdi mdi-chip"></i> {% trans "Memory" %}</th>
       <th scope="row"><i class="mdi mdi-chip"></i> {% trans "Memory" %}</th>
       <td>
       <td>
         {% if memory_sum %}
         {% if memory_sum %}
-          <span title={{ memory_sum }}>{{ memory_sum|humanize_ram_megabytes }}</span>
+          <span title={{ memory_sum }}>{{ memory_sum|humanize_ram_capacity }}</span>
         {% else %}
         {% else %}
           {{ ''|placeholder }}
           {{ ''|placeholder }}
         {% endif %}
         {% endif %}
@@ -24,7 +24,7 @@
       </th>
       </th>
       <td>
       <td>
         {% if disk_sum %}
         {% if disk_sum %}
-          {{ disk_sum|humanize_disk_megabytes }}
+          {{ disk_sum|humanize_disk_capacity }}
         {% else %}
         {% else %}
           {{ ''|placeholder }}
           {{ ''|placeholder }}
         {% endif %}
         {% endif %}

+ 2 - 2
netbox/templates/virtualization/panels/virtual_machine_resources.html

@@ -12,7 +12,7 @@
       <th scope="row"><i class="mdi mdi-chip"></i> {% trans "Memory" %}</th>
       <th scope="row"><i class="mdi mdi-chip"></i> {% trans "Memory" %}</th>
       <td>
       <td>
         {% if object.memory %}
         {% if object.memory %}
-          <span title={{ object.memory }}>{{ object.memory|humanize_ram_megabytes }}</span>
+          <span title={{ object.memory }}>{{ object.memory|humanize_ram_capacity }}</span>
         {% else %}
         {% else %}
           {{ ''|placeholder }}
           {{ ''|placeholder }}
         {% endif %}
         {% endif %}
@@ -24,7 +24,7 @@
       </th>
       </th>
       <td>
       <td>
         {% if object.disk %}
         {% if object.disk %}
-          {{ object.disk|humanize_disk_megabytes }}
+          {{ object.disk|humanize_disk_capacity }}
         {% else %}
         {% else %}
           {{ ''|placeholder }}
           {{ ''|placeholder }}
         {% endif %}
         {% endif %}

+ 1 - 1
netbox/templates/virtualization/virtualdisk/attrs/size.html

@@ -1,2 +1,2 @@
 {% load helpers %}
 {% load helpers %}
-{{ value|humanize_disk_megabytes }}
+{{ value|humanize_disk_capacity }}

+ 26 - 0
netbox/tenancy/tests/test_tables.py

@@ -0,0 +1,26 @@
+from tenancy.tables import *
+from utilities.testing import TableTestCases
+
+
+class TenantGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = TenantGroupTable
+
+
+class TenantTableTest(TableTestCases.StandardTableTestCase):
+    table = TenantTable
+
+
+class ContactGroupTableTest(TableTestCases.StandardTableTestCase):
+    table = ContactGroupTable
+
+
+class ContactRoleTableTest(TableTestCases.StandardTableTestCase):
+    table = ContactRoleTable
+
+
+class ContactTableTest(TableTestCases.StandardTableTestCase):
+    table = ContactTable
+
+
+class ContactAssignmentTableTest(TableTestCases.StandardTableTestCase):
+    table = ContactAssignmentTable

BIN
netbox/translations/cs/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/cs/LC_MESSAGES/django.po


BIN
netbox/translations/da/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/da/LC_MESSAGES/django.po


BIN
netbox/translations/de/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 277 - 297
netbox/translations/de/LC_MESSAGES/django.po


File diff suppressed because it is too large
+ 233 - 233
netbox/translations/en/LC_MESSAGES/django.po


BIN
netbox/translations/es/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/es/LC_MESSAGES/django.po


BIN
netbox/translations/fr/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 281 - 300
netbox/translations/fr/LC_MESSAGES/django.po


BIN
netbox/translations/it/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/it/LC_MESSAGES/django.po


BIN
netbox/translations/ja/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 273 - 295
netbox/translations/ja/LC_MESSAGES/django.po


BIN
netbox/translations/lv/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/lv/LC_MESSAGES/django.po


BIN
netbox/translations/nl/LC_MESSAGES/django.mo


File diff suppressed because it is too large
+ 275 - 295
netbox/translations/nl/LC_MESSAGES/django.po


Some files were not shown because too many files changed in this diff