Просмотр исходного кода

Merge branch 'main' into 17824-hotkeys

Elliott Balsley 5 месяцев назад
Родитель
Сommit
499ebb8ab4
86 измененных файлов с 6007 добавлено и 4659 удалено
  1. 1 1
      .github/ISSUE_TEMPLATE/01-feature_request.yaml
  2. 9 9
      .github/ISSUE_TEMPLATE/02-bug_report.yaml
  3. 0 3
      .github/ISSUE_TEMPLATE/config.yml
  4. 0 1
      README.md
  5. 1 1
      SECURITY.md
  6. 1 2
      base_requirements.txt
  7. 75 2
      contrib/generated_schema.json
  8. 167 86
      contrib/openapi.json
  9. 1 1
      docs/administration/authentication/microsoft-entra-id.md
  10. 1 1
      docs/administration/netbox-shell.md
  11. 1 1
      docs/configuration/data-validation.md
  12. 40 0
      docs/configuration/system.md
  13. 1 1
      docs/getting-started/planning.md
  14. 2 2
      docs/plugins/development/index.md
  15. 5 0
      docs/plugins/development/tables.md
  16. 1 1
      docs/release-notes/version-3.0.md
  17. 42 0
      docs/release-notes/version-4.4.md
  18. 4 0
      netbox/core/tables/data.py
  19. 3 16
      netbox/core/tables/plugins.py
  20. 26 0
      netbox/core/tables/template_code.py
  21. 9 3
      netbox/core/views.py
  22. 300 36
      netbox/dcim/choices.py
  23. 2 0
      netbox/dcim/graphql/enums.py
  24. 24 0
      netbox/dcim/graphql/filters.py
  25. 116 49
      netbox/dcim/models/cables.py
  26. 3 3
      netbox/dcim/models/device_components.py
  27. 5 0
      netbox/dcim/tables/devices.py
  28. 48 19
      netbox/dcim/tests/test_views.py
  29. 3 3
      netbox/extras/graphql/filters.py
  30. 1 1
      netbox/extras/jobs.py
  31. 4 2
      netbox/extras/models/models.py
  32. 14 5
      netbox/extras/querysets.py
  33. 3 2
      netbox/extras/tables/tables.py
  34. 153 31
      netbox/extras/tests/test_models.py
  35. 142 1
      netbox/extras/tests/test_utils.py
  36. 42 12
      netbox/extras/utils.py
  37. 2 2
      netbox/extras/views.py
  38. 1 0
      netbox/ipam/filtersets.py
  39. 0 7
      netbox/ipam/forms/model_forms.py
  40. 1 1
      netbox/ipam/utils.py
  41. 0 0
      netbox/project-static/dist/netbox.js
  42. 0 0
      netbox/project-static/dist/netbox.js.map
  43. 1 0
      netbox/project-static/img/plugin-default.svg
  44. 3 3
      netbox/project-static/package.json
  45. 12 12
      netbox/project-static/yarn.lock
  46. 2 2
      netbox/release.yaml
  47. 10 0
      netbox/templates/extras/object_imageattachments.html
  48. BIN
      netbox/translations/cs/LC_MESSAGES/django.mo
  49. 302 293
      netbox/translations/cs/LC_MESSAGES/django.po
  50. BIN
      netbox/translations/da/LC_MESSAGES/django.mo
  51. 302 293
      netbox/translations/da/LC_MESSAGES/django.po
  52. BIN
      netbox/translations/de/LC_MESSAGES/django.mo
  53. 304 295
      netbox/translations/de/LC_MESSAGES/django.po
  54. 216 156
      netbox/translations/en/LC_MESSAGES/django.po
  55. BIN
      netbox/translations/es/LC_MESSAGES/django.mo
  56. 302 293
      netbox/translations/es/LC_MESSAGES/django.po
  57. BIN
      netbox/translations/fr/LC_MESSAGES/django.mo
  58. 304 295
      netbox/translations/fr/LC_MESSAGES/django.po
  59. BIN
      netbox/translations/it/LC_MESSAGES/django.mo
  60. 302 293
      netbox/translations/it/LC_MESSAGES/django.po
  61. BIN
      netbox/translations/ja/LC_MESSAGES/django.mo
  62. 302 293
      netbox/translations/ja/LC_MESSAGES/django.po
  63. BIN
      netbox/translations/nl/LC_MESSAGES/django.mo
  64. 302 293
      netbox/translations/nl/LC_MESSAGES/django.po
  65. BIN
      netbox/translations/pl/LC_MESSAGES/django.mo
  66. 302 293
      netbox/translations/pl/LC_MESSAGES/django.po
  67. BIN
      netbox/translations/pt/LC_MESSAGES/django.mo
  68. 302 293
      netbox/translations/pt/LC_MESSAGES/django.po
  69. BIN
      netbox/translations/ru/LC_MESSAGES/django.mo
  70. 304 295
      netbox/translations/ru/LC_MESSAGES/django.po
  71. BIN
      netbox/translations/tr/LC_MESSAGES/django.mo
  72. 302 293
      netbox/translations/tr/LC_MESSAGES/django.po
  73. BIN
      netbox/translations/uk/LC_MESSAGES/django.mo
  74. 302 293
      netbox/translations/uk/LC_MESSAGES/django.po
  75. BIN
      netbox/translations/zh/LC_MESSAGES/django.mo
  76. 302 293
      netbox/translations/zh/LC_MESSAGES/django.po
  77. 34 12
      netbox/users/forms/model_forms.py
  78. 22 0
      netbox/users/migrations/0012_drop_django_admin_log_table.py
  79. 6 1
      netbox/users/models/permissions.py
  80. 21 6
      netbox/utilities/serialization.py
  81. 4 4
      netbox/utilities/testing/api.py
  82. 16 0
      netbox/utilities/testing/base.py
  83. 111 38
      netbox/utilities/testing/views.py
  84. 49 0
      netbox/utilities/tests/test_serialization.py
  85. 1 1
      pyproject.toml
  86. 11 11
      requirements.txt

+ 1 - 1
.github/ISSUE_TEMPLATE/01-feature_request.yaml

@@ -15,7 +15,7 @@ body:
     attributes:
       label: NetBox version
       description: What version of NetBox are you currently running?
-      placeholder: v4.4.0
+      placeholder: v4.4.1
     validations:
       required: true
   - type: dropdown

+ 9 - 9
.github/ISSUE_TEMPLATE/02-bug_report.yaml

@@ -8,26 +8,26 @@ body:
     attributes:
       value: >
         **NOTE:** This form is only for reporting _reproducible bugs_ in a current NetBox
-        installation. If you're having trouble with installation or just looking for
-        assistance with using NetBox, please visit our
+        release. If you're having trouble with installation or just looking for assistance
+        using NetBox, please visit our
         [discussion forum](https://github.com/netbox-community/netbox/discussions) instead.
   - type: dropdown
     attributes:
-      label: Deployment Type
+      label: NetBox Edition
       description: >
-        How are you running NetBox? (For issues with the Docker image, please go to the
-        [netbox-docker](https://github.com/netbox-community/netbox-docker) repo.)
+        Users of [NetBox Cloud](https://netboxlabs.com/netbox-cloud/) or
+        [NetBox Enterprise](https://netboxlabs.com/netbox-enterprise/), please contact the
+        [NetBox Labs](https://netboxlabs.com/) support team for assistance to ensure your
+        request receives immediate attention.
       options:
-        - NetBox Cloud
-        - NetBox Enterprise
-        - Self-hosted
+        - NetBox Community
     validations:
       required: true
   - type: input
     attributes:
       label: NetBox Version
       description: What version of NetBox are you currently running?
-      placeholder: v4.4.0
+      placeholder: v4.4.1
     validations:
       required: true
   - type: dropdown

+ 0 - 3
.github/ISSUE_TEMPLATE/config.yml

@@ -13,9 +13,6 @@ contact_links:
   - name: 🌎 Correct a Translation
     url: https://explore.transifex.com/netbox-community/netbox/
     about: "Spot an incorrect translation? You can propose a fix on Transifex."
-  - name: 💡 Plugin Idea
-    url: https://plugin-ideas.netbox.dev
-    about: "Have an idea for a plugin? Head over to the ideas board!"
   - name: 💬 Community Slack
     url: https://netdev.chat
     about: "Join #netbox on the NetDev Community Slack for assistance with installation issues and other problems."

+ 0 - 1
README.md

@@ -91,7 +91,6 @@ NetBox automatically logs the creation, modification, and deletion of all manage
 * Join the conversation on [the discussion forum](https://github.com/netbox-community/netbox/discussions) and [Slack](https://netdev.chat/)!
 * Already a power user? You can [suggest a feature](https://github.com/netbox-community/netbox/issues/new?assignees=&labels=type%3A+feature&template=feature_request.yaml) or [report a bug](https://github.com/netbox-community/netbox/issues/new?assignees=&labels=type%3A+bug&template=bug_report.yaml) on GitHub.
 * Contributions from the community are encouraged and appreciated! Check out our [contributing guide](CONTRIBUTING.md) to get started.
-* [Share your idea](https://plugin-ideas.netbox.dev/) for a new plugin, or [learn how to build one](https://github.com/netbox-community/netbox-plugin-tutorial) yourself!
 
 ## Screenshots
 

+ 1 - 1
SECURITY.md

@@ -34,4 +34,4 @@ For any security concerns regarding the community-maintained Docker image for Ne
 
 ### Bug Bounties
 
-As NetBox is provided as free open source software, we do not offer any monetary compensation for vulnerability or bug reports, however your contributions are greatly appreciated.
+As NetBox is provided as free open source software, we do not offer any monetary compensation for vulnerability or bug reports; however, your contributions are greatly appreciated.

+ 1 - 2
base_requirements.txt

@@ -139,8 +139,7 @@ requests
 
 # rq
 # https://github.com/rq/rq/blob/master/CHANGES.md
-# RQ v2.5 drops support for Redis < 5.0
-rq==2.4.1
+rq
 
 # Django app for social-auth-core
 # https://github.com/python-social-auth/social-app-django/blob/master/CHANGELOG.md

+ 75 - 2
contrib/generated_schema.json

@@ -330,14 +330,87 @@
                         "100base-lfx",
                         "100base-tx",
                         "100base-t1",
-                        "1000base-t",
+                        "1000base-bx10-d",
+                        "1000base-bx10-u",
+                        "1000base-cx",
+                        "1000base-cwdm",
+                        "1000base-dwdm",
+                        "1000base-ex",
                         "1000base-sx",
+                        "1000base-lsx",
                         "1000base-lx",
+                        "1000base-lx10",
+                        "1000base-t",
                         "1000base-tx",
+                        "1000base-zx",
                         "2.5gbase-t",
                         "5gbase-t",
-                        "10gbase-t",
+                        "10gbase-br-d",
+                        "10gbase-br-u",
                         "10gbase-cx4",
+                        "10gbase-er",
+                        "10gbase-lr",
+                        "10gbase-lrm",
+                        "10gbase-lx4",
+                        "10gbase-sr",
+                        "10gbase-t",
+                        "10gbase-zr",
+                        "25gbase-cr",
+                        "25gbase-er",
+                        "25gbase-lr",
+                        "25gbase-sr",
+                        "25gbase-t",
+                        "40gbase-cr4",
+                        "40gbase-er4",
+                        "40gbase-fr4",
+                        "40gbase-lr4",
+                        "40gbase-sr4",
+                        "50gbase-cr",
+                        "50gbase-er",
+                        "50gbase-fr",
+                        "50gbase-lr",
+                        "50gbase-sr",
+                        "100gbase-cr1",
+                        "100gbase-cr2",
+                        "100gbase-cr4",
+                        "100gbase-cr10",
+                        "100gbase-dr",
+                        "100gbase-er4",
+                        "100gbase-fr1",
+                        "100gbase-lr1",
+                        "100gbase-lr4",
+                        "100gbase-sr1",
+                        "100gbase-sr1.2",
+                        "100gbase-sr2",
+                        "100gbase-sr4",
+                        "100gbase-sr10",
+                        "100gbase-zr",
+                        "200gbase-cr2",
+                        "200gbase-cr4",
+                        "200gbase-sr2",
+                        "200gbase-sr4",
+                        "200gbase-dr4",
+                        "200gbase-er4",
+                        "200gbase-fr4",
+                        "200gbase-lr4",
+                        "200gbase-vr2",
+                        "400gbase-cr4",
+                        "400gbase-dr4",
+                        "400gbase-er8",
+                        "400gbase-fr4",
+                        "400gbase-fr8",
+                        "400gbase-lr4",
+                        "400gbase-lr8",
+                        "400gbase-sr4",
+                        "400gbase-sr4_2",
+                        "400gbase-sr8",
+                        "400gbase-sr16",
+                        "400gbase-vr4",
+                        "400gbase-zr",
+                        "800gbase-cr8",
+                        "800gbase-dr8",
+                        "800gbase-sr8",
+                        "800gbase-vr8",
                         "100base-x-sfp",
                         "1000base-x-gbic",
                         "1000base-x-sfp",

Разница между файлами не показана из-за своего большого размера
+ 167 - 86
contrib/openapi.json


+ 1 - 1
docs/administration/authentication/microsoft-entra-id.md

@@ -25,7 +25,7 @@ Once finished, make note of the application (client) ID; this will be used when
 ![Completed app registration](../../media/authentication/azure_ad_app_registration_created.png)
 
 !!! tip "Multitenant authentication"
-    NetBox also supports multitenant authentication via Azure AD, however it requires a different backend and an additional configuration parameter. Please see the [`python-social-auth` documentation](https://python-social-auth.readthedocs.io/en/latest/backends/azuread.html#tenant-support) for details concerning multitenant authentication.
+    NetBox also supports multitenant authentication via Azure AD; however, it requires a different backend and an additional configuration parameter. Please see the [`python-social-auth` documentation](https://python-social-auth.readthedocs.io/en/latest/backends/azuread.html#tenant-support) for details concerning multitenant authentication.
 
 ### 3. Create a secret
 

+ 1 - 1
docs/administration/netbox-shell.md

@@ -106,7 +106,7 @@ This approach can span multiple levels of relations. For example, the following
 ```
 
 !!! note
-    While the above query is functional, it's not very efficient. There are ways to optimize such requests, however they are out of scope for this document. For more information, see the [Django queryset method reference](https://docs.djangoproject.com/en/stable/ref/models/querysets/) documentation.
+    While the above query is functional, it's not very efficient. There are ways to optimize such requests; however, they are out of scope for this document. For more information, see the [Django queryset method reference](https://docs.djangoproject.com/en/stable/ref/models/querysets/) documentation.
 
 Reverse relationships can be traversed as well. For example, the following will find all devices with an interface named "em0":
 

+ 1 - 1
docs/configuration/data-validation.md

@@ -17,7 +17,7 @@ CUSTOM_VALIDATORS = {
         },
         "my_plugin.validators.Validator1"
     ],
-    "dim.device": [
+    "dcim.device": [
         "my_plugin.validators.Validator1"
     ]
 }

+ 40 - 0
docs/configuration/system.md

@@ -257,6 +257,46 @@ The specific configuration settings for each storage backend can be found in the
 !!! note
     Any keys defined in the `STORAGES` configuration parameter replace those in the default configuration. It is only necessary to define keys within the `STORAGES` for the specific backend(s) you wish to configure.
 
+### Environment Variables and Third-Party Libraries
+
+NetBox uses an explicit Python configuration approach rather than automatic environment variable detection. While this provides clear configuration management and version control capabilities, it affects how some third-party libraries like `django-storages` function within NetBox's context.
+
+Many Django libraries (including `django-storages`) expect to automatically detect environment variables like `AWS_STORAGE_BUCKET_NAME` or `AWS_S3_ACCESS_KEY_ID`. However, NetBox's configuration processing prevents this automatic detection from working as documented in some of these libraries.
+
+When using third-party libraries that rely on environment variable detection, you may need to explicitly read environment variables in your NetBox `configuration.py`:
+
+```python
+import os
+
+STORAGES = {
+    'default': {
+        'BACKEND': 'storages.backends.s3.S3Storage',
+        'OPTIONS': {
+            'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
+            'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
+            'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
+            'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
+            'location': 'media/',
+        }
+    },
+    'staticfiles': {
+        'BACKEND': 'storages.backends.s3.S3Storage',
+        'OPTIONS': {
+            'bucket_name': os.environ.get('AWS_STORAGE_BUCKET_NAME'),
+            'access_key': os.environ.get('AWS_S3_ACCESS_KEY_ID'),
+            'secret_key': os.environ.get('AWS_S3_SECRET_ACCESS_KEY'),
+            'endpoint_url': os.environ.get('AWS_S3_ENDPOINT_URL'),
+            'location': 'static/',
+        }
+    },
+}
+```
+
+This approach works because the environment variables are resolved during NetBox's configuration processing, before the third-party library attempts its own environment variable detection.
+
+!!! warning "Configuration Behavior"
+    Simply setting environment variables like `AWS_STORAGE_BUCKET_NAME` without explicitly reading them in your configuration will not work. The variables must be read using `os.environ.get()` within your `configuration.py` file.
+
 ---
 
 ## TIME_ZONE

+ 1 - 1
docs/getting-started/planning.md

@@ -17,7 +17,7 @@ Dedicate some time to take stock of your own sources of truth for your infrastru
 
 * **Multiple conflicting sources** for a given domain. For example, there may be multiple versions of a spreadsheet circulating, each of which asserts a conflicting set of data.
 * **Sources with no domain defined.** You may encounter that different teams within your organization use different tools for the same purpose, with no normal definition of when either should be used.
-* **Inaccessible data formatting.** Some tools are better suited for programmatic usage than others. For example, spreadsheets are generally very easy to parse and export, however free-form notes on wiki or similar application are much more difficult to consume.
+* **Inaccessible data formatting.** Some tools are better suited for programmatic usage than others. For example, spreadsheets are generally very easy to parse and export; however, free-form notes on wiki or similar application are much more difficult to consume.
 * **There is no source of truth.** Sometimes you'll find that a source of truth simply doesn't exist for a domain. For example, when assigning IP addresses, operators may be just using any (presumed) available IP from a subnet without ever recording its usage.
 
 See if you can identify each domain of infrastructure data for your organization, and the source of truth for each. Once you have these compiled, you'll need to determine what belongs in NetBox.

+ 2 - 2
docs/plugins/development/index.md

@@ -66,7 +66,7 @@ The top level is the project root, which can have any name that you like. Immedi
 * `README.md` - A brief introduction to your plugin, how to install and configure it, where to find help, and any other pertinent information. It is recommended to write `README` files using a markup language such as Markdown to enable human-friendly display.
 * The plugin source directory. This must be a valid Python package name, typically comprising only lowercase letters, numbers, and underscores.
 
-The plugin source directory contains all the actual Python code and other resources used by your plugin. Its structure is left to the author's discretion, however it is recommended to follow best practices as outlined in the [Django documentation](https://docs.djangoproject.com/en/stable/intro/reusable-apps/). At a minimum, this directory **must** contain an `__init__.py` file containing an instance of NetBox's `PluginConfig` class, discussed below.
+The plugin source directory contains all the actual Python code and other resources used by your plugin. Its structure is left to the author's discretion; however, it is recommended to follow best practices as outlined in the [Django documentation](https://docs.djangoproject.com/en/stable/intro/reusable-apps/). At a minimum, this directory **must** contain an `__init__.py` file containing an instance of NetBox's `PluginConfig` class, discussed below.
 
 **Note:** The [Cookiecutter NetBox Plugin](https://github.com/netbox-community/cookiecutter-netbox-plugin) can be used to auto-generate all the needed directories and files for a new plugin.
 
@@ -186,7 +186,7 @@ Many of these are self-explanatory, but for more information, see the [pyproject
 
 ## Create a Virtual Environment
 
-It is strongly recommended to create a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html) for the development of your plugin, as opposed to using system-wide packages. This will afford you complete control over the installed versions of all dependencies and avoid conflict with system packages. This environment can live wherever you'd like, however it should be excluded from revision control. (A popular convention is to keep all virtual environments in the user's home directory, e.g. `~/.virtualenvs/`.)
+It is strongly recommended to create a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html) for the development of your plugin, as opposed to using system-wide packages. This will afford you complete control over the installed versions of all dependencies and avoid conflict with system packages. This environment can live wherever you'd like;however, it should be excluded from revision control. (A popular convention is to keep all virtual environments in the user's home directory, e.g. `~/.virtualenvs/`.)
 
 ```shell
 python3 -m venv ~/.virtualenvs/my_plugin

+ 5 - 0
docs/plugins/development/tables.md

@@ -47,6 +47,11 @@ table.configure(request)
 
 This will automatically apply any user-specific preferences for the table. (If using a generic view provided by NetBox, table configuration is handled automatically.)
 
+
+### Bulk Edit and Delete Actions
+
+Bulk edit and delete buttons are automatically added to the table, if there is an appropriate view registered to the `${modelname}_bulk_edit` or `${modelname}_bulk_delete` path name.
+
 ## Columns
 
 The table column classes listed below are supported for use in plugins. These classes can be imported from `netbox.tables.columns`.

+ 1 - 1
docs/release-notes/version-3.0.md

@@ -357,7 +357,7 @@ And the response:
 ...
 ```
 
-All GraphQL requests are made at the `/graphql` URL (which also serves the GraphiQL UI). The API is currently read-only, however users who wish to disable it until needed can do so by setting the `GRAPHQL_ENABLED` configuration parameter to False. For more detail on NetBox's GraphQL implementation, see [the GraphQL API documentation](../integrations/graphql-api.md).
+All GraphQL requests are made at the `/graphql` URL (which also serves the GraphiQL UI). The API is currently read-only; however, users who wish to disable it until needed can do so by setting the `GRAPHQL_ENABLED` configuration parameter to False. For more detail on NetBox's GraphQL implementation, see [the GraphQL API documentation](../integrations/graphql-api.md).
 
 #### IP Ranges ([#834](https://github.com/netbox-community/netbox/issues/834))
 

+ 42 - 0
docs/release-notes/version-4.4.md

@@ -1,5 +1,47 @@
 # NetBox v4.4
 
+## v4.4.1 (2025-09-16)
+
+### Enhancements
+
+* [#15492](https://github.com/netbox-community/netbox/issues/15492) - Enable cloning of permissions
+* [#16381](https://github.com/netbox-community/netbox/issues/16381) - Display script result timestamps in system timezone
+* [#19262](https://github.com/netbox-community/netbox/issues/19262) - No longer restrict FHRP group assignment by assigned IP address
+* [#19408](https://github.com/netbox-community/netbox/issues/19408) - Support export templates for circuit terminations and virtual circuit terminations
+* [#19428](https://github.com/netbox-community/netbox/issues/19428) - Add an optional U height field to the devices table
+* [#19547](https://github.com/netbox-community/netbox/issues/19547) - Add individual "sync" buttons in data sources table
+* [#19865](https://github.com/netbox-community/netbox/issues/19865) - Reorganize cable type groupings
+* [#20222](https://github.com/netbox-community/netbox/issues/20222) - Enable the `HttpOnly` flag for CSRF cookie
+* [#20237](https://github.com/netbox-community/netbox/issues/20237) - Include VPN tunnel groups in global search results
+* [#20241](https://github.com/netbox-community/netbox/issues/20241) - Record A & B terminations in cable changelog data
+* [#20277](https://github.com/netbox-community/netbox/issues/20277) - Add support for attribute assignment to `deserialize_object()` utility
+* [#20321](https://github.com/netbox-community/netbox/issues/20321) - Add physical media types for transceiver interfaces
+* [#20347](https://github.com/netbox-community/netbox/issues/20347) - Add Wi-Fi Alliance aliases to 802.11 interface types
+
+### Bug Fixes
+
+* [#19729](https://github.com/netbox-community/netbox/issues/19729) - Restore `kind` filter for interfaces in GraphQL API
+* [#19744](https://github.com/netbox-community/netbox/issues/19744) - Plugins list should be orderable by "active" column
+* [#19851](https://github.com/netbox-community/netbox/issues/19851) - Fix `ValueError` complaining of missing `scope` when bulk importing wireless LANs
+* [#19896](https://github.com/netbox-community/netbox/issues/19896) - Min/max values for decimal custom fields should accept decimal values
+* [#20197](https://github.com/netbox-community/netbox/issues/20197) - Correct validation for virtual chassis parent interface
+* [#20215](https://github.com/netbox-community/netbox/issues/20215) - All GraphQL filters for config contexts should be optional
+* [#20217](https://github.com/netbox-community/netbox/issues/20217) - Remove "0 VLANs available" row at end of VLAN range table
+* [#20221](https://github.com/netbox-community/netbox/issues/20221) - JSON fields should not coerce empty dictionaries to null
+* [#20227](https://github.com/netbox-community/netbox/issues/20227) - Ensure consistent padding of Markdown content
+* [#20234](https://github.com/netbox-community/netbox/issues/20234) - Fix "add" button link for prerequisite object warning in UI
+* [#20236](https://github.com/netbox-community/netbox/issues/20236) - Strip invalid characters from uploaded image file names
+* [#20238](https://github.com/netbox-community/netbox/issues/20238) - Fix support for outside IP assignment during bulk import of tunnel terminations
+* [#20242](https://github.com/netbox-community/netbox/issues/20242) - Avoid `AttributeError` exception on background jobs with no request ID
+* [#20252](https://github.com/netbox-community/netbox/issues/20252) - Remove generic AddObject from ObjectChildrenView to prevent duplicate "add" buttons
+* [#20264](https://github.com/netbox-community/netbox/issues/20264) - Fix rendering of default icon in plugins list
+* [#20272](https://github.com/netbox-community/netbox/issues/20272) - ConfigContexts assigned to ancestor locations should apply to device/VM
+* [#20282](https://github.com/netbox-community/netbox/issues/20282) - Fix styling of prerequisite objects warning
+* [#20298](https://github.com/netbox-community/netbox/issues/20298) - Display a placeholder when an image thumbnail fails to load
+* [#20327](https://github.com/netbox-community/netbox/issues/20327) - Avoid calling `distinct()` on device/VM queryset when fetching config context data
+
+---
+
 ## v4.4.0 (2025-09-02)
 
 ### New Features

+ 4 - 0
netbox/core/tables/data.py

@@ -4,6 +4,7 @@ import django_tables2 as tables
 from core.models import *
 from netbox.tables import NetBoxTable, columns
 from .columns import BackendTypeColumn
+from .template_code import DATA_SOURCE_SYNC_BUTTON
 
 __all__ = (
     'DataFileTable',
@@ -37,6 +38,9 @@ class DataSourceTable(NetBoxTable):
     tags = columns.TagColumn(
         url_name='core:datasource_list',
     )
+    actions = columns.ActionsColumn(
+        extra_buttons=DATA_SOURCE_SYNC_BUTTON,
+    )
 
     class Meta(NetBoxTable.Meta):
         model = DataSource

+ 3 - 16
netbox/core/tables/plugins.py

@@ -1,10 +1,8 @@
 import django_tables2 as tables
-from django.urls import reverse
-from django.utils.safestring import mark_safe
 from django.utils.translation import gettext_lazy as _
 
 from netbox.tables import BaseTable, columns
-from .template_code import PLUGIN_IS_INSTALLED
+from .template_code import PLUGIN_IS_INSTALLED, PLUGIN_NAME_TEMPLATE
 
 __all__ = (
     'CatalogPluginTable',
@@ -12,12 +10,6 @@ __all__ = (
 )
 
 
-PLUGIN_NAME_TEMPLATE = """
-<img class="plugin-icon" src="{{ record.icon_url }}">
-<a href="{% url 'core:plugin' record.config_name %}">{{ record.title_long }}</a>
-"""
-
-
 class PluginVersionTable(BaseTable):
     version = tables.Column(
         verbose_name=_('Version')
@@ -61,6 +53,7 @@ class CatalogPluginTable(BaseTable):
         verbose_name=_('Local')
     )
     is_installed = columns.TemplateColumn(
+        accessor=tables.A('is_loaded'),
         verbose_name=_('Active'),
         template_code=PLUGIN_IS_INSTALLED
     )
@@ -93,10 +86,4 @@ class CatalogPluginTable(BaseTable):
         )
         # List installed plugins first, then certified plugins, then
         # everything else (with each tranche ordered alphabetically)
-        order_by = ('-is_installed', '-is_certified', 'name')
-
-    def render_title_long(self, value, record):
-        if record.static:
-            return value
-        url = reverse('core:plugin', args=[record.config_name])
-        return mark_safe(f"<a href='{url}'>{value}</a>")
+        order_by = ('-is_installed', '-is_certified', 'title_long')

+ 26 - 0
netbox/core/tables/template_code.py

@@ -26,3 +26,29 @@ PLUGIN_IS_INSTALLED = """
     <span class="text-muted">&mdash;</span>
 {% endif %}
 """
+
+PLUGIN_NAME_TEMPLATE = """
+{% load static %}
+{% if record.icon_url %}
+    <img class="plugin-icon" src="{{ record.icon_url }}">
+{% else %}
+    <img class="plugin-icon" src="{% static 'plugin-default.svg' %}">
+{% endif %}
+<a href="{% url 'core:plugin' record.config_name %}">{{ record.title_long }}</a>
+"""
+
+DATA_SOURCE_SYNC_BUTTON = """
+{% load helpers %}
+{% load i18n %}
+{% if perms.core.sync_datasource %}
+    {% if record.ready_for_sync %}
+        <button class="btn btn-primary btn-sm" type="submit" formaction="{% url 'core:datasource_sync' pk=record.pk %}?return_url={{ request.get_full_path|urlencode }}" formmethod="post">
+            <i class="mdi mdi-sync" aria-hidden="true"></i> {% trans "Sync" %}
+        </button>
+    {% else %}
+        <button class="btn btn-primary btn-sm" disabled>
+            <i class="mdi mdi-sync" aria-hidden="true"></i> {% trans "Sync" %}
+        </button>
+    {% endif %}
+{% endif %}
+"""

+ 9 - 3
netbox/core/views.py

@@ -33,7 +33,13 @@ from utilities.forms import ConfirmationForm
 from utilities.htmx import htmx_partial
 from utilities.json import ConfigJSONEncoder
 from utilities.query import count_related
-from utilities.views import ContentTypePermissionRequiredMixin, GetRelatedModelsMixin, ViewTab, register_model_view
+from utilities.views import (
+    ContentTypePermissionRequiredMixin,
+    GetRelatedModelsMixin,
+    GetReturnURLMixin,
+    ViewTab,
+    register_model_view,
+)
 from . import filtersets, forms, tables
 from .jobs import SyncDataSourceJob
 from .models import *
@@ -66,7 +72,7 @@ class DataSourceView(GetRelatedModelsMixin, generic.ObjectView):
 
 
 @register_model_view(DataSource, 'sync')
-class DataSourceSyncView(BaseObjectView):
+class DataSourceSyncView(GetReturnURLMixin, BaseObjectView):
     queryset = DataSource.objects.all()
 
     def get_required_permission(self):
@@ -85,7 +91,7 @@ class DataSourceSyncView(BaseObjectView):
             request,
             _("Queued job #{id} to sync {datasource}").format(id=job.pk, datasource=datasource)
         )
-        return redirect(datasource.get_absolute_url())
+        return redirect(self.get_return_url(request, datasource))
 
 
 @register_model_view(DataSource, 'add', detail=False)

+ 300 - 36
netbox/dcim/choices.py

@@ -889,22 +889,118 @@ class InterfaceTypeChoices(ChoiceSet):
     TYPE_BRIDGE = 'bridge'
     TYPE_LAG = 'lag'
 
-    # Ethernet
+    # FastEthernet
     TYPE_100ME_FX = '100base-fx'
     TYPE_100ME_LFX = '100base-lfx'
-    TYPE_100ME_FIXED = '100base-tx'
+    TYPE_100ME_FIXED = '100base-tx'  # TODO: Rename to _TX
     TYPE_100ME_T1 = '100base-t1'
+
+    # GigabitEthernet
+    TYPE_1GE_BX10_D = '1000base-bx10-d'
+    TYPE_1GE_BX10_U = '1000base-bx10-u'
+    TYPE_1GE_CWDM = '1000base-cwdm'
+    TYPE_1GE_CX = '1000base-cx'
+    TYPE_1GE_DWDM = '1000base-dwdm'
+    TYPE_1GE_EX = '1000base-ex'
+    TYPE_1GE_SX_FIXED = '1000base-sx'  # TODO: Drop _FIXED suffix
+    TYPE_1GE_LSX = '1000base-lsx'
+    TYPE_1GE_LX_FIXED = '1000base-lx'  # TODO: Drop _FIXED suffix
+    TYPE_1GE_LX10 = '1000base-lx10'
+    TYPE_1GE_FIXED = '1000base-t'  # TODO: Rename to _T
+    TYPE_1GE_TX_FIXED = '1000base-tx'  # TODO: Drop _FIXED suffix
+    TYPE_1GE_ZX = '1000base-zx'
+
+    # 2.5/5 Gbps Ethernet
+    TYPE_2GE_FIXED = '2.5gbase-t'  # TODO: Rename to _T
+    TYPE_5GE_FIXED = '5gbase-t'  # TODO: Rename to _T
+
+    # 10 Gbps Ethernet
+    TYPE_10GE_BR_D = '10gbase-br-d'
+    TYPE_10GE_BR_U = '10gbase-br-u'
+    TYPE_10GE_CX4 = '10gbase-cx4'
+    TYPE_10GE_ER = '10gbase-er'
+    TYPE_10GE_LR = '10gbase-lr'
+    TYPE_10GE_LRM = '10gbase-lrm'
+    TYPE_10GE_LX4 = '10gbase-lx4'
+    TYPE_10GE_SR = '10gbase-sr'
+    TYPE_10GE_FIXED = '10gbase-t'
+    TYPE_10GE_ZR = '10gbase-zr'
+
+    # 25 Gbps Ethernet
+    TYPE_25GE_CR = '25gbase-cr'
+    TYPE_25GE_ER = '25gbase-er'
+    TYPE_25GE_LR = '25gbase-lr'
+    TYPE_25GE_SR = '25gbase-sr'
+    TYPE_25GE_T = '25gbase-t'
+
+    # 40 Gbps Ethernet
+    TYPE_40GE_CR4 = '40gbase-cr4'
+    TYPE_40GE_ER4 = '40gbase-er4'
+    TYPE_40GE_FR4 = '40gbase-fr4'
+    TYPE_40GE_LR4 = '40gbase-lr4'
+    TYPE_40GE_SR4 = '40gbase-sr4'
+
+    # 50 Gbps Ethernet
+    TYPE_50GE_CR = '50gbase-cr'
+    TYPE_50GE_ER = '50gbase-er'
+    TYPE_50GE_FR = '50gbase-fr'
+    TYPE_50GE_LR = '50gbase-lr'
+    TYPE_50GE_SR = '50gbase-sr'
+
+    # 100 Gbps Ethernet
+    TYPE_100GE_CR1 = '100gbase-cr1'
+    TYPE_100GE_CR2 = '100gbase-cr2'
+    TYPE_100GE_CR4 = '100gbase-cr4'
+    TYPE_100GE_CR10 = '100gbase-cr10'
+    TYPE_100GE_CWDM4 = '100gbase-cwdm4'
+    TYPE_100GE_DR = '100gbase-dr'
+    TYPE_100GE_FR1 = '100gbase-fr1'
+    TYPE_100GE_ER4 = '100gbase-er4'
+    TYPE_100GE_LR1 = '100gbase-lr1'
+    TYPE_100GE_LR4 = '100gbase-lr4'
+    TYPE_100GE_SR1 = '100gbase-sr1'
+    TYPE_100GE_SR1_2 = '100gbase-sr1.2'
+    TYPE_100GE_SR2 = '100gbase-sr2'
+    TYPE_100GE_SR4 = '100gbase-sr4'
+    TYPE_100GE_SR10 = '100gbase-sr10'
+    TYPE_100GE_ZR = '100gbase-zr'
+
+    # 200 Gbps Ethernet
+    TYPE_200GE_CR2 = '200gbase-cr2'
+    TYPE_200GE_CR4 = '200gbase-cr4'
+    TYPE_200GE_SR2 = '200gbase-sr2'
+    TYPE_200GE_SR4 = '200gbase-sr4'
+    TYPE_200GE_DR4 = '200gbase-dr4'
+    TYPE_200GE_FR4 = '200gbase-fr4'
+    TYPE_200GE_LR4 = '200gbase-lr4'
+    TYPE_200GE_ER4 = '200gbase-er4'
+    TYPE_200GE_VR2 = '200gbase-vr2'
+
+    # 400 Gbps Ethernet
+    TYPE_400GE_CR4 = '400gbase-cr4'
+    TYPE_400GE_DR4 = '400gbase-dr4'
+    TYPE_400GE_ER8 = '400gbase-er8'
+    TYPE_400GE_FR4 = '400gbase-fr4'
+    TYPE_400GE_FR8 = '400gbase-fr8'
+    TYPE_400GE_LR4 = '400gbase-lr4'
+    TYPE_400GE_LR8 = '400gbase-lr8'
+    TYPE_400GE_SR4 = '400gbase-sr4'
+    TYPE_400GE_SR4_2 = '400gbase-sr4_2'
+    TYPE_400GE_SR8 = '400gbase-sr8'
+    TYPE_400GE_SR16 = '400gbase-sr16'
+    TYPE_400GE_VR4 = '400gbase-vr4'
+    TYPE_400GE_ZR = '400gbase-zr'
+
+    # 800 Gbps Ethernet
+    TYPE_800GE_CR8 = '800gbase-cr8'
+    TYPE_800GE_DR8 = '800gbase-dr8'
+    TYPE_800GE_SR8 = '800gbase-sr8'
+    TYPE_800GE_VR8 = '800gbase-vr8'
+
+    # Ethernet (modular)
     TYPE_100ME_SFP = '100base-x-sfp'
-    TYPE_1GE_FIXED = '1000base-t'
-    TYPE_1GE_SX_FIXED = '1000base-sx'
-    TYPE_1GE_LX_FIXED = '1000base-lx'
-    TYPE_1GE_TX_FIXED = '1000base-tx'
     TYPE_1GE_GBIC = '1000base-x-gbic'
     TYPE_1GE_SFP = '1000base-x-sfp'
-    TYPE_2GE_FIXED = '2.5gbase-t'
-    TYPE_5GE_FIXED = '5gbase-t'
-    TYPE_10GE_FIXED = '10gbase-t'
-    TYPE_10GE_CX4 = '10gbase-cx4'
     TYPE_10GE_SFP_PLUS = '10gbase-x-sfpp'
     TYPE_10GE_XFP = '10gbase-x-xfp'
     TYPE_10GE_XENPAK = '10gbase-x-xenpak'
@@ -935,7 +1031,7 @@ class InterfaceTypeChoices(ChoiceSet):
     TYPE_800GE_QSFP_DD = '800gbase-x-qsfpdd'
     TYPE_800GE_OSFP = '800gbase-x-osfp'
 
-    # Ethernet Backplane
+    # Backplane Ethernet
     TYPE_1GE_KX = '1000base-kx'
     TYPE_2GE_KX = '2.5gbase-kx'
     TYPE_5GE_KR = '5gbase-kr'
@@ -1054,24 +1150,147 @@ class InterfaceTypeChoices(ChoiceSet):
             ),
         ),
         (
-            _('Ethernet (fixed)'),
+            _('FastEthernet (100 Mbps)'),
             (
-                (TYPE_100ME_FX, '100BASE-FX (10/100ME FIBER)'),
-                (TYPE_100ME_LFX, '100BASE-LFX (10/100ME FIBER)'),
+                (TYPE_100ME_FX, '100BASE-FX (10/100ME)'),
+                (TYPE_100ME_LFX, '100BASE-LFX (10/100ME)'),
                 (TYPE_100ME_FIXED, '100BASE-TX (10/100ME)'),
-                (TYPE_100ME_T1, '100BASE-T1 (10/100ME Single Pair)'),
-                (TYPE_1GE_FIXED, '1000BASE-T (1GE)'),
+                (TYPE_100ME_T1, '100BASE-T1 (10/100ME)'),
+            ),
+        ),
+        (
+            _('GigabitEthernet (1 Gbps)'),
+            (
+                (TYPE_1GE_BX10_D, '1000BASE-BX10-D (1GE BiDi Down)'),
+                (TYPE_1GE_BX10_U, '1000BASE-BX10-U (1GE BiDi Up)'),
+                (TYPE_1GE_CX, '1000BASE-CX (1GE DAC)'),
+                (TYPE_1GE_CWDM, '1000BASE-CWDM (1GE)'),
+                (TYPE_1GE_DWDM, '1000BASE-DWDM (1GE)'),
+                (TYPE_1GE_EX, '1000BASE-EX (1GE)'),
                 (TYPE_1GE_SX_FIXED, '1000BASE-SX (1GE)'),
+                (TYPE_1GE_LSX, '1000BASE-LSX (1GE)'),
                 (TYPE_1GE_LX_FIXED, '1000BASE-LX (1GE)'),
+                (TYPE_1GE_LX10, '1000BASE-LX10/LH (1GE)'),
+                (TYPE_1GE_FIXED, '1000BASE-T (1GE)'),
                 (TYPE_1GE_TX_FIXED, '1000BASE-TX (1GE)'),
+                (TYPE_1GE_ZX, '1000BASE-ZX (1GE)'),
+            ),
+        ),
+        (
+            _('2.5/5 Gbps Ethernet'),
+            (
                 (TYPE_2GE_FIXED, '2.5GBASE-T (2.5GE)'),
                 (TYPE_5GE_FIXED, '5GBASE-T (5GE)'),
+            ),
+        ),
+        (
+            _('10 Gbps Ethernet'),
+            (
+                (TYPE_10GE_BR_D, '10GBASE-DR-D (10GE BiDi Down)'),
+                (TYPE_10GE_BR_U, '10GBASE-DR-U (10GE BiDi Up)'),
+                (TYPE_10GE_CX4, '10GBASE-CX4 (10GE DAC)'),
+                (TYPE_10GE_ER, '10GBASE-ER (10GE)'),
+                (TYPE_10GE_LR, '10GBASE-LR (10GE)'),
+                (TYPE_10GE_LRM, '10GBASE-LRM (10GE)'),
+                (TYPE_10GE_LX4, '10GBASE-LX4 (10GE)'),
+                (TYPE_10GE_SR, '10GBASE-SR (10GE)'),
                 (TYPE_10GE_FIXED, '10GBASE-T (10GE)'),
-                (TYPE_10GE_CX4, '10GBASE-CX4 (10GE)'),
+                (TYPE_10GE_ZR, '10GBASE-ZR (10GE)'),
+            )
+        ),
+        (
+            _('25 Gbps Ethernet'),
+            (
+                (TYPE_25GE_CR, '25GBASE-CR (25GE DAC)'),
+                (TYPE_25GE_ER, '25GBASE-ER (25GE)'),
+                (TYPE_25GE_LR, '25GBASE-LR (25GE)'),
+                (TYPE_25GE_SR, '25GBASE-SR (25GE)'),
+                (TYPE_25GE_T, '25GBASE-T (25GE)'),
+            )
+        ),
+        (
+            _('40 Gbps Ethernet'),
+            (
+                (TYPE_40GE_CR4, '40GBASE-CR4 (40GE DAC)'),
+                (TYPE_40GE_ER4, '40GBASE-ER4 (40GE)'),
+                (TYPE_40GE_FR4, '40GBASE-FR4 (40GE)'),
+                (TYPE_40GE_LR4, '40GBASE-LR4 (40GE)'),
+                (TYPE_40GE_SR4, '40GBASE-SR4 (40GE)'),
+            )
+        ),
+        (
+            _('50 Gbps Ethernet'),
+            (
+                (TYPE_50GE_CR, '50GBASE-CR (50GE DAC)'),
+                (TYPE_50GE_ER, '50GBASE-ER (50GE)'),
+                (TYPE_50GE_FR, '50GBASE-FR (50GE)'),
+                (TYPE_50GE_LR, '50GBASE-LR (50GE)'),
+                (TYPE_50GE_SR, '50GBASE-SR (50GE)'),
+            )
+        ),
+        (
+            _('100 Gbps Ethernet'),
+            (
+                (TYPE_100GE_CR1, '100GBASE-CR1 (100GE DAC)'),
+                (TYPE_100GE_CR2, '100GBASE-CR2 (100GE DAC)'),
+                (TYPE_100GE_CR4, '100GBASE-CR4 (100GE DAC)'),
+                (TYPE_100GE_CR10, '100GBASE-CR10 (100GE DAC)'),
+                (TYPE_100GE_DR, '100GBASE-DR (100GE)'),
+                (TYPE_100GE_ER4, '100GBASE-ER4 (100GE)'),
+                (TYPE_100GE_FR1, '100GBASE-FR1 (100GE)'),
+                (TYPE_100GE_LR1, '100GBASE-LR1 (100GE)'),
+                (TYPE_100GE_LR4, '100GBASE-LR4 (100GE)'),
+                (TYPE_100GE_SR1, '100GBASE-SR1 (100GE)'),
+                (TYPE_100GE_SR1_2, '100GBASE-SR1.2 (100GE BiDi)'),
+                (TYPE_100GE_SR2, '100GBASE-SR2 (100GE)'),
+                (TYPE_100GE_SR4, '100GBASE-SR4 (100GE)'),
+                (TYPE_100GE_SR10, '100GBASE-SR10 (100GE)'),
+                (TYPE_100GE_ZR, '100GBASE-ZR (100GE)'),
+            )
+        ),
+        (
+            _('200 Gbps Ethernet'),
+            (
+                (TYPE_200GE_CR2, '200GBASE-CR2 (200GE)'),
+                (TYPE_200GE_CR4, '200GBASE-CR4 (200GE)'),
+                (TYPE_200GE_SR2, '200GBASE-SR2 (200GE)'),
+                (TYPE_200GE_SR4, '200GBASE-SR4 (200GE)'),
+                (TYPE_200GE_DR4, '200GBASE-DR4 (200GE)'),
+                (TYPE_200GE_ER4, '200GBASE-ER4 (200GE)'),
+                (TYPE_200GE_FR4, '200GBASE-FR4 (200GE)'),
+                (TYPE_200GE_LR4, '200GBASE-LR4 (200GE)'),
+                (TYPE_200GE_VR2, '200GBASE-VR2 (200GE)'),
+            )
+        ),
+        (
+            _('400 Gbps Ethernet'),
+            (
+                (TYPE_400GE_CR4, '400GBASE-CR4 (400GE)'),
+                (TYPE_400GE_DR4, '400GBASE-DR4 (400GE)'),
+                (TYPE_400GE_ER8, '400GBASE-ER8 (400GE)'),
+                (TYPE_400GE_FR4, '400GBASE-FR4 (400GE)'),
+                (TYPE_400GE_FR8, '400GBASE-FR8 (400GE)'),
+                (TYPE_400GE_LR4, '400GBASE-LR4 (400GE)'),
+                (TYPE_400GE_LR8, '400GBASE-LR8 (400GE)'),
+                (TYPE_400GE_SR4, '400GBASE-SR4 (400GE)'),
+                (TYPE_400GE_SR4_2, '400GBASE-SR4.2 (400GE BiDi)'),
+                (TYPE_400GE_SR8, '400GBASE-SR8 (400GE)'),
+                (TYPE_400GE_SR16, '400GBASE-SR16 (400GE)'),
+                (TYPE_400GE_VR4, '400GBASE-VR4 (400GE)'),
+                (TYPE_400GE_ZR, '400GBASE-ZR (400GE)'),
             )
         ),
         (
-            _('Ethernet (modular)'),
+            _('800 Gbps Ethernet'),
+            (
+                (TYPE_800GE_CR8, '800GBASE-CR8 (800GE)'),
+                (TYPE_800GE_DR8, '800GBASE-DR8 (800GE)'),
+                (TYPE_800GE_SR8, '800GBASE-SR8 (800GE)'),
+                (TYPE_800GE_VR8, '800GBASE-VR8 (800GE)'),
+            )
+        ),
+        (
+            _('Pluggable transceivers'),
             (
                 (TYPE_100ME_SFP, 'SFP (100ME)'),
                 (TYPE_1GE_GBIC, 'GBIC (1GE)'),
@@ -1108,7 +1327,7 @@ class InterfaceTypeChoices(ChoiceSet):
             )
         ),
         (
-            _('Ethernet (backplane)'),
+            _('Backplane Ethernet'),
             (
                 (TYPE_1GE_KX, '1000BASE-KX (1GE)'),
                 (TYPE_2GE_KX, '2.5GBASE-KX (2.5GE)'),
@@ -1128,12 +1347,12 @@ class InterfaceTypeChoices(ChoiceSet):
             (
                 (TYPE_80211A, 'IEEE 802.11a'),
                 (TYPE_80211G, 'IEEE 802.11b/g'),
-                (TYPE_80211N, 'IEEE 802.11n'),
-                (TYPE_80211AC, 'IEEE 802.11ac'),
-                (TYPE_80211AD, 'IEEE 802.11ad'),
-                (TYPE_80211AX, 'IEEE 802.11ax'),
-                (TYPE_80211AY, 'IEEE 802.11ay'),
-                (TYPE_80211BE, 'IEEE 802.11be'),
+                (TYPE_80211N, 'IEEE 802.11n (Wi-Fi 4)'),
+                (TYPE_80211AC, 'IEEE 802.11ac (Wi-Fi 5)'),
+                (TYPE_80211AD, 'IEEE 802.11ad (WiGig)'),
+                (TYPE_80211AX, 'IEEE 802.11ax (Wi-Fi 6)'),
+                (TYPE_80211AY, 'IEEE 802.11ay (WiGig)'),
+                (TYPE_80211BE, 'IEEE 802.11be (Wi-Fi 7)'),
                 (TYPE_802151, 'IEEE 802.15.1 (Bluetooth)'),
                 (TYPE_802154, 'IEEE 802.15.4 (LR-WPAN)'),
                 (TYPE_OTHER_WIRELESS, 'Other (Wireless)'),
@@ -1497,8 +1716,9 @@ class PortTypeChoices(ChoiceSet):
 # Cables/links
 #
 
-class CableTypeChoices(ChoiceSet):
 
+class CableTypeChoices(ChoiceSet):
+    # Copper - Twisted Pair (UTP/STP)
     TYPE_CAT3 = 'cat3'
     TYPE_CAT5 = 'cat5'
     TYPE_CAT5E = 'cat5e'
@@ -1507,26 +1727,41 @@ class CableTypeChoices(ChoiceSet):
     TYPE_CAT7 = 'cat7'
     TYPE_CAT7A = 'cat7a'
     TYPE_CAT8 = 'cat8'
+    TYPE_MRJ21_TRUNK = 'mrj21-trunk'
+
+    # Copper - Twinax (DAC)
     TYPE_DAC_ACTIVE = 'dac-active'
     TYPE_DAC_PASSIVE = 'dac-passive'
-    TYPE_MRJ21_TRUNK = 'mrj21-trunk'
+
+    # Copper - Coaxial
     TYPE_COAXIAL = 'coaxial'
+
+    # Fiber Optic - Multimode
     TYPE_MMF = 'mmf'
     TYPE_MMF_OM1 = 'mmf-om1'
     TYPE_MMF_OM2 = 'mmf-om2'
     TYPE_MMF_OM3 = 'mmf-om3'
     TYPE_MMF_OM4 = 'mmf-om4'
     TYPE_MMF_OM5 = 'mmf-om5'
+
+    # Fiber Optic - Single-mode
     TYPE_SMF = 'smf'
     TYPE_SMF_OS1 = 'smf-os1'
     TYPE_SMF_OS2 = 'smf-os2'
+
+    # Fiber Optic - Other
     TYPE_AOC = 'aoc'
+
+    # Power
     TYPE_POWER = 'power'
+
+    # USB
     TYPE_USB = 'usb'
 
     CHOICES = (
         (
-            _('Copper'), (
+            _('Copper - Twisted Pair (UTP/STP)'),
+            (
                 (TYPE_CAT3, 'CAT3'),
                 (TYPE_CAT5, 'CAT5'),
                 (TYPE_CAT5E, 'CAT5e'),
@@ -1535,28 +1770,57 @@ class CableTypeChoices(ChoiceSet):
                 (TYPE_CAT7, 'CAT7'),
                 (TYPE_CAT7A, 'CAT7a'),
                 (TYPE_CAT8, 'CAT8'),
+                (TYPE_MRJ21_TRUNK, 'MRJ21 Trunk'),
+            ),
+        ),
+        (
+            _('Copper - Twinax (DAC)'),
+            (
                 (TYPE_DAC_ACTIVE, 'Direct Attach Copper (Active)'),
                 (TYPE_DAC_PASSIVE, 'Direct Attach Copper (Passive)'),
-                (TYPE_MRJ21_TRUNK, 'MRJ21 Trunk'),
+            ),
+        ),
+        (
+            _('Copper - Coaxial'),
+            (
                 (TYPE_COAXIAL, 'Coaxial'),
             ),
         ),
         (
-            _('Fiber'), (
+            _('Fiber - Multimode'),
+            (
                 (TYPE_MMF, 'Multimode Fiber'),
                 (TYPE_MMF_OM1, 'Multimode Fiber (OM1)'),
                 (TYPE_MMF_OM2, 'Multimode Fiber (OM2)'),
                 (TYPE_MMF_OM3, 'Multimode Fiber (OM3)'),
                 (TYPE_MMF_OM4, 'Multimode Fiber (OM4)'),
                 (TYPE_MMF_OM5, 'Multimode Fiber (OM5)'),
-                (TYPE_SMF, 'Singlemode Fiber'),
-                (TYPE_SMF_OS1, 'Singlemode Fiber (OS1)'),
-                (TYPE_SMF_OS2, 'Singlemode Fiber (OS2)'),
-                (TYPE_AOC, 'Active Optical Cabling (AOC)'),
             ),
         ),
-        (TYPE_USB, _('USB')),
-        (TYPE_POWER, _('Power')),
+        (
+            _('Fiber - Single-mode'),
+            (
+                (TYPE_SMF, 'Single-mode Fiber'),
+                (TYPE_SMF_OS1, 'Single-mode Fiber (OS1)'),
+                (TYPE_SMF_OS2, 'Single-mode Fiber (OS2)'),
+            ),
+        ),
+        (
+            _('Fiber - Other'),
+            ((TYPE_AOC, 'Active Optical Cabling (AOC)'),),
+        ),
+        (
+            _('Power'),
+            (
+                (TYPE_POWER, 'Power'),
+            ),
+        ),
+        (
+            _('USB'),
+            (
+                (TYPE_USB, 'USB'),
+            ),
+        ),
     )
 
 

+ 2 - 0
netbox/dcim/graphql/enums.py

@@ -12,6 +12,7 @@ __all__ = (
     'DeviceFaceEnum',
     'DeviceStatusEnum',
     'InterfaceDuplexEnum',
+    'InterfaceKindEnum',
     'InterfaceModeEnum',
     'InterfacePoEModeEnum',
     'InterfacePoETypeEnum',
@@ -48,6 +49,7 @@ DeviceAirflowEnum = strawberry.enum(DeviceAirflowChoices.as_enum(prefix='airflow
 DeviceFaceEnum = strawberry.enum(DeviceFaceChoices.as_enum(prefix='face'))
 DeviceStatusEnum = strawberry.enum(DeviceStatusChoices.as_enum(prefix='status'))
 InterfaceDuplexEnum = strawberry.enum(InterfaceDuplexChoices.as_enum(prefix='duplex'))
+InterfaceKindEnum = strawberry.enum(InterfaceKindChoices.as_enum(prefix='kind'))
 InterfaceModeEnum = strawberry.enum(InterfaceModeChoices.as_enum(prefix='mode'))
 InterfacePoEModeEnum = strawberry.enum(InterfacePoEModeChoices.as_enum(prefix='mode'))
 InterfacePoETypeEnum = strawberry.enum(InterfacePoETypeChoices.as_enum())

+ 24 - 0
netbox/dcim/graphql/filters.py

@@ -1,5 +1,6 @@
 from typing import Annotated, TYPE_CHECKING
 
+from django.db.models import Q
 import strawberry
 import strawberry_django
 from strawberry.scalars import ID
@@ -7,6 +8,8 @@ from strawberry_django import FilterLookup
 
 from core.graphql.filter_mixins import ChangeLogFilterMixin
 from dcim import models
+from dcim.constants import *
+from dcim.graphql.enums import InterfaceKindEnum
 from extras.graphql.filter_mixins import ConfigContextFilterMixin
 from netbox.graphql.filter_mixins import (
     PrimaryModelFilterMixin,
@@ -485,6 +488,27 @@ class InterfaceFilter(ModularComponentModelFilterMixin, InterfaceBaseFilterMixin
         strawberry_django.filter_field()
     )
 
+    @strawberry_django.filter_field
+    def connected(self, queryset, value: bool, prefix: str):
+        if value is True:
+            return queryset, Q(**{f"{prefix}_path__is_active": True})
+        else:
+            return queryset, Q(**{f"{prefix}_path__isnull": True}) | Q(**{f"{prefix}_path__is_active": False})
+
+    @strawberry_django.filter_field
+    def kind(
+        self,
+        queryset,
+        value: Annotated['InterfaceKindEnum', strawberry.lazy('dcim.graphql.enums')],
+        prefix: str
+    ):
+        if value == InterfaceKindEnum.KIND_PHYSICAL:
+            return queryset, ~Q(**{f"{prefix}type__in": NONCONNECTABLE_IFACE_TYPES})
+        elif value == InterfaceKindEnum.KIND_VIRTUAL:
+            return queryset, Q(**{f"{prefix}type__in": VIRTUAL_IFACE_TYPES})
+        elif value == InterfaceKindEnum.KIND_WIRELESS:
+            return queryset, Q(**{f"{prefix}type__in": WIRELESS_IFACE_TYPES})
+
 
 @strawberry_django.filter_type(models.InterfaceTemplate, lookups=True)
 class InterfaceTemplateFilter(ModularComponentTemplateFilterMixin):

+ 116 - 49
netbox/dcim/models/cables.py

@@ -18,6 +18,7 @@ from utilities.conversion import to_meters
 from utilities.exceptions import AbortRequest
 from utilities.fields import ColorField, GenericArrayForeignKey
 from utilities.querysets import RestrictedQuerySet
+from utilities.serialization import deserialize_object, serialize_object
 from wireless.models import WirelessLink
 from .device_components import FrontPort, RearPort, PathEndpoint
 
@@ -119,43 +120,61 @@ class Cable(PrimaryModel):
         pk = self.pk or self._pk
         return self.label or f'#{pk}'
 
-    @property
-    def a_terminations(self):
-        if hasattr(self, '_a_terminations'):
-            return self._a_terminations
+    def get_status_color(self):
+        return LinkStatusChoices.colors.get(self.status)
+
+    def _get_x_terminations(self, side):
+        """
+        Return the terminating objects for the given cable end (A or B).
+        """
+        if side not in (CableEndChoices.SIDE_A, CableEndChoices.SIDE_B):
+            raise ValueError(f"Unknown cable side: {side}")
+        attr = f'_{side.lower()}_terminations'
 
+        if hasattr(self, attr):
+            return getattr(self, attr)
         if not self.pk:
             return []
-
-        # Query self.terminations.all() to leverage cached results
         return [
-            ct.termination for ct in self.terminations.all() if ct.cable_end == CableEndChoices.SIDE_A
+            # Query self.terminations.all() to leverage cached results
+            ct.termination for ct in self.terminations.all() if ct.cable_end == side
         ]
 
-    @a_terminations.setter
-    def a_terminations(self, value):
-        if not self.pk or self.a_terminations != list(value):
+    def _set_x_terminations(self, side, value):
+        """
+        Set the terminating objects for the given cable end (A or B).
+        """
+        if side not in (CableEndChoices.SIDE_A, CableEndChoices.SIDE_B):
+            raise ValueError(f"Unknown cable side: {side}")
+        _attr = f'_{side.lower()}_terminations'
+
+        # If the provided value is a list of CableTermination IDs, resolve them
+        # to their corresponding termination objects.
+        if all(isinstance(item, int) for item in value):
+            value = [
+                ct.termination for ct in CableTermination.objects.filter(pk__in=value).prefetch_related('termination')
+            ]
+
+        if not self.pk or getattr(self, _attr, []) != list(value):
             self._terminations_modified = True
-        self._a_terminations = value
+
+        setattr(self, _attr, value)
 
     @property
-    def b_terminations(self):
-        if hasattr(self, '_b_terminations'):
-            return self._b_terminations
+    def a_terminations(self):
+        return self._get_x_terminations(CableEndChoices.SIDE_A)
 
-        if not self.pk:
-            return []
+    @a_terminations.setter
+    def a_terminations(self, value):
+        self._set_x_terminations(CableEndChoices.SIDE_A, value)
 
-        # Query self.terminations.all() to leverage cached results
-        return [
-            ct.termination for ct in self.terminations.all() if ct.cable_end == CableEndChoices.SIDE_B
-        ]
+    @property
+    def b_terminations(self):
+        return self._get_x_terminations(CableEndChoices.SIDE_B)
 
     @b_terminations.setter
     def b_terminations(self, value):
-        if not self.pk or self.b_terminations != list(value):
-            self._terminations_modified = True
-        self._b_terminations = value
+        self._set_x_terminations(CableEndChoices.SIDE_B, value)
 
     @property
     def color_name(self):
@@ -208,7 +227,7 @@ class Cable(PrimaryModel):
             for termination in self.b_terminations:
                 CableTermination(cable=self, cable_end='B', termination=termination).clean()
 
-    def save(self, *args, **kwargs):
+    def save(self, *args, force_insert=False, force_update=False, using=None, update_fields=None):
         _created = self.pk is None
 
         # Store the given length (if any) in meters for use in database ordering
@@ -221,39 +240,87 @@ class Cable(PrimaryModel):
         if self.length is None:
             self.length_unit = None
 
-        super().save(*args, **kwargs)
+        # If this is a new Cable, save it before attempting to create its CableTerminations
+        if self._state.adding:
+            super().save(*args, force_insert=True, using=using, update_fields=update_fields)
+            # Update the private PK used in __str__()
+            self._pk = self.pk
 
-        # Update the private pk used in __str__ in case this is a new object (i.e. just got its pk)
-        self._pk = self.pk
+        if self._terminations_modified:
+            self.update_terminations()
 
-        # Retrieve existing A/B terminations for the Cable
-        a_terminations = {ct.termination: ct for ct in self.terminations.filter(cable_end='A')}
-        b_terminations = {ct.termination: ct for ct in self.terminations.filter(cable_end='B')}
+        super().save(*args, force_update=True, using=using, update_fields=update_fields)
 
-        # Delete stale CableTerminations
-        if self._terminations_modified:
-            for termination, ct in a_terminations.items():
-                if termination.pk and termination not in self.a_terminations:
-                    ct.delete()
-            for termination, ct in b_terminations.items():
-                if termination.pk and termination not in self.b_terminations:
-                    ct.delete()
-
-        # Save new CableTerminations (if any)
-        if self._terminations_modified:
-            for termination in self.a_terminations:
-                if not termination.pk or termination not in a_terminations:
-                    CableTermination(cable=self, cable_end='A', termination=termination).save()
-            for termination in self.b_terminations:
-                if not termination.pk or termination not in b_terminations:
-                    CableTermination(cable=self, cable_end='B', termination=termination).save()
         try:
             trace_paths.send(Cable, instance=self, created=_created)
         except UnsupportedCablePath as e:
             raise AbortRequest(e)
 
-    def get_status_color(self):
-        return LinkStatusChoices.colors.get(self.status)
+    def serialize_object(self, exclude=None):
+        data = serialize_object(self, exclude=exclude or [])
+
+        # Add A & B terminations to the serialized data
+        a_terminations, b_terminations = self.get_terminations()
+        data['a_terminations'] = sorted([ct.pk for ct in a_terminations.values()])
+        data['b_terminations'] = sorted([ct.pk for ct in b_terminations.values()])
+
+        return data
+
+    @classmethod
+    def deserialize_object(cls, data, pk=None):
+        a_terminations = data.pop('a_terminations', [])
+        b_terminations = data.pop('b_terminations', [])
+
+        instance = deserialize_object(cls, data, pk=pk)
+
+        # Assign A & B termination objects to the Cable instance
+        queryset = CableTermination.objects.prefetch_related('termination')
+        instance.a_terminations = [
+            ct.termination for ct in queryset.filter(pk__in=a_terminations)
+        ]
+        instance.b_terminations = [
+            ct.termination for ct in queryset.filter(pk__in=b_terminations)
+        ]
+
+        return instance
+
+    def get_terminations(self):
+        """
+        Return two dictionaries mapping A & B side terminating objects to their corresponding CableTerminations
+        for this Cable.
+        """
+        a_terminations = {}
+        b_terminations = {}
+
+        for ct in CableTermination.objects.filter(cable=self).prefetch_related('termination'):
+            if ct.cable_end == CableEndChoices.SIDE_A:
+                a_terminations[ct.termination] = ct
+            else:
+                b_terminations[ct.termination] = ct
+
+        return a_terminations, b_terminations
+
+    def update_terminations(self):
+        """
+        Create/delete CableTerminations for this Cable to reflect its current state.
+        """
+        a_terminations, b_terminations = self.get_terminations()
+
+        # Delete any stale CableTerminations
+        for termination, ct in a_terminations.items():
+            if termination.pk and termination not in self.a_terminations:
+                ct.delete()
+        for termination, ct in b_terminations.items():
+            if termination.pk and termination not in self.b_terminations:
+                ct.delete()
+
+        # Save any new CableTerminations
+        for termination in self.a_terminations:
+            if not termination.pk or termination not in a_terminations:
+                CableTermination(cable=self, cable_end='A', termination=termination).save()
+        for termination in self.b_terminations:
+            if not termination.pk or termination not in b_terminations:
+                CableTermination(cable=self, cable_end='B', termination=termination).save()
 
 
 class CableTermination(ChangeLoggedModel):

+ 3 - 3
netbox/dcim/models/device_components.py

@@ -872,14 +872,14 @@ class Interface(ModularComponentModel, BaseInterface, CabledObjectModel, PathEnd
                         "The selected parent interface ({interface}) belongs to a different device ({device})"
                     ).format(interface=self.parent, device=self.parent.device)
                 })
-            elif self.parent.device.virtual_chassis != self.parent.virtual_chassis:
+            elif self.parent.device.virtual_chassis != self.device.virtual_chassis:
                 raise ValidationError({
                     'parent': _(
                         "The selected parent interface ({interface}) belongs to {device}, which is not part of "
                         "virtual chassis {virtual_chassis}."
                     ).format(
                         interface=self.parent,
-                        device=self.parent_device,
+                        device=self.parent.device,
                         virtual_chassis=self.device.virtual_chassis
                     )
                 })
@@ -890,7 +890,7 @@ class Interface(ModularComponentModel, BaseInterface, CabledObjectModel, PathEnd
         if self.pk and self.bridge_id == self.pk:
             raise ValidationError({'bridge': _("An interface cannot be bridged to itself.")})
 
-        # A bridged interface belong to the same device or virtual chassis
+        # A bridged interface belongs to the same device or virtual chassis
         if self.bridge and self.bridge.device != self.device:
             if self.device.virtual_chassis is None:
                 raise ValidationError({

+ 5 - 0
netbox/dcim/tables/devices.py

@@ -195,6 +195,11 @@ class DeviceTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
         linkify=True,
         verbose_name=_('Type')
     )
+    u_height = columns.TemplateColumn(
+        accessor=tables.A('device_type.u_height'),
+        verbose_name=_('U Height'),
+        template_code='{{ value|floatformat }}'
+    )
     platform = tables.Column(
         linkify=True,
         verbose_name=_('Platform')

+ 48 - 19
netbox/dcim/tests/test_views.py

@@ -1078,14 +1078,14 @@ class ModuleTypeTestCase(ViewTestCases.PrimaryObjectViewTestCase):
             'dcim.add_modulebaytemplate',
         )
 
-        # run base test
-        super().test_bulk_import_objects_with_permission()
-
-        # TODO: remove extra regression asserts once parent test supports testing all import fields
-        fan_module_type = ModuleType.objects.get(part_number='generic-fan')
-        fan_module_type_profile = ModuleTypeProfile.objects.get(name='Fan')
+        def verify_module_type_profile(scenario_name):
+            # TODO: remove extra regression asserts once parent test supports testing all import fields
+            fan_module_type = ModuleType.objects.get(part_number='generic-fan')
+            fan_module_type_profile = ModuleTypeProfile.objects.get(name='Fan')
+            assert fan_module_type.profile == fan_module_type_profile
 
-        assert fan_module_type.profile == fan_module_type_profile
+        # run base test
+        super().test_bulk_import_objects_with_permission(post_import_callback=verify_module_type_profile)
 
     @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
     def test_bulk_import_objects_with_constrained_permission(self):
@@ -3290,8 +3290,10 @@ class CableTestCase(
             Device(name='Device 1', site=sites[0], device_type=devicetype, role=role),
             Device(name='Device 2', site=sites[0], device_type=devicetype, role=role),
             Device(name='Device 3', site=sites[0], device_type=devicetype, role=role),
+            Device(name='Device 4', site=sites[0], device_type=devicetype, role=role),
             # Create 'Device 1' assigned to 'Site 2' (allowed since the site is different)
             Device(name='Device 1', site=sites[1], device_type=devicetype, role=role),
+            Device(name='Device 5', site=sites[1], device_type=devicetype, role=role),
         )
         Device.objects.bulk_create(devices)
 
@@ -3300,22 +3302,36 @@ class CableTestCase(
         vc.save()
 
         interfaces = (
+            # Device 1, Site 1
             Interface(device=devices[0], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[0], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[0], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            # Device 2, Site 1
             Interface(device=devices[1], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[1], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[1], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            # Device 3, Site 1
             Interface(device=devices[2], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[2], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[2], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            # Device 3, Site 1
             Interface(device=devices[3], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[3], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[3], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            # Device 1, Site 2
+            Interface(device=devices[4], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[4], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[4], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+
+            # Device 1, Site 2
+            Interface(device=devices[5], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[5], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[5], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+
             Interface(device=devices[1], name='Device 2 Interface', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
             Interface(device=devices[2], name='Device 3 Interface', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
-            Interface(device=devices[3], name='Interface 4', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
-            Interface(device=devices[3], name='Interface 5', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[4], name='Interface 4', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
+            Interface(device=devices[4], name='Interface 5', type=InterfaceTypeChoices.TYPE_1GE_FIXED),
         )
         Interface.objects.bulk_create(interfaces)
 
@@ -3342,16 +3358,29 @@ class CableTestCase(
             'tags': [t.pk for t in tags],
         }
 
-        # Ensure that CSV bulk import supports assigning terminations from parent devices that share
-        # the same device name, provided those devices belong to different sites.
-        cls.csv_data = (
-            "side_a_site,side_a_device,side_a_type,side_a_name,side_b_site,side_b_device,side_b_type,side_b_name",
-            "Site 1,Device 3,dcim.interface,Interface 1,Site 2,Device 1,dcim.interface,Interface 1",
-            "Site 1,Device 3,dcim.interface,Interface 2,Site 2,Device 1,dcim.interface,Interface 2",
-            "Site 1,Device 3,dcim.interface,Interface 3,Site 2,Device 1,dcim.interface,Interface 3",
-            "Site 1,Device 1,dcim.interface,Device 2 Interface,Site 2,Device 1,dcim.interface,Interface 4",
-            "Site 1,Device 1,dcim.interface,Device 3 Interface,Site 2,Device 1,dcim.interface,Interface 5",
-        )
+        cls.csv_data = {
+            'default': (
+                "side_a_device,side_a_type,side_a_name,side_b_device,side_b_type,side_b_name",
+                "Device 4,dcim.interface,Interface 1,Device 5,dcim.interface,Interface 1",
+                "Device 3,dcim.interface,Interface 2,Device 4,dcim.interface,Interface 2",
+                "Device 3,dcim.interface,Interface 3,Device 4,dcim.interface,Interface 3",
+
+                # The following is no longer possible in this scenario, because there are multiple
+                # devices named "Device 1" across multiple sites. See the "site-filtering" scenario
+                # below for how to specify a site for non-unique device names.
+                # "Device 1,dcim.interface,Device 3 Interface,Device 4,dcim.interface,Interface 5",
+            ),
+            'site-filtering': (
+                # Ensure that CSV bulk import supports assigning terminations from parent devices
+                # that share the same device name, provided those devices belong to different sites.
+                "side_a_site,side_a_device,side_a_type,side_a_name,side_b_site,side_b_device,side_b_type,side_b_name",
+                "Site 1,Device 3,dcim.interface,Interface 1,Site 2,Device 1,dcim.interface,Interface 1",
+                "Site 1,Device 3,dcim.interface,Interface 2,Site 2,Device 1,dcim.interface,Interface 2",
+                "Site 1,Device 3,dcim.interface,Interface 3,Site 2,Device 1,dcim.interface,Interface 3",
+                "Site 1,Device 1,dcim.interface,Device 2 Interface,Site 2,Device 1,dcim.interface,Interface 4",
+                "Site 1,Device 1,dcim.interface,Device 3 Interface,Site 2,Device 1,dcim.interface,Interface 5",
+            )
+        }
 
         cls.csv_update_data = (
             "id,label,color",

+ 3 - 3
netbox/extras/graphql/filters.py

@@ -43,12 +43,12 @@ __all__ = (
 
 @strawberry_django.filter_type(models.ConfigContext, lookups=True)
 class ConfigContextFilter(BaseObjectTypeFilterMixin, SyncedDataFilterMixin, ChangeLogFilterMixin):
-    name: FilterLookup[str] = strawberry_django.filter_field()
+    name: FilterLookup[str] | None = strawberry_django.filter_field()
     weight: Annotated['IntegerLookup', strawberry.lazy('netbox.graphql.filter_lookups')] | None = (
         strawberry_django.filter_field()
     )
-    description: FilterLookup[str] = strawberry_django.filter_field()
-    is_active: FilterLookup[bool] = strawberry_django.filter_field()
+    description: FilterLookup[str] | None = strawberry_django.filter_field()
+    is_active: FilterLookup[bool] | None = strawberry_django.filter_field()
     regions: Annotated['RegionFilter', strawberry.lazy('dcim.graphql.filters')] | None = (
         strawberry_django.filter_field()
     )

+ 1 - 1
netbox/extras/jobs.py

@@ -106,7 +106,7 @@ class ScriptJob(JobRunner):
 
         # Add the current request as a property of the script
         script.request = request
-        self.logger.debug(f"Request ID: {request.id}")
+        self.logger.debug(f"Request ID: {request.id if request else None}")
 
         # Execute the script. If commit is True, wrap it with the event_tracking context manager to ensure we process
         # change logging, event rules, etc.

+ 4 - 2
netbox/extras/models/models.py

@@ -1,6 +1,6 @@
 import json
-import os
 import urllib.parse
+from pathlib import Path
 
 from django.conf import settings
 from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
@@ -728,7 +728,9 @@ class ImageAttachment(ChangeLoggedModel):
 
     @property
     def filename(self):
-        return os.path.basename(self.image.name).split('_', 2)[2]
+        base_name = Path(self.image.name).name
+        prefix = f"{self.object_type.model}_{self.object_id}_"
+        return base_name.removeprefix(prefix)
 
     @property
     def html_tag(self):

+ 14 - 5
netbox/extras/querysets.py

@@ -22,9 +22,10 @@ class ConfigContextQuerySet(RestrictedQuerySet):
           aggregate_data: If True, use the JSONBAgg aggregate function to return only the list of JSON data objects
         """
 
-        # Device type and location assignment is relevant only for Devices
+        # Device type and location assignment are relevant only for Devices
         device_type = getattr(obj, 'device_type', None)
         location = getattr(obj, 'location', None)
+        locations = location.get_ancestors(include_self=True) if location else []
 
         # Get assigned cluster, group, and type (if any)
         cluster = getattr(obj, 'cluster', None)
@@ -49,7 +50,7 @@ class ConfigContextQuerySet(RestrictedQuerySet):
             Q(regions__in=regions) | Q(regions=None),
             Q(site_groups__in=sitegroups) | Q(site_groups=None),
             Q(sites=obj.site) | Q(sites=None),
-            Q(locations=location) | Q(locations=None),
+            Q(locations__in=locations) | Q(locations=None),
             Q(device_types=device_type) | Q(device_types=None),
             Q(roles__in=device_roles) | Q(roles=None),
             Q(platforms=obj.platform) | Q(platforms=None),
@@ -92,7 +93,7 @@ class ConfigContextModelQuerySet(RestrictedQuerySet):
                     _data=EmptyGroupByJSONBAgg('data', ordering=['weight', 'name'])
                 ).values("_data").order_by()
             )
-        ).distinct()
+        )
 
     def _get_config_context_filters(self):
         # Construct the set of Q objects for the specific object types
@@ -116,7 +117,7 @@ class ConfigContextModelQuerySet(RestrictedQuerySet):
                     ).values_list(
                         'tag_id',
                         flat=True
-                    )
+                    ).distinct()
                 )
             ) | Q(tags=None),
             is_active=True,
@@ -124,7 +125,15 @@ class ConfigContextModelQuerySet(RestrictedQuerySet):
 
         # Apply Location & DeviceType filters only for VirtualMachines
         if self.model._meta.model_name == 'device':
-            base_query.add((Q(locations=OuterRef('location')) | Q(locations=None)), Q.AND)
+            base_query.add(
+                (Q(
+                    locations__tree_id=OuterRef('location__tree_id'),
+                    locations__level__lte=OuterRef('location__level'),
+                    locations__lft__lte=OuterRef('location__lft'),
+                    locations__rght__gte=OuterRef('location__rght'),
+                ) | Q(locations=None)),
+                Q.AND
+            )
             base_query.add((Q(device_types=OuterRef('device_type')) | Q(device_types=None)), Q.AND)
         elif self.model._meta.model_name == 'virtualmachine':
             base_query.add(Q(locations=None), Q.AND)

+ 3 - 2
netbox/extras/tables/tables.py

@@ -725,8 +725,9 @@ class ScriptResultsTable(BaseTable):
     index = tables.Column(
         verbose_name=_('Line')
     )
-    time = tables.Column(
-        verbose_name=_('Time')
+    time = columns.DateTimeColumn(
+        verbose_name=_('Time'),
+        timespec='seconds'
     )
     status = tables.TemplateColumn(
         template_code="""{% load log_levels %}{% log_level record.status %}""",

+ 153 - 31
netbox/extras/tests/test_models.py

@@ -1,17 +1,95 @@
 import tempfile
 from pathlib import Path
 
+from django.contrib.contenttypes.models import ContentType
+from django.core.files.uploadedfile import SimpleUploadedFile
 from django.forms import ValidationError
 from django.test import tag, TestCase
 
 from core.models import DataSource, ObjectType
 from dcim.models import Device, DeviceRole, DeviceType, Location, Manufacturer, Platform, Region, Site, SiteGroup
-from extras.models import ConfigContext, ConfigContextProfile, ConfigTemplate, Tag
+from extras.models import ConfigContext, ConfigContextProfile, ConfigTemplate, ImageAttachment, Tag, TaggedItem
 from tenancy.models import Tenant, TenantGroup
 from utilities.exceptions import AbortRequest
 from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine
 
 
+class ImageAttachmentTests(TestCase):
+    @classmethod
+    def setUpTestData(cls):
+        cls.ct_rack = ContentType.objects.get(app_label='dcim', model='rack')
+        cls.image_content = b''
+
+    def _stub_image_attachment(self, object_id, image_filename, name=None):
+        """
+        Creates an instance of ImageAttachment with the provided object_id and image_name.
+
+        This method prepares a stubbed image attachment to test functionalities that
+        require an ImageAttachment object.
+        The function initializes the attachment with a specified file name and
+        pre-defined image content.
+        """
+        ia = ImageAttachment(
+            object_type=self.ct_rack,
+            object_id=object_id,
+            name=name,
+            image=SimpleUploadedFile(
+                name=image_filename,
+                content=self.image_content,
+                content_type='image/jpeg',
+            ),
+        )
+        return ia
+
+    def test_filename_strips_expected_prefix(self):
+        """
+        Tests that the filename of the image attachment is stripped of the expected
+        prefix.
+        """
+        ia = self._stub_image_attachment(12, 'image-attachments/rack_12_My_File.png')
+        self.assertEqual(ia.filename, 'My_File.png')
+
+    def test_filename_legacy_nested_path_returns_basename(self):
+        """
+        Tests if the filename of a legacy-nested path correctly returns only the basename.
+        """
+        # e.g. "image-attachments/rack_12_5/31/23.jpg" -> "23.jpg"
+        ia = self._stub_image_attachment(12, 'image-attachments/rack_12_5/31/23.jpg')
+        self.assertEqual(ia.filename, '23.jpg')
+
+    def test_filename_no_prefix_returns_basename(self):
+        """
+        Tests that the filename property correctly returns the basename for an image
+        attachment that has no leading prefix in its path.
+        """
+        ia = self._stub_image_attachment(42, 'image-attachments/just_name.webp')
+        self.assertEqual(ia.filename, 'just_name.webp')
+
+    def test_mismatched_prefix_is_not_stripped(self):
+        """
+        Tests that a mismatched prefix in the filename is not stripped.
+        """
+        # Prefix does not match object_id -> leave as-is (basename only)
+        ia = self._stub_image_attachment(12, 'image-attachments/rack_13_other.png')
+        self.assertEqual('rack_13_other.png', ia.filename)
+
+    def test_str_uses_name_when_present(self):
+        """
+        Tests that the `str` representation of the object uses the
+        `name` attribute when provided.
+        """
+        ia = self._stub_image_attachment(12, 'image-attachments/rack_12_file.png', name='Human title')
+        self.assertEqual('Human title', str(ia))
+
+    def test_str_falls_back_to_filename(self):
+        """
+        Tests that the `str` representation of the object falls back to
+        the filename if the name attribute is not set.
+        """
+        ia = self._stub_image_attachment(12, 'image-attachments/rack_12_file.png', name='')
+        self.assertEqual('file.png', str(ia))
+
+
 class TagTest(TestCase):
 
     def test_default_ordering_weight_then_name_is_set(self):
@@ -445,7 +523,7 @@ class ConfigContextTest(TestCase):
         vm1 = VirtualMachine.objects.create(name="VM 1", site=site, role=vm_role)
         vm2 = VirtualMachine.objects.create(name="VM 2", cluster=cluster, role=vm_role)
 
-        # Check that their individually-rendered config contexts are identical
+        # Check that their individually rendered config contexts are identical
         self.assertEqual(
             vm1.get_config_context(),
             vm2.get_config_context()
@@ -458,11 +536,39 @@ class ConfigContextTest(TestCase):
             vms[1].get_config_context()
         )
 
+    def test_valid_local_context_data(self):
+        device = Device.objects.first()
+        device.local_context_data = None
+        device.clean()
+
+        device.local_context_data = {"foo": "bar"}
+        device.clean()
+
+    def test_invalid_local_context_data(self):
+        device = Device.objects.first()
+
+        device.local_context_data = ""
+        with self.assertRaises(ValidationError):
+            device.clean()
+
+        device.local_context_data = 0
+        with self.assertRaises(ValidationError):
+            device.clean()
+
+        device.local_context_data = False
+        with self.assertRaises(ValidationError):
+            device.clean()
+
+        device.local_context_data = 'foo'
+        with self.assertRaises(ValidationError):
+            device.clean()
+
+    @tag('regression')
     def test_multiple_tags_return_distinct_objects(self):
         """
         Tagged items use a generic relationship, which results in duplicate rows being returned when queried.
         This is combated by appending distinct() to the config context querysets. This test creates a config
-        context assigned to two tags and ensures objects related by those same two tags result in only a single
+        context assigned to two tags and ensures objects related to those same two tags result in only a single
         config context record being returned.
 
         See https://github.com/netbox-community/netbox/issues/5314
@@ -495,14 +601,15 @@ class ConfigContextTest(TestCase):
         self.assertEqual(ConfigContext.objects.get_for_object(device).count(), 1)
         self.assertEqual(device.get_config_context(), annotated_queryset[0].get_config_context())
 
-    def test_multiple_tags_return_distinct_objects_with_seperate_config_contexts(self):
+    @tag('regression')
+    def test_multiple_tags_return_distinct_objects_with_separate_config_contexts(self):
         """
         Tagged items use a generic relationship, which results in duplicate rows being returned when queried.
-        This is combatted by by appending distinct() to the config context querysets. This test creates a config
-        context assigned to two tags and ensures objects related by those same two tags result in only a single
+        This is combated by appending distinct() to the config context querysets. This test creates a config
+        context assigned to two tags and ensures objects related to those same two tags result in only a single
         config context record being returned.
 
-        This test case is seperate from the above in that it deals with multiple config context objects in play.
+        This test case is separate from the above in that it deals with multiple config context objects in play.
 
         See https://github.com/netbox-community/netbox/issues/5387
         """
@@ -543,32 +650,47 @@ class ConfigContextTest(TestCase):
         self.assertEqual(ConfigContext.objects.get_for_object(device).count(), 2)
         self.assertEqual(device.get_config_context(), annotated_queryset[0].get_config_context())
 
-    def test_valid_local_context_data(self):
-        device = Device.objects.first()
-        device.local_context_data = None
-        device.clean()
-
-        device.local_context_data = {"foo": "bar"}
-        device.clean()
+    @tag('performance', 'regression')
+    def test_config_context_annotation_query_optimization(self):
+        """
+        Regression test for issue #20327: Ensure config context annotation
+        doesn't use expensive DISTINCT on main query.
 
-    def test_invalid_local_context_data(self):
+        Verifies that DISTINCT is only used in tag subquery where needed,
+        not on the main device query which is expensive for large datasets.
+        """
         device = Device.objects.first()
-
-        device.local_context_data = ""
-        with self.assertRaises(ValidationError):
-            device.clean()
-
-        device.local_context_data = 0
-        with self.assertRaises(ValidationError):
-            device.clean()
-
-        device.local_context_data = False
-        with self.assertRaises(ValidationError):
-            device.clean()
-
-        device.local_context_data = 'foo'
-        with self.assertRaises(ValidationError):
-            device.clean()
+        queryset = Device.objects.filter(pk=device.pk).annotate_config_context_data()
+
+        # Main device query should NOT use DISTINCT
+        self.assertFalse(queryset.query.distinct)
+
+        # Check that tag subqueries DO use DISTINCT by inspecting the annotation
+        config_annotation = queryset.query.annotations.get('config_context_data')
+        self.assertIsNotNone(config_annotation)
+
+        def find_tag_subqueries(where_node):
+            """Find subqueries in WHERE clause that relate to tag filtering"""
+            subqueries = []
+
+            def traverse(node):
+                if hasattr(node, 'children'):
+                    for child in node.children:
+                        try:
+                            if child.rhs.query.model is TaggedItem:
+                                subqueries.append(child.rhs.query)
+                        except AttributeError:
+                            traverse(child)
+            traverse(where_node)
+            return subqueries
+
+        # Find subqueries in the WHERE clause that should have DISTINCT
+        tag_subqueries = find_tag_subqueries(config_annotation.query.where)
+        distinct_subqueries = [sq for sq in tag_subqueries if sq.distinct]
+
+        # Verify we found at least one DISTINCT subquery for tags
+        self.assertEqual(len(distinct_subqueries), 1)
+        self.assertTrue(distinct_subqueries[0].distinct)
 
 
 class ConfigTemplateTest(TestCase):

+ 142 - 1
netbox/extras/tests/test_utils.py

@@ -1,7 +1,10 @@
+from types import SimpleNamespace
+
+from django.contrib.contenttypes.models import ContentType
 from django.test import TestCase
 
 from extras.models import ExportTemplate
-from extras.utils import filename_from_model
+from extras.utils import filename_from_model, image_upload
 from tenancy.models import ContactGroup, TenantGroup
 from wireless.models import WirelessLANGroup
 
@@ -17,3 +20,141 @@ class FilenameFromModelTests(TestCase):
 
         for model, expected in cases:
             self.assertEqual(filename_from_model(model), expected)
+
+
+class ImageUploadTests(TestCase):
+    @classmethod
+    def setUpTestData(cls):
+        # We only need a ContentType with model="rack" for the prefix;
+        # this doesn't require creating a Rack object.
+        cls.ct_rack = ContentType.objects.get(app_label='dcim', model='rack')
+
+    def _stub_instance(self, object_id=12, name=None):
+        """
+        Creates a minimal stub for use with the `image_upload()` function.
+
+        This method generates an instance of `SimpleNamespace` containing a set
+        of attributes required to simulate the expected input for the
+        `image_upload()` method.
+        It is designed to simplify testing or processing by providing a
+        lightweight representation of an object.
+        """
+        return SimpleNamespace(object_type=self.ct_rack, object_id=object_id, name=name)
+
+    def _second_segment(self, path: str):
+        """
+        Extracts and returns the portion of the input string after the
+        first '/' character.
+        """
+        return path.split('/', 1)[1]
+
+    def test_windows_fake_path_and_extension_lowercased(self):
+        """
+        Tests handling of a Windows file path with a fake directory and extension.
+        """
+        inst = self._stub_instance(name=None)
+        path = image_upload(inst, r'C:\fake_path\MyPhoto.JPG')
+        # Base directory and single-level path
+        seg2 = self._second_segment(path)
+        self.assertTrue(path.startswith('image-attachments/rack_12_'))
+        self.assertNotIn('/', seg2, 'should not create nested directories')
+        # Extension from the uploaded file, lowercased
+        self.assertTrue(seg2.endswith('.jpg'))
+
+    def test_name_with_slashes_is_flattened_no_subdirectories(self):
+        """
+        Tests that a name with slashes is flattened and does not
+        create subdirectories.
+        """
+        inst = self._stub_instance(name='5/31/23')
+        path = image_upload(inst, 'image.png')
+        seg2 = self._second_segment(path)
+        self.assertTrue(seg2.startswith('rack_12_'))
+        self.assertNotIn('/', seg2)
+        self.assertNotIn('\\', seg2)
+        self.assertTrue(seg2.endswith('.png'))
+
+    def test_name_with_backslashes_is_flattened_no_subdirectories(self):
+        """
+        Tests that a name including backslashes is correctly flattened
+        into a single directory name without creating subdirectories.
+        """
+        inst = self._stub_instance(name=r'5\31\23')
+        path = image_upload(inst, 'image_name.png')
+
+        seg2 = self._second_segment(path)
+        self.assertTrue(seg2.startswith('rack_12_'))
+        self.assertNotIn('/', seg2)
+        self.assertNotIn('\\', seg2)
+        self.assertTrue(seg2.endswith('.png'))
+
+    def test_prefix_format_is_as_expected(self):
+        """
+        Tests the output path format generated by the `image_upload` function.
+        """
+        inst = self._stub_instance(object_id=99, name='label')
+        path = image_upload(inst, 'a.webp')
+        # The second segment must begin with "rack_99_"
+        seg2 = self._second_segment(path)
+        self.assertTrue(seg2.startswith('rack_99_'))
+        self.assertTrue(seg2.endswith('.webp'))
+
+    def test_unsupported_file_extension(self):
+        """
+        Test that when the file extension is not allowed, the extension
+        is omitted.
+        """
+        inst = self._stub_instance(name='test')
+        path = image_upload(inst, 'document.txt')
+
+        seg2 = self._second_segment(path)
+        self.assertTrue(seg2.startswith('rack_12_test'))
+        self.assertFalse(seg2.endswith('.txt'))
+        # When not allowed, no extension should be appended
+        self.assertNotRegex(seg2, r'\.txt$')
+
+    def test_instance_name_with_whitespace_and_special_chars(self):
+        """
+        Test that an instance name with leading/trailing whitespace and
+        special characters is sanitized properly.
+        """
+        # Suppose the instance name has surrounding whitespace and
+        # extra slashes.
+        inst = self._stub_instance(name='  my/complex\\name  ')
+        path = image_upload(inst, 'irrelevant.png')
+
+        # The output should be flattened and sanitized.
+        # We expect the name to be transformed into a valid filename without
+        # path separators.
+        seg2 = self._second_segment(path)
+        self.assertNotIn(' ', seg2)
+        self.assertNotIn('/', seg2)
+        self.assertNotIn('\\', seg2)
+        self.assertTrue(seg2.endswith('.png'))
+
+    def test_separator_variants_with_subTest(self):
+        """
+        Tests that both forward slash and backslash in file paths are
+        handled consistently by the `image_upload` function and
+        processed into a sanitized uniform format.
+        """
+        for name in ['2025/09/12', r'2025\09\12']:
+            with self.subTest(name=name):
+                inst = self._stub_instance(name=name)
+                path = image_upload(inst, 'x.jpeg')
+                seg2 = self._second_segment(path)
+                self.assertTrue(seg2.startswith('rack_12_'))
+                self.assertNotIn('/', seg2)
+                self.assertNotIn('\\', seg2)
+                self.assertTrue(seg2.endswith('.jpeg') or seg2.endswith('.jpg'))
+
+    def test_fallback_on_suspicious_file_operation(self):
+        """
+        Test that when default_storage.get_valid_name() raises a
+        SuspiciousFileOperation, the fallback default is used.
+        """
+        inst = self._stub_instance(name=' ')
+        path = image_upload(inst, 'sample.png')
+        # Expect the fallback name 'unnamed' to be used.
+        self.assertIn('unnamed', path)
+        self.assertTrue(path.startswith('image-attachments/rack_12_'))

+ 42 - 12
netbox/extras/utils.py

@@ -1,15 +1,20 @@
 import importlib
+from pathlib import Path
 
-from django.core.exceptions import ImproperlyConfigured
+from django.core.exceptions import ImproperlyConfigured, SuspiciousFileOperation
+from django.core.files.storage import default_storage
+from django.core.files.utils import validate_file_name
 from django.db import models
 from django.db.models import Q
 from taggit.managers import _TaggableManager
 
 from netbox.context import current_request
+
 from .validators import CustomValidator
 
 __all__ = (
     'SharedObjectViewMixin',
+    'filename_from_model',
     'image_upload',
     'is_report',
     'is_script',
@@ -35,13 +40,13 @@ class SharedObjectViewMixin:
 
 
 def filename_from_model(model: models.Model) -> str:
-    """Standardises how we generate filenames from model class for exports"""
+    """Standardizes how we generate filenames from model class for exports"""
     base = model._meta.verbose_name_plural.lower().replace(' ', '_')
     return f'netbox_{base}'
 
 
 def filename_from_object(context: dict) -> str:
-    """Standardises how we generate filenames from model class for exports"""
+    """Standardizes how we generate filenames from model class for exports"""
     if 'device' in context:
         base = f"{context['device'].name or 'config'}"
     elif 'virtualmachine' in context:
@@ -64,17 +69,42 @@ def is_taggable(obj):
 def image_upload(instance, filename):
     """
     Return a path for uploading image attachments.
+
+    - Normalizes browser paths (e.g., C:\\fake_path\\photo.jpg)
+    - Uses the instance.name if provided (sanitized to a *basename*, no ext)
+    - Prefixes with a machine-friendly identifier
+
+    Note: Relies on Django's default_storage utility.
     """
-    path = 'image-attachments/'
+    upload_dir = 'image-attachments'
+    default_filename = 'unnamed'
+    allowed_img_extensions = ('bmp', 'gif', 'jpeg', 'jpg', 'png', 'webp')
+
+    # Normalize Windows paths and create a Path object.
+    normalized_filename = str(filename).replace('\\', '/')
+    file_path = Path(normalized_filename)
+
+    # Extract the extension from the uploaded file.
+    ext = file_path.suffix.lower().lstrip('.')
+
+    # Use the instance-provided name if available; otherwise use the file stem.
+    # Rely on Django's get_valid_filename to perform sanitization.
+    stem = (instance.name or file_path.stem).strip()
+    try:
+        safe_stem = default_storage.get_valid_name(stem)
+    except SuspiciousFileOperation:
+        safe_stem = default_filename
+
+    # Append the uploaded extension only if it's an allowed image type
+    final_name = f"{safe_stem}.{ext}" if ext in allowed_img_extensions else safe_stem
 
-    # Rename the file to the provided name, if any. Attempt to preserve the file extension.
-    extension = filename.rsplit('.')[-1].lower()
-    if instance.name and extension in ['bmp', 'gif', 'jpeg', 'jpg', 'png', 'webp']:
-        filename = '.'.join([instance.name, extension])
-    elif instance.name:
-        filename = instance.name
+    # Create a machine-friendly prefix from the instance
+    prefix = f"{instance.object_type.model}_{instance.object_id}"
+    name_with_path = f"{upload_dir}/{prefix}_{final_name}"
 
-    return '{}{}_{}_{}'.format(path, instance.object_type.name, instance.object_id, filename)
+    # Validate the generated relative path (blocks absolute/traversal)
+    validate_file_name(name_with_path, allow_relative_path=True)
+    return name_with_path
 
 
 def is_script(obj):
@@ -107,7 +137,7 @@ def run_validators(instance, validators):
     request = current_request.get()
     for validator in validators:
 
-        # Loading a validator class by dotted path
+        # Loading a validator class by a dotted path
         if type(validator) is str:
             module, cls = validator.rsplit('.', 1)
             validator = getattr(importlib.import_module(module), cls)()

+ 2 - 2
netbox/extras/views.py

@@ -1,3 +1,4 @@
+from datetime import datetime
 from django.contrib import messages
 from django.contrib.auth.mixins import LoginRequiredMixin
 from django.contrib.contenttypes.models import ContentType
@@ -1547,7 +1548,6 @@ class ScriptResultView(TableMixin, generic.ObjectView):
         except KeyError:
             log_threshold = LOG_LEVEL_RANK[LogLevelChoices.LOG_INFO]
         if job.data:
-
             if 'log' in job.data:
                 if 'tests' in job.data:
                     tests = job.data['tests']
@@ -1558,7 +1558,7 @@ class ScriptResultView(TableMixin, generic.ObjectView):
                         index += 1
                         result = {
                             'index': index,
-                            'time': log.get('time'),
+                            'time': datetime.fromisoformat(log.get('time')),
                             'status': log.get('status'),
                             'message': log.get('message'),
                             'object': log.get('obj'),

+ 1 - 0
netbox/ipam/filtersets.py

@@ -804,6 +804,7 @@ class FHRPGroupFilterSet(NetBoxModelFilterSet):
             return queryset
         return queryset.filter(
             Q(description__icontains=value) |
+            Q(group_id__contains=value) |
             Q(name__icontains=value)
         )
 

+ 0 - 7
netbox/ipam/forms/model_forms.py

@@ -580,13 +580,6 @@ class FHRPGroupAssignmentForm(forms.ModelForm):
         model = FHRPGroupAssignment
         fields = ('group', 'priority')
 
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-
-        ipaddresses = self.instance.interface.ip_addresses.all()
-        for ipaddress in ipaddresses:
-            self.fields['group'].widget.add_query_param('related_ip', ipaddress.pk)
-
     def clean_group(self):
         group = self.cleaned_data['group']
 

+ 1 - 1
netbox/ipam/utils.py

@@ -164,7 +164,7 @@ def available_vlans_from_range(vlans, vlan_group, vid_range):
         prev_vid = vlan.vid
 
     # Annotate any remaining available VLANs
-    if prev_vid < max_vid:
+    if prev_vid < max_vid - 1:
         new_vlans.append({
             'vid': prev_vid + 1,
             'vlan_group': vlan_group,

Разница между файлами не показана из-за своего большого размера
+ 0 - 0
netbox/project-static/dist/netbox.js


Разница между файлами не показана из-за своего большого размера
+ 0 - 0
netbox/project-static/dist/netbox.js.map


+ 1 - 0
netbox/project-static/img/plugin-default.svg

@@ -0,0 +1 @@
+<svg  xmlns="http://www.w3.org/2000/svg"  width="24"  height="24"  viewBox="0 0 24 24"  fill="none"  stroke="currentColor"  stroke-width="2"  stroke-linecap="round"  stroke-linejoin="round"  class="icon icon-tabler icons-tabler-outline icon-tabler-box"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M12 3l8 4.5l0 9l-8 4.5l-8 -4.5l0 -9l8 -4.5" /><path d="M12 12l8 -4.5" /><path d="M12 12l0 9" /><path d="M12 12l-8 -4.5" /></svg>

+ 3 - 3
netbox/project-static/package.json

@@ -28,9 +28,9 @@
     "clipboard": "2.0.11",
     "flatpickr": "4.6.13",
     "gridstack": "12.3.3",
-    "htmx.org": "2.0.6",
-    "query-string": "9.2.2",
-    "sass": "1.91.0",
+    "htmx.org": "2.0.7",
+    "query-string": "9.3.0",
+    "sass": "1.92.1",
     "tom-select": "2.4.3",
     "typeface-inter": "3.18.1",
     "typeface-roboto-mono": "1.1.13"

+ 12 - 12
netbox/project-static/yarn.lock

@@ -2241,10 +2241,10 @@ hey-listen@^1.0.8:
   resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
   integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
 
-htmx.org@2.0.6:
-  version "2.0.6"
-  resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.6.tgz#42573483c72112e7e332dfe93043cd0eb32cda01"
-  integrity sha512-7ythjYneGSk3yCHgtCnQeaoF+D+o7U2LF37WU3O0JYv3gTZSicdEFiI/Ai/NJyC5ZpYJWMpUb11OC5Lr6AfAqA==
+htmx.org@2.0.7:
+  version "2.0.7"
+  resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.7.tgz#991571e009a2ea4cb60e7af8bb4c1c8c0de32ecd"
+  integrity sha512-YiJqF3U5KyO28VC5mPfehKJPF+n1Gni+cupK+D69TF0nm7wY6AXn3a4mPWIikfAXtl1u1F1+ZhSCS7KT8pVmqA==
 
 ignore@^5.2.0:
   version "5.3.2"
@@ -2990,10 +2990,10 @@ punycode@^2.1.0:
   resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5"
   integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
 
-query-string@9.2.2:
-  version "9.2.2"
-  resolved "https://registry.yarnpkg.com/query-string/-/query-string-9.2.2.tgz#a0104824edfdd2c1db2f18af71cef7abf6a3b20f"
-  integrity sha512-pDSIZJ9sFuOp6VnD+5IkakSVf+rICAuuU88Hcsr6AKL0QtxSIfVuKiVP2oahFI7tk3CRSexwV+Ya6MOoTxzg9g==
+query-string@9.3.0:
+  version "9.3.0"
+  resolved "https://registry.yarnpkg.com/query-string/-/query-string-9.3.0.tgz#f2d60d6b4442cb445f374b5ff749b937b2cccd03"
+  integrity sha512-IQHOQ9aauHAApwAaUYifpEyLHv6fpVGVkMOnwPzcDScLjbLj8tLsILn6unSW79NafOw1llh8oK7Gd0VwmXBFmA==
   dependencies:
     decode-uri-component "^0.4.1"
     filter-obj "^5.1.0"
@@ -3190,10 +3190,10 @@ safe-regex-test@^1.1.0:
     es-errors "^1.3.0"
     is-regex "^1.2.1"
 
-sass@1.91.0:
-  version "1.91.0"
-  resolved "https://registry.yarnpkg.com/sass/-/sass-1.91.0.tgz#7d4f7f624b35d43f78da1c339cab24426e28d7fa"
-  integrity sha512-aFOZHGf+ur+bp1bCHZ+u8otKGh77ZtmFyXDo4tlYvT7PWql41Kwd8wdkPqhhT+h2879IVblcHFglIMofsFd1EA==
+sass@1.92.1:
+  version "1.92.1"
+  resolved "https://registry.yarnpkg.com/sass/-/sass-1.92.1.tgz#07fb1fec5647d7b712685d1090628bf52456fe86"
+  integrity sha512-ffmsdbwqb3XeyR8jJR6KelIXARM9bFQe8A6Q3W4Klmwy5Ckd5gz7jgUNHo4UOqutU5Sk1DtKLbpDP0nLCg1xqQ==
   dependencies:
     chokidar "^4.0.0"
     immutable "^5.0.2"

+ 2 - 2
netbox/release.yaml

@@ -1,3 +1,3 @@
-version: "4.4.0"
+version: "4.4.1"
 edition: "Community"
-published: "2025-09-02"
+published: "2025-09-16"

+ 10 - 0
netbox/templates/extras/object_imageattachments.html

@@ -27,6 +27,16 @@
                 alt="{{ object.description|default:object.name }}"
               />
             </a>
+          {% empty %}
+            <a href="{{ object.get_absolute_url }}" class="d-block text-decoration-none" title="{{ object.name }}">
+              <div class="d-flex align-items-center justify-content-center rounded bg-light text-secondary border" style="width: 200px; height: 200px;">
+                <div class="text-center">
+                  <i class="mdi mdi-image-broken-variant display-4"></i>
+                  <div class="small mt-2 text-dark">{% trans "Thumbnail cannot be generated" %}</div>
+                  <div class="small fw-bold text-dark">{% trans "Click to view original" %}</div>
+                </div>
+              </div>
+            </a>
           {% endthumbnail %}
           <div class="text-center text-secondary text-truncate fs-5">
             {{ object }}

BIN
netbox/translations/cs/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/cs/LC_MESSAGES/django.po


BIN
netbox/translations/da/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/da/LC_MESSAGES/django.po


BIN
netbox/translations/de/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 304 - 295
netbox/translations/de/LC_MESSAGES/django.po


Разница между файлами не показана из-за своего большого размера
+ 216 - 156
netbox/translations/en/LC_MESSAGES/django.po


BIN
netbox/translations/es/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/es/LC_MESSAGES/django.po


BIN
netbox/translations/fr/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 304 - 295
netbox/translations/fr/LC_MESSAGES/django.po


BIN
netbox/translations/it/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/it/LC_MESSAGES/django.po


BIN
netbox/translations/ja/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/ja/LC_MESSAGES/django.po


BIN
netbox/translations/nl/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/nl/LC_MESSAGES/django.po


BIN
netbox/translations/pl/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/pl/LC_MESSAGES/django.po


BIN
netbox/translations/pt/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/pt/LC_MESSAGES/django.po


BIN
netbox/translations/ru/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 304 - 295
netbox/translations/ru/LC_MESSAGES/django.po


BIN
netbox/translations/tr/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/tr/LC_MESSAGES/django.po


BIN
netbox/translations/uk/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/uk/LC_MESSAGES/django.po


BIN
netbox/translations/zh/LC_MESSAGES/django.mo


Разница между файлами не показана из-за своего большого размера
+ 302 - 293
netbox/translations/zh/LC_MESSAGES/django.po


+ 34 - 12
netbox/users/forms/model_forms.py

@@ -1,3 +1,5 @@
+import json
+
 from django import forms
 from django.conf import settings
 from django.contrib.auth import password_validation
@@ -13,7 +15,11 @@ from netbox.preferences import PREFERENCES
 from users.constants import *
 from users.models import *
 from utilities.data import flatten_dict
-from utilities.forms.fields import ContentTypeMultipleChoiceField, DynamicModelMultipleChoiceField
+from utilities.forms.fields import (
+    ContentTypeMultipleChoiceField,
+    DynamicModelMultipleChoiceField,
+    JSONField,
+)
 from utilities.forms.rendering import FieldSet
 from utilities.forms.widgets import DateTimePicker, SplitMultiSelectWidget
 from utilities.permissions import qs_filter_from_constraints
@@ -316,13 +322,22 @@ class ObjectPermissionForm(forms.ModelForm):
         required=False,
         queryset=Group.objects.all()
     )
+    constraints = JSONField(
+        required=False,
+        label=_('Constraints'),
+        help_text=_(
+            'JSON expression of a queryset filter that will return only permitted objects. Leave null '
+            'to match all objects of this type. A list of multiple objects will result in a logical OR '
+            'operation.'
+        ),
+    )
 
     fieldsets = (
         FieldSet('name', 'description', 'enabled'),
         FieldSet('can_view', 'can_add', 'can_change', 'can_delete', 'actions', name=_('Actions')),
         FieldSet('object_types', name=_('Objects')),
         FieldSet('groups', 'users', name=_('Assignment')),
-        FieldSet('constraints', name=_('Constraints'))
+        FieldSet('constraints', name=_('Constraints')),
     )
 
     class Meta:
@@ -330,13 +345,6 @@ class ObjectPermissionForm(forms.ModelForm):
         fields = [
             'name', 'description', 'enabled', 'object_types', 'users', 'groups', 'constraints', 'actions',
         ]
-        help_texts = {
-            'constraints': _(
-                'JSON expression of a queryset filter that will return only permitted objects. Leave null '
-                'to match all objects of this type. A list of multiple objects will result in a logical OR '
-                'operation.'
-            )
-        }
 
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
@@ -344,18 +352,32 @@ class ObjectPermissionForm(forms.ModelForm):
         # Make the actions field optional since the form uses it only for non-CRUD actions
         self.fields['actions'].required = False
 
-        # Populate assigned users and groups
+        # Prepare the appropriate fields when editing an existing ObjectPermission
         if self.instance.pk:
+            # Populate assigned users and groups
             self.fields['groups'].initial = self.instance.groups.values_list('id', flat=True)
             self.fields['users'].initial = self.instance.users.values_list('id', flat=True)
 
-        # Check the appropriate checkboxes when editing an existing ObjectPermission
-        if self.instance.pk:
+            # Check the appropriate checkboxes when editing an existing ObjectPermission
             for action in ['view', 'add', 'change', 'delete']:
                 if action in self.instance.actions:
                     self.fields[f'can_{action}'].initial = True
                     self.instance.actions.remove(action)
 
+        # Populate initial data for a new ObjectPermission
+        elif self.initial:
+            # Handle cloned objects - actions come from initial data (URL parameters)
+            if 'actions' in self.initial:
+                if cloned_actions := self.initial['actions']:
+                    for action in ['view', 'add', 'change', 'delete']:
+                        if action in cloned_actions:
+                            self.fields[f'can_{action}'].initial = True
+                            self.initial['actions'].remove(action)
+            # Convert data delivered via initial data to JSON data
+            if 'constraints' in self.initial:
+                if type(self.initial['constraints']) is str:
+                    self.initial['constraints'] = json.loads(self.initial['constraints'])
+
     def clean(self):
         super().clean()
 

+ 22 - 0
netbox/users/migrations/0012_drop_django_admin_log_table.py

@@ -0,0 +1,22 @@
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ('users', '0011_concrete_objecttype'),
+    ]
+
+    operations = [
+        # Django admin UI was removed in NetBox v4.0
+        # Older installations may still have the old `django_admin_log` table in place
+        # Drop the obsolete table if it exists. This is a no-op on fresh or already-clean DBs.
+        migrations.RunSQL(
+            sql='DROP TABLE IF EXISTS "django_admin_log";',
+            reverse_sql=migrations.RunSQL.noop,
+        ),
+        # Clean up a potential leftover sequence in older DBs
+        migrations.RunSQL(
+            sql='DROP SEQUENCE IF EXISTS "django_admin_log_id_seq";',
+            reverse_sql=migrations.RunSQL.noop,
+        ),
+    ]

+ 6 - 1
netbox/users/models/permissions.py

@@ -3,6 +3,7 @@ from django.db import models
 from django.urls import reverse
 from django.utils.translation import gettext_lazy as _
 
+from netbox.models.features import CloningMixin
 from utilities.querysets import RestrictedQuerySet
 
 __all__ = (
@@ -10,7 +11,7 @@ __all__ = (
 )
 
 
-class ObjectPermission(models.Model):
+class ObjectPermission(CloningMixin, models.Model):
     """
     A mapping of view, add, change, and/or delete permission for users and/or groups to an arbitrary set of objects
     identified by ORM query parameters.
@@ -43,6 +44,10 @@ class ObjectPermission(models.Model):
         help_text=_("Queryset filter matching the applicable objects of the selected type(s)")
     )
 
+    clone_fields = (
+        'description', 'enabled', 'object_types', 'actions', 'constraints',
+    )
+
     objects = RestrictedQuerySet.as_manager()
 
     class Meta:

+ 21 - 6
netbox/utilities/serialization.py

@@ -51,30 +51,45 @@ def serialize_object(obj, resolve_tags=True, extra=None, exclude=None):
     return data
 
 
-def deserialize_object(model, fields, pk=None):
+def deserialize_object(model, data, pk=None):
     """
     Instantiate an object from the given model and field data. Functions as
     the complement to serialize_object().
     """
     content_type = ContentType.objects.get_for_model(model)
+    data = data.copy()
     m2m_data = {}
 
     # Account for custom field data
-    if 'custom_fields' in fields:
-        fields['custom_field_data'] = fields.pop('custom_fields')
+    if 'custom_fields' in data:
+        data['custom_field_data'] = data.pop('custom_fields')
 
     # Pop any assigned tags to handle the M2M relationships manually
-    if is_taggable(model) and fields.get('tags'):
+    if is_taggable(model) and data.get('tags'):
         Tag = apps.get_model('extras', 'Tag')
-        m2m_data['tags'] = Tag.objects.filter(name__in=fields.pop('tags'))
+        m2m_data['tags'] = Tag.objects.filter(name__in=data.pop('tags'))
+
+    # Separate any non-field attributes for assignment after deserialization of the object
+    model_fields = [
+        field.name for field in model._meta.get_fields()
+    ]
+    attrs = {
+        name: data.pop(name) for name in list(data.keys())
+        if name not in model_fields
+    }
 
+    # Employ Django's native Python deserializer to produce the instance
     data = {
         'model': '.'.join(content_type.natural_key()),
         'pk': pk,
-        'fields': fields,
+        'fields': data,
     }
     instance = list(serializers.deserialize('python', [data]))[0]
 
+    # Assign non-field attributes
+    for name, value in attrs.items():
+        setattr(instance.object, name, value)
+
     # Apply any additional M2M assignments
     instance.m2m_data.update(**m2m_data)
 

+ 4 - 4
netbox/utilities/testing/api.py

@@ -247,9 +247,9 @@ class APIViewTestCases:
             if issubclass(self.model, ChangeLoggingMixin):
                 objectchange = ObjectChange.objects.get(
                     changed_object_type=ContentType.objects.get_for_model(instance),
-                    changed_object_id=instance.pk
+                    changed_object_id=instance.pk,
+                    action=ObjectChangeActionChoices.ACTION_CREATE,
                 )
-                self.assertEqual(objectchange.action, ObjectChangeActionChoices.ACTION_CREATE)
                 self.assertEqual(objectchange.message, data['changelog_message'])
 
         def test_bulk_create_objects(self):
@@ -298,11 +298,11 @@ class APIViewTestCases:
                 ]
                 objectchanges = ObjectChange.objects.filter(
                     changed_object_type=ContentType.objects.get_for_model(self.model),
-                    changed_object_id__in=id_list
+                    changed_object_id__in=id_list,
+                    action=ObjectChangeActionChoices.ACTION_CREATE,
                 )
                 self.assertEqual(len(objectchanges), len(self.create_data))
                 for oc in objectchanges:
-                    self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_CREATE)
                     self.assertEqual(oc.message, changelog_message)
 
     class UpdateObjectViewTestCase(APITestCase):

+ 16 - 0
netbox/utilities/testing/base.py

@@ -1,9 +1,11 @@
 import json
+from contextlib import contextmanager
 
 from django.contrib.contenttypes.fields import GenericForeignKey
 from django.contrib.contenttypes.models import ContentType
 from django.contrib.postgres.fields import ArrayField, RangeField
 from django.core.exceptions import FieldDoesNotExist
+from django.db import transaction
 from django.db.models import ManyToManyField, ManyToManyRel, JSONField
 from django.forms.models import model_to_dict
 from django.test import Client, TestCase as _TestCase
@@ -36,6 +38,20 @@ class TestCase(_TestCase):
         self.client = Client()
         self.client.force_login(self.user)
 
+    @contextmanager
+    def cleanupSubTest(self, **params):
+        """
+        Context manager that wraps subTest with automatic cleanup.
+        All database changes within the context will be rolled back.
+        """
+        sid = transaction.savepoint()
+
+        try:
+            with self.subTest(**params):
+                yield
+        finally:
+            transaction.savepoint_rollback(sid)
+
     #
     # Permissions management
     #

+ 111 - 38
netbox/utilities/testing/views.py

@@ -152,7 +152,6 @@ class ViewTestCases:
 
         @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
         def test_create_object_with_permission(self):
-
             # Assign unconstrained permission
             obj_perm = ObjectPermission(
                 name='Test permission',
@@ -586,19 +585,59 @@ class ViewTestCases:
             response = self.client.post(**request)
             self.assertHttpStatus(response, 302)
             self.assertEqual(initial_count + self.bulk_create_count, self._get_queryset().count())
-            for instance in self._get_queryset().order_by('-pk')[:self.bulk_create_count]:
+            for instance in self._get_queryset().order_by('-pk')[: self.bulk_create_count]:
                 self.assertInstanceEqual(instance, self.bulk_create_data, exclude=self.validation_excluded_fields)
 
     class BulkImportObjectsViewTestCase(ModelViewTestCase):
         """
         Create multiple instances from imported data.
 
-        :csv_data: A list of CSV-formatted lines (starting with the headers) to be used for bulk object import.
+        :csv_data: CSV data for bulk import testing. Supports two formats:
+
+            1. Tuple/list format (backwards compatible):
+                csv_data = (
+                    "name,slug,description",
+                    "Object 1,object-1,First object",
+                    "Object 2,object-2,Second object",
+                )
+
+            2. Dictionary format for multiple scenarios:
+                csv_data = {
+                    'default': (
+                        "name,slug,description",
+                        "Object 1,object-1,First object",
+                    ),
+                    'with_optional_fields': (
+                        "name,slug,description,comments",
+                        "Object 2,object-2,Second object,With comments",
+                    )
+                }
+
+            When using dictionary format, test_bulk_import_objects_with_permission()
+            runs each scenario as a separate subtest with clear output:
+
+                test_bulk_import_objects_with_permission (scenario=default) ... ok
+                test_bulk_import_objects_with_permission (scenario=with_optional_fields) ... ok
         """
+
         csv_data = ()
 
-        def _get_csv_data(self):
-            return '\n'.join(self.csv_data)
+        def get_scenarios(self):
+            return self.csv_data.keys() if isinstance(self.csv_data, dict) else ['default']
+
+        def _get_csv_data(self, scenario_name='default'):
+            """
+            Get CSV data for testing. Supports both tuple/list and dictionary formats.
+            """
+            if isinstance(self.csv_data, dict):
+                if scenario_name not in self.csv_data:
+                    available = ', '.join(self.csv_data.keys())
+                    raise ValueError(f"Scenario '{scenario_name}' not found in csv_data. Available: {available}")
+                return '\n'.join(self.csv_data[scenario_name])
+            elif isinstance(self.csv_data, (tuple, list)):
+                return '\n'.join(self.csv_data)
+            else:
+                raise TypeError(f'csv_data must be a tuple, list, or dictionary, got {type(self.csv_data)}')
 
         def _get_update_csv_data(self):
             return self.csv_update_data, '\n'.join(self.csv_update_data)
@@ -620,10 +659,36 @@ class ViewTestCases:
                 self.assertHttpStatus(response, 403)
 
         @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
-        def test_bulk_import_objects_with_permission(self):
+        def test_bulk_import_objects_with_permission(self, post_import_callback=None):
+            # Assign model-level permission once for all scenarios
+            obj_perm = ObjectPermission(name='Test permission', actions=['add'])
+            obj_perm.save()
+            obj_perm.users.add(self.user)
+            obj_perm.object_types.add(ObjectType.objects.get_for_model(self.model))
+
+            # Try GET with model-level permission (only once)
+            self.assertHttpStatus(self.client.get(self._get_url('bulk_import')), 200)
+
+            # Test each scenario
+            for scenario_name in self.get_scenarios():
+                with self.cleanupSubTest(scenario=scenario_name):
+                    self._test_bulk_import_with_permission_scenario(scenario_name)
+
+                    if post_import_callback:
+                        post_import_callback(scenario_name)
+
+        def _test_bulk_import_with_permission_scenario(self, scenario_name):
+            """
+            Helper method to test a single bulk import scenario.
+            """
             initial_count = self._get_queryset().count()
+
+            # Get CSV data for this scenario
+            scenario_data = self._get_csv_data(scenario_name)
+            expected_new_objects = len(scenario_data.splitlines()) - 1
+
             data = {
-                'data': self._get_csv_data(),
+                'data': scenario_data,
                 'format': ImportFormatChoices.CSV,
                 'csv_delimiter': CSVDelimiterChoices.AUTO,
             }
@@ -632,34 +697,25 @@ class ViewTestCases:
             if issubclass(self.model, ChangeLoggingMixin):
                 data['changelog_message'] = get_random_string(10)
 
-            # Assign model-level permission
-            obj_perm = ObjectPermission(
-                name='Test permission',
-                actions=['add']
-            )
-            obj_perm.save()
-            obj_perm.users.add(self.user)
-            obj_perm.object_types.add(ObjectType.objects.get_for_model(self.model))
-
-            # Try GET with model-level permission
-            self.assertHttpStatus(self.client.get(self._get_url('bulk_import')), 200)
-
             # Test POST with permission
             response = self.client.post(self._get_url('bulk_import'), data)
             self.assertHttpStatus(response, 302)
-            self.assertEqual(self._get_queryset().count(), initial_count + len(self.csv_data) - 1)
+
+            # Verify object count increase
+            self.assertEqual(self._get_queryset().count(), initial_count + expected_new_objects)
 
             # Verify ObjectChange creation
             if issubclass(self.model, ChangeLoggingMixin):
                 request_id = response.headers.get('X-Request-ID')
-                self.assertIsNotNone(request_id, "Unable to determine request ID from response")
+                self.assertIsNotNone(request_id, 'Unable to determine request ID from response')
                 objectchanges = ObjectChange.objects.filter(
                     changed_object_type=ContentType.objects.get_for_model(self.model),
-                    request_id=request_id
+                    request_id=request_id,
+                    action=ObjectChangeActionChoices.ACTION_CREATE,
                 )
-                self.assertEqual(len(objectchanges), len(self.csv_data) - 1)
+                self.assertEqual(len(objectchanges), expected_new_objects)
+
                 for oc in objectchanges:
-                    self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_CREATE)
                     self.assertEqual(oc.message, data['changelog_message'])
 
         @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
@@ -701,35 +757,52 @@ class ViewTestCases:
                             self.assertEqual(value, value)
 
         @override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
-        def test_bulk_import_objects_with_constrained_permission(self):
-            initial_count = self._get_queryset().count()
-            data = {
-                'data': self._get_csv_data(),
-                'format': ImportFormatChoices.CSV,
-                'csv_delimiter': CSVDelimiterChoices.AUTO,
-            }
-
-            # Assign constrained permission
+        def test_bulk_import_objects_with_constrained_permission(self, post_import_callback=None):
+            # Assign constrained permission (deny all initially)
             obj_perm = ObjectPermission(
                 name='Test permission',
                 constraints={'pk': 0},  # Dummy permission to deny all
-                actions=['add']
+                actions=['add'],
             )
             obj_perm.save()
             obj_perm.users.add(self.user)
             obj_perm.object_types.add(ObjectType.objects.get_for_model(self.model))
 
-            # Attempt to import non-permitted objects
+            # Test each scenario with constrained permissions
+            for scenario_name in self.get_scenarios():
+                with self.cleanupSubTest(scenario=scenario_name):
+                    self._test_bulk_import_constrained_scenario(scenario_name, obj_perm)
+
+                    if post_import_callback:
+                        post_import_callback(scenario_name)
+
+        def _test_bulk_import_constrained_scenario(self, scenario_name, obj_perm):
+            """
+            Helper method to test a single bulk import scenario with constrained permissions.
+            """
+            initial_count = self._get_queryset().count()
+
+            # Get CSV data for this scenario
+            scenario_data = self._get_csv_data(scenario_name)
+            expected_new_objects = len(scenario_data.splitlines()) - 1
+
+            data = {
+                'data': scenario_data,
+                'format': ImportFormatChoices.CSV,
+                'csv_delimiter': CSVDelimiterChoices.AUTO,
+            }
+
+            # Attempt to import non-permitted objects (should fail)
             self.assertHttpStatus(self.client.post(self._get_url('bulk_import'), data), 200)
             self.assertEqual(self._get_queryset().count(), initial_count)
 
-            # Update permission constraints
+            # Update permission constraints to allow all
             obj_perm.constraints = {'pk__gt': 0}  # Dummy permission to allow all
             obj_perm.save()
 
-            # Import permitted objects
+            # Import permitted objects (should succeed)
             self.assertHttpStatus(self.client.post(self._get_url('bulk_import'), data), 302)
-            self.assertEqual(self._get_queryset().count(), initial_count + len(self.csv_data) - 1)
+            self.assertEqual(self._get_queryset().count(), initial_count + expected_new_objects)
 
     class BulkEditObjectsViewTestCase(ModelViewTestCase):
         """

+ 49 - 0
netbox/utilities/tests/test_serialization.py

@@ -0,0 +1,49 @@
+from django.test import TestCase
+
+from dcim.choices import SiteStatusChoices
+from dcim.models import Site
+from extras.models import Tag
+from utilities.serialization import deserialize_object, serialize_object
+
+
+class SerializationTestCase(TestCase):
+
+    @classmethod
+    def setUpTestData(cls):
+        tags = (
+            Tag(name='Tag 1', slug='tag-1'),
+            Tag(name='Tag 2', slug='tag-2'),
+            Tag(name='Tag 3', slug='tag-3'),
+        )
+        Tag.objects.bulk_create(tags)
+
+    def test_serialize_object(self):
+        site = Site.objects.create(
+            name='Site 1',
+            slug='site=1',
+            description='Ignore me',
+        )
+        site.tags.set(Tag.objects.all())
+
+        data = serialize_object(site, extra={'foo': 123}, exclude=['description'])
+        self.assertEqual(data['name'], site.name)
+        self.assertEqual(data['slug'], site.slug)
+        self.assertEqual(data['tags'], [tag.name for tag in Tag.objects.all()])
+        self.assertEqual(data['foo'], 123)
+        self.assertNotIn('description', data)
+
+    def test_deserialize_object(self):
+        data = {
+            'name': 'Site 1',
+            'slug': 'site-1',
+            'tags': ['Tag 1', 'Tag 2', 'Tag 3'],
+            'foo': 123,
+        }
+
+        instance = deserialize_object(Site, data, pk=123)
+        self.assertEqual(instance.object.pk, 123)
+        self.assertEqual(instance.object.name, data['name'])
+        self.assertEqual(instance.object.slug, data['slug'])
+        self.assertEqual(instance.object.status, SiteStatusChoices.STATUS_ACTIVE)  # Default field value
+        self.assertEqual(instance.object.foo, data['foo'])  # Non-field attribute
+        self.assertEqual(list(instance.m2m_data['tags']), list(Tag.objects.all()))

+ 1 - 1
pyproject.toml

@@ -3,7 +3,7 @@
 
 [project]
 name = "netbox"
-version = "4.4.0"
+version = "4.4.1"
 requires-python = ">=3.10"
 description = "The premier source of truth powering network automation."
 readme = "README.md"

+ 11 - 11
requirements.txt

@@ -1,15 +1,15 @@
 colorama==0.4.6
-Django==5.2.5
-django-cors-headers==4.7.0
+Django==5.2.6
+django-cors-headers==4.8.0
 django-debug-toolbar==5.2.0
 django-filter==25.1
 django-graphiql-debug-toolbar==0.2.0
-django-htmx==1.23.2
+django-htmx==1.24.1
 django-mptt==0.17.0
 django-pglocks==1.0.4
 django-prometheus==2.4.1
 django-redis==6.0.0
-django-rich==2.0.0
+django-rich==2.1.0
 django-rq==3.1
 django-storages==1.14.6
 django-tables2==2.7.5
@@ -17,26 +17,26 @@ django-taggit==6.1.0
 django-timezone-field==7.1
 djangorestframework==3.16.1
 drf-spectacular==0.28.0
-drf-spectacular-sidecar==2025.8.1
-feedparser==6.0.11
+drf-spectacular-sidecar==2025.9.1
+feedparser==6.0.12
 gunicorn==23.0.0
 Jinja2==3.1.6
 jsonschema==4.25.1
-Markdown==3.8.2
-mkdocs-material==9.6.18
+Markdown==3.9
+mkdocs-material==9.6.20
 mkdocstrings==0.30.0
 mkdocstrings-python==1.18.2
 netaddr==1.3.0
 nh3==0.3.0
 Pillow==11.3.0
-psycopg[c,pool]==3.2.9
+psycopg[c,pool]==3.2.10
 PyYAML==6.0.2
 requests==2.32.5
-rq==2.5.0
+rq==2.6.0
 social-auth-app-django==5.5.1
 social-auth-core==4.7.0
 sorl-thumbnail==12.11.0
-strawberry-graphql==0.281.0
+strawberry-graphql==0.282.0
 strawberry-graphql-django==0.65.1
 svgwrite==1.4.3
 tablib==3.8.0

Некоторые файлы не были показаны из-за большого количества измененных файлов