diff --git a/.cherry_picker.toml b/.cherry_picker.toml
index 677644159c78a..484fcf386c2fc 100644
--- a/.cherry_picker.toml
+++ b/.cherry_picker.toml
@@ -21,3 +21,4 @@ repo = "airflow"
 fix_commit_msg = false
 default_branch = "main"
 require_version_in_branch_name=false
+draft_pr = true
diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml
index 8ac0440ceae7f..30a3367710a92 100644
--- a/.github/actions/install-pre-commit/action.yml
+++ b/.github/actions/install-pre-commit/action.yml
@@ -24,7 +24,7 @@ inputs:
     default: "3.9"
   uv-version:
     description: 'uv version to use'
-    default: "0.5.14"  # Keep this comment to allow automatic replacement of uv version
+    default: "0.5.20"  # Keep this comment to allow automatic replacement of uv version
   pre-commit-version:
     description: 'pre-commit version to use'
     default: "4.0.1"  # Keep this comment to allow automatic replacement of pre-commit version
diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml
index 075147746ef80..2f14b5e79773a 100644
--- a/.github/boring-cyborg.yml
+++ b/.github/boring-cyborg.yml
@@ -225,9 +225,7 @@ labelPRBasedOnFilePath:
     - providers/tests/system/docker/**/*
 
   provider:edge:
-    - providers/src/airflow/providers/edge/**/*
-    - docs/apache-airflow-providers-edge/**/*
-    - providers/tests/edge/**/*
+    - providers/edge/**
 
   provider:elasticsearch:
     - providers/src/airflow/providers/elasticsearch/**/*
diff --git a/.github/workflows/backport-cli.yml b/.github/workflows/backport-cli.yml
index 53243006137a6..673607027496d 100644
--- a/.github/workflows/backport-cli.yml
+++ b/.github/workflows/backport-cli.yml
@@ -61,7 +61,7 @@ jobs:
       - name: Install Python dependencies
         run: |
           python -m pip install --upgrade pip
-          python -m pip install cherry-picker==2.4.0 requests==2.32.3
+          python -m pip install cherry-picker==2.5.0 requests==2.32.3
 
       - name: Run backport script
         id: execute-backport
diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml
index 764eb6f714aa4..3ae6e3af42df6 100644
--- a/.github/workflows/basic-tests.yml
+++ b/.github/workflows/basic-tests.yml
@@ -100,7 +100,7 @@ jobs:
       - name: "Cleanup docker"
         run: ./scripts/ci/cleanup_docker.sh
       - name: Setup pnpm
-        uses: pnpm/action-setup@v4.0.0
+        uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2  # v4.0.0
         with:
           version: 9
           run_install: false
diff --git a/Dockerfile b/Dockerfile
index fb82c882048c0..9b7e8a4391f3e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -55,7 +55,7 @@ ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm"
 # Also use `force pip` label on your PR to swap all places we use `uv` to `pip`
 ARG AIRFLOW_PIP_VERSION=24.3.1
 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main"
-ARG AIRFLOW_UV_VERSION=0.5.14
+ARG AIRFLOW_UV_VERSION=0.5.20
 ARG AIRFLOW_USE_UV="false"
 ARG UV_HTTP_TIMEOUT="300"
 ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 5996ebe1ccb21..4e80ff1050abd 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -1268,7 +1268,7 @@ COPY --from=scripts common.sh install_packaging_tools.sh install_additional_depe
 # Also use `force pip` label on your PR to swap all places we use `uv` to `pip`
 ARG AIRFLOW_PIP_VERSION=24.3.1
 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main"
-ARG AIRFLOW_UV_VERSION=0.5.14
+ARG AIRFLOW_UV_VERSION=0.5.20
 # TODO(potiuk): automate with upgrade check (possibly)
 ARG AIRFLOW_PRE_COMMIT_VERSION="4.0.1"
 ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4"
diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml
index 9c0d70a265ad2..ed0ce173bc55b 100644
--- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml
+++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml
@@ -5989,68 +5989,6 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/HTTPValidationError'
-  /public/variables/import:
-    post:
-      tags:
-      - Variable
-      summary: Import Variables
-      description: Import variables from a JSON file.
-      operationId: import_variables
-      parameters:
-      - name: action_if_exists
-        in: query
-        required: false
-        schema:
-          enum:
-          - overwrite
-          - fail
-          - skip
-          type: string
-          default: fail
-          title: Action If Exists
-      requestBody:
-        required: true
-        content:
-          multipart/form-data:
-            schema:
-              $ref: '#/components/schemas/Body_import_variables'
-      responses:
-        '200':
-          description: Successful Response
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/VariablesImportResponse'
-        '401':
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/HTTPExceptionResponse'
-          description: Unauthorized
-        '403':
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/HTTPExceptionResponse'
-          description: Forbidden
-        '400':
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/HTTPExceptionResponse'
-          description: Bad Request
-        '409':
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/HTTPExceptionResponse'
-          description: Conflict
-        '422':
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/HTTPExceptionResponse'
-          description: Unprocessable Entity
   /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}:
     get:
       tags:
@@ -6629,16 +6567,6 @@ components:
       - status
       title: BaseInfoResponse
       description: Base info serializer for responses.
-    Body_import_variables:
-      properties:
-        file:
-          type: string
-          format: binary
-          title: File
-      type: object
-      required:
-      - file
-      title: Body_import_variables
     ClearTaskInstancesBody:
       properties:
         dry_run:
@@ -10066,26 +9994,6 @@ components:
       - is_encrypted
       title: VariableResponse
       description: Variable serializer for responses.
-    VariablesImportResponse:
-      properties:
-        created_variable_keys:
-          items:
-            type: string
-          type: array
-          title: Created Variable Keys
-        import_count:
-          type: integer
-          title: Import Count
-        created_count:
-          type: integer
-          title: Created Count
-      type: object
-      required:
-      - created_variable_keys
-      - import_count
-      - created_count
-      title: VariablesImportResponse
-      description: Import Variables serializer for responses.
     VersionInfo:
       properties:
         version:
diff --git a/airflow/api_fastapi/core_api/routes/public/variables.py b/airflow/api_fastapi/core_api/routes/public/variables.py
index 19d1b24d7eba8..6bd850d76e609 100644
--- a/airflow/api_fastapi/core_api/routes/public/variables.py
+++ b/airflow/api_fastapi/core_api/routes/public/variables.py
@@ -16,10 +16,9 @@
 # under the License.
 from __future__ import annotations
 
-import json
-from typing import Annotated, Literal
+from typing import Annotated
 
-from fastapi import Depends, HTTPException, Query, UploadFile, status
+from fastapi import Depends, HTTPException, Query, status
 from fastapi.exceptions import RequestValidationError
 from pydantic import ValidationError
 from sqlalchemy import select
@@ -39,7 +38,6 @@
     VariableBulkResponse,
     VariableCollectionResponse,
     VariableResponse,
-    VariablesImportResponse,
 )
 from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
 from airflow.api_fastapi.core_api.services.public.variables import (
@@ -192,59 +190,6 @@ def post_variable(
     return variable
 
 
-@variables_router.post(
-    "/import",
-    status_code=status.HTTP_200_OK,
-    responses=create_openapi_http_exception_doc(
-        [status.HTTP_400_BAD_REQUEST, status.HTTP_409_CONFLICT, status.HTTP_422_UNPROCESSABLE_ENTITY]
-    ),
-)
-def import_variables(
-    file: UploadFile,
-    session: SessionDep,
-    action_if_exists: Literal["overwrite", "fail", "skip"] = "fail",
-) -> VariablesImportResponse:
-    """Import variables from a JSON file."""
-    try:
-        file_content = file.file.read().decode("utf-8")
-        variables = json.loads(file_content)
-
-        if not isinstance(variables, dict):
-            raise ValueError("Uploaded JSON must contain key-value pairs.")
-    except (json.JSONDecodeError, ValueError) as e:
-        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Invalid JSON format: {e}")
-
-    if not variables:
-        raise HTTPException(
-            status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
-            detail="No variables found in the provided JSON.",
-        )
-
-    existing_keys = {variable for variable in session.execute(select(Variable.key)).scalars()}
-    import_keys = set(variables.keys())
-
-    matched_keys = existing_keys & import_keys
-
-    if action_if_exists == "fail" and matched_keys:
-        raise HTTPException(
-            status_code=status.HTTP_409_CONFLICT,
-            detail=f"The variables with these keys: {matched_keys} already exists.",
-        )
-    elif action_if_exists == "skip":
-        create_keys = import_keys - matched_keys
-    else:
-        create_keys = import_keys
-
-    for key in create_keys:
-        Variable.set(key=key, value=variables[key], session=session)
-
-    return VariablesImportResponse(
-        created_count=len(create_keys),
-        import_count=len(import_keys),
-        created_variable_keys=list(create_keys),
-    )
-
-
 @variables_router.patch("")
 def bulk_variables(
     request: VariableBulkBody,
diff --git a/airflow/api_fastapi/execution_api/datamodels/asset.py b/airflow/api_fastapi/execution_api/datamodels/asset.py
new file mode 100644
index 0000000000000..6d3a53c3e4ca8
--- /dev/null
+++ b/airflow/api_fastapi/execution_api/datamodels/asset.py
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from airflow.api_fastapi.core_api.base import BaseModel
+
+
+class AssetResponse(BaseModel):
+    """Asset schema for responses with fields that are needed for Runtime."""
+
+    name: str
+    uri: str
+    group: str
+    extra: dict | None = None
+
+
+class AssetAliasResponse(BaseModel):
+    """Asset alias schema with fields that are needed for Runtime."""
+
+    name: str
+    group: str
diff --git a/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow/api_fastapi/execution_api/routes/__init__.py
index 0383503f18b87..793cd8fe08494 100644
--- a/airflow/api_fastapi/execution_api/routes/__init__.py
+++ b/airflow/api_fastapi/execution_api/routes/__init__.py
@@ -17,9 +17,17 @@
 from __future__ import annotations
 
 from airflow.api_fastapi.common.router import AirflowRouter
-from airflow.api_fastapi.execution_api.routes import connections, health, task_instances, variables, xcoms
+from airflow.api_fastapi.execution_api.routes import (
+    assets,
+    connections,
+    health,
+    task_instances,
+    variables,
+    xcoms,
+)
 
 execution_api_router = AirflowRouter()
+execution_api_router.include_router(assets.router, prefix="/assets", tags=["Assets"])
 execution_api_router.include_router(connections.router, prefix="/connections", tags=["Connections"])
 execution_api_router.include_router(health.router, tags=["Health"])
 execution_api_router.include_router(task_instances.router, prefix="/task-instances", tags=["Task Instances"])
diff --git a/airflow/api_fastapi/execution_api/routes/assets.py b/airflow/api_fastapi/execution_api/routes/assets.py
new file mode 100644
index 0000000000000..213c599befb3e
--- /dev/null
+++ b/airflow/api_fastapi/execution_api/routes/assets.py
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from fastapi import HTTPException, Query, status
+from sqlalchemy import select
+
+from airflow.api_fastapi.common.db.common import SessionDep
+from airflow.api_fastapi.common.router import AirflowRouter
+from airflow.api_fastapi.execution_api.datamodels.asset import AssetResponse
+from airflow.models.asset import AssetModel
+
+# TODO: Add dependency on JWT token
+router = AirflowRouter(
+    responses={
+        status.HTTP_404_NOT_FOUND: {"description": "Asset not found"},
+        status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"},
+    },
+)
+
+
+@router.get("/by-name")
+def get_asset_by_name(
+    name: Annotated[str, Query(description="The name of the Asset")],
+    session: SessionDep,
+) -> AssetResponse:
+    """Get an Airflow Asset by `name`."""
+    asset = session.scalar(select(AssetModel).where(AssetModel.name == name, AssetModel.active.has()))
+    _raise_if_not_found(asset, f"Asset with name {name} not found")
+
+    return AssetResponse.model_validate(asset)
+
+
+@router.get("/by-uri")
+def get_asset_by_uri(
+    uri: Annotated[str, Query(description="The URI of the Asset")],
+    session: SessionDep,
+) -> AssetResponse:
+    """Get an Airflow Asset by `uri`."""
+    asset = session.scalar(select(AssetModel).where(AssetModel.uri == uri, AssetModel.active.has()))
+    _raise_if_not_found(asset, f"Asset with URI {uri} not found")
+
+    return AssetResponse.model_validate(asset)
+
+
+def _raise_if_not_found(asset, msg):
+    if asset is None:
+        raise HTTPException(
+            status.HTTP_404_NOT_FOUND,
+            detail={
+                "reason": "not_found",
+                "message": msg,
+            },
+        )
diff --git a/airflow/cli/commands/remote_commands/config_command.py b/airflow/cli/commands/remote_commands/config_command.py
index 1e0bcd17c9b78..5fc0148a2b790 100644
--- a/airflow/cli/commands/remote_commands/config_command.py
+++ b/airflow/cli/commands/remote_commands/config_command.py
@@ -327,6 +327,10 @@ def message(self) -> str:
         config=ConfigParameter("scheduler", "statsd_custom_client_path"),
         renamed_to=ConfigParameter("metrics", "statsd_custom_client_path"),
     ),
+    ConfigChange(
+        config=ConfigParameter("scheduler", "dag_dir_list_interval"),
+        renamed_to=ConfigParameter("dag_bundles", "refresh_interval"),
+    ),
     # celery
     ConfigChange(
         config=ConfigParameter("celery", "stalled_task_timeout"),
diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml
index ba6af6ca11e13..5b99c94a4f33d 100644
--- a/airflow/config_templates/config.yml
+++ b/airflow/config_templates/config.yml
@@ -28,14 +28,6 @@ core:
       type: string
       example: ~
       default: "{AIRFLOW_HOME}/dags"
-    dag_bundle_storage_path:
-      description: |
-        The folder where Airflow bundles can store files locally (if required).
-        By default, this is ``tempfile.gettempdir()/airflow``. This path must be absolute.
-      version_added: 3.0.0
-      type: string
-      example: "`tempfile.gettempdir()/dag_bundles"
-      default: ~
     hostname_callable:
       description: |
         Hostname by providing a path to a callable, which will resolve the hostname.
@@ -2670,7 +2662,17 @@ dag_bundles:
     Configuration for the DAG bundles. This allows Airflow to load DAGs from different sources.
 
   options:
-    backends:
+    dag_bundle_storage_path:
+      description: |
+        String path to folder where Airflow bundles can store files locally. Not templated.
+        If no path is provided, Airflow will use ``Path(tempfile.gettempdir()) / "airflow"``.
+        This path must be absolute.
+      version_added: 3.0.0
+      type: string
+      example: "/tmp/some-place"
+      default: ~
+
+    config_list:
       description: |
         List of backend configs.  Must supply name, classpath, and kwargs for each backend.
 
diff --git a/airflow/dag_processing/bundles/base.py b/airflow/dag_processing/bundles/base.py
index da60f77cf4a96..9b55c0d4f0ecf 100644
--- a/airflow/dag_processing/bundles/base.py
+++ b/airflow/dag_processing/bundles/base.py
@@ -74,7 +74,7 @@ def _dag_bundle_root_storage_path(self) -> Path:
 
         This is the root path, shared by various bundles. Each bundle should have its own subdirectory.
         """
-        if configured_location := conf.get("core", "dag_bundle_storage_path", fallback=None):
+        if configured_location := conf.get("dag_bundles", "dag_bundle_storage_path", fallback=None):
             return Path(configured_location)
         return Path(tempfile.gettempdir(), "airflow", "dag_bundles")
 
diff --git a/airflow/dag_processing/bundles/manager.py b/airflow/dag_processing/bundles/manager.py
index c5a2115b24f75..2aa8cf2303ddd 100644
--- a/airflow/dag_processing/bundles/manager.py
+++ b/airflow/dag_processing/bundles/manager.py
@@ -54,7 +54,7 @@ def parse_config(self) -> None:
         if self._bundle_config:
             return
 
-        backends = conf.getjson("dag_bundles", "backends")
+        backends = conf.getjson("dag_bundles", "config_list")
 
         if not backends:
             return
diff --git a/airflow/exceptions.py b/airflow/exceptions.py
index 163a9ca626619..fd4fbf6758f07 100644
--- a/airflow/exceptions.py
+++ b/airflow/exceptions.py
@@ -461,7 +461,7 @@ class TaskDeferralTimeout(AirflowException):
 # 2) if you have new provider, both provider and pod generator will throw the
 #    "airflow.providers.cncf.kubernetes" as it will be imported here from the provider.
 try:
-    from airflow.providers.cncf.kubernetes.pod_generator import PodMutationHookException
+    from airflow.providers.cncf.kubernetes.exceptions import PodMutationHookException
 except ImportError:
 
     class PodMutationHookException(AirflowException):  # type: ignore[no-redef]
@@ -469,7 +469,7 @@ class PodMutationHookException(AirflowException):  # type: ignore[no-redef]
 
 
 try:
-    from airflow.providers.cncf.kubernetes.pod_generator import PodReconciliationError
+    from airflow.providers.cncf.kubernetes.exceptions import PodReconciliationError
 except ImportError:
 
     class PodReconciliationError(AirflowException):  # type: ignore[no-redef]
diff --git a/airflow/executors/workloads.py b/airflow/executors/workloads.py
index 9a5e425ef887d..4c3eebe6811b9 100644
--- a/airflow/executors/workloads.py
+++ b/airflow/executors/workloads.py
@@ -97,7 +97,7 @@ def make(cls, ti: TIModel, dag_rel_path: Path | None = None) -> ExecuteTask:
         from airflow.utils.helpers import log_filename_template_renderer
 
         ser_ti = TaskInstance.model_validate(ti, from_attributes=True)
-        bundle_info = BundleInfo.model_construct(
+        bundle_info = BundleInfo(
             name=ti.dag_model.bundle_name,
             version=ti.dag_run.bundle_version,
         )
diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py
index 11c293b531fa6..d828a9a5b6b24 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -64,6 +64,7 @@
     BaseAsset,
 )
 from airflow.sdk.definitions.baseoperator import BaseOperator as TaskSDKBaseOperator
+from airflow.sdk.execution_time.context import AssetAliasEvent, OutletEventAccessor
 from airflow.serialization.dag_dependency import DagDependency
 from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
 from airflow.serialization.helpers import serialize_template_field
@@ -77,10 +78,8 @@
 from airflow.triggers.base import BaseTrigger, StartTriggerArgs
 from airflow.utils.code_utils import get_python_source
 from airflow.utils.context import (
-    AssetAliasEvent,
     ConnectionAccessor,
     Context,
-    OutletEventAccessor,
     OutletEventAccessors,
     VariableAccessor,
 )
diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts
index a34f57020cd83..b6cf77099005b 100644
--- a/airflow/ui/openapi-gen/queries/common.ts
+++ b/airflow/ui/openapi-gen/queries/common.ts
@@ -1625,9 +1625,6 @@ export type PoolServicePostPoolMutationResult = Awaited<ReturnType<typeof PoolSe
 export type VariableServicePostVariableMutationResult = Awaited<
   ReturnType<typeof VariableService.postVariable>
 >;
-export type VariableServiceImportVariablesMutationResult = Awaited<
-  ReturnType<typeof VariableService.importVariables>
->;
 export type BackfillServicePauseBackfillMutationResult = Awaited<
   ReturnType<typeof BackfillService.pauseBackfill>
 >;
diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts
index a87e218adce4c..a43172d73c7ca 100644
--- a/airflow/ui/openapi-gen/queries/queries.ts
+++ b/airflow/ui/openapi-gen/queries/queries.ts
@@ -32,7 +32,6 @@ import {
 } from "../requests/services.gen";
 import {
   BackfillPostBody,
-  Body_import_variables,
   ClearTaskInstancesBody,
   ConnectionBody,
   ConnectionBulkBody,
@@ -3124,46 +3123,6 @@ export const useVariableServicePostVariable = <
       VariableService.postVariable({ requestBody }) as unknown as Promise<TData>,
     ...options,
   });
-/**
- * Import Variables
- * Import variables from a JSON file.
- * @param data The data for the request.
- * @param data.formData
- * @param data.actionIfExists
- * @returns VariablesImportResponse Successful Response
- * @throws ApiError
- */
-export const useVariableServiceImportVariables = <
-  TData = Common.VariableServiceImportVariablesMutationResult,
-  TError = unknown,
-  TContext = unknown,
->(
-  options?: Omit<
-    UseMutationOptions<
-      TData,
-      TError,
-      {
-        actionIfExists?: "overwrite" | "fail" | "skip";
-        formData: Body_import_variables;
-      },
-      TContext
-    >,
-    "mutationFn"
-  >,
-) =>
-  useMutation<
-    TData,
-    TError,
-    {
-      actionIfExists?: "overwrite" | "fail" | "skip";
-      formData: Body_import_variables;
-    },
-    TContext
-  >({
-    mutationFn: ({ actionIfExists, formData }) =>
-      VariableService.importVariables({ actionIfExists, formData }) as unknown as Promise<TData>,
-    ...options,
-  });
 /**
  * Pause Backfill
  * @param data The data for the request.
diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts
index 99341b970cac1..598c487ccbd02 100644
--- a/airflow/ui/openapi-gen/requests/schemas.gen.ts
+++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts
@@ -482,19 +482,6 @@ export const $BaseInfoResponse = {
   description: "Base info serializer for responses.",
 } as const;
 
-export const $Body_import_variables = {
-  properties: {
-    file: {
-      type: "string",
-      format: "binary",
-      title: "File",
-    },
-  },
-  type: "object",
-  required: ["file"],
-  title: "Body_import_variables",
-} as const;
-
 export const $ClearTaskInstancesBody = {
   properties: {
     dry_run: {
@@ -5749,30 +5736,6 @@ export const $VariableResponse = {
   description: "Variable serializer for responses.",
 } as const;
 
-export const $VariablesImportResponse = {
-  properties: {
-    created_variable_keys: {
-      items: {
-        type: "string",
-      },
-      type: "array",
-      title: "Created Variable Keys",
-    },
-    import_count: {
-      type: "integer",
-      title: "Import Count",
-    },
-    created_count: {
-      type: "integer",
-      title: "Created Count",
-    },
-  },
-  type: "object",
-  required: ["created_variable_keys", "import_count", "created_count"],
-  title: "VariablesImportResponse",
-  description: "Import Variables serializer for responses.",
-} as const;
-
 export const $VersionInfo = {
   properties: {
     version: {
diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts
index 0ce36911eca13..7f888aca34da8 100644
--- a/airflow/ui/openapi-gen/requests/services.gen.ts
+++ b/airflow/ui/openapi-gen/requests/services.gen.ts
@@ -187,8 +187,6 @@ import type {
   PostVariableResponse,
   BulkVariablesData,
   BulkVariablesResponse,
-  ImportVariablesData,
-  ImportVariablesResponse,
   ReparseDagFileData,
   ReparseDagFileResponse,
   GetHealthResponse,
@@ -3095,34 +3093,6 @@ export class VariableService {
       },
     });
   }
-
-  /**
-   * Import Variables
-   * Import variables from a JSON file.
-   * @param data The data for the request.
-   * @param data.formData
-   * @param data.actionIfExists
-   * @returns VariablesImportResponse Successful Response
-   * @throws ApiError
-   */
-  public static importVariables(data: ImportVariablesData): CancelablePromise<ImportVariablesResponse> {
-    return __request(OpenAPI, {
-      method: "POST",
-      url: "/public/variables/import",
-      query: {
-        action_if_exists: data.actionIfExists,
-      },
-      formData: data.formData,
-      mediaType: "multipart/form-data",
-      errors: {
-        400: "Bad Request",
-        401: "Unauthorized",
-        403: "Forbidden",
-        409: "Conflict",
-        422: "Unprocessable Entity",
-      },
-    });
-  }
 }
 
 export class DagParsingService {
diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts
index 09911de7c9745..81925913fba42 100644
--- a/airflow/ui/openapi-gen/requests/types.gen.ts
+++ b/airflow/ui/openapi-gen/requests/types.gen.ts
@@ -138,10 +138,6 @@ export type BaseInfoResponse = {
   status: string | null;
 };
 
-export type Body_import_variables = {
-  file: Blob | File;
-};
-
 /**
  * Request body for Clear Task Instances endpoint.
  */
@@ -1389,15 +1385,6 @@ export type VariableResponse = {
   is_encrypted: boolean;
 };
 
-/**
- * Import Variables serializer for responses.
- */
-export type VariablesImportResponse = {
-  created_variable_keys: Array<string>;
-  import_count: number;
-  created_count: number;
-};
-
 /**
  * Version information serializer for responses.
  */
@@ -2259,13 +2246,6 @@ export type BulkVariablesData = {
 
 export type BulkVariablesResponse = VariableBulkResponse;
 
-export type ImportVariablesData = {
-  actionIfExists?: "overwrite" | "fail" | "skip";
-  formData: Body_import_variables;
-};
-
-export type ImportVariablesResponse = VariablesImportResponse;
-
 export type ReparseDagFileData = {
   fileToken: string;
 };
@@ -4766,37 +4746,6 @@ export type $OpenApiTs = {
       };
     };
   };
-  "/public/variables/import": {
-    post: {
-      req: ImportVariablesData;
-      res: {
-        /**
-         * Successful Response
-         */
-        200: VariablesImportResponse;
-        /**
-         * Bad Request
-         */
-        400: HTTPExceptionResponse;
-        /**
-         * Unauthorized
-         */
-        401: HTTPExceptionResponse;
-        /**
-         * Forbidden
-         */
-        403: HTTPExceptionResponse;
-        /**
-         * Conflict
-         */
-        409: HTTPExceptionResponse;
-        /**
-         * Unprocessable Entity
-         */
-        422: HTTPExceptionResponse;
-      };
-    };
-  };
   "/public/parseDagFile/{file_token}": {
     put: {
       req: ReparseDagFileData;
diff --git a/airflow/ui/src/components/ui/ActionButton.tsx b/airflow/ui/src/components/ui/ActionButton.tsx
index 9820775223d74..0f921826989b5 100644
--- a/airflow/ui/src/components/ui/ActionButton.tsx
+++ b/airflow/ui/src/components/ui/ActionButton.tsx
@@ -34,6 +34,7 @@ type Props = {
 const ActionButton = ({
   actionName,
   colorPalette,
+  disabled = false,
   icon,
   onClick,
   text,
@@ -47,6 +48,7 @@ const ActionButton = ({
       <ButtonComponent
         aria-label={actionName}
         colorPalette={withText ? colorPalette : "blue"}
+        disabled={disabled}
         onClick={onClick}
         size={withText ? "md" : "sm"}
         variant={withText ? variant : "ghost"}
diff --git a/airflow/ui/src/pages/Variables/DeleteVariablesButton.tsx b/airflow/ui/src/pages/Variables/DeleteVariablesButton.tsx
new file mode 100644
index 0000000000000..6cdb9bf0fbc25
--- /dev/null
+++ b/airflow/ui/src/pages/Variables/DeleteVariablesButton.tsx
@@ -0,0 +1,101 @@
+/*!
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { Flex, useDisclosure, Text, VStack, Heading, Code } from "@chakra-ui/react";
+import { FiTrash, FiTrash2 } from "react-icons/fi";
+
+import { ErrorAlert } from "src/components/ErrorAlert";
+import { Button, Dialog } from "src/components/ui";
+import { useBulkDeleteVariables } from "src/queries/useBulkDeleteVariables";
+
+type Props = {
+  readonly clearSelections: VoidFunction;
+  readonly deleteKeys: Array<string>;
+};
+
+const DeleteVariablesButton = ({ clearSelections, deleteKeys: variableKeys }: Props) => {
+  const { onClose, onOpen, open } = useDisclosure();
+  const { error, isPending, mutate } = useBulkDeleteVariables({ clearSelections, onSuccessConfirm: onClose });
+
+  return (
+    <>
+      <Button
+        onClick={() => {
+          onOpen();
+        }}
+        size="sm"
+        variant="outline"
+      >
+        <FiTrash2 />
+        Delete
+      </Button>
+
+      <Dialog.Root onOpenChange={onClose} open={open} size="xl">
+        <Dialog.Content backdrop>
+          <Dialog.Header>
+            <VStack align="start" gap={4}>
+              <Heading size="xl">Delete Variable{variableKeys.length > 1 ? "s" : ""}</Heading>
+            </VStack>
+          </Dialog.Header>
+
+          <Dialog.CloseTrigger />
+
+          <Dialog.Body width="full">
+            <Text color="gray.solid" fontSize="md" fontWeight="semibold" mb={4}>
+              You are about to delete{" "}
+              <strong>
+                {variableKeys.length} variable{variableKeys.length > 1 ? "s" : ""}.
+              </strong>
+              <br />
+              <Code mb={2} mt={2} p={4}>
+                {variableKeys.join(", ")}
+              </Code>
+              <br />
+              This action is permanent and cannot be undone.{" "}
+              <strong>Are you sure you want to proceed?</strong>
+            </Text>
+            <ErrorAlert error={error} />
+            <Flex justifyContent="end" mt={3}>
+              <Button
+                colorPalette="red"
+                loading={isPending}
+                onClick={() => {
+                  mutate({
+                    requestBody: {
+                      actions: [
+                        {
+                          action: "delete" as const,
+                          action_if_not_exists: "fail",
+                          keys: variableKeys,
+                        },
+                      ],
+                    },
+                  });
+                }}
+              >
+                <FiTrash /> <Text fontWeight="bold">Yes, Delete</Text>
+              </Button>
+            </Flex>
+          </Dialog.Body>
+        </Dialog.Content>
+      </Dialog.Root>
+    </>
+  );
+};
+
+export default DeleteVariablesButton;
diff --git a/airflow/ui/src/pages/Variables/ImportVariablesButton.tsx b/airflow/ui/src/pages/Variables/ImportVariablesButton.tsx
index a662ffb1ea478..4f23d32d77298 100644
--- a/airflow/ui/src/pages/Variables/ImportVariablesButton.tsx
+++ b/airflow/ui/src/pages/Variables/ImportVariablesButton.tsx
@@ -23,12 +23,16 @@ import { Button, Dialog } from "src/components/ui";
 
 import ImportVariablesForm from "./ImportVariablesForm";
 
-const ImportVariablesButton = () => {
+type Props = {
+  readonly disabled: boolean;
+};
+
+const ImportVariablesButton = ({ disabled }: Props) => {
   const { onClose, onOpen, open } = useDisclosure();
 
   return (
     <>
-      <Button colorPalette="blue" onClick={onOpen}>
+      <Button colorPalette="blue" disabled={disabled} onClick={onOpen}>
         <FiUploadCloud /> Import Variables
       </Button>
 
diff --git a/airflow/ui/src/pages/Variables/ImportVariablesForm.tsx b/airflow/ui/src/pages/Variables/ImportVariablesForm.tsx
index 89029a1fd0aef..5645c91d4baf3 100644
--- a/airflow/ui/src/pages/Variables/ImportVariablesForm.tsx
+++ b/airflow/ui/src/pages/Variables/ImportVariablesForm.tsx
@@ -55,21 +55,65 @@ const ImportVariablesForm = ({ onClose }: ImportVariablesFormProps) => {
     onSuccessConfirm: onClose,
   });
 
-  const [selectedFile, setSelectedFile] = useState<Blob | File | undefined>(undefined);
-  const [actionIfExists, setActionIfExists] = useState<"fail" | "overwrite" | "skip" | undefined>("fail");
+  const [actionIfExists, setActionIfExists] = useState<"fail" | "overwrite" | "skip">("fail");
+  const [isParsing, setIsParsing] = useState(false);
+  const [fileContent, setFileContent] = useState<Record<string, string> | undefined>(undefined);
+
+  const onFileChange = (file: File) => {
+    setIsParsing(true);
+    const reader = new FileReader();
+
+    reader.addEventListener("load", (event) => {
+      try {
+        const text = event.target?.result as string;
+        const parsedContent = JSON.parse(text) as unknown;
+
+        if (
+          typeof parsedContent === "object" &&
+          parsedContent !== null &&
+          Object.entries(parsedContent).every(
+            ([key, value]) => typeof key === "string" && typeof value === "string",
+          )
+        ) {
+          const typedContent = parsedContent as Record<string, string>;
+
+          setFileContent(typedContent);
+        } else {
+          throw new Error("Invalid JSON format");
+        }
+      } catch {
+        setError({
+          body: {
+            detail:
+              'Error Parsing JSON File: Upload a JSON file containing variables (e.g., {"key": "value", ...}).',
+          },
+        });
+        setFileContent(undefined);
+      } finally {
+        setIsParsing(false);
+      }
+    });
+
+    reader.readAsText(file);
+  };
 
   const onSubmit = () => {
     setError(undefined);
-    if (selectedFile) {
-      const formData = new FormData();
+    if (fileContent) {
+      const formattedPayload = {
+        actions: [
+          {
+            action: "create" as const,
+            action_if_exists: actionIfExists,
+            variables: Object.entries(fileContent).map(([key, value]) => ({
+              key,
+              value,
+            })),
+          },
+        ],
+      };
 
-      formData.append("file", selectedFile);
-      mutate({
-        actionIfExists,
-        formData: {
-          file: selectedFile,
-        },
-      });
+      mutate({ requestBody: formattedPayload });
     }
   };
 
@@ -82,7 +126,11 @@ const ImportVariablesForm = ({ onClose }: ImportVariablesFormProps) => {
         mb={6}
         onFileChange={(files) => {
           if (files.acceptedFiles.length > 0) {
-            setSelectedFile(files.acceptedFiles[0]);
+            setError(undefined);
+            setFileContent(undefined);
+            if (files.acceptedFiles[0]) {
+              onFileChange(files.acceptedFiles[0]);
+            }
           }
         }}
         required
@@ -99,7 +147,8 @@ const ImportVariablesForm = ({ onClose }: ImportVariablesFormProps) => {
                 focusVisibleRing="inside"
                 me="-1"
                 onClick={() => {
-                  setSelectedFile(undefined);
+                  setError(undefined);
+                  setFileContent(undefined);
                 }}
                 pointerEvents="auto"
                 size="xs"
@@ -112,6 +161,11 @@ const ImportVariablesForm = ({ onClose }: ImportVariablesFormProps) => {
         >
           <FileInput placeholder='Upload a JSON file containing variables (e.g., {"key": "value", ...})' />
         </InputGroup>
+        {isParsing ? (
+          <Center mt={2}>
+            <Spinner color="blue.solid" marginRight={2} size="sm" /> Parsing file...
+          </Center>
+        ) : undefined}
       </FileUpload.Root>
       <RadioCardRoot
         defaultValue="fail"
@@ -139,13 +193,13 @@ const ImportVariablesForm = ({ onClose }: ImportVariablesFormProps) => {
       <ErrorAlert error={error} />
       <Box as="footer" display="flex" justifyContent="flex-end" mt={4}>
         {isPending ? (
-          <Box bg="bg/80" inset="0" pos="absolute">
+          <Box bg="bg.muted" inset="0" pos="absolute">
             <Center h="full">
-              <Spinner borderWidth="4px" color="blue.500" size="xl" />
+              <Spinner borderWidth="4px" color="blue.solid" size="xl" />
             </Center>
           </Box>
         ) : undefined}
-        <Button colorPalette="blue" disabled={!selectedFile || isPending} onClick={onSubmit}>
+        <Button colorPalette="blue" disabled={!Boolean(fileContent) || isPending} onClick={onSubmit}>
           <FiUploadCloud /> Import
         </Button>
       </Box>
diff --git a/airflow/ui/src/pages/Variables/ManageVariable/AddVariableButton.tsx b/airflow/ui/src/pages/Variables/ManageVariable/AddVariableButton.tsx
index 2771ebd2dd485..c3b3c3a551900 100644
--- a/airflow/ui/src/pages/Variables/ManageVariable/AddVariableButton.tsx
+++ b/airflow/ui/src/pages/Variables/ManageVariable/AddVariableButton.tsx
@@ -24,7 +24,11 @@ import { useAddVariable } from "src/queries/useAddVariable";
 
 import VariableForm, { type VariableBody } from "./VariableForm";
 
-const AddVariableButton = () => {
+type Props = {
+  readonly disabled: boolean;
+};
+
+const AddVariableButton = ({ disabled }: Props) => {
   const { onClose, onOpen, open } = useDisclosure();
   const { addVariable, error, isPending, setError } = useAddVariable({
     onSuccessConfirm: onClose,
@@ -44,7 +48,7 @@ const AddVariableButton = () => {
   return (
     <>
       <Toaster />
-      <Button colorPalette="blue" onClick={onOpen}>
+      <Button colorPalette="blue" disabled={disabled} onClick={onOpen}>
         <FiPlusCircle /> Add Variable
       </Button>
 
diff --git a/airflow/ui/src/pages/Variables/ManageVariable/DeleteVariableButton.tsx b/airflow/ui/src/pages/Variables/ManageVariable/DeleteVariableButton.tsx
index d8d2efbecae70..fab939d73cc28 100644
--- a/airflow/ui/src/pages/Variables/ManageVariable/DeleteVariableButton.tsx
+++ b/airflow/ui/src/pages/Variables/ManageVariable/DeleteVariableButton.tsx
@@ -25,32 +25,20 @@ import { useDeleteVariable } from "src/queries/useDeleteVariable";
 
 type Props = {
   readonly deleteKey: string;
+  readonly disabled: boolean;
 };
 
-const DeleteVariableButton = ({ deleteKey: variableKey }: Props) => {
+const DeleteVariableButton = ({ deleteKey: variableKey, disabled }: Props) => {
   const { onClose, onOpen, open } = useDisclosure();
   const { isPending, mutate } = useDeleteVariable({
     onSuccessConfirm: onClose,
   });
 
-  const renderDeleteButton = () => (
-    <Button
-      colorPalette="red"
-      loading={isPending}
-      onClick={() => {
-        mutate({
-          variableKey,
-        });
-      }}
-    >
-      <FiTrash /> Delete
-    </Button>
-  );
-
   return (
     <>
       <ActionButton
         actionName="Delete Variable"
+        disabled={disabled}
         icon={<FiTrash />}
         onClick={() => {
           onOpen();
@@ -70,9 +58,24 @@ const DeleteVariableButton = ({ deleteKey: variableKey }: Props) => {
           <Dialog.CloseTrigger />
 
           <Dialog.Body width="full">
-            <Text>Are you sure you want to delete the variable key: `{variableKey}`?</Text>
+            <Text color="gray.solid" fontSize="md" fontWeight="semibold" mb={4}>
+              You are about to delete variable with key <strong>{variableKey}</strong>.
+              <br />
+              This action is permanent and cannot be undone.{" "}
+              <strong>Are you sure you want to proceed?</strong>
+            </Text>
             <Flex justifyContent="end" mt={3}>
-              {renderDeleteButton()}
+              <Button
+                colorPalette="red"
+                loading={isPending}
+                onClick={() => {
+                  mutate({
+                    variableKey,
+                  });
+                }}
+              >
+                <FiTrash /> <Text fontWeight="bold">Yes, Delete</Text>
+              </Button>
             </Flex>
           </Dialog.Body>
         </Dialog.Content>
diff --git a/airflow/ui/src/pages/Variables/ManageVariable/EditVariableButton.tsx b/airflow/ui/src/pages/Variables/ManageVariable/EditVariableButton.tsx
index 3fee33d461e2f..5138cefb6fe0a 100644
--- a/airflow/ui/src/pages/Variables/ManageVariable/EditVariableButton.tsx
+++ b/airflow/ui/src/pages/Variables/ManageVariable/EditVariableButton.tsx
@@ -28,10 +28,11 @@ import type { VariableBody } from "./VariableForm";
 import VariableForm from "./VariableForm";
 
 type Props = {
+  readonly disabled: boolean;
   readonly variable: VariableResponse;
 };
 
-const EditVariableButton = ({ variable }: Props) => {
+const EditVariableButton = ({ disabled, variable }: Props) => {
   const { onClose, onOpen, open } = useDisclosure();
   const initialVariableValue: VariableBody = {
     description: variable.description ?? "",
@@ -51,6 +52,7 @@ const EditVariableButton = ({ variable }: Props) => {
     <>
       <ActionButton
         actionName="Edit Variable"
+        disabled={disabled}
         icon={<FiEdit />}
         onClick={() => {
           onOpen();
diff --git a/airflow/ui/src/pages/Variables/Variables.tsx b/airflow/ui/src/pages/Variables/Variables.tsx
index 94ae08f760dc2..51a0c8c35cc27 100644
--- a/airflow/ui/src/pages/Variables/Variables.tsx
+++ b/airflow/ui/src/pages/Variables/Variables.tsx
@@ -19,7 +19,7 @@
 import { Box, Flex, HStack, Spacer, VStack } from "@chakra-ui/react";
 import type { ColumnDef } from "@tanstack/react-table";
 import { useMemo, useState } from "react";
-import { FiShare, FiTrash2 } from "react-icons/fi";
+import { FiShare } from "react-icons/fi";
 import { useSearchParams } from "react-router-dom";
 
 import { useVariableServiceGetVariables } from "openapi/queries";
@@ -35,6 +35,7 @@ import { Checkbox } from "src/components/ui/Checkbox";
 import { SearchParamsKeys, type SearchParamsKeysType } from "src/constants/searchParams";
 import { TrimText } from "src/utils/TrimText";
 
+import DeleteVariablesButton from "./DeleteVariablesButton";
 import ImportVariablesButton from "./ImportVariablesButton";
 import AddVariableButton from "./ManageVariable/AddVariableButton";
 import DeleteVariableButton from "./ManageVariable/DeleteVariableButton";
@@ -51,12 +52,17 @@ const getColumns = ({
     cell: ({ row }) => (
       <Checkbox
         checked={selectedRows.get(row.original.key)}
+        colorPalette="blue"
         onCheckedChange={(event) => onRowSelect(row.original.key, Boolean(event.checked))}
       />
     ),
     enableSorting: false,
     header: () => (
-      <Checkbox checked={allRowsSelected} onCheckedChange={(event) => onSelectAll(Boolean(event.checked))} />
+      <Checkbox
+        checked={allRowsSelected}
+        colorPalette="blue"
+        onCheckedChange={(event) => onSelectAll(Boolean(event.checked))}
+      />
     ),
     meta: {
       skeletonWidth: 10,
@@ -85,8 +91,8 @@ const getColumns = ({
     accessorKey: "actions",
     cell: ({ row: { original } }) => (
       <Flex justifyContent="end">
-        <EditVariableButton variable={original} />
-        <DeleteVariableButton deleteKey={original.key} />
+        <EditVariableButton disabled={selectedRows.size > 0} variable={original} />
+        <DeleteVariableButton deleteKey={original.key} disabled={selectedRows.size > 0} />
       </Flex>
     ),
     enableSorting: false,
@@ -98,7 +104,9 @@ const getColumns = ({
 ];
 
 export const Variables = () => {
-  const { setTableURLState, tableURLState } = useTableURLState();
+  const { setTableURLState, tableURLState } = useTableURLState({
+    pagination: { pageIndex: 0, pageSize: 30 },
+  }); // To make multiselection smooth
   const [searchParams, setSearchParams] = useSearchParams();
   const { NAME_PATTERN: NAME_PATTERN_PARAM }: SearchParamsKeysType = SearchParamsKeys;
   const [variableKeyPattern, setVariableKeyPattern] = useState(
@@ -156,9 +164,9 @@ export const Variables = () => {
           placeHolder="Search Keys"
         />
         <HStack gap={4} mt={2}>
-          <ImportVariablesButton />
+          <ImportVariablesButton disabled={selectedRows.size > 0} />
           <Spacer />
-          <AddVariableButton />
+          <AddVariableButton disabled={selectedRows.size > 0} />
         </HStack>
       </VStack>
       <Box overflow="auto">
@@ -178,12 +186,9 @@ export const Variables = () => {
         <ActionBar.Content>
           <ActionBar.SelectionTrigger>{selectedRows.size} selected</ActionBar.SelectionTrigger>
           <ActionBar.Separator />
-          {/* TODO: Implement the delete and export selected */}
-          <Tooltip content="Delete selected variable coming soon..">
-            <Button disabled size="sm" variant="outline">
-              <FiTrash2 />
-              Delete
-            </Button>
+          {/* TODO: Implement the export selected */}
+          <Tooltip content="Delete selected variables">
+            <DeleteVariablesButton clearSelections={clearSelections} deleteKeys={[...selectedRows.keys()]} />
           </Tooltip>
           <Tooltip content="Export selected variable coming soon..">
             <Button disabled size="sm" variant="outline">
diff --git a/airflow/ui/src/queries/useBulkDeleteVariables.ts b/airflow/ui/src/queries/useBulkDeleteVariables.ts
new file mode 100644
index 0000000000000..7fa1447436d0b
--- /dev/null
+++ b/airflow/ui/src/queries/useBulkDeleteVariables.ts
@@ -0,0 +1,70 @@
+/*!
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { useQueryClient } from "@tanstack/react-query";
+import { useState } from "react";
+
+import { useVariableServiceBulkVariables, useVariableServiceGetVariablesKey } from "openapi/queries";
+import { toaster } from "src/components/ui";
+
+type Props = {
+  readonly clearSelections: VoidFunction;
+  readonly onSuccessConfirm: VoidFunction;
+};
+
+export const useBulkDeleteVariables = ({ clearSelections, onSuccessConfirm }: Props) => {
+  const queryClient = useQueryClient();
+  const [error, setError] = useState<unknown>(undefined);
+
+  const onSuccess = async (responseData: { delete?: { errors: Array<unknown>; success: Array<string> } }) => {
+    await queryClient.invalidateQueries({
+      queryKey: [useVariableServiceGetVariablesKey],
+    });
+
+    if (responseData.delete) {
+      const { errors, success } = responseData.delete;
+
+      if (Array.isArray(errors) && errors.length > 0) {
+        const apiError = errors[0] as { error: string };
+
+        setError({
+          body: { detail: apiError.error },
+        });
+      } else if (Array.isArray(success) && success.length > 0) {
+        toaster.create({
+          description: `${success.length} variables deleted successfully. Keys: ${success.join(", ")}`,
+          title: "Delete Variables Request Successful",
+          type: "success",
+        });
+        clearSelections();
+        onSuccessConfirm();
+      }
+    }
+  };
+
+  const onError = (_error: unknown) => {
+    setError(_error);
+  };
+
+  const { isPending, mutate } = useVariableServiceBulkVariables({
+    onError,
+    onSuccess,
+  });
+
+  return { error, isPending, mutate };
+};
diff --git a/airflow/ui/src/queries/useImportVariables.ts b/airflow/ui/src/queries/useImportVariables.ts
index b4692e37c535f..212d9c83e1993 100644
--- a/airflow/ui/src/queries/useImportVariables.ts
+++ b/airflow/ui/src/queries/useImportVariables.ts
@@ -19,36 +19,43 @@
 import { useQueryClient } from "@tanstack/react-query";
 import { useState } from "react";
 
-import { useVariableServiceGetVariablesKey, useVariableServiceImportVariables } from "openapi/queries";
+import { useVariableServiceBulkVariables, useVariableServiceGetVariablesKey } from "openapi/queries";
 import { toaster } from "src/components/ui";
 
 export const useImportVariables = ({ onSuccessConfirm }: { onSuccessConfirm: () => void }) => {
   const queryClient = useQueryClient();
   const [error, setError] = useState<unknown>(undefined);
 
-  const onSuccess = async (responseData: {
-    created_count: number;
-    created_variable_keys: Array<string>;
-    import_count: number;
-  }) => {
+  const onSuccess = async (responseData: { create?: { errors: Array<unknown>; success: Array<string> } }) => {
     await queryClient.invalidateQueries({
       queryKey: [useVariableServiceGetVariablesKey],
     });
 
-    toaster.create({
-      description: `${responseData.created_count} of ${responseData.import_count} variables imported successfully. Keys imported are ${responseData.created_variable_keys.join(", ")}`,
-      title: "Import Variables Request Successful",
-      type: "success",
-    });
+    if (responseData.create) {
+      const { errors, success } = responseData.create;
+
+      if (Array.isArray(errors) && errors.length > 0) {
+        const apiError = errors[0] as { error: string };
 
-    onSuccessConfirm();
+        setError({
+          body: { detail: apiError.error },
+        });
+      } else if (Array.isArray(success) && success.length > 0) {
+        toaster.create({
+          description: `${success.length} variables created successfully. Keys: ${success.join(", ")}`,
+          title: "Import Variables Request Successful",
+          type: "success",
+        });
+        onSuccessConfirm();
+      }
+    }
   };
 
   const onError = (_error: unknown) => {
     setError(_error);
   };
 
-  const { isPending, mutate } = useVariableServiceImportVariables({
+  const { isPending, mutate } = useVariableServiceBulkVariables({
     onError,
     onSuccess,
   });
diff --git a/airflow/utils/context.py b/airflow/utils/context.py
index 1f453457e4323..168243290fabc 100644
--- a/airflow/utils/context.py
+++ b/airflow/utils/context.py
@@ -19,7 +19,6 @@
 
 from __future__ import annotations
 
-import contextlib
 from collections.abc import (
     Container,
     Iterator,
@@ -51,9 +50,9 @@
     AssetRef,
     AssetUniqueKey,
     AssetUriRef,
-    BaseAssetUniqueKey,
 )
 from airflow.sdk.definitions.context import Context
+from airflow.sdk.execution_time.context import OutletEventAccessors as OutletEventAccessorsSDK
 from airflow.utils.db import LazySelectSequence
 from airflow.utils.session import create_session
 from airflow.utils.types import NOTSET
@@ -156,104 +155,29 @@ def get(self, key: str, default_conn: Any = None) -> Any:
             return default_conn
 
 
-@attrs.define()
-class AssetAliasEvent:
-    """
-    Represeation of asset event to be triggered by an asset alias.
-
-    :meta private:
-    """
-
-    source_alias_name: str
-    dest_asset_key: AssetUniqueKey
-    extra: dict[str, Any]
-
-
-@attrs.define()
-class OutletEventAccessor:
-    """
-    Wrapper to access an outlet asset event in template.
-
-    :meta private:
-    """
-
-    key: BaseAssetUniqueKey
-    extra: dict[str, Any] = attrs.Factory(dict)
-    asset_alias_events: list[AssetAliasEvent] = attrs.field(factory=list)
-
-    def add(self, asset: Asset, extra: dict[str, Any] | None = None) -> None:
-        """Add an AssetEvent to an existing Asset."""
-        if not isinstance(self.key, AssetAliasUniqueKey):
-            return
-
-        asset_alias_name = self.key.name
-        event = AssetAliasEvent(
-            source_alias_name=asset_alias_name,
-            dest_asset_key=AssetUniqueKey.from_asset(asset),
-            extra=extra or {},
-        )
-        self.asset_alias_events.append(event)
-
-
-class OutletEventAccessors(Mapping[Union[Asset, AssetAlias], OutletEventAccessor]):
+class OutletEventAccessors(OutletEventAccessorsSDK):
     """
     Lazy mapping of outlet asset event accessors.
 
     :meta private:
     """
 
-    _asset_ref_cache: dict[AssetRef, AssetUniqueKey] = {}
-
-    def __init__(self) -> None:
-        self._dict: dict[BaseAssetUniqueKey, OutletEventAccessor] = {}
-
-    def __str__(self) -> str:
-        return f"OutletEventAccessors(_dict={self._dict})"
-
-    def __iter__(self) -> Iterator[Asset | AssetAlias]:
-        return (
-            key.to_asset() if isinstance(key, AssetUniqueKey) else key.to_asset_alias() for key in self._dict
-        )
-
-    def __len__(self) -> int:
-        return len(self._dict)
-
-    def __getitem__(self, key: Asset | AssetAlias) -> OutletEventAccessor:
-        hashable_key: BaseAssetUniqueKey
-        if isinstance(key, Asset):
-            hashable_key = AssetUniqueKey.from_asset(key)
-        elif isinstance(key, AssetAlias):
-            hashable_key = AssetAliasUniqueKey.from_asset_alias(key)
-        elif isinstance(key, AssetRef):
-            hashable_key = self._resolve_asset_ref(key)
-        else:
-            raise TypeError(f"Key should be either an asset or an asset alias, not {type(key)}")
-
-        if hashable_key not in self._dict:
-            self._dict[hashable_key] = OutletEventAccessor(extra={}, key=hashable_key)
-        return self._dict[hashable_key]
-
-    def _resolve_asset_ref(self, ref: AssetRef) -> AssetUniqueKey:
-        with contextlib.suppress(KeyError):
-            return self._asset_ref_cache[ref]
-
-        refs_to_cache: list[AssetRef]
-        with create_session() as session:
-            if isinstance(ref, AssetNameRef):
+    @staticmethod
+    def _get_asset_from_db(name: str | None = None, uri: str | None = None) -> Asset:
+        if name:
+            with create_session() as session:
                 asset = session.scalar(
-                    select(AssetModel).where(AssetModel.name == ref.name, AssetModel.active.has())
+                    select(AssetModel).where(AssetModel.name == name, AssetModel.active.has())
                 )
-                refs_to_cache = [ref, AssetUriRef(asset.uri)]
-            elif isinstance(ref, AssetUriRef):
+        elif uri:
+            with create_session() as session:
                 asset = session.scalar(
-                    select(AssetModel).where(AssetModel.uri == ref.uri, AssetModel.active.has())
+                    select(AssetModel).where(AssetModel.uri == uri, AssetModel.active.has())
                 )
-                refs_to_cache = [ref, AssetNameRef(asset.name)]
-            else:
-                raise TypeError(f"Unimplemented asset ref: {type(ref)}")
-            for ref in refs_to_cache:
-                self._asset_ref_cache[ref] = unique_key = AssetUniqueKey.from_asset(asset)
-        return unique_key
+        else:
+            raise ValueError("Either name or uri must be provided")
+
+        return asset.to_public()
 
 
 class LazyAssetEventSelectSequence(LazySelectSequence[AssetEvent]):
diff --git a/airflow/www/views.py b/airflow/www/views.py
index e021c441f5d22..ded98f1e1d860 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -4768,6 +4768,8 @@ class DagRunModelView(AirflowModelView):
         permissions.ACTION_CAN_ACCESS_MENU,
     ]
 
+    add_exclude_columns = ["conf"]
+
     list_columns = [
         "state",
         "dag_id",
@@ -4803,7 +4805,6 @@ class DagRunModelView(AirflowModelView):
         "start_date",
         "end_date",
         "run_id",
-        "conf",
         "note",
     ]
 
diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md
index 3d1d7d8b53eb7..84f71f34c3f1d 100644
--- a/dev/breeze/doc/ci/02_images.md
+++ b/dev/breeze/doc/ci/02_images.md
@@ -443,7 +443,7 @@ can be used for CI images:
 | `ADDITIONAL_DEV_APT_DEPS`       |                            | Additional apt dev dependencies installed in the first part of the image                                          |
 | `ADDITIONAL_DEV_APT_ENV`        |                            | Additional env variables defined when installing dev deps                                                         |
 | `AIRFLOW_PIP_VERSION`           | `24.3.1`                   | `pip` version used.                                                                                               |
-| `AIRFLOW_UV_VERSION`            | `0.5.14`                   | `uv` version used.                                                                                                |
+| `AIRFLOW_UV_VERSION`            | `0.5.20`                   | `uv` version used.                                                                                                |
 | `AIRFLOW_PRE_COMMIT_VERSION`    | `4.0.1`                    | `pre-commit` version used.                                                                                        |
 | `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4`                    | `pre-commit-uv` version used.                                                                                     |
 | `AIRFLOW_USE_UV`                | `true`                     | Whether to use UV for installation.                                                                               |
diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index 1dad40f7e5b9f..99e619240702b 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -234,7 +234,7 @@ class VersionedFile(NamedTuple):
 
 
 AIRFLOW_PIP_VERSION = "24.3.1"
-AIRFLOW_UV_VERSION = "0.5.14"
+AIRFLOW_UV_VERSION = "0.5.20"
 AIRFLOW_USE_UV = False
 # TODO: automate these as well
 WHEEL_VERSION = "0.44.0"
diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py
index 52a3bc8fbfaa8..39d2eb024c8db 100644
--- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py
@@ -1217,8 +1217,16 @@ def _run_test_command(
     perform_environment_checks()
     if skip_providers:
         ignored_path_list = [
-            f"--ignore=providers/tests/{provider_id.replace('.','/')}"
-            for provider_id in skip_providers.split(" ")
+            # TODO(potiuk): remove the old ways once we migrate all providers to the new structure
+            *[
+                f"--ignore=providers/tests/{provider_id.replace('.','/')}"
+                for provider_id in skip_providers.split(" ")
+            ],
+            # New structure
+            *[
+                f"--ignore=providers/{provider_id.replace('.','/')}/tests"
+                for provider_id in skip_providers.split(" ")
+            ],
         ]
         extra_pytest_args = (*extra_pytest_args, *ignored_path_list)
     if run_in_parallel:
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py
index b18f8c4da3227..287611732567b 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -189,7 +189,7 @@
 ALLOWED_INSTALL_MYSQL_CLIENT_TYPES = ["mariadb", "mysql"]
 
 PIP_VERSION = "24.3.1"
-UV_VERSION = "0.5.14"
+UV_VERSION = "0.5.20"
 
 DEFAULT_UV_HTTP_TIMEOUT = 300
 DEFAULT_WSL2_HTTP_TIMEOUT = 900
diff --git a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
index 5e024ca42658f..7082999c1176d 100644
--- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
+++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
@@ -556,7 +556,11 @@ def _update_source_date_epoch_in_provider_yaml(
 
 def _verify_changelog_exists(package: str) -> Path:
     provider_details = get_provider_details(package)
-    changelog_path = Path(provider_details.root_provider_path) / "CHANGELOG.rst"
+    changelog_path = (
+        Path(provider_details.root_provider_path) / "docs" / "changelog.rst"
+        if provider_details.is_new_structure
+        else Path(provider_details.root_provider_path) / "CHANGELOG.rst"
+    )
     if not os.path.isfile(changelog_path):
         get_console().print(f"\n[error]ERROR: Missing {changelog_path}[/]\n")
         get_console().print("[info]Please add the file with initial content:")
diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py
index 1afadcfc064a2..13d117ab67d5e 100644
--- a/dev/breeze/tests/test_pytest_args_for_test_types.py
+++ b/dev/breeze/tests/test_pytest_args_for_test_types.py
@@ -66,7 +66,7 @@
         (
             GroupOfTests.PROVIDERS,
             "Providers",
-            ["providers/airbyte/tests", "providers/tests"],
+            ["providers/airbyte/tests", "providers/edge/tests", "providers/tests"],
         ),
         (
             GroupOfTests.PROVIDERS,
@@ -88,6 +88,7 @@
             "Providers[-amazon,google,microsoft.azure]",
             [
                 "providers/airbyte/tests",
+                "providers/edge/tests",
                 "providers/tests",
                 "--ignore=providers/tests/amazon",
                 "--ignore=providers/amazon",
@@ -105,7 +106,14 @@
         (
             GroupOfTests.PROVIDERS,
             "All-Quarantined",
-            ["providers/airbyte/tests", "providers/tests", "-m", "quarantined", "--include-quarantined"],
+            [
+                "providers/airbyte/tests",
+                "providers/edge/tests",
+                "providers/tests",
+                "-m",
+                "quarantined",
+                "--include-quarantined",
+            ],
         ),
         (
             GroupOfTests.CORE,
@@ -204,6 +212,7 @@ def test_pytest_args_for_missing_provider():
             "Providers",
             [
                 "providers/airbyte/tests",
+                "providers/edge/tests",
                 "providers/tests",
             ],
         ),
@@ -227,6 +236,7 @@ def test_pytest_args_for_missing_provider():
             "Providers[-amazon,google]",
             [
                 "providers/airbyte/tests",
+                "providers/edge/tests",
                 "providers/tests",
             ],
         ),
@@ -235,6 +245,7 @@ def test_pytest_args_for_missing_provider():
             "Providers[-amazon,google] Providers[amazon] Providers[google]",
             [
                 "providers/airbyte/tests",
+                "providers/edge/tests",
                 "providers/tests",
             ],
         ),
diff --git a/docs/.gitignore b/docs/.gitignore
index db54b7da0d2ca..8d41d702989cf 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -1,3 +1,4 @@
 # TODO(potiuk): change it to apache-airflow-providers-* after all providers are migrated to the new structure
 # Eventually when we swtich to individually build docs for each provider, we should remove this altogether
 apache-airflow-providers-airbyte
+apache-airflow-providers-edge
diff --git a/docs/apache-airflow-providers-edge/changelog.rst b/docs/apache-airflow-providers-edge/changelog.rst
deleted file mode 100644
index 46bd53ed4ccc6..0000000000000
--- a/docs/apache-airflow-providers-edge/changelog.rst
+++ /dev/null
@@ -1,25 +0,0 @@
-
- .. Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
- ..   http://www.apache.org/licenses/LICENSE-2.0
-
- .. Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
- ..  NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
-     OVERWRITTEN WHEN PREPARING PACKAGES.
-
- ..  IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
-     `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
-
-.. include:: ../../providers/src/airflow/providers/edge/CHANGELOG.rst
diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst
index cbeb5eafcd037..1846f1ad41b5e 100644
--- a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst
+++ b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst
@@ -417,3 +417,97 @@ To get a Data Profile scan job you can use:
     :dedent: 4
     :start-after: [START howto_dataplex_get_data_profile_job_operator]
     :end-before: [END howto_dataplex_get_data_profile_job_operator]
+
+
+Google Dataplex Catalog Operators
+=================================
+
+Dataplex Catalog provides a unified inventory of Google Cloud resources, such as BigQuery, and other resources,
+such as on-premises resources. Dataplex Catalog automatically retrieves metadata for Google Cloud resources,
+and you bring metadata for third-party resources into Dataplex Catalog.
+
+For more information about Dataplex Catalog visit `Dataplex Catalog production documentation <Product documentation <https://cloud.google.com/dataplex/docs/catalog-overview>`__
+
+.. _howto/operator:DataplexCatalogCreateEntryGroupOperator:
+
+Create an EntryGroup
+--------------------
+
+To create an Entry Group in specific location in Dataplex Catalog you can
+use :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryGroupOperator`
+For more information about the available fields to pass when creating an Entry Group, visit `Entry Group resource configuration. <https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups#EntryGroup>`__
+
+A simple Entry Group configuration can look as followed:
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_entry_group_configuration]
+    :end-before: [END howto_dataplex_entry_group_configuration]
+
+With this configuration you can create an Entry Group resource:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryGroupOperator`
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_dataplex_catalog_create_entry_group]
+    :end-before: [END howto_operator_dataplex_catalog_create_entry_group]
+
+.. _howto/operator:DataplexCatalogDeleteEntryGroupOperator:
+
+Delete an EntryGroup
+--------------------
+
+To delete an Entry Group in specific location in Dataplex Catalog you can
+use :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteEntryGroupOperator`
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_dataplex_catalog_delete_entry_group]
+    :end-before: [END howto_operator_dataplex_catalog_delete_entry_group]
+
+.. _howto/operator:DataplexCatalogListEntryGroupsOperator:
+
+List EntryGroups
+----------------
+
+To list all Entry Groups in specific location in Dataplex Catalog you can
+use :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogListEntryGroupsOperator`.
+This operator also supports filtering and ordering the result of the operation.
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_dataplex_catalog_list_entry_groups]
+    :end-before: [END howto_operator_dataplex_catalog_list_entry_groups]
+
+.. _howto/operator:DataplexCatalogGetEntryGroupOperator:
+
+Get an EntryGroup
+-----------------
+
+To retrieve an Entry Group in specific location in Dataplex Catalog you can
+use :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryGroupOperator`
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_dataplex_catalog_get_entry_group]
+    :end-before: [END howto_operator_dataplex_catalog_get_entry_group]
+
+.. _howto/operator:DataplexCatalogUpdateEntryGroupOperator:
+
+Update an EntryGroup
+--------------------
+
+To update an Entry Group in specific location in Dataplex Catalog you can
+use :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryGroupOperator`
+
+.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_dataplex_catalog_update_entry_group]
+    :end-before: [END howto_operator_dataplex_catalog_update_entry_group]
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 9377ac6acf3be..246f90286253f 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -574,6 +574,8 @@ encodable
 encryptor
 enqueue
 enqueued
+EntryGroup
+EntryGroups
 entrypoint
 entrypoints
 Enum
diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json
index 2724c6a73d4e9..c0a2e488d0f86 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -535,9 +535,7 @@
         "plugin-class": "airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin"
       }
     ],
-    "cross-providers-deps": [
-      "common.compat"
-    ],
+    "cross-providers-deps": [],
     "excluded-python-versions": [],
     "state": "not-ready"
   },
@@ -656,7 +654,7 @@
       "google-cloud-datacatalog>=3.23.0",
       "google-cloud-dataflow-client>=0.8.6",
       "google-cloud-dataform>=0.5.0",
-      "google-cloud-dataplex>=1.10.0",
+      "google-cloud-dataplex>=2.6.0",
       "google-cloud-dataproc-metastore>=1.12.0",
       "google-cloud-dataproc>=5.12.0",
       "google-cloud-dlp>=3.12.0",
diff --git a/newsfragments/45722.significant.rst b/newsfragments/45722.significant.rst
new file mode 100644
index 0000000000000..3e9068a1ac13d
--- /dev/null
+++ b/newsfragments/45722.significant.rst
@@ -0,0 +1,18 @@
+Move airflow config ``scheduler.dag_dir_list_interval`` to ``dag_bundles.refresh_interval``
+
+* Types of change
+
+  * [ ] DAG changes
+  * [x] Config changes
+  * [ ] API changes
+  * [ ] CLI changes
+  * [ ] Behaviour changes
+  * [ ] Plugin changes
+  * [ ] Dependency change
+  * [ ] Code interface change
+
+* Migration rules needed
+
+  * ``airflow config lint``
+
+    * [x] ``scheduler.dag_dir_list_interval`` → ``dag_bundles.refresh_interval``
diff --git a/providers/edge/README.rst b/providers/edge/README.rst
new file mode 100644
index 0000000000000..d088fd11708f1
--- /dev/null
+++ b/providers/edge/README.rst
@@ -0,0 +1,63 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+    `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+
+
+Package ``apache-airflow-providers-edge``
+
+Release: ``0.10.1pre0``
+
+
+Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites
+
+
+Provider package
+----------------
+
+This is a provider package for ``edge`` provider. All classes for this provider package
+are in ``airflow.providers.edge`` python package.
+
+You can find package information and changelog for the provider
+in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge/0.10.1pre0/>`_.
+
+Installation
+------------
+
+You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
+for the minimum Airflow version supported) via
+``pip install apache-airflow-providers-edge``
+
+The package supports the following python versions: 3.9,3.10,3.11,3.12
+
+Requirements
+------------
+
+==================  ==================
+PIP package         Version required
+==================  ==================
+``apache-airflow``  ``>=2.10.0``
+``pydantic``        ``>=2.10.2``
+``retryhttp``       ``>=1.2.0``
+==================  ==================
+
+The changelog for the provider package can be found in the
+`changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge/0.10.1pre0/changelog.html>`_.
diff --git a/providers/src/airflow/providers/edge/CHANGELOG.rst b/providers/edge/docs/changelog.rst
similarity index 98%
rename from providers/src/airflow/providers/edge/CHANGELOG.rst
rename to providers/edge/docs/changelog.rst
index fe403abcfb4e4..8ef261b4f85c6 100644
--- a/providers/src/airflow/providers/edge/CHANGELOG.rst
+++ b/providers/edge/docs/changelog.rst
@@ -146,7 +146,7 @@ Misc
 Misc
 ~~~~
 
-* ``Edge worker supports concurrency slots feature so that jobs which need more concurrency blocking other jobs beeing executed on the same worker in parallel.``
+* ``Edge worker supports concurrency slots feature so that jobs which need more concurrency blocking other jobs being executed on the same worker in parallel.``
 
 0.6.2pre0
 .........
diff --git a/docs/apache-airflow-providers-edge/cli-ref.rst b/providers/edge/docs/cli-ref.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/cli-ref.rst
rename to providers/edge/docs/cli-ref.rst
diff --git a/docs/apache-airflow-providers-edge/commits.rst b/providers/edge/docs/commits.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/commits.rst
rename to providers/edge/docs/commits.rst
diff --git a/docs/apache-airflow-providers-edge/configurations-ref.rst b/providers/edge/docs/configurations-ref.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/configurations-ref.rst
rename to providers/edge/docs/configurations-ref.rst
diff --git a/docs/apache-airflow-providers-edge/edge_executor.rst b/providers/edge/docs/edge_executor.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/edge_executor.rst
rename to providers/edge/docs/edge_executor.rst
diff --git a/docs/apache-airflow-providers-edge/index.rst b/providers/edge/docs/index.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/index.rst
rename to providers/edge/docs/index.rst
diff --git a/docs/apache-airflow-providers-edge/install_on_windows.rst b/providers/edge/docs/install_on_windows.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/install_on_windows.rst
rename to providers/edge/docs/install_on_windows.rst
diff --git a/docs/apache-airflow-providers-edge/installing-providers-from-sources.rst b/providers/edge/docs/installing-providers-from-sources.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/installing-providers-from-sources.rst
rename to providers/edge/docs/installing-providers-from-sources.rst
diff --git a/docs/apache-airflow-providers-edge/security.rst b/providers/edge/docs/security.rst
similarity index 100%
rename from docs/apache-airflow-providers-edge/security.rst
rename to providers/edge/docs/security.rst
diff --git a/providers/src/airflow/providers/edge/provider.yaml b/providers/edge/provider.yaml
similarity index 98%
rename from providers/src/airflow/providers/edge/provider.yaml
rename to providers/edge/provider.yaml
index b161f835348ec..4b36732e0392e 100644
--- a/providers/src/airflow/providers/edge/provider.yaml
+++ b/providers/edge/provider.yaml
@@ -27,11 +27,6 @@ source-date-epoch: 1729683247
 versions:
   - 0.10.1pre0
 
-dependencies:
-  - apache-airflow>=2.10.0
-  - pydantic>=2.10.2
-  - retryhttp>=1.2.0
-
 plugins:
   - name: edge_executor
     plugin-class: airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin
diff --git a/providers/edge/pyproject.toml b/providers/edge/pyproject.toml
new file mode 100644
index 0000000000000..da11c7e78fb34
--- /dev/null
+++ b/providers/edge/pyproject.toml
@@ -0,0 +1,76 @@
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+
+# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+[build-system]
+requires = ["flit_core==3.10.1"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "apache-airflow-providers-edge"
+version = "0.10.1pre0"
+description = "Provider package apache-airflow-providers-edge for Apache Airflow"
+readme = "README.rst"
+authors = [
+    {name="Apache Software Foundation", email="dev@airflow.apache.org"},
+]
+maintainers = [
+    {name="Apache Software Foundation", email="dev@airflow.apache.org"},
+]
+keywords = [ "airflow-provider", "edge", "airflow", "integration" ]
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+    "Environment :: Console",
+    "Environment :: Web Environment",
+    "Intended Audience :: Developers",
+    "Intended Audience :: System Administrators",
+    "Framework :: Apache Airflow",
+    "Framework :: Apache Airflow :: Provider",
+    "License :: OSI Approved :: Apache Software License",
+    "Programming Language :: Python :: 3.9",
+    "Programming Language :: Python :: 3.10",
+    "Programming Language :: Python :: 3.11",
+    "Programming Language :: Python :: 3.12",
+    "Topic :: System :: Monitoring",
+]
+requires-python = "~=3.9"
+dependencies = [
+    "apache-airflow>=2.10.0",
+    "pydantic>=2.10.2",
+    "retryhttp>=1.2.0",
+]
+
+[project.urls]
+"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-edge/0.10.1pre0"
+"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-edge/0.10.1pre0/changelog.html"
+"Bug Tracker" = "https://github.com/apache/airflow/issues"
+"Source Code" = "https://github.com/apache/airflow"
+"Slack Chat" = "https://s.apache.org/airflow-slack"
+"Twitter" = "https://x.com/ApacheAirflow"
+"YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
+
+[project.entry-points."apache_airflow_provider"]
+provider_info = "airflow.providers.edge.get_provider_info:get_provider_info"
+[project.entry-points."airflow.plugins"]
+edge_executor = "airflow.providers.edge.plugins.edge_executor_plugin:EdgeExecutorPlugin"
+
+[tool.flit.module]
+name = "airflow.providers.edge"
diff --git a/providers/edge/src/airflow/providers/edge/LICENSE b/providers/edge/src/airflow/providers/edge/LICENSE
new file mode 100644
index 0000000000000..405dcfe69d7c5
--- /dev/null
+++ b/providers/edge/src/airflow/providers/edge/LICENSE
@@ -0,0 +1,253 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+============================================================================
+   APACHE AIRFLOW SUBCOMPONENTS:
+
+   The Apache Airflow project contains subcomponents with separate copyright
+   notices and license terms. Your use of the source code for the these
+   subcomponents is subject to the terms and conditions of the following
+   licenses.
+
+
+========================================================================
+Third party Apache 2.0 licenses
+========================================================================
+
+The following components are provided under the Apache 2.0 License.
+See project link for details. The text of each license is also included
+at 3rd-party-licenses/LICENSE-[project].txt.
+
+    (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
+    (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
+    (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
+    (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
+
+========================================================================
+MIT licenses
+========================================================================
+
+The following components are provided under the MIT License. See project link for details.
+The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
+
+    (MIT License) jquery v3.5.1 (https://jquery.org/license/)
+    (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
+    (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
+    (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
+    (MIT License) dataTables v1.10.25 (https://datatables.net)
+    (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
+    (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
+    (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
+    (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
+
+========================================================================
+BSD 3-Clause licenses
+========================================================================
+The following components are provided under the BSD 3-Clause license. See project links for details.
+The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
+
+    (BSD 3 License) d3 v5.16.0 (https://d3js.org)
+    (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
+    (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
+
+========================================================================
+See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
diff --git a/providers/src/airflow/providers/edge/__init__.py b/providers/edge/src/airflow/providers/edge/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/__init__.py
rename to providers/edge/src/airflow/providers/edge/__init__.py
diff --git a/providers/src/airflow/providers/edge/cli/__init__.py b/providers/edge/src/airflow/providers/edge/cli/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/cli/__init__.py
rename to providers/edge/src/airflow/providers/edge/cli/__init__.py
diff --git a/providers/src/airflow/providers/edge/cli/api_client.py b/providers/edge/src/airflow/providers/edge/cli/api_client.py
similarity index 100%
rename from providers/src/airflow/providers/edge/cli/api_client.py
rename to providers/edge/src/airflow/providers/edge/cli/api_client.py
diff --git a/providers/src/airflow/providers/edge/cli/edge_command.py b/providers/edge/src/airflow/providers/edge/cli/edge_command.py
similarity index 100%
rename from providers/src/airflow/providers/edge/cli/edge_command.py
rename to providers/edge/src/airflow/providers/edge/cli/edge_command.py
diff --git a/providers/src/airflow/providers/edge/example_dags/__init__.py b/providers/edge/src/airflow/providers/edge/example_dags/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/example_dags/__init__.py
rename to providers/edge/src/airflow/providers/edge/example_dags/__init__.py
diff --git a/providers/src/airflow/providers/edge/example_dags/integration_test.py b/providers/edge/src/airflow/providers/edge/example_dags/integration_test.py
similarity index 98%
rename from providers/src/airflow/providers/edge/example_dags/integration_test.py
rename to providers/edge/src/airflow/providers/edge/example_dags/integration_test.py
index ba42665ddf736..418164832576d 100644
--- a/providers/src/airflow/providers/edge/example_dags/integration_test.py
+++ b/providers/edge/src/airflow/providers/edge/example_dags/integration_test.py
@@ -99,7 +99,7 @@ def virtualenv():
     @task
     def variable():
         Variable.set("integration_test_key", "value")
-        assert Variable.get("integration_test_key") == "value"  # noqa: S101
+        assert Variable.get("integration_test_key") == "value"
         Variable.delete("integration_test_key")
 
     @task
diff --git a/providers/src/airflow/providers/edge/example_dags/win_notepad.py b/providers/edge/src/airflow/providers/edge/example_dags/win_notepad.py
similarity index 100%
rename from providers/src/airflow/providers/edge/example_dags/win_notepad.py
rename to providers/edge/src/airflow/providers/edge/example_dags/win_notepad.py
diff --git a/providers/src/airflow/providers/edge/example_dags/win_test.py b/providers/edge/src/airflow/providers/edge/example_dags/win_test.py
similarity index 100%
rename from providers/src/airflow/providers/edge/example_dags/win_test.py
rename to providers/edge/src/airflow/providers/edge/example_dags/win_test.py
diff --git a/providers/src/airflow/providers/edge/executors/__init__.py b/providers/edge/src/airflow/providers/edge/executors/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/executors/__init__.py
rename to providers/edge/src/airflow/providers/edge/executors/__init__.py
diff --git a/providers/src/airflow/providers/edge/executors/edge_executor.py b/providers/edge/src/airflow/providers/edge/executors/edge_executor.py
similarity index 100%
rename from providers/src/airflow/providers/edge/executors/edge_executor.py
rename to providers/edge/src/airflow/providers/edge/executors/edge_executor.py
diff --git a/providers/edge/src/airflow/providers/edge/get_provider_info.py b/providers/edge/src/airflow/providers/edge/get_provider_info.py
new file mode 100644
index 0000000000000..2b17305d4f713
--- /dev/null
+++ b/providers/edge/src/airflow/providers/edge/get_provider_info.py
@@ -0,0 +1,103 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+#
+# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+# `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+
+
+def get_provider_info():
+    return {
+        "package-name": "apache-airflow-providers-edge",
+        "name": "Edge Executor",
+        "description": "Handle edge workers on remote sites via HTTP(s) connection and orchestrates work over distributed sites\n",
+        "state": "not-ready",
+        "source-date-epoch": 1729683247,
+        "versions": ["0.10.1pre0"],
+        "plugins": [
+            {
+                "name": "edge_executor",
+                "plugin-class": "airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin",
+            }
+        ],
+        "executors": ["airflow.providers.edge.executors.EdgeExecutor"],
+        "config": {
+            "edge": {
+                "description": "This section only applies if you are using the EdgeExecutor in\n``[core]`` section above\n",
+                "options": {
+                    "api_enabled": {
+                        "description": "Flag if the plugin endpoint is enabled to serve Edge Workers.\n",
+                        "version_added": None,
+                        "type": "boolean",
+                        "example": "True",
+                        "default": "False",
+                    },
+                    "api_url": {
+                        "description": "URL endpoint on which the Airflow code edge API is accessible from edge worker.\n",
+                        "version_added": None,
+                        "type": "string",
+                        "example": "https://airflow.hosting.org/edge_worker/v1/rpcapi",
+                        "default": None,
+                    },
+                    "job_poll_interval": {
+                        "description": "Edge Worker currently polls for new jobs via HTTP. This parameter defines the number\nof seconds it should sleep between polls for new jobs.\nJob polling only happens if the Edge Worker seeks for new work. Not if busy.\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": "5",
+                        "default": "5",
+                    },
+                    "heartbeat_interval": {
+                        "description": "Edge Worker continuously reports status to the central site. This parameter defines\nhow often a status with heartbeat should be sent.\nDuring heartbeat status is reported as well as it is checked if a running task is to be terminated.\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": "10",
+                        "default": "30",
+                    },
+                    "worker_concurrency": {
+                        "description": "The concurrency defines the default max parallel running task instances and can also be set during\nstart of worker with the ``airflow edge worker`` command parameter. The size of the workers\nand the resources must support the nature of your tasks. The parameter\nworks together with the concurrency_slots parameter of a task.\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": None,
+                        "default": "8",
+                    },
+                    "job_success_purge": {
+                        "description": "Minutes after which successful jobs for EdgeExecutor are purged from database\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": None,
+                        "default": "5",
+                    },
+                    "job_fail_purge": {
+                        "description": "Minutes after which failed jobs for EdgeExecutor are purged from database\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": None,
+                        "default": "60",
+                    },
+                    "push_log_chunk_size": {
+                        "description": "Edge Worker uploads log files in chunks. If the log file part which is uploaded\nexceeds the chunk size it creates a new request. The application gateway can\nlimit the max body size see:\nhttps://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size\nA HTTP 413 issue can point to this value to fix the issue.\nThis value must be defined in Bytes.\n",
+                        "version_added": None,
+                        "type": "integer",
+                        "example": None,
+                        "default": "524288",
+                    },
+                },
+            }
+        },
+        "dependencies": ["apache-airflow>=2.10.0", "pydantic>=2.10.2", "retryhttp>=1.2.0"],
+    }
diff --git a/providers/src/airflow/providers/edge/models/__init__.py b/providers/edge/src/airflow/providers/edge/models/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/models/__init__.py
rename to providers/edge/src/airflow/providers/edge/models/__init__.py
diff --git a/providers/src/airflow/providers/edge/models/edge_job.py b/providers/edge/src/airflow/providers/edge/models/edge_job.py
similarity index 100%
rename from providers/src/airflow/providers/edge/models/edge_job.py
rename to providers/edge/src/airflow/providers/edge/models/edge_job.py
diff --git a/providers/src/airflow/providers/edge/models/edge_logs.py b/providers/edge/src/airflow/providers/edge/models/edge_logs.py
similarity index 100%
rename from providers/src/airflow/providers/edge/models/edge_logs.py
rename to providers/edge/src/airflow/providers/edge/models/edge_logs.py
diff --git a/providers/src/airflow/providers/edge/models/edge_worker.py b/providers/edge/src/airflow/providers/edge/models/edge_worker.py
similarity index 100%
rename from providers/src/airflow/providers/edge/models/edge_worker.py
rename to providers/edge/src/airflow/providers/edge/models/edge_worker.py
diff --git a/providers/src/airflow/providers/edge/openapi/__init__.py b/providers/edge/src/airflow/providers/edge/openapi/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/openapi/__init__.py
rename to providers/edge/src/airflow/providers/edge/openapi/__init__.py
diff --git a/providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml b/providers/edge/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml
similarity index 100%
rename from providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml
rename to providers/edge/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml
diff --git a/providers/src/airflow/providers/edge/plugins/__init__.py b/providers/edge/src/airflow/providers/edge/plugins/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/plugins/__init__.py
rename to providers/edge/src/airflow/providers/edge/plugins/__init__.py
diff --git a/providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py b/providers/edge/src/airflow/providers/edge/plugins/edge_executor_plugin.py
similarity index 100%
rename from providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py
rename to providers/edge/src/airflow/providers/edge/plugins/edge_executor_plugin.py
diff --git a/providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html b/providers/edge/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html
similarity index 100%
rename from providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html
rename to providers/edge/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html
diff --git a/providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html b/providers/edge/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html
similarity index 100%
rename from providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html
rename to providers/edge/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html
diff --git a/providers/src/airflow/providers/edge/version_compat.py b/providers/edge/src/airflow/providers/edge/version_compat.py
similarity index 100%
rename from providers/src/airflow/providers/edge/version_compat.py
rename to providers/edge/src/airflow/providers/edge/version_compat.py
diff --git a/providers/src/airflow/providers/edge/worker_api/__init__.py b/providers/edge/src/airflow/providers/edge/worker_api/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/__init__.py
rename to providers/edge/src/airflow/providers/edge/worker_api/__init__.py
diff --git a/providers/src/airflow/providers/edge/worker_api/app.py b/providers/edge/src/airflow/providers/edge/worker_api/app.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/app.py
rename to providers/edge/src/airflow/providers/edge/worker_api/app.py
diff --git a/providers/src/airflow/providers/edge/worker_api/auth.py b/providers/edge/src/airflow/providers/edge/worker_api/auth.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/auth.py
rename to providers/edge/src/airflow/providers/edge/worker_api/auth.py
diff --git a/providers/src/airflow/providers/edge/worker_api/datamodels.py b/providers/edge/src/airflow/providers/edge/worker_api/datamodels.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/datamodels.py
rename to providers/edge/src/airflow/providers/edge/worker_api/datamodels.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/__init__.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/__init__.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/__init__.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/_v2_compat.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/_v2_compat.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/_v2_compat.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/_v2_compat.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/_v2_routes.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/_v2_routes.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/_v2_routes.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/_v2_routes.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/health.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/health.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/health.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/health.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/jobs.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/jobs.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/jobs.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/jobs.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/logs.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/logs.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/logs.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/logs.py
diff --git a/providers/src/airflow/providers/edge/worker_api/routes/worker.py b/providers/edge/src/airflow/providers/edge/worker_api/routes/worker.py
similarity index 100%
rename from providers/src/airflow/providers/edge/worker_api/routes/worker.py
rename to providers/edge/src/airflow/providers/edge/worker_api/routes/worker.py
diff --git a/providers/edge/tests/conftest.py b/providers/edge/tests/conftest.py
new file mode 100644
index 0000000000000..068fe6bbf5ae9
--- /dev/null
+++ b/providers/edge/tests/conftest.py
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pathlib
+
+import pytest
+
+pytest_plugins = "tests_common.pytest_plugin"
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_configure(config: pytest.Config) -> None:
+    deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml")
+    dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else []
+    config.inicfg["airflow_deprecations_ignore"] = (
+        config.inicfg.get("airflow_deprecations_ignore", []) + dep_path  # type: ignore[assignment,operator]
+    )
diff --git a/providers/tests/edge/__init__.py b/providers/edge/tests/providers/edge/__init__.py
similarity index 100%
rename from providers/tests/edge/__init__.py
rename to providers/edge/tests/providers/edge/__init__.py
diff --git a/providers/tests/edge/cli/__init__.py b/providers/edge/tests/providers/edge/cli/__init__.py
similarity index 100%
rename from providers/tests/edge/cli/__init__.py
rename to providers/edge/tests/providers/edge/cli/__init__.py
diff --git a/providers/tests/edge/cli/test_api_client.py b/providers/edge/tests/providers/edge/cli/test_api_client.py
similarity index 100%
rename from providers/tests/edge/cli/test_api_client.py
rename to providers/edge/tests/providers/edge/cli/test_api_client.py
diff --git a/providers/tests/edge/cli/test_edge_command.py b/providers/edge/tests/providers/edge/cli/test_edge_command.py
similarity index 96%
rename from providers/tests/edge/cli/test_edge_command.py
rename to providers/edge/tests/providers/edge/cli/test_edge_command.py
index b1b719444baf0..42e6d7ec3e8fc 100644
--- a/providers/tests/edge/cli/test_edge_command.py
+++ b/providers/edge/tests/providers/edge/cli/test_edge_command.py
@@ -41,19 +41,19 @@
 
 MOCK_COMMAND = (
     {
-        "token": "dummy",
+        "token": "mock",
         "ti": {
             "id": "4d828a62-a417-4936-a7a6-2b3fabacecab",
-            "task_id": "dummy",
-            "dag_id": "dummy",
-            "run_id": "dummy",
+            "task_id": "mock",
+            "dag_id": "mock",
+            "run_id": "mock",
             "try_number": 1,
             "pool_slots": 1,
             "queue": "default",
             "priority_weight": 1,
         },
-        "dag_rel_path": "dummy.py",
-        "log_path": "dummy.log",
+        "dag_rel_path": "mock.py",
+        "log_path": "mock.log",
         "bundle_info": {"name": "hello", "version": "abc"},
     }
     if AIRFLOW_V_3_0_PLUS
@@ -112,7 +112,7 @@ def returncode(self):
 
 class TestEdgeWorkerCli:
     @pytest.fixture
-    def dummy_joblist(self, tmp_path: Path) -> list[_Job]:
+    def mock_joblist(self, tmp_path: Path) -> list[_Job]:
         logfile = tmp_path / "file.log"
         logfile.touch()
 
@@ -134,9 +134,9 @@ def dummy_joblist(self, tmp_path: Path) -> list[_Job]:
         ]
 
     @pytest.fixture
-    def worker_with_job(self, tmp_path: Path, dummy_joblist: list[_Job]) -> _EdgeWorkerCli:
-        test_worker = _EdgeWorkerCli(str(tmp_path / "dummy.pid"), "dummy", None, 8, 5, 5)
-        test_worker.jobs = dummy_joblist
+    def worker_with_job(self, tmp_path: Path, mock_joblist: list[_Job]) -> _EdgeWorkerCli:
+        test_worker = _EdgeWorkerCli(str(tmp_path / "mock.pid"), "mock", None, 8, 5, 5)
+        test_worker.jobs = mock_joblist
         return test_worker
 
     @patch("airflow.providers.edge.cli.edge_command.Process")
@@ -198,7 +198,7 @@ def test_fetch_job(
     ):
         logfile_path_call_count, set_state_call_count = expected_calls
         mock_reserve_task.side_effect = [reserve_result]
-        mock_popen.side_effect = ["dummy"]
+        mock_popen.side_effect = ["mock"]
         with conf_vars({("edge", "api_url"): "https://invalid-api-test-endpoint"}):
             got_job = worker_with_job.fetch_job()
         mock_reserve_task.assert_called_once()
diff --git a/providers/tests/edge/executors/__init__.py b/providers/edge/tests/providers/edge/executors/__init__.py
similarity index 100%
rename from providers/tests/edge/executors/__init__.py
rename to providers/edge/tests/providers/edge/executors/__init__.py
diff --git a/providers/tests/edge/executors/test_edge_executor.py b/providers/edge/tests/providers/edge/executors/test_edge_executor.py
similarity index 97%
rename from providers/tests/edge/executors/test_edge_executor.py
rename to providers/edge/tests/providers/edge/executors/test_edge_executor.py
index 3a5e6b18d69a3..6b34f3a3650cb 100644
--- a/providers/tests/edge/executors/test_edge_executor.py
+++ b/providers/edge/tests/providers/edge/executors/test_edge_executor.py
@@ -107,7 +107,7 @@ def test_sync_orphaned_tasks(self):
                         try_number=1,
                         state=state,
                         queue="default",
-                        command="dummy",
+                        command="mock",
                         concurrency_slots=1,
                         last_update=last_update,
                     )
@@ -153,7 +153,7 @@ def remove_from_running(key: TaskInstanceKey):
                         state=state,
                         queue="default",
                         concurrency_slots=1,
-                        command="dummy",
+                        command="mock",
                         last_update=last_update,
                     )
                 )
@@ -195,7 +195,7 @@ def remove_from_running(key: TaskInstanceKey):
                     state=state,
                     concurrency_slots=1,
                     queue="default",
-                    command="dummy",
+                    command="mock",
                     last_update=timezone.utcnow(),
                 )
             )
@@ -290,19 +290,19 @@ def test_queue_workload(self):
             executor.queue_workload(command=["airflow", "tasks", "run", "hello", "world"])
 
         workload = ExecuteTask(
-            token="dummy",
+            token="mock",
             ti=TaskInstance(
                 id="4d828a62-a417-4936-a7a6-2b3fabacecab",
-                task_id="dummy",
-                dag_id="dummy",
-                run_id="dummy",
+                task_id="mock",
+                dag_id="mock",
+                run_id="mock",
                 try_number=1,
                 pool_slots=1,
                 queue="default",
                 priority_weight=1,
             ),
-            dag_rel_path="dummy.py",
-            log_path="dummy.log",
+            dag_rel_path="mock.py",
+            log_path="mock.log",
             bundle_info={"name": "n/a", "version": "no matter"},
         )
         executor.queue_workload(workload=workload)
diff --git a/providers/tests/edge/models/__init__.py b/providers/edge/tests/providers/edge/models/__init__.py
similarity index 100%
rename from providers/tests/edge/models/__init__.py
rename to providers/edge/tests/providers/edge/models/__init__.py
diff --git a/providers/tests/edge/plugins/__init__.py b/providers/edge/tests/providers/edge/plugins/__init__.py
similarity index 100%
rename from providers/tests/edge/plugins/__init__.py
rename to providers/edge/tests/providers/edge/plugins/__init__.py
diff --git a/providers/tests/edge/plugins/test_edge_executor_plugin.py b/providers/edge/tests/providers/edge/plugins/test_edge_executor_plugin.py
similarity index 100%
rename from providers/tests/edge/plugins/test_edge_executor_plugin.py
rename to providers/edge/tests/providers/edge/plugins/test_edge_executor_plugin.py
diff --git a/providers/tests/edge/worker_api/__init__.py b/providers/edge/tests/providers/edge/worker_api/__init__.py
similarity index 100%
rename from providers/tests/edge/worker_api/__init__.py
rename to providers/edge/tests/providers/edge/worker_api/__init__.py
diff --git a/providers/tests/edge/worker_api/routes/__init__.py b/providers/edge/tests/providers/edge/worker_api/routes/__init__.py
similarity index 100%
rename from providers/tests/edge/worker_api/routes/__init__.py
rename to providers/edge/tests/providers/edge/worker_api/routes/__init__.py
diff --git a/providers/tests/edge/worker_api/routes/test_health.py b/providers/edge/tests/providers/edge/worker_api/routes/test_health.py
similarity index 100%
rename from providers/tests/edge/worker_api/routes/test_health.py
rename to providers/edge/tests/providers/edge/worker_api/routes/test_health.py
diff --git a/providers/tests/edge/worker_api/routes/test_logs.py b/providers/edge/tests/providers/edge/worker_api/routes/test_logs.py
similarity index 100%
rename from providers/tests/edge/worker_api/routes/test_logs.py
rename to providers/edge/tests/providers/edge/worker_api/routes/test_logs.py
diff --git a/providers/tests/edge/worker_api/routes/test_worker.py b/providers/edge/tests/providers/edge/worker_api/routes/test_worker.py
similarity index 98%
rename from providers/tests/edge/worker_api/routes/test_worker.py
rename to providers/edge/tests/providers/edge/worker_api/routes/test_worker.py
index e05a94c5f8719..e9442c4c0381c 100644
--- a/providers/tests/edge/worker_api/routes/test_worker.py
+++ b/providers/edge/tests/providers/edge/worker_api/routes/test_worker.py
@@ -42,7 +42,7 @@
 class TestWorkerApiRoutes:
     @pytest.fixture
     def cli_worker(self, tmp_path: Path) -> _EdgeWorkerCli:
-        test_worker = _EdgeWorkerCli(str(tmp_path / "dummy.pid"), "dummy", None, 8, 5, 5)
+        test_worker = _EdgeWorkerCli(str(tmp_path / "mock.pid"), "mock", None, 8, 5, 5)
         return test_worker
 
     @pytest.fixture(autouse=True)
diff --git a/providers/src/airflow/providers/amazon/aws/operators/batch.py b/providers/src/airflow/providers/amazon/aws/operators/batch.py
index 3df00fb04c37f..e69508d89319f 100644
--- a/providers/src/airflow/providers/amazon/aws/operators/batch.py
+++ b/providers/src/airflow/providers/amazon/aws/operators/batch.py
@@ -95,6 +95,7 @@ class BatchOperator(BaseOperator):
         If it is an array job, only the logs of the first task will be printed.
     :param awslogs_fetch_interval: The interval with which cloudwatch logs are to be fetched, 30 sec.
     :param poll_interval: (Deferrable mode only) Time in seconds to wait between polling.
+    :param submit_job_timeout: Execution timeout in seconds for submitted batch job.
 
     .. note::
         Any custom waiters must return a waiter for these calls:
@@ -184,6 +185,7 @@ def __init__(
         poll_interval: int = 30,
         awslogs_enabled: bool = False,
         awslogs_fetch_interval: timedelta = timedelta(seconds=30),
+        submit_job_timeout: int | None = None,
         **kwargs,
     ) -> None:
         BaseOperator.__init__(self, **kwargs)
@@ -208,6 +210,7 @@ def __init__(
         self.poll_interval = poll_interval
         self.awslogs_enabled = awslogs_enabled
         self.awslogs_fetch_interval = awslogs_fetch_interval
+        self.submit_job_timeout = submit_job_timeout
 
         # params for hook
         self.max_retries = max_retries
@@ -315,6 +318,9 @@ def submit_job(self, context: Context):
             "schedulingPriorityOverride": self.scheduling_priority_override,
         }
 
+        if self.submit_job_timeout:
+            args["timeout"] = {"attemptDurationSeconds": self.submit_job_timeout}
+
         try:
             response = self.hook.client.submit_job(**trim_none_values(args))
         except Exception as e:
diff --git a/providers/src/airflow/providers/cncf/kubernetes/exceptions.py b/providers/src/airflow/providers/cncf/kubernetes/exceptions.py
new file mode 100644
index 0000000000000..c0b6ad83a3fdc
--- /dev/null
+++ b/providers/src/airflow/providers/cncf/kubernetes/exceptions.py
@@ -0,0 +1,29 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from airflow.exceptions import (
+    AirflowException,
+)
+
+
+class PodMutationHookException(AirflowException):
+    """Raised when exception happens during Pod Mutation Hook execution."""
+
+
+class PodReconciliationError(AirflowException):
+    """Raised when an error is encountered while trying to merge pod configs."""
diff --git a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
index cfd31cda894a2..482f99725b58f 100644
--- a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
+++ b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
@@ -61,13 +61,13 @@
 from airflow.exceptions import AirflowProviderDeprecationWarning
 from airflow.executors.base_executor import BaseExecutor
 from airflow.executors.executor_constants import KUBERNETES_EXECUTOR
+from airflow.providers.cncf.kubernetes.exceptions import PodMutationHookException, PodReconciliationError
 from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
     ADOPTED,
     POD_EXECUTOR_DONE_KEY,
 )
 from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
 from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import annotations_to_key
-from airflow.providers.cncf.kubernetes.pod_generator import PodMutationHookException, PodReconciliationError
 from airflow.stats import Stats
 from airflow.utils.event_scheduler import EventScheduler
 from airflow.utils.log.logging_mixin import remove_escape_codes
diff --git a/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py b/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py
index ad4123eacc0c0..b90fa715333bf 100644
--- a/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py
+++ b/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py
@@ -39,9 +39,9 @@
 
 from airflow.exceptions import (
     AirflowConfigException,
-    AirflowException,
 )
 from airflow.providers.cncf.kubernetes.backcompat import get_logical_date_key
+from airflow.providers.cncf.kubernetes.exceptions import PodMutationHookException, PodReconciliationError
 from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
     POD_NAME_MAX_LENGTH,
     add_unique_suffix,
@@ -58,14 +58,6 @@
 MAX_LABEL_LEN = 63
 
 
-class PodMutationHookException(AirflowException):
-    """Raised when exception happens during Pod Mutation Hook execution."""
-
-
-class PodReconciliationError(AirflowException):
-    """Raised when an error is encountered while trying to merge pod configs."""
-
-
 def make_safe_label_value(string: str) -> str:
     """
     Normalize a provided label to be of valid length and characters.
diff --git a/providers/src/airflow/providers/exasol/hooks/exasol.py b/providers/src/airflow/providers/exasol/hooks/exasol.py
index ad3362e9115ee..69c51ab4c8eb2 100644
--- a/providers/src/airflow/providers/exasol/hooks/exasol.py
+++ b/providers/src/airflow/providers/exasol/hooks/exasol.py
@@ -246,7 +246,7 @@ def run(
                         else:
                             results.append(result)
                             self.descriptions.append(self.get_description(exa_statement))
-                    self.log.info("Rows affected: %s", exa_statement.rowcount)
+                    self.log.info("Rows affected: %s", exa_statement.rowcount())
 
             # If autocommit was set to False or db does not support autocommit, we do a manual commit.
             if not self.get_autocommit(conn):
diff --git a/providers/src/airflow/providers/google/cloud/hooks/dataplex.py b/providers/src/airflow/providers/google/cloud/hooks/dataplex.py
index 387dfb00a50c2..cb2c7e41a2067 100644
--- a/providers/src/airflow/providers/google/cloud/hooks/dataplex.py
+++ b/providers/src/airflow/providers/google/cloud/hooks/dataplex.py
@@ -20,15 +20,22 @@
 
 import time
 from collections.abc import Sequence
+from copy import deepcopy
 from typing import TYPE_CHECKING, Any
 
 from google.api_core.client_options import ClientOptions
 from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
-from google.cloud.dataplex_v1 import DataplexServiceClient, DataScanServiceAsyncClient, DataScanServiceClient
+from google.cloud.dataplex_v1 import (
+    DataplexServiceClient,
+    DataScanServiceAsyncClient,
+    DataScanServiceClient,
+)
+from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceClient
 from google.cloud.dataplex_v1.types import (
     Asset,
     DataScan,
     DataScanJob,
+    EntryGroup,
     Lake,
     Task,
     Zone,
@@ -47,6 +54,7 @@
     from google.api_core.operation import Operation
     from google.api_core.retry import Retry
     from google.api_core.retry_async import AsyncRetry
+    from google.cloud.dataplex_v1.services.catalog_service.pagers import ListEntryGroupsPager
     from googleapiclient.discovery import Resource
 
 PATH_DATA_SCAN = "projects/{project_id}/locations/{region}/dataScans/{data_scan_id}"
@@ -110,6 +118,14 @@ def get_dataplex_data_scan_client(self) -> DataScanServiceClient:
             credentials=self.get_credentials(), client_info=CLIENT_INFO, client_options=client_options
         )
 
+    def get_dataplex_catalog_client(self) -> CatalogServiceClient:
+        """Return CatalogServiceClient."""
+        client_options = ClientOptions(api_endpoint="dataplex.googleapis.com:443")
+
+        return CatalogServiceClient(
+            credentials=self.get_credentials(), client_info=CLIENT_INFO, client_options=client_options
+        )
+
     def wait_for_operation(self, timeout: float | None, operation: Operation):
         """Wait for long-lasting operation to complete."""
         try:
@@ -118,6 +134,200 @@ def wait_for_operation(self, timeout: float | None, operation: Operation):
             error = operation.exception(timeout=timeout)
             raise AirflowException(error)
 
+    @GoogleBaseHook.fallback_to_default_project_id
+    def create_entry_group(
+        self,
+        location: str,
+        entry_group_id: str,
+        entry_group_configuration: EntryGroup | dict,
+        project_id: str = PROVIDE_PROJECT_ID,
+        validate_only: bool = False,
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+    ) -> Operation:
+        """
+        Create an Entry resource.
+
+        :param location: Required. The ID of the Google Cloud location that the task belongs to.
+        :param entry_group_id: Required. EntryGroup identifier.
+        :param entry_group_configuration: Required. EntryGroup configuration body.
+        :param project_id: Optional. The ID of the Google Cloud project that the task belongs to.
+        :param validate_only: Optional. If set, performs request validation, but does not actually execute
+            the create request.
+        :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+            will not be retried.
+        :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+            Note that if `retry` is specified, the timeout applies to each individual attempt.
+        :param metadata: Optional. Additional metadata that is provided to the method.
+        """
+        client = self.get_dataplex_catalog_client()
+        return client.create_entry_group(
+            request={
+                "parent": client.common_location_path(project_id, location),
+                "entry_group_id": entry_group_id,
+                "entry_group": entry_group_configuration,
+                "validate_only": validate_only,
+            },
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def get_entry_group(
+        self,
+        location: str,
+        entry_group_id: str,
+        project_id: str = PROVIDE_PROJECT_ID,
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+    ) -> EntryGroup:
+        """
+        Get an EntryGroup resource.
+
+        :param location: Required. The ID of the Google Cloud location that the task belongs to.
+        :param entry_group_id: Required. EntryGroup identifier.
+        :param project_id: Optional. The ID of the Google Cloud project that the task belongs to.
+        :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+            will not be retried.
+        :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+            Note that if `retry` is specified, the timeout applies to each individual attempt.
+        :param metadata: Optional. Additional metadata that is provided to the method.
+        """
+        client = self.get_dataplex_catalog_client()
+        return client.get_entry_group(
+            request={
+                "name": client.entry_group_path(project_id, location, entry_group_id),
+            },
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def delete_entry_group(
+        self,
+        location: str,
+        entry_group_id: str,
+        project_id: str = PROVIDE_PROJECT_ID,
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+    ) -> Operation:
+        """
+        Delete an EntryGroup resource.
+
+        :param location: Required. The ID of the Google Cloud location that the task belongs to.
+        :param entry_group_id: Required. EntryGroup identifier.
+        :param project_id: Optional. The ID of the Google Cloud project that the task belongs to.
+        :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+            will not be retried.
+        :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+            Note that if `retry` is specified, the timeout applies to each individual attempt.
+        :param metadata: Optional. Additional metadata that is provided to the method.
+        """
+        client = self.get_dataplex_catalog_client()
+        return client.delete_entry_group(
+            request={
+                "name": client.entry_group_path(project_id, location, entry_group_id),
+            },
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def list_entry_groups(
+        self,
+        location: str,
+        filter_by: str | None = None,
+        order_by: str | None = None,
+        page_size: int | None = None,
+        page_token: str | None = None,
+        project_id: str = PROVIDE_PROJECT_ID,
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+    ) -> ListEntryGroupsPager:
+        """
+        List EntryGroups resources from specific location.
+
+        :param location: Required. The ID of the Google Cloud location that the task belongs to.
+        :param filter_by: Optional. Filter to apply on the list results.
+        :param order_by: Optional. Fields to order the results by.
+        :param page_size: Optional. Maximum number of EntryGroups to return on one page.
+        :param page_token: Optional. Token to retrieve the next page of results.
+        :param project_id: Optional. The ID of the Google Cloud project that the task belongs to.
+        :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+            will not be retried.
+        :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+            Note that if `retry` is specified, the timeout applies to each individual attempt.
+        :param metadata: Optional. Additional metadata that is provided to the method.
+        """
+        client = self.get_dataplex_catalog_client()
+        return client.list_entry_groups(
+            request={
+                "parent": client.common_location_path(project_id, location),
+                "filter": filter_by,
+                "order_by": order_by,
+                "page_size": page_size,
+                "page_token": page_token,
+            },
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def update_entry_group(
+        self,
+        location: str,
+        entry_group_id: str,
+        entry_group_configuration: dict | EntryGroup,
+        project_id: str = PROVIDE_PROJECT_ID,
+        update_mask: list[str] | FieldMask | None = None,
+        validate_only: bool | None = False,
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+    ) -> Operation:
+        """
+        Update an EntryGroup resource.
+
+        :param entry_group_id: Required. ID of the EntryGroup to update.
+        :param entry_group_configuration: Required. The updated configuration body of the EntryGroup.
+        :param location: Required. The ID of the Google Cloud location that the task belongs to.
+        :param update_mask: Optional. Names of fields whose values to overwrite on an entry group.
+            If this parameter is absent or empty, all modifiable fields are overwritten. If such
+            fields are non-required and omitted in the request body, their values are emptied.
+        :param project_id: Optional. The ID of the Google Cloud project that the task belongs to.
+        :param validate_only: Optional. The service validates the request without performing any mutations.
+        :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+            will not be retried.
+        :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+            Note that if `retry` is specified, the timeout applies to each individual attempt.
+        :param metadata: Optional. Additional metadata that is provided to the method.
+        """
+        client = self.get_dataplex_catalog_client()
+        _entry_group = (
+            deepcopy(entry_group_configuration)
+            if isinstance(entry_group_configuration, dict)
+            else EntryGroup.to_dict(entry_group_configuration)
+        )
+        _entry_group["name"] = client.entry_group_path(project_id, location, entry_group_id)
+        return client.update_entry_group(
+            request={
+                "entry_group": _entry_group,
+                "update_mask": FieldMask(paths=update_mask) if type(update_mask) is list else update_mask,
+                "validate_only": validate_only,
+            },
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
     @GoogleBaseHook.fallback_to_default_project_id
     def create_task(
         self,
diff --git a/providers/src/airflow/providers/google/cloud/links/dataplex.py b/providers/src/airflow/providers/google/cloud/links/dataplex.py
index 80d4b2cb9c07d..e0bd1ae584479 100644
--- a/providers/src/airflow/providers/google/cloud/links/dataplex.py
+++ b/providers/src/airflow/providers/google/cloud/links/dataplex.py
@@ -30,8 +30,10 @@
 DATAPLEX_TASK_LINK = DATAPLEX_BASE_LINK + "/{lake_id}.{task_id};location={region}/jobs?project={project_id}"
 DATAPLEX_TASKS_LINK = DATAPLEX_BASE_LINK + "?project={project_id}&qLake={lake_id}.{region}"
 
-DATAPLEX_LAKE_LINK = (
-    "https://console.cloud.google.com/dataplex/lakes/{lake_id};location={region}?project={project_id}"
+DATAPLEX_LAKE_LINK = "/dataplex/lakes/{lake_id};location={region}?project={project_id}"
+DATAPLEX_CATALOG_ENTRY_GROUPS_LINK = "/dataplex/catalog/entry-groups?project={project_id}"
+DATAPLEX_CATALOG_ENTRY_GROUP_LINK = (
+    "/dataplex/projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}?project={project_id}"
 )
 
 
@@ -103,3 +105,48 @@ def persist(
                 "project_id": task_instance.project_id,
             },
         )
+
+
+class DataplexCatalogEntryGroupLink(BaseGoogleLink):
+    """Helper class for constructing Dataplex Catalog EntryGroup link."""
+
+    name = "Dataplex Catalog EntryGroup"
+    key = "dataplex_catalog_entry_group_key"
+    format_str = DATAPLEX_CATALOG_ENTRY_GROUP_LINK
+
+    @staticmethod
+    def persist(
+        context: Context,
+        task_instance,
+    ):
+        task_instance.xcom_push(
+            context=context,
+            key=DataplexCatalogEntryGroupLink.key,
+            value={
+                "entry_group_id": task_instance.entry_group_id,
+                "location": task_instance.location,
+                "project_id": task_instance.project_id,
+            },
+        )
+
+
+class DataplexCatalogEntryGroupsLink(BaseGoogleLink):
+    """Helper class for constructing Dataplex Catalog EntryGroups link."""
+
+    name = "Dataplex Catalog EntryGroups"
+    key = "dataplex_catalog_entry_groups_key"
+    format_str = DATAPLEX_CATALOG_ENTRY_GROUPS_LINK
+
+    @staticmethod
+    def persist(
+        context: Context,
+        task_instance,
+    ):
+        task_instance.xcom_push(
+            context=context,
+            key=DataplexCatalogEntryGroupsLink.key,
+            value={
+                "location": task_instance.location,
+                "project_id": task_instance.project_id,
+            },
+        )
diff --git a/providers/src/airflow/providers/google/cloud/operators/dataplex.py b/providers/src/airflow/providers/google/cloud/operators/dataplex.py
index 8f7a0d694b9f3..33874063955ff 100644
--- a/providers/src/airflow/providers/google/cloud/operators/dataplex.py
+++ b/providers/src/airflow/providers/google/cloud/operators/dataplex.py
@@ -20,8 +20,11 @@
 
 import time
 from collections.abc import Sequence
+from functools import cached_property
 from typing import TYPE_CHECKING, Any
 
+from google.protobuf.json_format import MessageToDict
+
 from airflow.exceptions import AirflowException
 from airflow.providers.google.cloud.triggers.dataplex import (
     DataplexDataProfileJobTrigger,
@@ -33,15 +36,26 @@
 
     from airflow.utils.context import Context
 
-from google.api_core.exceptions import AlreadyExists, GoogleAPICallError
+from google.api_core.exceptions import AlreadyExists, GoogleAPICallError, NotFound
 from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
 from google.api_core.retry import Retry, exponential_sleep_generator
-from google.cloud.dataplex_v1.types import Asset, DataScan, DataScanJob, Lake, Task, Zone
+from google.cloud.dataplex_v1.types import (
+    Asset,
+    DataScan,
+    DataScanJob,
+    EntryGroup,
+    Lake,
+    ListEntryGroupsResponse,
+    Task,
+    Zone,
+)
 from googleapiclient.errors import HttpError
 
 from airflow.configuration import conf
 from airflow.providers.google.cloud.hooks.dataplex import AirflowDataQualityScanException, DataplexHook
 from airflow.providers.google.cloud.links.dataplex import (
+    DataplexCatalogEntryGroupLink,
+    DataplexCatalogEntryGroupsLink,
     DataplexLakeLink,
     DataplexTaskLink,
     DataplexTasksLink,
@@ -2093,3 +2107,475 @@ def execute(self, context: Context):
         )
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex asset %s deleted successfully!", self.asset_id)
+
+
+class DataplexCatalogBaseOperator(GoogleCloudBaseOperator):
+    """
+    Base class for all Dataplex Catalog operators.
+
+    :param project_id: Required. The ID of the Google Cloud project where the service is used.
+    :param location: Required. The ID of the Google Cloud region where the service is used.
+    :param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
+    :param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
+        be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = (
+        "project_id",
+        "location",
+        "gcp_conn_id",
+        "impersonation_chain",
+    )
+
+    def __init__(
+        self,
+        project_id: str,
+        location: str,
+        gcp_conn_id: str = "google_cloud_default",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ):
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+
+    @cached_property
+    def hook(self) -> DataplexHook:
+        return DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+
+class DataplexCatalogCreateEntryGroupOperator(DataplexCatalogBaseOperator):
+    """
+    Create an EntryGroup resource.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:DataplexCatalogCreateEntryGroupOperator`
+
+    :param entry_group_id: Required. EntryGroup identifier.
+    :param entry_group_configuration: Required. EntryGroup configuration.
+        For more details please see API documentation:
+        https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups#EntryGroup
+    :param validate_request: Optional. If set, performs request validation, but does not actually
+        execute the request.
+    :param project_id: Required. The ID of the Google Cloud project where the service is used.
+    :param location: Required. The ID of the Google Cloud region where the service is used.
+    :param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
+    :param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
+        be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = tuple(
+        {"entry_group_id", "entry_group_configuration"} | set(DataplexCatalogBaseOperator.template_fields)
+    )
+    operator_extra_links = (DataplexCatalogEntryGroupLink(),)
+
+    def __init__(
+        self,
+        entry_group_id: str,
+        entry_group_configuration: EntryGroup | dict,
+        validate_request: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.entry_group_id = entry_group_id
+        self.entry_group_configuration = entry_group_configuration
+        self.validate_request = validate_request
+
+    def execute(self, context: Context):
+        DataplexCatalogEntryGroupLink.persist(
+            context=context,
+            task_instance=self,
+        )
+
+        if self.validate_request:
+            self.log.info("Validating a Create Dataplex Catalog EntryGroup request.")
+        else:
+            self.log.info("Creating a Dataplex Catalog EntryGroup.")
+
+        try:
+            operation = self.hook.create_entry_group(
+                entry_group_id=self.entry_group_id,
+                entry_group_configuration=self.entry_group_configuration,
+                location=self.location,
+                project_id=self.project_id,
+                validate_only=self.validate_request,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            entry_group = self.hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            entry_group = self.hook.get_entry_group(
+                entry_group_id=self.entry_group_id,
+                location=self.location,
+                project_id=self.project_id,
+            )
+            self.log.info(
+                "Dataplex Catalog EntryGroup %s already exists.",
+                self.entry_group_id,
+            )
+            result = EntryGroup.to_dict(entry_group)
+            return result
+        except Exception as ex:
+            raise AirflowException(ex)
+        else:
+            result = EntryGroup.to_dict(entry_group) if not self.validate_request else None
+
+        if not self.validate_request:
+            self.log.info("Dataplex Catalog EntryGroup %s was successfully created.", self.entry_group_id)
+        return result
+
+
+class DataplexCatalogGetEntryGroupOperator(DataplexCatalogBaseOperator):
+    """
+    Get an EntryGroup resource.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:DataplexCatalogGetEntryGroupOperator`
+
+    :param entry_group_id: Required. EntryGroup identifier.
+    :param project_id: Required. The ID of the Google Cloud project where the service is used.
+    :param location: Required. The ID of the Google Cloud region where the service is used.
+    :param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
+    :param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
+        be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = tuple(
+        {"entry_group_id"} | set(DataplexCatalogBaseOperator.template_fields)
+    )
+    operator_extra_links = (DataplexCatalogEntryGroupLink(),)
+
+    def __init__(
+        self,
+        entry_group_id: str,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.entry_group_id = entry_group_id
+
+    def execute(self, context: Context):
+        DataplexCatalogEntryGroupLink.persist(
+            context=context,
+            task_instance=self,
+        )
+        self.log.info(
+            "Retrieving Dataplex Catalog EntryGroup %s.",
+            self.entry_group_id,
+        )
+        try:
+            entry_group = self.hook.get_entry_group(
+                entry_group_id=self.entry_group_id,
+                location=self.location,
+                project_id=self.project_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+        except NotFound:
+            self.log.info(
+                "Dataplex Catalog EntryGroup %s not found.",
+                self.entry_group_id,
+            )
+            raise AirflowException(NotFound)
+        except Exception as ex:
+            raise AirflowException(ex)
+
+        return EntryGroup.to_dict(entry_group)
+
+
+class DataplexCatalogDeleteEntryGroupOperator(DataplexCatalogBaseOperator):
+    """
+    Delete an EntryGroup resource.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:DataplexCatalogDeleteEntryGroupOperator`
+
+    :param entry_group_id: Required. EntryGroup identifier.
+    :param project_id: Required. The ID of the Google Cloud project where the service is used.
+    :param location: Required. The ID of the Google Cloud region where the service is used.
+    :param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
+    :param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
+        be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = tuple(
+        {"entry_group_id"} | set(DataplexCatalogBaseOperator.template_fields)
+    )
+
+    def __init__(
+        self,
+        entry_group_id: str,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.entry_group_id = entry_group_id
+
+    def execute(self, context: Context):
+        self.log.info(
+            "Deleting Dataplex Catalog EntryGroup %s.",
+            self.entry_group_id,
+        )
+        try:
+            operation = self.hook.delete_entry_group(
+                entry_group_id=self.entry_group_id,
+                location=self.location,
+                project_id=self.project_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            self.hook.wait_for_operation(timeout=self.timeout, operation=operation)
+
+        except NotFound:
+            self.log.info(
+                "Dataplex Catalog EntryGroup %s not found.",
+                self.entry_group_id,
+            )
+            raise AirflowException(NotFound)
+        except Exception as ex:
+            raise AirflowException(ex)
+        return None
+
+
+class DataplexCatalogListEntryGroupsOperator(DataplexCatalogBaseOperator):
+    """
+    List EntryGroup resources.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:DataplexCatalogListEntryGroupsOperator`
+
+    :param filter_by: Optional. Filter to apply on the list results.
+    :param order_by: Optional. Fields to order the results by.
+    :param page_size: Optional. Maximum number of EntryGroups to return on the page.
+    :param page_token: Optional. Token to retrieve the next page of results.
+    :param project_id: Required. The ID of the Google Cloud project where the service is used.
+    :param location: Required. The ID of the Google Cloud region where the service is used.
+    :param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
+    :param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
+        be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = tuple(DataplexCatalogBaseOperator.template_fields)
+    operator_extra_links = (DataplexCatalogEntryGroupsLink(),)
+
+    def __init__(
+        self,
+        page_size: int | None = None,
+        page_token: str | None = None,
+        filter_by: str | None = None,
+        order_by: str | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.page_size = page_size
+        self.page_token = page_token
+        self.filter_by = filter_by
+        self.order_by = order_by
+
+    def execute(self, context: Context):
+        DataplexCatalogEntryGroupsLink.persist(
+            context=context,
+            task_instance=self,
+        )
+        self.log.info(
+            "Listing Dataplex Catalog EntryGroup from location %s.",
+            self.location,
+        )
+        try:
+            entry_group_on_page = self.hook.list_entry_groups(
+                location=self.location,
+                project_id=self.project_id,
+                page_size=self.page_size,
+                page_token=self.page_token,
+                filter_by=self.filter_by,
+                order_by=self.order_by,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            self.log.info("EntryGroup on page: %s", entry_group_on_page)
+            self.xcom_push(
+                context=context,
+                key="entry_group_page",
+                value=ListEntryGroupsResponse.to_dict(entry_group_on_page._response),
+            )
+        except Exception as ex:
+            raise AirflowException(ex)
+
+        # Constructing list to return EntryGroups in readable format
+        entry_groups_list = [
+            MessageToDict(entry_group._pb, preserving_proto_field_name=True)
+            for entry_group in next(iter(entry_group_on_page.pages)).entry_groups
+        ]
+        return entry_groups_list
+
+
+class DataplexCatalogUpdateEntryGroupOperator(DataplexCatalogBaseOperator):
+    """
+    Update an EntryGroup resource.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:DataplexCatalogUpdateEntryGroupOperator`
+
+    :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
+    :param location: Required. The ID of the Google Cloud region that the task belongs to.
+    :param update_mask: Optional. Names of fields whose values to overwrite on an entry group.
+        If this parameter is absent or empty, all modifiable fields are overwritten. If such
+        fields are non-required and omitted in the request body, their values are emptied.
+    :param entry_group_id: Required. ID of the EntryGroup to update.
+    :param entry_group_configuration: Required. The updated configuration body of the EntryGroup.
+        For more details please see API documentation:
+        https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups#EntryGroup
+    :param validate_only: Optional. The service validates the request without performing any mutations.
+    :param retry: Optional. A retry object used  to retry requests. If `None` is specified, requests
+        will not be retried.
+    :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
+        Note that if `retry` is specified, the timeout applies to each individual attempt.
+    :param metadata: Optional. Additional metadata that is provided to the method.
+    :param gcp_conn_id: Optional. The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional. Service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    """
+
+    template_fields: Sequence[str] = tuple(
+        {"entry_group_id", "entry_group_configuration", "update_mask"}
+        | set(DataplexCatalogBaseOperator.template_fields)
+    )
+    operator_extra_links = (DataplexCatalogEntryGroupLink(),)
+
+    def __init__(
+        self,
+        entry_group_id: str,
+        entry_group_configuration: dict | EntryGroup,
+        update_mask: list[str] | FieldMask | None = None,
+        validate_request: bool | None = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.entry_group_id = entry_group_id
+        self.entry_group_configuration = entry_group_configuration
+        self.update_mask = update_mask
+        self.validate_request = validate_request
+
+    def execute(self, context: Context):
+        DataplexCatalogEntryGroupLink.persist(
+            context=context,
+            task_instance=self,
+        )
+
+        if self.validate_request:
+            self.log.info("Validating an Update Dataplex Catalog EntryGroup request.")
+        else:
+            self.log.info(
+                "Updating Dataplex Catalog EntryGroup %s.",
+                self.entry_group_id,
+            )
+        try:
+            operation = self.hook.update_entry_group(
+                location=self.location,
+                project_id=self.project_id,
+                entry_group_id=self.entry_group_id,
+                entry_group_configuration=self.entry_group_configuration,
+                update_mask=self.update_mask,
+                validate_only=self.validate_request,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            entry_group = self.hook.wait_for_operation(timeout=self.timeout, operation=operation)
+
+        except NotFound as ex:
+            self.log.info("Specified EntryGroup was not found.")
+            raise AirflowException(ex)
+        except Exception as exc:
+            raise AirflowException(exc)
+        else:
+            result = EntryGroup.to_dict(entry_group) if not self.validate_request else None
+
+        if not self.validate_request:
+            self.log.info("EntryGroup %s was successfully updated.", self.entry_group_id)
+        return result
diff --git a/providers/src/airflow/providers/google/provider.yaml b/providers/src/airflow/providers/google/provider.yaml
index 772c8babdeeff..97277806b855c 100644
--- a/providers/src/airflow/providers/google/provider.yaml
+++ b/providers/src/airflow/providers/google/provider.yaml
@@ -131,7 +131,7 @@ dependencies:
   - google-cloud-datacatalog>=3.23.0
   - google-cloud-dataflow-client>=0.8.6
   - google-cloud-dataform>=0.5.0
-  - google-cloud-dataplex>=1.10.0
+  - google-cloud-dataplex>=2.6.0
   - google-cloud-dataproc>=5.12.0
   - google-cloud-dataproc-metastore>=1.12.0
   - google-cloud-dlp>=3.12.0
@@ -1203,6 +1203,8 @@ extra-links:
   - airflow.providers.google.cloud.links.dataplex.DataplexTaskLink
   - airflow.providers.google.cloud.links.dataplex.DataplexTasksLink
   - airflow.providers.google.cloud.links.dataplex.DataplexLakeLink
+  - airflow.providers.google.cloud.links.dataplex.DataplexCatalogEntryGroupLink
+  - airflow.providers.google.cloud.links.dataplex.DataplexCatalogEntryGroupsLink
   - airflow.providers.google.cloud.links.bigquery.BigQueryDatasetLink
   - airflow.providers.google.cloud.links.bigquery.BigQueryTableLink
   - airflow.providers.google.cloud.links.bigquery.BigQueryJobDetailLink
diff --git a/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py
index 0e12696d9f32b..f01fa1c585837 100644
--- a/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py
+++ b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py
@@ -27,7 +27,7 @@
 
 import httpx
 from azure.identity import ClientSecretCredential
-from httpx import Timeout
+from httpx import AsyncHTTPTransport, Timeout
 from kiota_abstractions.api_error import APIError
 from kiota_abstractions.method import Method
 from kiota_abstractions.request_information import RequestInformation
@@ -208,9 +208,9 @@ def format_no_proxy_url(url: str) -> str:
     def to_httpx_proxies(cls, proxies: dict) -> dict:
         proxies = proxies.copy()
         if proxies.get("http"):
-            proxies["http://"] = proxies.pop("http")
+            proxies["http://"] = AsyncHTTPTransport(proxy=proxies.pop("http"))
         if proxies.get("https"):
-            proxies["https://"] = proxies.pop("https")
+            proxies["https://"] = AsyncHTTPTransport(proxy=proxies.pop("https"))
         if proxies.get("no"):
             for url in proxies.pop("no", "").split(","):
                 proxies[cls.format_no_proxy_url(url.strip())] = None
@@ -288,7 +288,7 @@ def get_conn(self) -> RequestAdapter:
             http_client = GraphClientFactory.create_with_default_middleware(
                 api_version=api_version,  # type: ignore
                 client=httpx.AsyncClient(
-                    proxy=httpx_proxies,  # type: ignore
+                    mounts=httpx_proxies,
                     timeout=Timeout(timeout=self.timeout),
                     verify=verify,
                     trust_env=trust_env,
diff --git a/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py b/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py
index 42c5852900567..6b5622e2d7ae2 100644
--- a/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py
+++ b/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py
@@ -129,6 +129,7 @@ def execute(self, context: Context):
     def retry_execute(
         self,
         context: Context,
+        **kwargs,
     ) -> Any:
         self.execute(context=context)
 
diff --git a/providers/tests/amazon/aws/operators/test_batch.py b/providers/tests/amazon/aws/operators/test_batch.py
index 0c14c256edba9..c1b1d847b7d91 100644
--- a/providers/tests/amazon/aws/operators/test_batch.py
+++ b/providers/tests/amazon/aws/operators/test_batch.py
@@ -70,6 +70,7 @@ def setup_method(self, _, get_client_type_mock):
             aws_conn_id="airflow_test",
             region_name="eu-west-1",
             tags={},
+            submit_job_timeout=3600,
         )
         self.client_mock = self.get_client_type_mock.return_value
         # We're mocking all actual AWS calls and don't need a connection. This
@@ -109,6 +110,7 @@ def test_init(self):
         assert self.batch.hook.client == self.client_mock
         assert self.batch.tags == {}
         assert self.batch.wait_for_completion is True
+        assert self.batch.submit_job_timeout == 3600
 
         self.get_client_type_mock.assert_called_once_with(region_name="eu-west-1")
 
@@ -141,6 +143,7 @@ def test_init_defaults(self):
         assert issubclass(type(batch_job.hook.client), botocore.client.BaseClient)
         assert batch_job.tags == {}
         assert batch_job.wait_for_completion is True
+        assert batch_job.submit_job_timeout is None
 
     def test_template_fields_overrides(self):
         assert self.batch.template_fields == (
@@ -181,6 +184,7 @@ def test_execute_without_failures(self, check_mock, wait_mock, job_description_m
             parameters={},
             retryStrategy={"attempts": 1},
             tags={},
+            timeout={"attemptDurationSeconds": 3600},
         )
 
         assert self.batch.job_id == JOB_ID
@@ -205,6 +209,7 @@ def test_execute_with_failures(self):
             parameters={},
             retryStrategy={"attempts": 1},
             tags={},
+            timeout={"attemptDurationSeconds": 3600},
         )
 
     @mock.patch.object(BatchClientHook, "get_job_description")
@@ -261,6 +266,7 @@ def test_execute_with_ecs_overrides(self, check_mock, wait_mock, job_description
             parameters={},
             retryStrategy={"attempts": 1},
             tags={},
+            timeout={"attemptDurationSeconds": 3600},
         )
 
     @mock.patch.object(BatchClientHook, "get_job_description")
@@ -359,6 +365,7 @@ def test_execute_with_eks_overrides(self, check_mock, wait_mock, job_description
             parameters={},
             retryStrategy={"attempts": 1},
             tags={},
+            timeout={"attemptDurationSeconds": 3600},
         )
 
     @mock.patch.object(BatchClientHook, "check_job_success")
diff --git a/providers/tests/exasol/hooks/test_exasol.py b/providers/tests/exasol/hooks/test_exasol.py
index 1c58431ea693f..9882622aef5a4 100644
--- a/providers/tests/exasol/hooks/test_exasol.py
+++ b/providers/tests/exasol/hooks/test_exasol.py
@@ -65,7 +65,7 @@ def test_get_conn_extra_args(self, mock_pyexasol):
 
 class TestExasolHook:
     def setup_method(self):
-        self.cur = mock.MagicMock(rowcount=0)
+        self.cur = mock.MagicMock(rowcount=lambda: 0)
         self.conn = mock.MagicMock()
         self.conn.execute.return_value = self.cur
         conn = self.conn
diff --git a/providers/tests/exasol/hooks/test_sql.py b/providers/tests/exasol/hooks/test_sql.py
index 4864c83790297..4e57ebe4b6168 100644
--- a/providers/tests/exasol/hooks/test_sql.py
+++ b/providers/tests/exasol/hooks/test_sql.py
@@ -260,7 +260,7 @@ def test_query(
         cursors = []
         for index in range(len(cursor_descriptions)):
             cur = mock.MagicMock(
-                rowcount=len(cursor_results[index]),
+                rowcount=lambda: len(cursor_results[index]),
             )
             cur.columns.return_value = get_columns(cursor_descriptions[index])
             cur.fetchall.return_value = cursor_results[index]
@@ -287,7 +287,7 @@ def test_query(
 )
 def test_no_query(empty_statement):
     dbapi_hook = ExasolHookForTests()
-    dbapi_hook.get_conn.return_value.cursor.rowcount = 0
+    dbapi_hook.get_conn.return_value.cursor.rowcount = lambda: 0
     with pytest.raises(ValueError) as err:
         dbapi_hook.run(sql=empty_statement)
     assert err.value.args[0] == "List of SQL statements is empty"
diff --git a/providers/tests/fab/auth_manager/conftest.py b/providers/tests/fab/auth_manager/conftest.py
index 9c61f7ab2dccc..4cb4b84a24cde 100644
--- a/providers/tests/fab/auth_manager/conftest.py
+++ b/providers/tests/fab/auth_manager/conftest.py
@@ -95,7 +95,7 @@ def _config_bundle(path_to_parse: Path | str):
                 "kwargs": {"path": str(path_to_parse), "refresh_interval": 0},
             }
         ]
-        with conf_vars({("dag_bundles", "backends"): json.dumps(bundle_config)}):
+        with conf_vars({("dag_bundles", "config_list"): json.dumps(bundle_config)}):
             yield
 
     return _config_bundle
diff --git a/providers/tests/google/cloud/hooks/test_dataplex.py b/providers/tests/google/cloud/hooks/test_dataplex.py
index 8f1f5d9866619..4a4f550eca67b 100644
--- a/providers/tests/google/cloud/hooks/test_dataplex.py
+++ b/providers/tests/google/cloud/hooks/test_dataplex.py
@@ -19,6 +19,7 @@
 from unittest import mock
 
 from google.api_core.gapic_v1.method import DEFAULT
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.providers.google.cloud.operators.dataplex import DataplexHook
 
@@ -30,6 +31,9 @@
 DATAPLEX_HOOK_DS_CLIENT = (
     "airflow.providers.google.cloud.hooks.dataplex.DataplexHook.get_dataplex_data_scan_client"
 )
+DATAPLEX_CATALOG_HOOK_CLIENT = (
+    "airflow.providers.google.cloud.hooks.dataplex.DataplexHook.get_dataplex_catalog_client"
+)
 
 PROJECT_ID = "project-id"
 REGION = "region"
@@ -44,12 +48,21 @@
 ASSET_ID = "test_asset_id"
 ZONE_ID = "test_zone_id"
 JOB_ID = "job_id"
+
+LOCATION = "us-central1"
+ENTRY_GROUP_ID = "entry-group-id"
+ENTRY_GROUP_BODY = {"description": "Some descr"}
+ENTRY_GROUP_UPDATED_BODY = {"description": "Some new descr"}
+UPDATE_MASK = ["description"]
+
+COMMON_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}"
 DATA_SCAN_NAME = f"projects/{PROJECT_ID}/locations/{REGION}/dataScans/{DATA_SCAN_ID}"
 DATA_SCAN_JOB_NAME = f"projects/{PROJECT_ID}/locations/{REGION}/dataScans/{DATA_SCAN_ID}/jobs/{JOB_ID}"
 ZONE_NAME = f"projects/{PROJECT_ID}/locations/{REGION}/lakes/{LAKE_ID}"
 ZONE_PARENT = f"projects/{PROJECT_ID}/locations/{REGION}/lakes/{LAKE_ID}/zones/{ZONE_ID}"
 ASSET_PARENT = f"projects/{PROJECT_ID}/locations/{REGION}/lakes/{LAKE_ID}/zones/{ZONE_ID}/assets/{ASSET_ID}"
 DATASCAN_PARENT = f"projects/{PROJECT_ID}/locations/{REGION}"
+ENTRY_GROUP_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}/entryGroup/{ENTRY_GROUP_ID}"
 
 
 class TestDataplexHook:
@@ -311,3 +324,104 @@ def test_get_data_scan(self, mock_client):
             timeout=None,
             metadata=(),
         )
+
+    @mock.patch(DATAPLEX_CATALOG_HOOK_CLIENT)
+    def test_create_entry_group(self, mock_client):
+        mock_common_location_path = mock_client.return_value.common_location_path
+        mock_common_location_path.return_value = COMMON_PARENT
+        self.hook.create_entry_group(
+            project_id=PROJECT_ID,
+            location=LOCATION,
+            entry_group_id=ENTRY_GROUP_ID,
+            entry_group_configuration=ENTRY_GROUP_BODY,
+            validate_only=False,
+        )
+        mock_client.return_value.create_entry_group.assert_called_once_with(
+            request=dict(
+                parent=COMMON_PARENT,
+                entry_group_id=ENTRY_GROUP_ID,
+                entry_group=ENTRY_GROUP_BODY,
+                validate_only=False,
+            ),
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+    @mock.patch(DATAPLEX_CATALOG_HOOK_CLIENT)
+    def test_delete_entry_group(self, mock_client):
+        mock_common_location_path = mock_client.return_value.entry_group_path
+        mock_common_location_path.return_value = ENTRY_GROUP_PARENT
+        self.hook.delete_entry_group(project_id=PROJECT_ID, location=LOCATION, entry_group_id=ENTRY_GROUP_ID)
+
+        mock_client.return_value.delete_entry_group.assert_called_once_with(
+            request=dict(
+                name=ENTRY_GROUP_PARENT,
+            ),
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+    @mock.patch(DATAPLEX_CATALOG_HOOK_CLIENT)
+    def test_list_entry_groups(self, mock_client):
+        mock_common_location_path = mock_client.return_value.common_location_path
+        mock_common_location_path.return_value = COMMON_PARENT
+        self.hook.list_entry_groups(
+            project_id=PROJECT_ID,
+            location=LOCATION,
+            order_by="name",
+            page_size=2,
+            filter_by="'description' = 'Some descr'",
+        )
+        mock_client.return_value.list_entry_groups.assert_called_once_with(
+            request=dict(
+                parent=COMMON_PARENT,
+                page_size=2,
+                page_token=None,
+                filter="'description' = 'Some descr'",
+                order_by="name",
+            ),
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+    @mock.patch(DATAPLEX_CATALOG_HOOK_CLIENT)
+    def test_get_entry_group(self, mock_client):
+        mock_common_location_path = mock_client.return_value.entry_group_path
+        mock_common_location_path.return_value = ENTRY_GROUP_PARENT
+        self.hook.get_entry_group(project_id=PROJECT_ID, location=LOCATION, entry_group_id=ENTRY_GROUP_ID)
+
+        mock_client.return_value.get_entry_group.assert_called_once_with(
+            request=dict(
+                name=ENTRY_GROUP_PARENT,
+            ),
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+    @mock.patch(DATAPLEX_CATALOG_HOOK_CLIENT)
+    def test_update_entry_group(self, mock_client):
+        mock_common_location_path = mock_client.return_value.entry_group_path
+        mock_common_location_path.return_value = ENTRY_GROUP_PARENT
+        self.hook.update_entry_group(
+            project_id=PROJECT_ID,
+            location=LOCATION,
+            entry_group_id=ENTRY_GROUP_ID,
+            entry_group_configuration=ENTRY_GROUP_UPDATED_BODY,
+            update_mask=UPDATE_MASK,
+            validate_only=False,
+        )
+
+        mock_client.return_value.update_entry_group.assert_called_once_with(
+            request=dict(
+                entry_group={**ENTRY_GROUP_UPDATED_BODY, "name": ENTRY_GROUP_PARENT},
+                update_mask=FieldMask(paths=UPDATE_MASK),
+                validate_only=False,
+            ),
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
diff --git a/providers/tests/google/cloud/links/test_dataplex.py b/providers/tests/google/cloud/links/test_dataplex.py
new file mode 100644
index 0000000000000..05661c84bd3ee
--- /dev/null
+++ b/providers/tests/google/cloud/links/test_dataplex.py
@@ -0,0 +1,168 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pytest
+
+from airflow.providers.google.cloud.links.dataplex import (
+    DataplexCatalogEntryGroupLink,
+    DataplexCatalogEntryGroupsLink,
+    DataplexLakeLink,
+    DataplexTaskLink,
+    DataplexTasksLink,
+)
+from airflow.providers.google.cloud.operators.dataplex import (
+    DataplexCatalogCreateEntryGroupOperator,
+    DataplexCatalogGetEntryGroupOperator,
+    DataplexCreateLakeOperator,
+    DataplexCreateTaskOperator,
+    DataplexListTasksOperator,
+)
+
+TEST_LOCATION = "test-location"
+TEST_PROJECT_ID = "test-project-id"
+TEST_ENTRY_GROUP_ID = "test-entry-group-id"
+TEST_ENTRY_GROUP_ID_BODY = {"description": "some description"}
+TEST_ENTRY_GROUPS_ID = "test-entry-groups-id"
+TEST_TASK_ID = "test-task-id"
+TEST_TASKS_ID = "test-tasks-id"
+TEST_LAKE_ID = "test-lake-id"
+TEST_LAKE_BODY = {"name": "some_name"}
+
+DATAPLEX_BASE_LINK = "https://console.cloud.google.com/dataplex/"
+EXPECTED_DATAPLEX_CATALOG_ENTRY_GROUP_LINK = (
+    DATAPLEX_BASE_LINK
+    + f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}?project={TEST_PROJECT_ID}"
+)
+EXPECTED_DATAPLEX_CATALOG_ENTRY_GROUPS_LINK = (
+    DATAPLEX_BASE_LINK + f"catalog/entry-groups?project={TEST_PROJECT_ID}"
+)
+DATAPLEX_LAKE_LINK = (
+    DATAPLEX_BASE_LINK + f"lakes/{TEST_LAKE_ID};location={TEST_LOCATION}?project={TEST_PROJECT_ID}"
+)
+EXPECTED_DATAPLEX_TASK_LINK = (
+    DATAPLEX_BASE_LINK
+    + f"process/tasks/{TEST_LAKE_ID}.{TEST_TASK_ID};location={TEST_LOCATION}/jobs?project={TEST_PROJECT_ID}"
+)
+EXPECTED_DATAPLEX_TASKS_LINK = (
+    DATAPLEX_BASE_LINK + f"process/tasks?project={TEST_PROJECT_ID}&qLake={TEST_LAKE_ID}.{TEST_LOCATION}"
+)
+
+
+class TestDataplexTaskLink:
+    @pytest.mark.db_test
+    def test_get_link(self, create_task_instance_of_operator, session):
+        expected_url = EXPECTED_DATAPLEX_TASK_LINK
+        link = DataplexTaskLink()
+        ti = create_task_instance_of_operator(
+            DataplexCreateTaskOperator,
+            dag_id="test_link_dag",
+            task_id="test_link_task",
+            region=TEST_LOCATION,
+            lake_id=TEST_LAKE_ID,
+            project_id=TEST_PROJECT_ID,
+            body=TEST_LAKE_BODY,
+            dataplex_task_id=TEST_TASK_ID,
+        )
+        session.add(ti)
+        session.commit()
+        link.persist(context={"ti": ti}, task_instance=ti.task)
+        actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
+        assert actual_url == expected_url
+
+
+class TestDataplexTasksLink:
+    @pytest.mark.db_test
+    def test_get_link(self, create_task_instance_of_operator, session):
+        expected_url = EXPECTED_DATAPLEX_TASKS_LINK
+        link = DataplexTasksLink()
+        ti = create_task_instance_of_operator(
+            DataplexListTasksOperator,
+            dag_id="test_link_dag",
+            task_id="test_link_task",
+            region=TEST_LOCATION,
+            lake_id=TEST_LAKE_ID,
+            project_id=TEST_PROJECT_ID,
+        )
+        session.add(ti)
+        session.commit()
+        link.persist(context={"ti": ti}, task_instance=ti.task)
+        actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
+        assert actual_url == expected_url
+
+
+class TestDataplexLakeLink:
+    @pytest.mark.db_test
+    def test_get_link(self, create_task_instance_of_operator, session):
+        expected_url = DATAPLEX_LAKE_LINK
+        link = DataplexLakeLink()
+        ti = create_task_instance_of_operator(
+            DataplexCreateLakeOperator,
+            dag_id="test_link_dag",
+            task_id="test_link_task",
+            region=TEST_LOCATION,
+            lake_id=TEST_LAKE_ID,
+            project_id=TEST_PROJECT_ID,
+            body={},
+        )
+        session.add(ti)
+        session.commit()
+        link.persist(context={"ti": ti}, task_instance=ti.task)
+        actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
+        assert actual_url == expected_url
+
+
+class TestDataplexCatalogEntryGroupLink:
+    @pytest.mark.db_test
+    def test_get_link(self, create_task_instance_of_operator, session):
+        expected_url = EXPECTED_DATAPLEX_CATALOG_ENTRY_GROUP_LINK
+        link = DataplexCatalogEntryGroupLink()
+        ti = create_task_instance_of_operator(
+            DataplexCatalogGetEntryGroupOperator,
+            dag_id="test_link_dag",
+            task_id="test_link_task",
+            location=TEST_LOCATION,
+            entry_group_id=TEST_ENTRY_GROUP_ID,
+            project_id=TEST_PROJECT_ID,
+        )
+        session.add(ti)
+        session.commit()
+        link.persist(context={"ti": ti}, task_instance=ti.task)
+        actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
+        assert actual_url == expected_url
+
+
+class TestDataplexCatalogEntryGroupsLink:
+    @pytest.mark.db_test
+    def test_get_link(self, create_task_instance_of_operator, session):
+        expected_url = EXPECTED_DATAPLEX_CATALOG_ENTRY_GROUPS_LINK
+        link = DataplexCatalogEntryGroupsLink()
+        ti = create_task_instance_of_operator(
+            DataplexCatalogCreateEntryGroupOperator,
+            dag_id="test_link_dag",
+            task_id="test_link_task",
+            location=TEST_LOCATION,
+            entry_group_id=TEST_ENTRY_GROUP_ID,
+            entry_group_configuration=TEST_ENTRY_GROUP_ID_BODY,
+            project_id=TEST_PROJECT_ID,
+        )
+        session.add(ti)
+        session.commit()
+        link.persist(context={"ti": ti}, task_instance=ti.task)
+        actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
+        assert actual_url == expected_url
diff --git a/providers/tests/google/cloud/operators/test_dataplex.py b/providers/tests/google/cloud/operators/test_dataplex.py
index 1eec9008e2c10..2aff961623bb3 100644
--- a/providers/tests/google/cloud/operators/test_dataplex.py
+++ b/providers/tests/google/cloud/operators/test_dataplex.py
@@ -20,9 +20,15 @@
 
 import pytest
 from google.api_core.gapic_v1.method import DEFAULT
+from google.cloud.dataplex_v1.services.catalog_service.pagers import ListEntryGroupsPager
+from google.cloud.dataplex_v1.types import ListEntryGroupsRequest, ListEntryGroupsResponse
 
 from airflow.exceptions import TaskDeferred
 from airflow.providers.google.cloud.operators.dataplex import (
+    DataplexCatalogCreateEntryGroupOperator,
+    DataplexCatalogDeleteEntryGroupOperator,
+    DataplexCatalogGetEntryGroupOperator,
+    DataplexCatalogListEntryGroupsOperator,
     DataplexCreateAssetOperator,
     DataplexCreateLakeOperator,
     DataplexCreateOrUpdateDataProfileScanOperator,
@@ -51,6 +57,7 @@
 DATASCANJOB_STR = "airflow.providers.google.cloud.operators.dataplex.DataScanJob"
 ZONE_STR = "airflow.providers.google.cloud.operators.dataplex.Zone"
 ASSET_STR = "airflow.providers.google.cloud.operators.dataplex.Asset"
+ENTRY_GROUP_STR = "airflow.providers.google.cloud.operators.dataplex.EntryGroup"
 
 PROJECT_ID = "project-id"
 REGION = "region"
@@ -72,6 +79,7 @@
 ASSET_ID = "test_asset_id"
 ZONE_ID = "test_zone_id"
 JOB_ID = "test_job_id"
+ENTRY_GROUP_NAME = "test_entry_group"
 
 
 class TestDataplexCreateTaskOperator:
@@ -734,3 +742,138 @@ def test_execute(self, hook_mock):
             timeout=None,
             metadata=(),
         )
+
+
+class TestDataplexCatalogCreateEntryGroupOperator:
+    @mock.patch(ENTRY_GROUP_STR)
+    @mock.patch(HOOK_STR)
+    def test_execute(self, hook_mock, entry_group_mock):
+        op = DataplexCatalogCreateEntryGroupOperator(
+            task_id="create_task",
+            project_id=PROJECT_ID,
+            location=REGION,
+            entry_group_id=ENTRY_GROUP_NAME,
+            entry_group_configuration=BODY,
+            validate_request=None,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        entry_group_mock.return_value.to_dict.return_value = None
+        hook_mock.return_value.wait_for_operation.return_value = None
+        op.execute(context=mock.MagicMock())
+        hook_mock.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        hook_mock.return_value.create_entry_group.assert_called_once_with(
+            entry_group_id=ENTRY_GROUP_NAME,
+            entry_group_configuration=BODY,
+            location=REGION,
+            project_id=PROJECT_ID,
+            validate_only=None,
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+
+class TestDataplexCatalogGetEntryGroupOperator:
+    @mock.patch(ENTRY_GROUP_STR)
+    @mock.patch(HOOK_STR)
+    def test_execute(self, hook_mock, entry_group_mock):
+        op = DataplexCatalogGetEntryGroupOperator(
+            project_id=PROJECT_ID,
+            location=REGION,
+            entry_group_id=ENTRY_GROUP_NAME,
+            task_id="get_task",
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        op.execute(context=mock.MagicMock())
+        entry_group_mock.return_value.to_dict.return_value = None
+        hook_mock.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        hook_mock.return_value.get_entry_group.assert_called_once_with(
+            project_id=PROJECT_ID,
+            location=REGION,
+            entry_group_id=ENTRY_GROUP_NAME,
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+
+class TestDataplexCatalogDeleteEntryGroupOperator:
+    @mock.patch(HOOK_STR)
+    def test_execute(self, hook_mock):
+        op = DataplexCatalogDeleteEntryGroupOperator(
+            project_id=PROJECT_ID,
+            location=REGION,
+            entry_group_id=ENTRY_GROUP_NAME,
+            task_id="delete_task",
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        hook_mock.return_value.wait_for_operation.return_value = None
+        op.execute(context=mock.MagicMock())
+        hook_mock.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        hook_mock.return_value.delete_entry_group.assert_called_once_with(
+            project_id=PROJECT_ID,
+            location=REGION,
+            entry_group_id=ENTRY_GROUP_NAME,
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
+
+
+class TestDataplexCatalogListEntryGroupsOperator:
+    @mock.patch(ENTRY_GROUP_STR)
+    @mock.patch(HOOK_STR)
+    def test_execute(self, hook_mock, entry_group_mock):
+        op = DataplexCatalogListEntryGroupsOperator(
+            project_id=PROJECT_ID,
+            location=REGION,
+            task_id="list_task",
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        hook_mock.return_value.list_entry_groups.return_value = ListEntryGroupsPager(
+            response=(
+                ListEntryGroupsResponse(
+                    entry_groups=[
+                        {
+                            "name": "aaa",
+                            "description": "Test Entry Group 1",
+                            "display_name": "Entry Group One",
+                        }
+                    ]
+                )
+            ),
+            method=mock.MagicMock(),
+            request=ListEntryGroupsRequest(parent=""),
+        )
+
+        entry_group_mock.return_value.to_dict.return_value = None
+        op.execute(context=mock.MagicMock())
+        hook_mock.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        hook_mock.return_value.list_entry_groups.assert_called_once_with(
+            project_id=PROJECT_ID,
+            location=REGION,
+            page_size=None,
+            page_token=None,
+            filter_by=None,
+            order_by=None,
+            retry=DEFAULT,
+            timeout=None,
+            metadata=(),
+        )
diff --git a/providers/tests/microsoft/azure/resources/status.json b/providers/tests/microsoft/azure/resources/status.json
index 6bff9e29afb41..bfece9ed6a94b 100644
--- a/providers/tests/microsoft/azure/resources/status.json
+++ b/providers/tests/microsoft/azure/resources/status.json
@@ -1 +1 @@
-{"id": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef", "createdDateTime": "2024-04-10T15:05:17.357", "status": "Succeeded"}
+[{"id": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef", "createdDateTime": "2024-04-10T15:05:17.357", "status": "InProgress"},{"id": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef", "createdDateTime": "2024-04-10T15:05:17.357", "status": "Succeeded"}]
diff --git a/providers/tests/microsoft/azure/sensors/test_msgraph.py b/providers/tests/microsoft/azure/sensors/test_msgraph.py
index 9ad03ccf17020..4240a88e4039b 100644
--- a/providers/tests/microsoft/azure/sensors/test_msgraph.py
+++ b/providers/tests/microsoft/azure/sensors/test_msgraph.py
@@ -17,6 +17,7 @@
 from __future__ import annotations
 
 import json
+from datetime import datetime
 
 import pytest
 
@@ -31,7 +32,7 @@
 class TestMSGraphSensor(Base):
     def test_execute(self):
         status = load_json("resources", "status.json")
-        response = mock_json_response(200, status)
+        response = mock_json_response(200, *status)
 
         with self.patch_hook_and_request_adapter(response):
             sensor = MSGraphSensor(
@@ -40,6 +41,7 @@ def test_execute(self):
                 url="myorg/admin/workspaces/scanStatus/{scanId}",
                 path_parameters={"scanId": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"},
                 result_processor=lambda context, result: result["id"],
+                retry_delay=5,
                 timeout=350.0,
             )
 
@@ -48,16 +50,22 @@ def test_execute(self):
             assert sensor.path_parameters == {"scanId": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"}
             assert isinstance(results, str)
             assert results == "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"
-            assert len(events) == 1
+            assert len(events) == 3
             assert isinstance(events[0], TriggerEvent)
             assert events[0].payload["status"] == "success"
             assert events[0].payload["type"] == "builtins.dict"
-            assert events[0].payload["response"] == json.dumps(status)
+            assert events[0].payload["response"] == json.dumps(status[0])
+            assert isinstance(events[1], TriggerEvent)
+            assert isinstance(events[1].payload, datetime)
+            assert isinstance(events[2], TriggerEvent)
+            assert events[2].payload["status"] == "success"
+            assert events[2].payload["type"] == "builtins.dict"
+            assert events[2].payload["response"] == json.dumps(status[1])
 
     @pytest.mark.skipif(not AIRFLOW_V_2_10_PLUS, reason="Lambda parameters works in Airflow >= 2.10.0")
     def test_execute_with_lambda_parameter(self):
         status = load_json("resources", "status.json")
-        response = mock_json_response(200, status)
+        response = mock_json_response(200, *status)
 
         with self.patch_hook_and_request_adapter(response):
             sensor = MSGraphSensor(
@@ -66,6 +74,7 @@ def test_execute_with_lambda_parameter(self):
                 url="myorg/admin/workspaces/scanStatus/{scanId}",
                 path_parameters=lambda context, jinja_env: {"scanId": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"},
                 result_processor=lambda context, result: result["id"],
+                retry_delay=5,
                 timeout=350.0,
             )
 
@@ -74,11 +83,17 @@ def test_execute_with_lambda_parameter(self):
             assert sensor.path_parameters == {"scanId": "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"}
             assert isinstance(results, str)
             assert results == "0a1b1bf3-37de-48f7-9863-ed4cda97a9ef"
-            assert len(events) == 1
+            assert len(events) == 3
             assert isinstance(events[0], TriggerEvent)
             assert events[0].payload["status"] == "success"
             assert events[0].payload["type"] == "builtins.dict"
-            assert events[0].payload["response"] == json.dumps(status)
+            assert events[0].payload["response"] == json.dumps(status[0])
+            assert isinstance(events[1], TriggerEvent)
+            assert isinstance(events[1].payload, datetime)
+            assert isinstance(events[2], TriggerEvent)
+            assert events[2].payload["status"] == "success"
+            assert events[2].payload["type"] == "builtins.dict"
+            assert events[2].payload["response"] == json.dumps(status[1])
 
     def test_template_fields(self):
         sensor = MSGraphSensor(
diff --git a/providers/tests/microsoft/conftest.py b/providers/tests/microsoft/conftest.py
index d875096402b8b..d4a65075f5061 100644
--- a/providers/tests/microsoft/conftest.py
+++ b/providers/tests/microsoft/conftest.py
@@ -149,8 +149,10 @@ def xcom_pull(
             run_id: str | None = None,
         ) -> Any:
             if map_indexes:
-                return values.get(f"{task_ids or self.task_id}_{dag_id or self.dag_id}_{key}_{map_indexes}")
-            return values.get(f"{task_ids or self.task_id}_{dag_id or self.dag_id}_{key}")
+                return values.get(
+                    f"{task_ids or self.task_id}_{dag_id or self.dag_id}_{key}_{map_indexes}", default
+                )
+            return values.get(f"{task_ids or self.task_id}_{dag_id or self.dag_id}_{key}", default)
 
         def xcom_push(self, key: str, value: Any, session: Session = NEW_SESSION, **kwargs) -> None:
             values[f"{self.task_id}_{self.dag_id}_{key}_{self.map_index}"] = value
diff --git a/providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
new file mode 100644
index 0000000000000..8eec8a317d640
--- /dev/null
+++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_catalog.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG that shows how to use Dataplex Catalog.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+from airflow.models.dag import DAG
+from airflow.providers.google.cloud.operators.dataplex import (
+    DataplexCatalogCreateEntryGroupOperator,
+    DataplexCatalogDeleteEntryGroupOperator,
+    DataplexCatalogGetEntryGroupOperator,
+    DataplexCatalogListEntryGroupsOperator,
+    DataplexCatalogUpdateEntryGroupOperator,
+)
+from airflow.utils.trigger_rule import TriggerRule
+
+from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+
+DAG_ID = "dataplex_catalog"
+GCP_LOCATION = "us-central1"
+
+ENTRY_GROUP_NAME = f"{DAG_ID}_entry_group_{ENV_ID}".replace("_", "-")
+# [START howto_dataplex_entry_group_configuration]
+ENTRY_GROUP_BODY = {"display_name": "Display Name", "description": "Some description"}
+# [END howto_dataplex_entry_group_configuration]
+
+with DAG(
+    DAG_ID,
+    start_date=datetime.datetime(2021, 1, 1),
+    schedule="@once",
+    tags=["example", "dataplex_catalog"],
+) as dag:
+    # [START howto_operator_dataplex_catalog_create_entry_group]
+    create_entry_group = DataplexCatalogCreateEntryGroupOperator(
+        task_id="create_entry_group",
+        project_id=PROJECT_ID,
+        location=GCP_LOCATION,
+        entry_group_id=ENTRY_GROUP_NAME,
+        entry_group_configuration=ENTRY_GROUP_BODY,
+        validate_request=False,
+    )
+    # [END howto_operator_dataplex_catalog_create_entry_group]
+
+    # [START howto_operator_dataplex_catalog_get_entry_group]
+    get_entry_group = DataplexCatalogGetEntryGroupOperator(
+        task_id="get_entry_group",
+        project_id=PROJECT_ID,
+        location=GCP_LOCATION,
+        entry_group_id=ENTRY_GROUP_NAME,
+    )
+    # [END howto_operator_dataplex_catalog_get_entry_group]
+
+    # [START howto_operator_dataplex_catalog_list_entry_groups]
+    list_entry_group = DataplexCatalogListEntryGroupsOperator(
+        task_id="list_entry_group",
+        project_id=PROJECT_ID,
+        location=GCP_LOCATION,
+        order_by="name",
+        filter_by='display_name = "Display Name"',
+    )
+    # [END howto_operator_dataplex_catalog_list_entry_groups]
+
+    # [START howto_operator_dataplex_catalog_update_entry_group]
+    update_entry_group = DataplexCatalogUpdateEntryGroupOperator(
+        task_id="update_entry_group",
+        project_id=PROJECT_ID,
+        location=GCP_LOCATION,
+        entry_group_id=ENTRY_GROUP_NAME,
+        entry_group_configuration={"display_name": "Updated Display Name"},
+        update_mask=["display_name"],
+    )
+    # [END howto_operator_dataplex_catalog_update_entry_group]
+
+    # [START howto_operator_dataplex_catalog_delete_entry_group]
+    delete_entry_group = DataplexCatalogDeleteEntryGroupOperator(
+        task_id="delete_entry_group",
+        project_id=PROJECT_ID,
+        location=GCP_LOCATION,
+        entry_group_id=ENTRY_GROUP_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
+    )
+    # [END howto_operator_dataplex_catalog_delete_entry_group]
+
+    create_entry_group >> get_entry_group >> list_entry_group >> update_entry_group >> delete_entry_group
+
+    from tests_common.test_utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+
+from tests_common.test_utils.system_tests import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git a/pyproject.toml b/pyproject.toml
index c67587ec9ab37..b0cb0793afed5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -566,6 +566,7 @@ mypy_path = [
   "$MYPY_CONFIG_FILE_DIR/providers/src",
   "$MYPY_CONFIG_FILE_DIR/task_sdk/src",
   "$MYPY_CONFIG_FILE_DIR/providers/airbyte/src",
+  "$MYPY_CONFIG_FILE_DIR/providers/edge/src",
 ]
 
 [[tool.mypy.overrides]]
@@ -605,6 +606,7 @@ dev = [
   "local-providers",
   "apache-airflow-task-sdk",
   "apache-airflow-providers-airbyte",
+  "apache-airflow-providers-edge",
 ]
 
 [tool.uv.sources]
@@ -613,6 +615,7 @@ dev = [
 local-providers = { workspace = true }
 apache-airflow-task-sdk = { workspace = true }
 apache-airflow-providers-airbyte = {workspace = true}
+apache-airflow-providers-edge = {workspace = true}
 
 [tool.uv.workspace]
-members = ["providers", "task_sdk", "providers/airbyte" ]
+members = ["providers", "task_sdk", "providers/airbyte", "providers/edge" ]
diff --git a/scripts/ci/install_breeze.sh b/scripts/ci/install_breeze.sh
index a9a830ca31914..98c29d44d07c2 100755
--- a/scripts/ci/install_breeze.sh
+++ b/scripts/ci/install_breeze.sh
@@ -22,7 +22,7 @@ cd "$( dirname "${BASH_SOURCE[0]}" )/../../"
 PYTHON_ARG=""
 
 PIP_VERSION="24.3.1"
-UV_VERSION="0.5.14"
+UV_VERSION="0.5.20"
 if [[ ${PYTHON_VERSION=} != "" ]]; then
     PYTHON_ARG="--python=$(which python"${PYTHON_VERSION}") "
 fi
diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py b/scripts/ci/pre_commit/update_providers_dependencies.py
index debe615521254..f5c9c018abb60 100755
--- a/scripts/ci/pre_commit/update_providers_dependencies.py
+++ b/scripts/ci/pre_commit/update_providers_dependencies.py
@@ -206,11 +206,15 @@ def check_if_different_provider_used(file_path: Path) -> None:
             warnings.append(f"The provider {imported_provider} from {file_path} cannot be found.")
             continue
 
-        if imported_provider == "standard":
-            # Standard -- i.e. BashOperator is used in a lot of example dags, but we don't want to mark this
+        if "/example_dags/" in file_path.as_posix():
+            # If provider is used in a example dags, we don't want to mark this
             # as a provider cross dependency
-            if file_path.name == "celery_executor_utils.py" or "/example_dags/" in file_path.as_posix():
-                continue
+            continue
+        if imported_provider == "standard" and file_path.name == "celery_executor_utils.py":
+            # some common standard operators are pre-imported in celery when it starts in order to speed
+            # up the task startup time - but it does not mean that standard provider is a cross-provider
+            # dependency of the celery executor
+            continue
         if imported_provider:
             if file_provider != imported_provider:
                 ALL_DEPENDENCIES[file_provider]["cross-providers-deps"].append(imported_provider)
diff --git a/scripts/tools/setup_breeze b/scripts/tools/setup_breeze
index 8b3932c982008..272d56c89d64b 100755
--- a/scripts/tools/setup_breeze
+++ b/scripts/tools/setup_breeze
@@ -27,7 +27,7 @@ COLOR_YELLOW=$'\e[33m'
 COLOR_BLUE=$'\e[34m'
 COLOR_RESET=$'\e[0m'
 
-UV_VERSION="0.5.14"
+UV_VERSION="0.5.20"
 
 function manual_instructions() {
     echo
diff --git a/task_sdk/src/airflow/sdk/api/client.py b/task_sdk/src/airflow/sdk/api/client.py
index 5ee270591481e..e73e5aebea64b 100644
--- a/task_sdk/src/airflow/sdk/api/client.py
+++ b/task_sdk/src/airflow/sdk/api/client.py
@@ -34,6 +34,7 @@
 
 from airflow.sdk import __version__
 from airflow.sdk.api.datamodels._generated import (
+    AssetResponse,
     ConnectionResponse,
     DagRunType,
     TerminalTIState,
@@ -267,6 +268,24 @@ def set(
         return {"ok": True}
 
 
+class AssetOperations:
+    __slots__ = ("client",)
+
+    def __init__(self, client: Client):
+        self.client = client
+
+    def get(self, name: str | None = None, uri: str | None = None) -> AssetResponse:
+        """Get Asset value from the API server."""
+        if name:
+            resp = self.client.get("assets/by-name", params={"name": name})
+        elif uri:
+            resp = self.client.get("assets/by-uri", params={"uri": uri})
+        else:
+            raise ValueError("Either `name` or `uri` must be provided")
+
+        return AssetResponse.model_validate_json(resp.read())
+
+
 class BearerAuth(httpx.Auth):
     def __init__(self, token: str):
         self.token: str = token
@@ -374,6 +393,12 @@ def xcoms(self) -> XComOperations:
         """Operations related to XComs."""
         return XComOperations(self)
 
+    @lru_cache()  # type: ignore[misc]
+    @property
+    def assets(self) -> AssetOperations:
+        """Operations related to XComs."""
+        return AssetOperations(self)
+
 
 # This is only used for parsing. ServerResponseError is raised instead
 class _ErrorBody(BaseModel):
diff --git a/task_sdk/src/airflow/sdk/api/datamodels/_generated.py b/task_sdk/src/airflow/sdk/api/datamodels/_generated.py
index a8b478d07f029..f0a04da21c894 100644
--- a/task_sdk/src/airflow/sdk/api/datamodels/_generated.py
+++ b/task_sdk/src/airflow/sdk/api/datamodels/_generated.py
@@ -29,6 +29,15 @@
 from pydantic import BaseModel, ConfigDict, Field
 
 
+class AssetAliasResponse(BaseModel):
+    """
+    Asset alias schema with fields that are needed for Runtime.
+    """
+
+    name: Annotated[str, Field(title="Name")]
+    group: Annotated[str, Field(title="Group")]
+
+
 class ConnectionResponse(BaseModel):
     """
     Connection schema for responses with fields that are needed for Runtime.
@@ -187,6 +196,17 @@ class TaskInstance(BaseModel):
     hostname: Annotated[str | None, Field(title="Hostname")] = None
 
 
+class AssetResponse(BaseModel):
+    """
+    Asset schema for responses with fields that are needed for Runtime.
+    """
+
+    name: Annotated[str, Field(title="Name")]
+    uri: Annotated[str, Field(title="Uri")]
+    group: Annotated[str, Field(title="Group")]
+    extra: Annotated[dict[str, Any] | None, Field(title="Extra")] = None
+
+
 class DagRun(BaseModel):
     """
     Schema for DagRun model with minimal required fields needed for Runtime.
diff --git a/task_sdk/src/airflow/sdk/definitions/asset/__init__.py b/task_sdk/src/airflow/sdk/definitions/asset/__init__.py
index 5b0cbb4a784d9..ea89f1b681701 100644
--- a/task_sdk/src/airflow/sdk/definitions/asset/__init__.py
+++ b/task_sdk/src/airflow/sdk/definitions/asset/__init__.py
@@ -488,14 +488,14 @@ def iter_dag_dependencies(self, *, source: str = "", target: str = "") -> Iterat
             )
 
 
-@attrs.define()
+@attrs.define(hash=True)
 class AssetNameRef(AssetRef):
     """Name reference to an asset."""
 
     name: str
 
 
-@attrs.define()
+@attrs.define(hash=True)
 class AssetUriRef(AssetRef):
     """URI reference to an asset."""
 
diff --git a/task_sdk/src/airflow/sdk/execution_time/comms.py b/task_sdk/src/airflow/sdk/execution_time/comms.py
index b6874d47f090c..f8aaab65af4f1 100644
--- a/task_sdk/src/airflow/sdk/execution_time/comms.py
+++ b/task_sdk/src/airflow/sdk/execution_time/comms.py
@@ -50,6 +50,7 @@
 from pydantic import BaseModel, ConfigDict, Field, JsonValue
 
 from airflow.sdk.api.datamodels._generated import (
+    AssetResponse,
     BundleInfo,
     ConnectionResponse,
     TaskInstance,
@@ -79,6 +80,25 @@ class StartupDetails(BaseModel):
     type: Literal["StartupDetails"] = "StartupDetails"
 
 
+class AssetResult(AssetResponse):
+    """Response to ReadXCom request."""
+
+    type: Literal["AssetResult"] = "AssetResult"
+
+    @classmethod
+    def from_asset_response(cls, asset_response: AssetResponse) -> AssetResult:
+        """
+        Get AssetResult from AssetResponse.
+
+        AssetResponse is autogenerated from the API schema, so we need to convert it to AssetResult
+        for communication between the Supervisor and the task process.
+        """
+        # Exclude defaults to avoid sending unnecessary data
+        # Pass the type as AssetResult explicitly so we can then call model_dump_json with exclude_unset=True
+        # to avoid sending unset fields (which are defaults in our case).
+        return cls(**asset_response.model_dump(exclude_defaults=True), type="AssetResult")
+
+
 class XComResult(XComResponse):
     """Response to ReadXCom request."""
 
@@ -133,7 +153,7 @@ class ErrorResponse(BaseModel):
 
 
 ToTask = Annotated[
-    Union[StartupDetails, XComResult, ConnectionResult, VariableResult, ErrorResponse],
+    Union[StartupDetails, XComResult, ConnectionResult, VariableResult, ErrorResponse, AssetResult],
     Field(discriminator="type"),
 ]
 
@@ -231,12 +251,24 @@ class SetRenderedFields(BaseModel):
     type: Literal["SetRenderedFields"] = "SetRenderedFields"
 
 
+class GetAssetByName(BaseModel):
+    name: str
+    type: Literal["GetAssetByName"] = "GetAssetByName"
+
+
+class GetAssetByUri(BaseModel):
+    uri: str
+    type: Literal["GetAssetByUri"] = "GetAssetByUri"
+
+
 ToSupervisor = Annotated[
     Union[
         TaskState,
         GetXCom,
         GetConnection,
         GetVariable,
+        GetAssetByName,
+        GetAssetByUri,
         DeferTask,
         PutVariable,
         SetXCom,
diff --git a/task_sdk/src/airflow/sdk/execution_time/context.py b/task_sdk/src/airflow/sdk/execution_time/context.py
index cdb3880bb36b3..918526c3004c2 100644
--- a/task_sdk/src/airflow/sdk/execution_time/context.py
+++ b/task_sdk/src/airflow/sdk/execution_time/context.py
@@ -17,20 +17,31 @@
 from __future__ import annotations
 
 import contextlib
-from collections.abc import Generator
-from typing import TYPE_CHECKING, Any
+from collections.abc import Generator, Iterator, Mapping
+from typing import TYPE_CHECKING, Any, Union
 
+import attrs
 import structlog
 
 from airflow.sdk.definitions._internal.contextmanager import _CURRENT_CONTEXT
 from airflow.sdk.definitions._internal.types import NOTSET
+from airflow.sdk.definitions.asset import (
+    Asset,
+    AssetAlias,
+    AssetAliasUniqueKey,
+    AssetNameRef,
+    AssetRef,
+    AssetUniqueKey,
+    AssetUriRef,
+    BaseAssetUniqueKey,
+)
 from airflow.sdk.exceptions import AirflowRuntimeError, ErrorType
 
 if TYPE_CHECKING:
     from airflow.sdk.definitions.connection import Connection
     from airflow.sdk.definitions.context import Context
     from airflow.sdk.definitions.variable import Variable
-    from airflow.sdk.execution_time.comms import ConnectionResult, VariableResult
+    from airflow.sdk.execution_time.comms import AssetResult, ConnectionResult, VariableResult
 
 log = structlog.get_logger(logger_name="task")
 
@@ -163,6 +174,112 @@ def __eq__(self, other: object) -> bool:
         return True
 
 
+@attrs.define
+class AssetAliasEvent:
+    """Representation of asset event to be triggered by an asset alias."""
+
+    source_alias_name: str
+    dest_asset_key: AssetUniqueKey
+    extra: dict[str, Any]
+
+
+@attrs.define
+class OutletEventAccessor:
+    """Wrapper to access an outlet asset event in template."""
+
+    key: BaseAssetUniqueKey
+    extra: dict[str, Any] = attrs.Factory(dict)
+    asset_alias_events: list[AssetAliasEvent] = attrs.field(factory=list)
+
+    def add(self, asset: Asset, extra: dict[str, Any] | None = None) -> None:
+        """Add an AssetEvent to an existing Asset."""
+        if not isinstance(self.key, AssetAliasUniqueKey):
+            return
+
+        asset_alias_name = self.key.name
+        event = AssetAliasEvent(
+            source_alias_name=asset_alias_name,
+            dest_asset_key=AssetUniqueKey.from_asset(asset),
+            extra=extra or {},
+        )
+        self.asset_alias_events.append(event)
+
+
+class OutletEventAccessors(Mapping[Union[Asset, AssetAlias], OutletEventAccessor]):
+    """Lazy mapping of outlet asset event accessors."""
+
+    _asset_ref_cache: dict[AssetRef, AssetUniqueKey] = {}
+
+    def __init__(self) -> None:
+        self._dict: dict[BaseAssetUniqueKey, OutletEventAccessor] = {}
+
+    def __str__(self) -> str:
+        return f"OutletEventAccessors(_dict={self._dict})"
+
+    def __iter__(self) -> Iterator[Asset | AssetAlias]:
+        return (
+            key.to_asset() if isinstance(key, AssetUniqueKey) else key.to_asset_alias() for key in self._dict
+        )
+
+    def __len__(self) -> int:
+        return len(self._dict)
+
+    def __getitem__(self, key: Asset | AssetAlias) -> OutletEventAccessor:
+        hashable_key: BaseAssetUniqueKey
+        if isinstance(key, Asset):
+            hashable_key = AssetUniqueKey.from_asset(key)
+        elif isinstance(key, AssetAlias):
+            hashable_key = AssetAliasUniqueKey.from_asset_alias(key)
+        elif isinstance(key, AssetRef):
+            hashable_key = self._resolve_asset_ref(key)
+        else:
+            raise TypeError(f"Key should be either an asset or an asset alias, not {type(key)}")
+
+        if hashable_key not in self._dict:
+            self._dict[hashable_key] = OutletEventAccessor(extra={}, key=hashable_key)
+        return self._dict[hashable_key]
+
+    def _resolve_asset_ref(self, ref: AssetRef) -> AssetUniqueKey:
+        with contextlib.suppress(KeyError):
+            return self._asset_ref_cache[ref]
+
+        refs_to_cache: list[AssetRef]
+        if isinstance(ref, AssetNameRef):
+            asset = self._get_asset_from_db(name=ref.name)
+            refs_to_cache = [ref, AssetUriRef(asset.uri)]
+        elif isinstance(ref, AssetUriRef):
+            asset = self._get_asset_from_db(uri=ref.uri)
+            refs_to_cache = [ref, AssetNameRef(asset.name)]
+        else:
+            raise TypeError(f"Unimplemented asset ref: {type(ref)}")
+        unique_key = AssetUniqueKey.from_asset(asset)
+        for ref in refs_to_cache:
+            self._asset_ref_cache[ref] = unique_key
+        return unique_key
+
+    # TODO: This is temporary to avoid code duplication between here & airflow/models/taskinstance.py
+    @staticmethod
+    def _get_asset_from_db(name: str | None = None, uri: str | None = None) -> Asset:
+        from airflow.sdk.definitions.asset import Asset
+        from airflow.sdk.execution_time.comms import ErrorResponse, GetAssetByName, GetAssetByUri
+        from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
+
+        if name:
+            SUPERVISOR_COMMS.send_request(log=log, msg=GetAssetByName(name=name))
+        elif uri:
+            SUPERVISOR_COMMS.send_request(log=log, msg=GetAssetByUri(uri=uri))
+        else:
+            raise ValueError("Either name or uri must be provided")
+
+        msg = SUPERVISOR_COMMS.get_message()
+        if isinstance(msg, ErrorResponse):
+            raise AirflowRuntimeError(msg)
+
+        if TYPE_CHECKING:
+            assert isinstance(msg, AssetResult)
+        return Asset(**msg.model_dump(exclude={"type"}))
+
+
 @contextlib.contextmanager
 def set_current_context(context: Context) -> Generator[Context, None, None]:
     """
diff --git a/task_sdk/src/airflow/sdk/execution_time/supervisor.py b/task_sdk/src/airflow/sdk/execution_time/supervisor.py
index 32895d36524d8..bd50ee5126b94 100644
--- a/task_sdk/src/airflow/sdk/execution_time/supervisor.py
+++ b/task_sdk/src/airflow/sdk/execution_time/supervisor.py
@@ -61,8 +61,11 @@
     VariableResponse,
 )
 from airflow.sdk.execution_time.comms import (
+    AssetResult,
     ConnectionResult,
     DeferTask,
+    GetAssetByName,
+    GetAssetByUri,
     GetConnection,
     GetVariable,
     GetXCom,
@@ -787,6 +790,14 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger):
             self.client.variables.set(msg.key, msg.value, msg.description)
         elif isinstance(msg, SetRenderedFields):
             self.client.task_instances.set_rtif(self.id, msg.rendered_fields)
+        elif isinstance(msg, GetAssetByName):
+            asset_resp = self.client.assets.get(name=msg.name)
+            asset_result = AssetResult.from_asset_response(asset_resp)
+            resp = asset_result.model_dump_json(exclude_unset=True).encode()
+        elif isinstance(msg, GetAssetByUri):
+            asset_resp = self.client.assets.get(uri=msg.uri)
+            asset_result = AssetResult.from_asset_response(asset_resp)
+            resp = asset_result.model_dump_json(exclude_unset=True).encode()
         else:
             log.error("Unhandled request", msg=msg)
             return
diff --git a/task_sdk/src/airflow/sdk/execution_time/task_runner.py b/task_sdk/src/airflow/sdk/execution_time/task_runner.py
index 186faac878a0a..d252c24be180c 100644
--- a/task_sdk/src/airflow/sdk/execution_time/task_runner.py
+++ b/task_sdk/src/airflow/sdk/execution_time/task_runner.py
@@ -50,6 +50,7 @@
 from airflow.sdk.execution_time.context import (
     ConnectionAccessor,
     MacrosAccessor,
+    OutletEventAccessors,
     VariableAccessor,
     set_current_context,
 )
@@ -92,12 +93,13 @@ def get_template_context(self) -> Context:
             # TODO: Ensure that ti.log_url and such are available to use in context
             #   especially after removal of `conf` from Context.
             "ti": self,
-            # "outlet_events": OutletEventAccessors(),
+            "outlet_events": OutletEventAccessors(),
             # "expanded_ti_count": expanded_ti_count,
             "expanded_ti_count": None,  # TODO: Implement this
             # "inlet_events": InletEventsAccessors(task.inlets, session=session),
             "macros": MacrosAccessor(),
             # "params": validated_params,
+            # TODO: Make this go through Public API longer term.
             # "prev_data_interval_start_success": get_prev_data_interval_start_success(),
             # "prev_data_interval_end_success": get_prev_data_interval_end_success(),
             # "prev_start_date_success": get_prev_start_date_success(),
diff --git a/task_sdk/tests/execution_time/conftest.py b/task_sdk/tests/execution_time/conftest.py
index 641f14817d899..d2a961a5307da 100644
--- a/task_sdk/tests/execution_time/conftest.py
+++ b/task_sdk/tests/execution_time/conftest.py
@@ -149,7 +149,7 @@ def _create_task_instance(
                 id=ti_id, task_id=task.task_id, dag_id=dag_id, run_id=run_id, try_number=try_number
             ),
             dag_rel_path="",
-            bundle_info=BundleInfo.model_construct(name="anything", version="any"),
+            bundle_info=BundleInfo(name="anything", version="any"),
             requests_fd=0,
             ti_context=ti_context,
         )
diff --git a/task_sdk/tests/execution_time/test_context.py b/task_sdk/tests/execution_time/test_context.py
index 6527d517e375f..e3ef15dc934cf 100644
--- a/task_sdk/tests/execution_time/test_context.py
+++ b/task_sdk/tests/execution_time/test_context.py
@@ -22,12 +22,16 @@
 import pytest
 
 from airflow.sdk import get_current_context
+from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetAliasUniqueKey, AssetUniqueKey
 from airflow.sdk.definitions.connection import Connection
 from airflow.sdk.definitions.variable import Variable
 from airflow.sdk.exceptions import ErrorType
-from airflow.sdk.execution_time.comms import ConnectionResult, ErrorResponse, VariableResult
+from airflow.sdk.execution_time.comms import AssetResult, ConnectionResult, ErrorResponse, VariableResult
 from airflow.sdk.execution_time.context import (
+    AssetAliasEvent,
     ConnectionAccessor,
+    OutletEventAccessor,
+    OutletEventAccessors,
     VariableAccessor,
     _convert_connection_result_conn,
     _convert_variable_result_to_variable,
@@ -248,3 +252,100 @@ def test_nested_context(self):
             assert ctx["ContextId"] == i
             # End of with statement
             ctx_list[i].__exit__(None, None, None)
+
+
+class TestOutletEventAccessor:
+    @pytest.mark.parametrize(
+        "key, asset_alias_events",
+        (
+            (AssetUniqueKey.from_asset(Asset("test_uri")), []),
+            (
+                AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias")),
+                [
+                    AssetAliasEvent(
+                        source_alias_name="test_alias",
+                        dest_asset_key=AssetUniqueKey(uri="test_uri", name="test_uri"),
+                        extra={},
+                    )
+                ],
+            ),
+        ),
+    )
+    def test_add(self, key, asset_alias_events, mock_supervisor_comms):
+        asset = Asset("test_uri")
+        mock_supervisor_comms.get_message.return_value = asset
+
+        outlet_event_accessor = OutletEventAccessor(key=key, extra={})
+        outlet_event_accessor.add(asset)
+        assert outlet_event_accessor.asset_alias_events == asset_alias_events
+
+    @pytest.mark.parametrize(
+        "key, asset_alias_events",
+        (
+            (AssetUniqueKey.from_asset(Asset("test_uri")), []),
+            (
+                AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias")),
+                [
+                    AssetAliasEvent(
+                        source_alias_name="test_alias",
+                        dest_asset_key=AssetUniqueKey(name="test-asset", uri="test://asset-uri/"),
+                        extra={},
+                    )
+                ],
+            ),
+        ),
+    )
+    def test_add_with_db(self, key, asset_alias_events, mock_supervisor_comms):
+        asset = Asset(uri="test://asset-uri", name="test-asset")
+        mock_supervisor_comms.get_message.return_value = asset
+
+        outlet_event_accessor = OutletEventAccessor(key=key, extra={"not": ""})
+        outlet_event_accessor.add(asset, extra={})
+        assert outlet_event_accessor.asset_alias_events == asset_alias_events
+
+
+class TestOutletEventAccessors:
+    @pytest.mark.parametrize(
+        "access_key, internal_key",
+        (
+            (Asset("test"), AssetUniqueKey.from_asset(Asset("test"))),
+            (
+                Asset(name="test", uri="test://asset"),
+                AssetUniqueKey.from_asset(Asset(name="test", uri="test://asset")),
+            ),
+            (AssetAlias("test_alias"), AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias"))),
+        ),
+    )
+    def test__get_item__dict_key_not_exists(self, access_key, internal_key):
+        outlet_event_accessors = OutletEventAccessors()
+        assert len(outlet_event_accessors) == 0
+        outlet_event_accessor = outlet_event_accessors[access_key]
+        assert len(outlet_event_accessors) == 1
+        assert outlet_event_accessor.key == internal_key
+        assert outlet_event_accessor.extra == {}
+
+    @pytest.mark.parametrize(
+        ["access_key", "asset"],
+        (
+            (Asset.ref(name="test"), Asset(name="test")),
+            (Asset.ref(name="test1"), Asset(name="test1", uri="test://asset-uri")),
+            (Asset.ref(uri="test://asset-uri"), Asset(uri="test://asset-uri")),
+        ),
+    )
+    def test__get_item__asset_ref(self, access_key, asset, mock_supervisor_comms):
+        """Test accessing OutletEventAccessors with AssetRef resolves to correct Asset."""
+        internal_key = AssetUniqueKey.from_asset(asset)
+        outlet_event_accessors = OutletEventAccessors()
+        assert len(outlet_event_accessors) == 0
+
+        # Asset from the API Server via the supervisor
+        mock_supervisor_comms.get_message.return_value = AssetResult(
+            name=asset.name,
+            uri=asset.uri,
+            group=asset.group,
+        )
+
+        outlet_event_accessor = outlet_event_accessors[access_key]
+        assert len(outlet_event_accessors) == 1
+        assert outlet_event_accessor.key == internal_key
+        assert outlet_event_accessor.extra == {}
diff --git a/task_sdk/tests/execution_time/test_supervisor.py b/task_sdk/tests/execution_time/test_supervisor.py
index fb84713216625..cae34a90adfeb 100644
--- a/task_sdk/tests/execution_time/test_supervisor.py
+++ b/task_sdk/tests/execution_time/test_supervisor.py
@@ -41,8 +41,11 @@
 from airflow.sdk.api.client import ServerResponseError
 from airflow.sdk.api.datamodels._generated import TaskInstance, TerminalTIState
 from airflow.sdk.execution_time.comms import (
+    AssetResult,
     ConnectionResult,
     DeferTask,
+    GetAssetByName,
+    GetAssetByUri,
     GetConnection,
     GetVariable,
     GetXCom,
@@ -74,7 +77,7 @@ def lineno():
 
 def local_dag_bundle_cfg(path, name="my-bundle"):
     return {
-        "AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(
+        "AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(
             [
                 {
                     "name": name,
@@ -277,7 +280,7 @@ def test_run_simple_dag(self, test_dags_dir, captured_logs, time_machine):
             run_id="c",
             try_number=1,
         )
-        bundle_info = BundleInfo.model_construct(name="my-bundle", version=None)
+        bundle_info = BundleInfo(name="my-bundle", version=None)
         with patch.dict(os.environ, local_dag_bundle_cfg(test_dags_dir, bundle_info.name)):
             exit_code = supervise(
                 ti=ti,
@@ -320,7 +323,7 @@ def test_supervise_handles_deferred_task(
         instant = tz.datetime(2024, 11, 7, 12, 34, 56, 0)
         time_machine.move_to(instant, tick=False)
 
-        bundle_info = BundleInfo.model_construct(name="my-bundle", version=None)
+        bundle_info = BundleInfo(name="my-bundle", version=None)
         with patch.dict(os.environ, local_dag_bundle_cfg(test_dags_dir, bundle_info.name)):
             exit_code = supervise(
                 ti=ti,
@@ -805,13 +808,14 @@ def watched_subprocess(self, mocker):
         )
 
     @pytest.mark.parametrize(
-        ["message", "expected_buffer", "client_attr_path", "method_arg", "mock_response"],
+        ["message", "expected_buffer", "client_attr_path", "method_arg", "method_kwarg", "mock_response"],
         [
             pytest.param(
                 GetConnection(conn_id="test_conn"),
                 b'{"conn_id":"test_conn","conn_type":"mysql","type":"ConnectionResult"}\n',
                 "connections.get",
                 ("test_conn",),
+                {},
                 ConnectionResult(conn_id="test_conn", conn_type="mysql"),
                 id="get_connection",
             ),
@@ -820,6 +824,7 @@ def watched_subprocess(self, mocker):
                 b'{"key":"test_key","value":"test_value","type":"VariableResult"}\n',
                 "variables.get",
                 ("test_key",),
+                {},
                 VariableResult(key="test_key", value="test_value"),
                 id="get_variable",
             ),
@@ -828,6 +833,7 @@ def watched_subprocess(self, mocker):
                 b"",
                 "variables.set",
                 ("test_key", "test_value", "test_description"),
+                {},
                 {"ok": True},
                 id="set_variable",
             ),
@@ -836,6 +842,7 @@ def watched_subprocess(self, mocker):
                 b"",
                 "task_instances.defer",
                 (TI_ID, DeferTask(next_method="execute_callback", classpath="my-classpath")),
+                {},
                 "",
                 id="patch_task_instance_to_deferred",
             ),
@@ -853,6 +860,7 @@ def watched_subprocess(self, mocker):
                         end_date=timezone.parse("2024-10-31T12:00:00Z"),
                     ),
                 ),
+                {},
                 "",
                 id="patch_task_instance_to_up_for_reschedule",
             ),
@@ -861,6 +869,7 @@ def watched_subprocess(self, mocker):
                 b'{"key":"test_key","value":"test_value","type":"XComResult"}\n',
                 "xcoms.get",
                 ("test_dag", "test_run", "test_task", "test_key", None),
+                {},
                 XComResult(key="test_key", value="test_value"),
                 id="get_xcom",
             ),
@@ -871,6 +880,7 @@ def watched_subprocess(self, mocker):
                 b'{"key":"test_key","value":"test_value","type":"XComResult"}\n',
                 "xcoms.get",
                 ("test_dag", "test_run", "test_task", "test_key", 2),
+                {},
                 XComResult(key="test_key", value="test_value"),
                 id="get_xcom_map_index",
             ),
@@ -879,6 +889,7 @@ def watched_subprocess(self, mocker):
                 b'{"key":"test_key","value":null,"type":"XComResult"}\n',
                 "xcoms.get",
                 ("test_dag", "test_run", "test_task", "test_key", None),
+                {},
                 XComResult(key="test_key", value=None, type="XComResult"),
                 id="get_xcom_not_found",
             ),
@@ -900,6 +911,7 @@ def watched_subprocess(self, mocker):
                     '{"key": "test_key", "value": {"key2": "value2"}}',
                     None,
                 ),
+                {},
                 {"ok": True},
                 id="set_xcom",
             ),
@@ -922,6 +934,7 @@ def watched_subprocess(self, mocker):
                     '{"key": "test_key", "value": {"key2": "value2"}}',
                     2,
                 ),
+                {},
                 {"ok": True},
                 id="set_xcom_with_map_index",
             ),
@@ -932,6 +945,7 @@ def watched_subprocess(self, mocker):
                 b"",
                 "",
                 (),
+                {},
                 "",
                 id="patch_task_instance_to_skipped",
             ),
@@ -940,9 +954,28 @@ def watched_subprocess(self, mocker):
                 b"",
                 "task_instances.set_rtif",
                 (TI_ID, {"field1": "rendered_value1", "field2": "rendered_value2"}),
+                {},
                 {"ok": True},
                 id="set_rtif",
             ),
+            pytest.param(
+                GetAssetByName(name="asset"),
+                b'{"name":"asset","uri":"s3://bucket/obj","group":"asset","type":"AssetResult"}\n',
+                "assets.get",
+                [],
+                {"name": "asset"},
+                AssetResult(name="asset", uri="s3://bucket/obj", group="asset"),
+                id="get_asset_by_name",
+            ),
+            pytest.param(
+                GetAssetByUri(uri="s3://bucket/obj"),
+                b'{"name":"asset","uri":"s3://bucket/obj","group":"asset","type":"AssetResult"}\n',
+                "assets.get",
+                [],
+                {"uri": "s3://bucket/obj"},
+                AssetResult(name="asset", uri="s3://bucket/obj", group="asset"),
+                id="get_asset_by_uri",
+            ),
         ],
     )
     def test_handle_requests(
@@ -953,8 +986,8 @@ def test_handle_requests(
         expected_buffer,
         client_attr_path,
         method_arg,
+        method_kwarg,
         mock_response,
-        time_machine,
     ):
         """
         Test handling of different messages to the subprocess. For any new message type, add a
@@ -980,7 +1013,7 @@ def test_handle_requests(
 
         # Verify the correct client method was called
         if client_attr_path:
-            mock_client_method.assert_called_once_with(*method_arg)
+            mock_client_method.assert_called_once_with(*method_arg, **method_kwarg)
 
         # Verify the response was added to the buffer
         val = watched_subprocess.stdin.getvalue()
diff --git a/task_sdk/tests/execution_time/test_task_runner.py b/task_sdk/tests/execution_time/test_task_runner.py
index 0e3698050b88e..1a09c05908d15 100644
--- a/task_sdk/tests/execution_time/test_task_runner.py
+++ b/task_sdk/tests/execution_time/test_task_runner.py
@@ -52,7 +52,12 @@
     VariableResult,
     XComResult,
 )
-from airflow.sdk.execution_time.context import ConnectionAccessor, MacrosAccessor, VariableAccessor
+from airflow.sdk.execution_time.context import (
+    ConnectionAccessor,
+    MacrosAccessor,
+    OutletEventAccessors,
+    VariableAccessor,
+)
 from airflow.sdk.execution_time.task_runner import (
     CommsDecoder,
     RuntimeTaskInstance,
@@ -63,7 +68,7 @@
 )
 from airflow.utils import timezone
 
-FAKE_BUNDLE = BundleInfo.model_construct(name="anything", version="any")
+FAKE_BUNDLE = BundleInfo(name="anything", version="any")
 
 
 def get_inline_dag(dag_id: str, task: BaseOperator) -> DAG:
@@ -109,7 +114,7 @@ def test_recv_StartupDetails(self):
         assert msg.ti.task_id == "a"
         assert msg.ti.dag_id == "c"
         assert msg.dag_rel_path == "/dev/null"
-        assert msg.bundle_info == BundleInfo.model_construct(name="any-name", version="any-version")
+        assert msg.bundle_info == BundleInfo(name="any-name", version="any-version")
 
         # Since this was a StartupDetails message, the decoder should open the other socket
         assert decoder.request_socket is not None
@@ -122,7 +127,7 @@ def test_parse(test_dags_dir: Path, make_ti_context):
     what = StartupDetails(
         ti=TaskInstance(id=uuid7(), task_id="a", dag_id="super_basic", run_id="c", try_number=1),
         dag_rel_path="super_basic.py",
-        bundle_info=BundleInfo.model_construct(name="my-bundle", version=None),
+        bundle_info=BundleInfo(name="my-bundle", version=None),
         requests_fd=0,
         ti_context=make_ti_context(),
     )
@@ -130,7 +135,7 @@ def test_parse(test_dags_dir: Path, make_ti_context):
     with patch.dict(
         os.environ,
         {
-            "AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(
+            "AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(
                 [
                     {
                         "name": "my-bundle",
@@ -574,7 +579,7 @@ def test_dag_parsing_context(make_ti_context, mock_supervisor_comms, monkeypatch
         ]
     )
 
-    monkeypatch.setenv("AIRFLOW__DAG_BUNDLES__BACKENDS", dag_bundle_val)
+    monkeypatch.setenv("AIRFLOW__DAG_BUNDLES__CONFIG_LIST", dag_bundle_val)
     ti, _ = startup()
 
     # Presence of `conditional_task` below means DAG ID is properly set in the parsing context!
@@ -613,6 +618,7 @@ def test_get_context_without_ti_context_from_server(self, mocked_parse, make_ti_
             "inlets": task.inlets,
             "macros": MacrosAccessor(),
             "map_index_template": task.map_index_template,
+            "outlet_events": OutletEventAccessors(),
             "outlets": task.outlets,
             "run_id": "test_run",
             "task": task,
@@ -645,6 +651,7 @@ def test_get_context_with_ti_context_from_server(self, create_runtime_ti):
             "inlets": task.inlets,
             "macros": MacrosAccessor(),
             "map_index_template": task.map_index_template,
+            "outlet_events": OutletEventAccessors(),
             "outlets": task.outlets,
             "run_id": "test_run",
             "task": task,
diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py
index f12b3ad6a6684..b894acdb3a86b 100644
--- a/tests/always/test_project_structure.py
+++ b/tests/always/test_project_structure.py
@@ -116,7 +116,6 @@ def test_providers_modules_should_have_tests(self):
             "providers/tests/google/cloud/links/test_dataflow.py",
             "providers/tests/google/cloud/links/test_dataform.py",
             "providers/tests/google/cloud/links/test_datafusion.py",
-            "providers/tests/google/cloud/links/test_dataplex.py",
             "providers/tests/google/cloud/links/test_dataprep.py",
             "providers/tests/google/cloud/links/test_dataproc.py",
             "providers/tests/google/cloud/links/test_datastore.py",
@@ -396,6 +395,7 @@ class TestGoogleProviderProjectStructure(ExampleCoverageTest, AssetsCoverageTest
         "airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator",
         "airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator",
         "airflow.providers.google.cloud.operators.dataproc._DataprocStartStopClusterBaseOperator",
+        "airflow.providers.google.cloud.operators.dataplex.DataplexCatalogBaseOperator",
         "airflow.providers.google.cloud.operators.vertex_ai.custom_job.CustomTrainingJobBaseOperator",
         "airflow.providers.google.cloud.operators.cloud_base.GoogleCloudBaseOperator",
         "airflow.providers.google.marketing_platform.operators.search_ads._GoogleSearchAdsBaseOperator",
diff --git a/tests/api_fastapi/core_api/routes/public/test_variables.py b/tests/api_fastapi/core_api/routes/public/test_variables.py
index 3cbab24878ac7..fac8b27472449 100644
--- a/tests/api_fastapi/core_api/routes/public/test_variables.py
+++ b/tests/api_fastapi/core_api/routes/public/test_variables.py
@@ -433,113 +433,6 @@ def test_post_should_respond_422_when_value_is_null(self, test_client):
         }
 
 
-class TestImportVariables(TestVariableEndpoint):
-    @pytest.mark.enable_redact
-    @pytest.mark.parametrize(
-        "variables_data, behavior, expected_status_code, expected_created_count, expected_created_keys, expected_conflict_keys",
-        [
-            (
-                {"new_key1": "new_value1", "new_key2": "new_value2"},
-                "overwrite",
-                200,
-                2,
-                {"new_key1", "new_key2"},
-                set(),
-            ),
-            (
-                {"new_key1": "new_value1", "new_key2": "new_value2"},
-                "skip",
-                200,
-                2,
-                {"new_key1", "new_key2"},
-                set(),
-            ),
-            (
-                {"test_variable_key": "new_value", "new_key": "new_value"},
-                "fail",
-                409,
-                0,
-                set(),
-                {"test_variable_key"},
-            ),
-            (
-                {"test_variable_key": "new_value", "new_key": "new_value"},
-                "skip",
-                200,
-                1,
-                {"new_key"},
-                {"test_variable_key"},
-            ),
-            (
-                {"test_variable_key": "new_value", "new_key": "new_value"},
-                "overwrite",
-                200,
-                2,
-                {"test_variable_key", "new_key"},
-                set(),
-            ),
-        ],
-    )
-    def test_import_variables(
-        self,
-        test_client,
-        variables_data,
-        behavior,
-        expected_status_code,
-        expected_created_count,
-        expected_created_keys,
-        expected_conflict_keys,
-        session,
-    ):
-        """Test variable import with different behaviors (overwrite, fail, skip)."""
-
-        self.create_variables()
-
-        file = create_file_upload(variables_data)
-        response = test_client.post(
-            "/public/variables/import",
-            files={"file": ("variables.json", file, "application/json")},
-            params={"action_if_exists": behavior},
-        )
-
-        assert response.status_code == expected_status_code
-
-        if expected_status_code == 200:
-            body = response.json()
-            assert body["created_count"] == expected_created_count
-            assert set(body["created_variable_keys"]) == expected_created_keys
-
-        elif expected_status_code == 409:
-            body = response.json()
-            assert (
-                f"The variables with these keys: {expected_conflict_keys} already exists." == body["detail"]
-            )
-
-    def test_import_invalid_json(self, test_client):
-        """Test invalid JSON import."""
-        file = BytesIO(b"import variable test")
-        response = test_client.post(
-            "/public/variables/import",
-            files={"file": ("variables.json", file, "application/json")},
-            params={"action_if_exists": "overwrite"},
-        )
-
-        assert response.status_code == 400
-        assert "Invalid JSON format" in response.json()["detail"]
-
-    def test_import_empty_file(self, test_client):
-        """Test empty file import."""
-        file = create_file_upload({})
-        response = test_client.post(
-            "/public/variables/import",
-            files={"file": ("empty_variables.json", file, "application/json")},
-            params={"action_if_exists": "overwrite"},
-        )
-
-        assert response.status_code == 422
-        assert response.json()["detail"] == "No variables found in the provided JSON."
-
-
 class TestBulkVariables(TestVariableEndpoint):
     @pytest.mark.enable_redact
     @pytest.mark.parametrize(
diff --git a/tests/api_fastapi/execution_api/routes/test_assets.py b/tests/api_fastapi/execution_api/routes/test_assets.py
new file mode 100644
index 0000000000000..2cf34f8dd7bc7
--- /dev/null
+++ b/tests/api_fastapi/execution_api/routes/test_assets.py
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import pytest
+
+from airflow.models.asset import AssetActive, AssetModel
+from airflow.utils import timezone
+
+DEFAULT_DATE = timezone.parse("2021-01-01T00:00:00")
+
+pytestmark = pytest.mark.db_test
+
+
+class TestGetAssetByName:
+    def test_get_asset_by_name(self, client, session):
+        asset = AssetModel(
+            id=1,
+            name="test_get_asset_by_name",
+            uri="s3://bucket/key",
+            group="asset",
+            extra={"foo": "bar"},
+            created_at=DEFAULT_DATE,
+            updated_at=DEFAULT_DATE,
+        )
+
+        asset_active = AssetActive.for_asset(asset)
+
+        session.add_all([asset, asset_active])
+        session.commit()
+
+        response = client.get("/execution/assets/by-name", params={"name": "test_get_asset_by_name"})
+
+        assert response.status_code == 200
+        assert response.json() == {
+            "name": "test_get_asset_by_name",
+            "uri": "s3://bucket/key",
+            "group": "asset",
+            "extra": {"foo": "bar"},
+        }
+
+        session.delete(asset)
+        session.delete(asset_active)
+        session.commit()
+
+    def test_asset_name_not_found(self, client):
+        response = client.get("/execution/assets/by-name", params={"name": "non_existent"})
+
+        assert response.status_code == 404
+        assert response.json() == {
+            "detail": {
+                "message": "Asset with name non_existent not found",
+                "reason": "not_found",
+            }
+        }
+
+
+class TestGetAssetByUri:
+    def test_get_asset_by_uri(self, client, session):
+        asset = AssetModel(
+            name="test_get_asset_by_uri",
+            uri="s3://bucket/key",
+            group="asset",
+            extra={"foo": "bar"},
+        )
+
+        asset_active = AssetActive.for_asset(asset)
+
+        session.add_all([asset, asset_active])
+        session.commit()
+
+        response = client.get("/execution/assets/by-uri", params={"uri": "s3://bucket/key"})
+
+        assert response.status_code == 200
+        assert response.json() == {
+            "name": "test_get_asset_by_uri",
+            "uri": "s3://bucket/key",
+            "group": "asset",
+            "extra": {"foo": "bar"},
+        }
+
+        session.delete(asset)
+        session.delete(asset_active)
+        session.commit()
+
+    def test_asset_uri_not_found(self, client):
+        response = client.get("/execution/assets/by-uri", params={"uri": "non_existent"})
+
+        assert response.status_code == 404
+        assert response.json() == {
+            "detail": {
+                "message": "Asset with URI non_existent not found",
+                "reason": "not_found",
+            }
+        }
diff --git a/tests/conftest.py b/tests/conftest.py
index fca82aee34b87..8082238808dd4 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -111,7 +111,7 @@ def _config_bundle(path_to_parse: Path | str):
                 "kwargs": {"path": str(path_to_parse), "refresh_interval": 0},
             }
         ]
-        with conf_vars({("dag_bundles", "backends"): json.dumps(bundle_config)}):
+        with conf_vars({("dag_bundles", "config_list"): json.dumps(bundle_config)}):
             yield
 
     return _config_bundle
diff --git a/tests/core/test_exceptions.py b/tests/core/test_exceptions.py
new file mode 100644
index 0000000000000..b0ebce05e90d6
--- /dev/null
+++ b/tests/core/test_exceptions.py
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import sys
+
+
+class TestExceptions:
+    def setup_method(self):
+        self.old_modules = dict(sys.modules)
+
+    def teardown_method(self):
+        # Remove any new modules imported during the test run. This lets us
+        # import the same source files for more than one test.
+        for mod in [m for m in sys.modules if m not in self.old_modules]:
+            del sys.modules[mod]
+
+    def test_pod_mutation_hook_exceptions_compatibility(
+        self,
+    ):
+        from airflow.exceptions import (
+            PodMutationHookException as CoreMutationHookException,
+        )
+        from airflow.providers.cncf.kubernetes.exceptions import (
+            PodMutationHookException as ProviderMutationHookException,
+        )
+        from airflow.providers.cncf.kubernetes.pod_generator import (
+            PodMutationHookException as ProviderGeneratorMutationHookException,
+        )
+
+        assert ProviderMutationHookException == CoreMutationHookException
+        assert ProviderMutationHookException == ProviderGeneratorMutationHookException
+
+    def test_pod_reconciliation_error_exceptions_compatibility(
+        self,
+    ):
+        from airflow.exceptions import (
+            PodReconciliationError as CoreReconciliationError,
+        )
+        from airflow.providers.cncf.kubernetes.exceptions import (
+            PodReconciliationError as ProviderReconciliationError,
+        )
+        from airflow.providers.cncf.kubernetes.pod_generator import (
+            PodReconciliationError as ProviderGeneratorReconciliationError,
+        )
+
+        assert ProviderReconciliationError == CoreReconciliationError
+        assert ProviderReconciliationError == ProviderGeneratorReconciliationError
diff --git a/tests/dag_processing/bundles/test_dag_bundle_manager.py b/tests/dag_processing/bundles/test_dag_bundle_manager.py
index b0baa21c6f84c..26f8b045837eb 100644
--- a/tests/dag_processing/bundles/test_dag_bundle_manager.py
+++ b/tests/dag_processing/bundles/test_dag_bundle_manager.py
@@ -70,7 +70,7 @@ def test_parse_bundle_config(value, expected):
     """Test that bundle_configs are read from configuration."""
     envs = {"AIRFLOW__CORE__LOAD_EXAMPLES": "False"}
     if value:
-        envs["AIRFLOW__DAG_BUNDLES__BACKENDS"] = value
+        envs["AIRFLOW__DAG_BUNDLES__CONFIG_LIST"] = value
     cm = nullcontext()
     exp_fail = False
     if isinstance(expected, str):
@@ -108,7 +108,7 @@ def path(self):
 def test_get_bundle():
     """Test that get_bundle builds and returns a bundle."""
 
-    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(BASIC_BUNDLE_CONFIG)}):
+    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(BASIC_BUNDLE_CONFIG)}):
         bundle_manager = DagBundlesManager()
 
         with pytest.raises(ValueError, match="'bundle-that-doesn't-exist' is not configured"):
@@ -120,7 +120,7 @@ def test_get_bundle():
     assert bundle.refresh_interval == 1
 
     # And none for version also works!
-    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(BASIC_BUNDLE_CONFIG)}):
+    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(BASIC_BUNDLE_CONFIG)}):
         bundle = bundle_manager.get_bundle(name="my-test-bundle")
     assert isinstance(bundle, BasicBundle)
     assert bundle.name == "my-test-bundle"
@@ -144,7 +144,7 @@ def _get_bundle_names_and_active():
             )
 
     # Initial add
-    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(BASIC_BUNDLE_CONFIG)}):
+    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(BASIC_BUNDLE_CONFIG)}):
         manager = DagBundlesManager()
         manager.sync_bundles_to_db()
     assert _get_bundle_names_and_active() == [("my-test-bundle", True)]
@@ -156,13 +156,13 @@ def _get_bundle_names_and_active():
     assert _get_bundle_names_and_active() == [("dags-folder", True), ("my-test-bundle", False)]
 
     # Re-enable one that reappears in config
-    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__BACKENDS": json.dumps(BASIC_BUNDLE_CONFIG)}):
+    with patch.dict(os.environ, {"AIRFLOW__DAG_BUNDLES__CONFIG_LIST": json.dumps(BASIC_BUNDLE_CONFIG)}):
         manager = DagBundlesManager()
         manager.sync_bundles_to_db()
     assert _get_bundle_names_and_active() == [("dags-folder", False), ("my-test-bundle", True)]
 
 
-@conf_vars({("dag_bundles", "backends"): json.dumps(BASIC_BUNDLE_CONFIG)})
+@conf_vars({("dag_bundles", "config_list"): json.dumps(BASIC_BUNDLE_CONFIG)})
 @pytest.mark.parametrize("version", [None, "hello"])
 def test_view_url(version):
     """Test that view_url calls the bundle's view_url method."""
@@ -185,6 +185,6 @@ def test_example_dags_bundle_added():
 
 def test_example_dags_name_is_reserved():
     reserved_name_config = [{"name": "example_dags"}]
-    with conf_vars({("dag_bundles", "backends"): json.dumps(reserved_name_config)}):
+    with conf_vars({("dag_bundles", "config_list"): json.dumps(reserved_name_config)}):
         with pytest.raises(AirflowConfigException, match="Bundle name 'example_dags' is a reserved name."):
             DagBundlesManager().parse_config()
diff --git a/tests/dag_processing/test_dag_bundles.py b/tests/dag_processing/test_dag_bundles.py
index 32b2277b68c54..6f6fb2c80f044 100644
--- a/tests/dag_processing/test_dag_bundles.py
+++ b/tests/dag_processing/test_dag_bundles.py
@@ -39,12 +39,12 @@
 
 @pytest.fixture(autouse=True)
 def bundle_temp_dir(tmp_path):
-    with conf_vars({("core", "dag_bundle_storage_path"): str(tmp_path)}):
+    with conf_vars({("dag_bundles", "dag_bundle_storage_path"): str(tmp_path)}):
         yield tmp_path
 
 
 def test_default_dag_storage_path():
-    with conf_vars({("core", "dag_bundle_storage_path"): ""}):
+    with conf_vars({("dag_bundles", "dag_bundle_storage_path"): ""}):
         bundle = LocalDagBundle(name="test", path="/hello")
         assert bundle._dag_bundle_root_storage_path == Path(tempfile.gettempdir(), "airflow", "dag_bundles")
 
@@ -60,7 +60,7 @@ def get_current_version(self):
         def path(self):
             pass
 
-    with conf_vars({("core", "dag_bundle_storage_path"): None}):
+    with conf_vars({("dag_bundles", "dag_bundle_storage_path"): None}):
         bundle = BasicBundle(name="test")
         assert bundle._dag_bundle_root_storage_path == Path(tempfile.gettempdir(), "airflow", "dag_bundles")
 
diff --git a/tests/dag_processing/test_manager.py b/tests/dag_processing/test_manager.py
index 4ab55c24eefc0..68740c4601ba0 100644
--- a/tests/dag_processing/test_manager.py
+++ b/tests/dag_processing/test_manager.py
@@ -857,7 +857,7 @@ def test_bundles_are_refreshed(self):
         bundletwo.refresh_interval = 300
         bundletwo.get_current_version.return_value = None
 
-        with conf_vars({("dag_bundles", "backends"): json.dumps(config)}):
+        with conf_vars({("dag_bundles", "config_list"): json.dumps(config)}):
             DagBundlesManager().sync_bundles_to_db()
             with mock.patch(
                 "airflow.dag_processing.bundles.manager.DagBundlesManager"
@@ -910,7 +910,7 @@ def test_bundles_versions_are_stored(self):
         mybundle.supports_versioning = True
         mybundle.get_current_version.return_value = "123"
 
-        with conf_vars({("dag_bundles", "backends"): json.dumps(config)}):
+        with conf_vars({("dag_bundles", "config_list"): json.dumps(config)}):
             DagBundlesManager().sync_bundles_to_db()
             with mock.patch(
                 "airflow.dag_processing.bundles.manager.DagBundlesManager"
diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py
index 7f94a30f08974..9dc0c975335ad 100644
--- a/tests/dag_processing/test_processor.py
+++ b/tests/dag_processing/test_processor.py
@@ -262,7 +262,7 @@ def fake_collect_dags(self, *args, **kwargs):
     spy_agency.spy_on(DagBag.collect_dags, call_fake=fake_collect_dags, owner=DagBag)
 
     requests = [
-        TaskCallbackRequest.model_construct(
+        TaskCallbackRequest(
             full_filepath="A",
             msg="Message",
             ti=None,
diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py
index a67864fb1ca6b..2b5d4cce4c7bb 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -415,6 +415,13 @@ def setup_test_cases(self):
                 )
             )
 
+    # Skip that test if latest botocore is used - it reads all example dags and in case latest botocore
+    # is upgraded to latest, usually aiobotocore can't be installed and some of the system tests will fail with
+    # import errors.
+    @pytest.mark.skipif(
+        os.environ.get("UPGRADE_BOTO", "") == "true",
+        reason="This test is skipped when latest botocore is installed",
+    )
     @pytest.mark.db_test
     def test_serialization(self):
         """Serialization and deserialization should work for every DAG and Operator."""
diff --git a/tests/serialization/test_serialized_objects.py b/tests/serialization/test_serialized_objects.py
index 0faeed038e648..707595b92ffa2 100644
--- a/tests/serialization/test_serialized_objects.py
+++ b/tests/serialization/test_serialized_objects.py
@@ -43,11 +43,12 @@
 from airflow.operators.empty import EmptyOperator
 from airflow.providers.standard.operators.python import PythonOperator
 from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetUniqueKey
+from airflow.sdk.execution_time.context import AssetAliasEvent, OutletEventAccessor
 from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
 from airflow.serialization.serialized_objects import BaseSerialization
 from airflow.triggers.base import BaseTrigger
 from airflow.utils import timezone
-from airflow.utils.context import AssetAliasEvent, OutletEventAccessor, OutletEventAccessors
+from airflow.utils.context import OutletEventAccessors
 from airflow.utils.db import LazySelectSequence
 from airflow.utils.operator_resources import Resources
 from airflow.utils.state import DagRunState, State
diff --git a/tests/utils/test_context.py b/tests/utils/test_context.py
deleted file mode 100644
index 0046ca33cc4da..0000000000000
--- a/tests/utils/test_context.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from __future__ import annotations
-
-import pytest
-
-from airflow.models.asset import AssetActive, AssetAliasModel, AssetModel
-from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetAliasUniqueKey, AssetUniqueKey
-from airflow.utils.context import AssetAliasEvent, OutletEventAccessor, OutletEventAccessors
-
-
-class TestOutletEventAccessor:
-    @pytest.mark.parametrize(
-        "key, asset_alias_events",
-        (
-            (AssetUniqueKey.from_asset(Asset("test_uri")), []),
-            (
-                AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias")),
-                [
-                    AssetAliasEvent(
-                        source_alias_name="test_alias",
-                        dest_asset_key=AssetUniqueKey(uri="test_uri", name="test_uri"),
-                        extra={},
-                    )
-                ],
-            ),
-        ),
-    )
-    @pytest.mark.db_test
-    def test_add(self, key, asset_alias_events, session):
-        asset = Asset("test_uri")
-        session.add_all([AssetModel.from_public(asset), AssetActive.for_asset(asset)])
-        session.flush()
-
-        outlet_event_accessor = OutletEventAccessor(key=key, extra={})
-        outlet_event_accessor.add(asset)
-        assert outlet_event_accessor.asset_alias_events == asset_alias_events
-
-    @pytest.mark.db_test
-    @pytest.mark.parametrize(
-        "key, asset_alias_events",
-        (
-            (AssetUniqueKey.from_asset(Asset("test_uri")), []),
-            (
-                AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias")),
-                [
-                    AssetAliasEvent(
-                        source_alias_name="test_alias",
-                        dest_asset_key=AssetUniqueKey(name="test-asset", uri="test://asset-uri/"),
-                        extra={},
-                    )
-                ],
-            ),
-        ),
-    )
-    def test_add_with_db(self, key, asset_alias_events, session):
-        asset = Asset(uri="test://asset-uri", name="test-asset")
-        asm = AssetModel.from_public(asset)
-        aam = AssetAliasModel(name="test_alias")
-        session.add_all([asm, aam, AssetActive.for_asset(asset)])
-        session.flush()
-
-        outlet_event_accessor = OutletEventAccessor(key=key, extra={"not": ""})
-        outlet_event_accessor.add(asset, extra={})
-        assert outlet_event_accessor.asset_alias_events == asset_alias_events
-
-
-class TestOutletEventAccessors:
-    @pytest.mark.parametrize(
-        "access_key, internal_key",
-        (
-            (Asset("test"), AssetUniqueKey.from_asset(Asset("test"))),
-            (
-                Asset(name="test", uri="test://asset"),
-                AssetUniqueKey.from_asset(Asset(name="test", uri="test://asset")),
-            ),
-            (AssetAlias("test_alias"), AssetAliasUniqueKey.from_asset_alias(AssetAlias("test_alias"))),
-        ),
-    )
-    def test___get_item__dict_key_not_exists(self, access_key, internal_key):
-        outlet_event_accessors = OutletEventAccessors()
-        assert len(outlet_event_accessors) == 0
-        outlet_event_accessor = outlet_event_accessors[access_key]
-        assert len(outlet_event_accessors) == 1
-        assert outlet_event_accessor.key == internal_key
-        assert outlet_event_accessor.extra == {}