This is an automated email from the ASF dual-hosted git repository.
pierrejeambrun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new b3f91071843 Add the "is backfillable" property for Dags (#64644)
b3f91071843 is described below
commit b3f9107184329aaf87459002a621f2d6a5bc6861
Author: Dev-iL <[email protected]>
AuthorDate: Tue Apr 14 17:16:31 2026 +0300
Add the "is backfillable" property for Dags (#64644)
---
airflow-core/docs/migrations-ref.rst | 4 +-
.../api_fastapi/core_api/datamodels/dags.py | 12 +++
.../api_fastapi/core_api/openapi/_private_ui.yaml | 10 +++
.../core_api/openapi/v2-rest-api-generated.yaml | 20 +++++
.../core_api/routes/public/backfills.py | 11 ++-
.../src/airflow/cli/commands/dag_command.py | 4 +
.../src/airflow/dag_processing/collection.py | 1 +
.../0111_3_3_0_add_timetable_periodic_to_dag.py | 51 +++++++++++
airflow-core/src/airflow/models/backfill.py | 35 +++++---
airflow-core/src/airflow/models/dag.py | 2 +
.../airflow/ui/openapi-gen/requests/schemas.gen.ts | 36 +++++++-
.../airflow/ui/openapi-gen/requests/types.gen.ts | 15 ++++
.../ui/public/i18n/locales/ar/components.json | 1 -
.../ui/public/i18n/locales/ca/components.json | 1 -
.../ui/public/i18n/locales/de/components.json | 1 -
.../ui/public/i18n/locales/el/components.json | 1 -
.../ui/public/i18n/locales/en/components.json | 2 +-
.../ui/public/i18n/locales/es/components.json | 1 -
.../ui/public/i18n/locales/fr/components.json | 1 -
.../ui/public/i18n/locales/he/components.json | 1 -
.../ui/public/i18n/locales/hi/components.json | 1 -
.../ui/public/i18n/locales/hu/components.json | 1 -
.../ui/public/i18n/locales/it/components.json | 1 -
.../ui/public/i18n/locales/ja/components.json | 1 -
.../ui/public/i18n/locales/ko/components.json | 1 -
.../ui/public/i18n/locales/nl/components.json | 1 -
.../ui/public/i18n/locales/pl/components.json | 1 -
.../ui/public/i18n/locales/pt/components.json | 1 -
.../ui/public/i18n/locales/ru/components.json | 1 -
.../ui/public/i18n/locales/th/components.json | 1 -
.../ui/public/i18n/locales/tr/components.json | 1 -
.../ui/public/i18n/locales/zh-CN/components.json | 1 -
.../ui/public/i18n/locales/zh-TW/components.json | 1 -
.../src/components/TriggerDag/TriggerDAGModal.tsx | 11 ++-
.../airflow/ui/src/pages/DagsList/DagCard.test.tsx | 2 +
airflow-core/src/airflow/utils/db.py | 2 +-
.../api_fastapi/core_api/datamodels/__init__.py | 16 ++++
.../api_fastapi/core_api/datamodels/test_dags.py | 99 ++++++++++++++++++++++
.../core_api/routes/public/test_backfills.py | 4 +-
.../core_api/routes/public/test_dags.py | 6 ++
.../tests/unit/cli/commands/test_dag_command.py | 35 +++++++-
airflow-core/tests/unit/models/test_backfill.py | 60 +++++++++++++
.../src/airflowctl/api/datamodels/generated.py | 8 ++
.../tests/airflow_ctl/api/test_operations.py | 4 +
.../airflow_ctl/ctl/commands/test_dag_command.py | 4 +
45 files changed, 427 insertions(+), 47 deletions(-)
diff --git a/airflow-core/docs/migrations-ref.rst
b/airflow-core/docs/migrations-ref.rst
index eecc8056b14..e54009b856e 100644
--- a/airflow-core/docs/migrations-ref.rst
+++ b/airflow-core/docs/migrations-ref.rst
@@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are
executed via when you ru
+-------------------------+------------------+-------------------+--------------------------------------------------------------+
| Revision ID | Revises ID | Airflow Version | Description
|
+=========================+==================+===================+==============================================================+
-| ``a4c2d171ae18`` (head) | ``1d6611b6ab7c`` | ``3.3.0`` | Add
dag_result to XComModel. |
+| ``9fabad868fdb`` (head) | ``a4c2d171ae18`` | ``3.3.0`` | Add
timetable_periodic to DagModel. |
++-------------------------+------------------+-------------------+--------------------------------------------------------------+
+| ``a4c2d171ae18`` | ``1d6611b6ab7c`` | ``3.3.0`` | Add
dag_result to XComModel. |
+-------------------------+------------------+-------------------+--------------------------------------------------------------+
| ``1d6611b6ab7c`` | ``888b59e02a5b`` | ``3.2.0`` | Add
bundle_name to callback table. |
+-------------------------+------------------+-------------------+--------------------------------------------------------------+
diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py
b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py
index 333349ad5e1..0c4bc1329eb 100644
--- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py
+++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py
@@ -89,6 +89,7 @@ class DAGResponse(BaseModel):
timetable_summary: str | None
timetable_description: str | None
timetable_partitioned: bool
+ timetable_periodic: bool
tags: list[DagTagResponse]
max_active_tasks: int
max_active_runs: int | None
@@ -128,6 +129,17 @@ class DAGResponse(BaseModel):
return None
return str(tts)
+ # Mypy issue https://github.com/python/mypy/issues/1362
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def is_backfillable(self) -> bool:
+ """Whether this DAG's schedule supports backfilling."""
+ if not self.timetable_periodic:
+ return False
+ if self.allowed_run_types is not None and DagRunType.BACKFILL_JOB not
in self.allowed_run_types:
+ return False
+ return True
+
# Mypy issue https://github.com/python/mypy/issues/1362
@computed_field # type: ignore[prop-decorator]
@property
diff --git
a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml
b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml
index e3bce4f8f15..6af9088fda3 100644
--- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml
+++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml
@@ -2051,6 +2051,9 @@ components:
timetable_partitioned:
type: boolean
title: Timetable Partitioned
+ timetable_periodic:
+ type: boolean
+ title: Timetable Periodic
tags:
items:
$ref: '#/components/schemas/DagTagResponse'
@@ -2128,6 +2131,11 @@ components:
is_favorite:
type: boolean
title: Is Favorite
+ is_backfillable:
+ type: boolean
+ title: Is Backfillable
+ description: Whether this DAG's schedule supports backfilling.
+ readOnly: true
file_token:
type: string
title: File Token
@@ -2150,6 +2158,7 @@ components:
- timetable_summary
- timetable_description
- timetable_partitioned
+ - timetable_periodic
- tags
- max_active_tasks
- max_active_runs
@@ -2166,6 +2175,7 @@ components:
- latest_dag_runs
- pending_actions
- is_favorite
+ - is_backfillable
- file_token
title: DAGWithLatestDagRunsResponse
description: DAG with latest dag runs response serializer.
diff --git
a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml
b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml
index bd8e6aa62ba..0f98b70d39b 100644
---
a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml
+++
b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml
@@ -10244,6 +10244,9 @@ components:
timetable_partitioned:
type: boolean
title: Timetable Partitioned
+ timetable_periodic:
+ type: boolean
+ title: Timetable Periodic
tags:
items:
$ref: '#/components/schemas/DagTagResponse'
@@ -10387,6 +10390,11 @@ components:
type: integer
title: Active Runs Count
default: 0
+ is_backfillable:
+ type: boolean
+ title: Is Backfillable
+ description: Whether this DAG's schedule supports backfilling.
+ readOnly: true
file_token:
type: string
title: File Token
@@ -10424,6 +10432,7 @@ components:
- timetable_summary
- timetable_description
- timetable_partitioned
+ - timetable_periodic
- tags
- max_active_tasks
- max_active_runs
@@ -10449,6 +10458,7 @@ components:
- timezone
- last_parsed
- default_args
+ - is_backfillable
- file_token
- concurrency
- latest_dag_version
@@ -10532,6 +10542,9 @@ components:
timetable_partitioned:
type: boolean
title: Timetable Partitioned
+ timetable_periodic:
+ type: boolean
+ title: Timetable Periodic
tags:
items:
$ref: '#/components/schemas/DagTagResponse'
@@ -10590,6 +10603,11 @@ components:
type: string
type: array
title: Owners
+ is_backfillable:
+ type: boolean
+ title: Is Backfillable
+ description: Whether this DAG's schedule supports backfilling.
+ readOnly: true
file_token:
type: string
title: File Token
@@ -10612,6 +10630,7 @@ components:
- timetable_summary
- timetable_description
- timetable_partitioned
+ - timetable_periodic
- tags
- max_active_tasks
- max_active_runs
@@ -10624,6 +10643,7 @@ components:
- next_dagrun_run_after
- allowed_run_types
- owners
+ - is_backfillable
- file_token
title: DAGResponse
description: DAG serializer for responses.
diff --git
a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py
b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py
index c1e90ab0fa6..a520d243e18 100644
--- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py
+++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py
@@ -49,7 +49,7 @@ from airflow.models.backfill import (
AlreadyRunningBackfill,
Backfill,
BackfillDagRun,
- DagNoScheduleException,
+ DagNonPeriodicScheduleException,
InvalidBackfillDate,
InvalidBackfillDirection,
InvalidReprocessBehavior,
@@ -262,7 +262,7 @@ def create_backfill(
except (
InvalidReprocessBehavior,
InvalidBackfillDirection,
- DagNoScheduleException,
+ DagNonPeriodicScheduleException,
InvalidBackfillDate,
) as e:
raise RequestValidationError(str(e))
@@ -305,11 +305,16 @@ def create_backfill_dry_run(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Could not find dag {body.dag_id}",
)
+ except DagRunTypeNotAllowed as e:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=str(e),
+ )
except (
InvalidReprocessBehavior,
InvalidBackfillDirection,
- DagNoScheduleException,
+ DagNonPeriodicScheduleException,
InvalidBackfillDate,
) as e:
raise RequestValidationError(str(e))
diff --git a/airflow-core/src/airflow/cli/commands/dag_command.py
b/airflow-core/src/airflow/cli/commands/dag_command.py
index 6572f467ff3..d0162a49fc5 100644
--- a/airflow-core/src/airflow/cli/commands/dag_command.py
+++ b/airflow-core/src/airflow/cli/commands/dag_command.py
@@ -53,6 +53,7 @@ from airflow.utils.platform import getuser
from airflow.utils.providers_configuration_loader import
providers_configuration_loaded
from airflow.utils.session import NEW_SESSION, create_session, provide_session
from airflow.utils.state import DagRunState
+from airflow.utils.types import DagRunType
if TYPE_CHECKING:
from collections.abc import Iterable, Iterator
@@ -262,6 +263,7 @@ def _get_dagbag_dag_details(dag: DAG) -> dict:
"timetable_summary": core_timetable.summary,
"timetable_description": core_timetable.description,
"timetable_partitioned": core_timetable.partitioned,
+ "timetable_periodic": core_timetable.periodic,
"tags": dag.tags,
"max_active_tasks": dag.max_active_tasks,
"max_active_runs": dag.max_active_runs,
@@ -275,6 +277,8 @@ def _get_dagbag_dag_details(dag: DAG) -> dict:
"next_dagrun_logical_date": None,
"next_dagrun_run_after": None,
"allowed_run_types": dag.allowed_run_types,
+ "is_backfillable": core_timetable.periodic
+ and (dag.allowed_run_types is None or DagRunType.BACKFILL_JOB in
dag.allowed_run_types),
}
diff --git a/airflow-core/src/airflow/dag_processing/collection.py
b/airflow-core/src/airflow/dag_processing/collection.py
index 06e4900d816..4df0b589c1f 100644
--- a/airflow-core/src/airflow/dag_processing/collection.py
+++ b/airflow-core/src/airflow/dag_processing/collection.py
@@ -621,6 +621,7 @@ class DagModelOperation(NamedTuple):
dm.timetable_summary = dag.timetable.summary
dm.timetable_description = dag.timetable.description
dm.timetable_partitioned = dag.timetable.partitioned
+ dm.timetable_periodic = dag.timetable.periodic
dm.fail_fast = dag.fail_fast if dag.fail_fast is not None else
False
allowed_types = dag.allowed_run_types
diff --git
a/airflow-core/src/airflow/migrations/versions/0111_3_3_0_add_timetable_periodic_to_dag.py
b/airflow-core/src/airflow/migrations/versions/0111_3_3_0_add_timetable_periodic_to_dag.py
new file mode 100644
index 00000000000..a01a9e80460
--- /dev/null
+++
b/airflow-core/src/airflow/migrations/versions/0111_3_3_0_add_timetable_periodic_to_dag.py
@@ -0,0 +1,51 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Add timetable_periodic to DagModel.
+
+Revision ID: 9fabad868fdb
+Revises: a4c2d171ae18
+Create Date: 2026-04-09 10:00:00.000000
+
+"""
+
+from __future__ import annotations
+
+import sqlalchemy as sa
+from alembic import op
+
+revision = "9fabad868fdb"
+down_revision = "a4c2d171ae18"
+branch_labels = None
+depends_on = None
+airflow_version = "3.3.0"
+
+
+def upgrade():
+ """Add timetable_periodic column to DagModel."""
+ with op.batch_alter_table("dag", schema=None) as batch_op:
+ batch_op.add_column(
+ sa.Column("timetable_periodic", sa.Boolean, nullable=False,
server_default="0"),
+ )
+
+
+def downgrade():
+ """Remove timetable_periodic column from DagModel."""
+ with op.batch_alter_table("dag", schema=None) as batch_op:
+ batch_op.drop_column("timetable_periodic")
diff --git a/airflow-core/src/airflow/models/backfill.py
b/airflow-core/src/airflow/models/backfill.py
index bb54faec615..86dbe1713f7 100644
--- a/airflow-core/src/airflow/models/backfill.py
+++ b/airflow-core/src/airflow/models/backfill.py
@@ -68,9 +68,16 @@ class AlreadyRunningBackfill(AirflowException):
"""
-class DagNoScheduleException(AirflowException):
+class DagNonPeriodicScheduleException(AirflowException):
"""
- Raised when attempting to create backfill for a Dag with no schedule.
+ Raised when attempting to backfill a Dag whose schedule is fundamentally
incompatible with backfills.
+
+ This covers the following timetable types:
+ - NullTimetable
+ - OnceTimetable
+ - ContinuousTimetable
+ - AssetTriggeredTimetable
+ - PartitionedAssetTimetable
:meta private:
"""
@@ -275,20 +282,21 @@ def _do_dry_run(
reprocess_behavior: ReprocessBehavior,
session: Session,
) -> Iterable[DagRunInfo]:
- from airflow.models import DagModel
from airflow.models.serialized_dag import SerializedDagModel
serdag =
session.scalar(SerializedDagModel.latest_item_select_object(dag_id))
if not serdag:
raise DagNotFound(f"Could not find Dag {dag_id}")
dag = serdag.dag
- _validate_backfill_params(dag, reverse, from_date, to_date,
reprocess_behavior)
- no_schedule = session.scalar(
- select(func.count()).where(DagModel.timetable_summary == "None",
DagModel.dag_id == dag_id)
- )
- if no_schedule:
- raise DagNoScheduleException(f"{dag_id} has no schedule")
+ if not dag.timetable.periodic:
+ raise DagNonPeriodicScheduleException(
+ f"{dag_id} has a non-periodic schedule that does not support
backfills"
+ )
+ if dag.allowed_run_types is not None and DagRunType.BACKFILL_JOB not in
dag.allowed_run_types:
+ raise DagRunTypeNotAllowed(f"Dag with dag_id: '{dag_id}' does not
allow backfill runs")
+
+ _validate_backfill_params(dag, reverse, from_date, to_date,
reprocess_behavior)
dagrun_info_list = _get_info_list(
dag=dag,
@@ -571,8 +579,12 @@ def _create_backfill(
and DagRunType.BACKFILL_JOB not in dag_model.allowed_run_types
):
raise DagRunTypeNotAllowed(f"Dag with dag_id: '{dag_id}' does
not allow backfill runs")
- if dag_model.timetable_summary == "None":
- raise DagNoScheduleException(f"{dag_id} has no schedule")
+
+ dag = serdag.dag
+ if not dag.timetable.periodic:
+ raise DagNonPeriodicScheduleException(
+ f"{dag_id} has a non-periodic schedule that does not support
backfills"
+ )
num_active = session.scalar(
select(func.count()).where(
@@ -588,7 +600,6 @@ def _create_backfill(
f"There can be only one running backfill per Dag."
)
- dag = serdag.dag
_validate_backfill_params(dag, reverse, from_date, to_date,
reprocess_behavior)
br = Backfill(
diff --git a/airflow-core/src/airflow/models/dag.py
b/airflow-core/src/airflow/models/dag.py
index eff187c633f..ca84b7047b4 100644
--- a/airflow-core/src/airflow/models/dag.py
+++ b/airflow-core/src/airflow/models/dag.py
@@ -386,6 +386,8 @@ class DagModel(Base):
timetable_description: Mapped[str | None] = mapped_column(String(1000),
nullable=True)
# Whether the timetable do partitioning.
timetable_partitioned: Mapped[bool] = mapped_column(Boolean,
nullable=False, server_default="0")
+ # Whether the timetable is periodic (supports backfilling).
+ timetable_periodic: Mapped[bool] = mapped_column(Boolean, nullable=False,
server_default="0")
# Asset expression based on asset triggers
asset_expression: Mapped[dict[str, Any] | None] = mapped_column(sa.JSON(),
nullable=True)
# DAG deadline information
diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts
b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts
index 2af71aa482e..3f861d00621 100644
--- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts
+++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts
@@ -1904,6 +1904,10 @@ export const $DAGDetailsResponse = {
type: 'boolean',
title: 'Timetable Partitioned'
},
+ timetable_periodic: {
+ type: 'boolean',
+ title: 'Timetable Periodic'
+ },
tags: {
items: {
'$ref': '#/components/schemas/DagTagResponse'
@@ -2170,6 +2174,12 @@ export const $DAGDetailsResponse = {
title: 'Active Runs Count',
default: 0
},
+ is_backfillable: {
+ type: 'boolean',
+ title: 'Is Backfillable',
+ description: "Whether this DAG's schedule supports backfilling.",
+ readOnly: true
+ },
file_token: {
type: 'string',
title: 'File Token',
@@ -2199,7 +2209,7 @@ Deprecated: Use max_active_tasks instead.`,
}
},
type: 'object',
- required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned', 'tags',
'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs',
'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date',
'next_dagrun_data_interval_start', 'next_dagrun_dat [...]
+ required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned',
'timetable_periodic', 'tags', 'max_active_tasks', 'max_active_runs',
'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits',
'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_st
[...]
title: 'DAGDetailsResponse',
description: 'Specific serializer for DAG Details responses.'
} as const;
@@ -2345,6 +2355,10 @@ export const $DAGResponse = {
type: 'boolean',
title: 'Timetable Partitioned'
},
+ timetable_periodic: {
+ type: 'boolean',
+ title: 'Timetable Periodic'
+ },
tags: {
items: {
'$ref': '#/components/schemas/DagTagResponse'
@@ -2448,6 +2462,12 @@ export const $DAGResponse = {
type: 'array',
title: 'Owners'
},
+ is_backfillable: {
+ type: 'boolean',
+ title: 'Is Backfillable',
+ description: "Whether this DAG's schedule supports backfilling.",
+ readOnly: true
+ },
file_token: {
type: 'string',
title: 'File Token',
@@ -2456,7 +2476,7 @@ export const $DAGResponse = {
}
},
type: 'object',
- required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned', 'tags',
'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs',
'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date',
'next_dagrun_data_interval_start', 'next_dagrun_dat [...]
+ required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned',
'timetable_periodic', 'tags', 'max_active_tasks', 'max_active_runs',
'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits',
'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_st
[...]
title: 'DAGResponse',
description: 'DAG serializer for responses.'
} as const;
@@ -7787,6 +7807,10 @@ export const $DAGWithLatestDagRunsResponse = {
type: 'boolean',
title: 'Timetable Partitioned'
},
+ timetable_periodic: {
+ type: 'boolean',
+ title: 'Timetable Periodic'
+ },
tags: {
items: {
'$ref': '#/components/schemas/DagTagResponse'
@@ -7920,6 +7944,12 @@ export const $DAGWithLatestDagRunsResponse = {
type: 'boolean',
title: 'Is Favorite'
},
+ is_backfillable: {
+ type: 'boolean',
+ title: 'Is Backfillable',
+ description: "Whether this DAG's schedule supports backfilling.",
+ readOnly: true
+ },
file_token: {
type: 'string',
title: 'File Token',
@@ -7928,7 +7958,7 @@ export const $DAGWithLatestDagRunsResponse = {
}
},
type: 'object',
- required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned', 'tags',
'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs',
'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date',
'next_dagrun_data_interval_start', 'next_dagrun_dat [...]
+ required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale',
'last_parsed_time', 'last_parse_duration', 'last_expired', 'bundle_name',
'bundle_version', 'relative_fileloc', 'fileloc', 'description',
'timetable_summary', 'timetable_description', 'timetable_partitioned',
'timetable_periodic', 'tags', 'max_active_tasks', 'max_active_runs',
'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits',
'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_st
[...]
title: 'DAGWithLatestDagRunsResponse',
description: 'DAG with latest dag runs response serializer.'
} as const;
diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts
b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts
index 5420e3e7505..adf7d7a0cc8 100644
--- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts
+++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts
@@ -542,6 +542,7 @@ export type DAGDetailsResponse = {
timetable_summary: string | null;
timetable_description: string | null;
timetable_partitioned: boolean;
+ timetable_periodic: boolean;
tags: Array<DagTagResponse>;
max_active_tasks: number;
max_active_runs: number | null;
@@ -578,6 +579,10 @@ export type DAGDetailsResponse = {
} | null;
is_favorite?: boolean;
active_runs_count?: number;
+ /**
+ * Whether this DAG's schedule supports backfilling.
+ */
+ readonly is_backfillable: boolean;
/**
* Return file token.
*/
@@ -621,6 +626,7 @@ export type DAGResponse = {
timetable_summary: string | null;
timetable_description: string | null;
timetable_partitioned: boolean;
+ timetable_periodic: boolean;
tags: Array<DagTagResponse>;
max_active_tasks: number;
max_active_runs: number | null;
@@ -633,6 +639,10 @@ export type DAGResponse = {
next_dagrun_run_after: string | null;
allowed_run_types: Array<DagRunType> | null;
owners: Array<(string)>;
+ /**
+ * Whether this DAG's schedule supports backfilling.
+ */
+ readonly is_backfillable: boolean;
/**
* Return file token.
*/
@@ -1903,6 +1913,7 @@ export type DAGWithLatestDagRunsResponse = {
timetable_summary: string | null;
timetable_description: string | null;
timetable_partitioned: boolean;
+ timetable_periodic: boolean;
tags: Array<DagTagResponse>;
max_active_tasks: number;
max_active_runs: number | null;
@@ -1921,6 +1932,10 @@ export type DAGWithLatestDagRunsResponse = {
latest_dag_runs: Array<DAGRunLightResponse>;
pending_actions: Array<HITLDetail>;
is_favorite: boolean;
+ /**
+ * Whether this DAG's schedule supports backfilling.
+ */
+ readonly is_backfillable: boolean;
/**
* Return file token.
*/
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json
index 11aa3dbd783..c6ceca45e87 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json
@@ -26,7 +26,6 @@
"title": "تم إنشاء التعبئة الرجعية"
}
},
- "tooltip": "التعبئة الرجعية تتطلب جدول زمني",
"unpause": "إلغاء إيقاف {{dag_display_name}} عند التشغيل",
"validation": {
"datesRequired": "يجب توفير بيانات كل من تاريخ بدء فترة وتاريخ
الانتهاء.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ca/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/ca/components.json
index dfae7c37a3b..b6c885bff72 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/ca/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/ca/components.json
@@ -22,7 +22,6 @@
"title": "Reompliment generat"
}
},
- "tooltip": "El reompliment necessita una programació",
"unpause": "Reactivar {{dag_display_name}} a l'executar",
"validation": {
"datesRequired": "S'ha de proporcionar tant la data d'inici com la data
de finalització.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json
index df6199b19e5..2faeef0602e 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json
@@ -22,7 +22,6 @@
"title": "Auffüllung gestartet"
}
},
- "tooltip": "Auffüllung benötigt eine Zeitplanung",
"unpause": "Dag {{dag_display_name}} beim Start der Auffüllung aktiv
schalten",
"validation": {
"datesRequired": "Sowohl Start- als auch Enddatum müssen angegeben
werden.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/el/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/el/components.json
index 36399b813fd..aae983deabb 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/el/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/el/components.json
@@ -21,7 +21,6 @@
"title": "Το Backfill δημιουργήθηκε"
}
},
- "tooltip": "Το Backfill απαιτεί πρόγραμμα",
"unpause": "Αναίρεση παύσης του {{dag_display_name}} κατά την
ενεργοποίηση",
"validation": {
"datesRequired": "Πρέπει να δοθούν και η ημερομηνία έναρξης και λήξης
του διαστήματος δεδομένων.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json
index 67e348ca11e..af84e92d174 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json
@@ -13,6 +13,7 @@
"permissionDenied": "Dry Run Failed: User does not have permission to
create backfills.",
"reprocessBehavior": "Reprocess Behavior",
"run": "Run Backfill",
+ "scheduleNotBackfillable": "This Dag's schedule does not support
backfills",
"selectDescription": "Run this Dag for a range of dates",
"selectLabel": "Backfill",
"title": "Run Backfill",
@@ -22,7 +23,6 @@
"title": "Backfill generated"
}
},
- "tooltip": "Backfill requires a schedule",
"unpause": "Unpause {{dag_display_name}} on trigger",
"validation": {
"datesRequired": "Both Data Interval Start Date and End Date must be
provided.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json
index 89d241bc447..de515551a73 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json
@@ -22,7 +22,6 @@
"title": "Backfill generado"
}
},
- "tooltip": "Backfill requiere una programación",
"unpause": "Reanudar {{dag_display_name}} al activarse",
"validation": {
"datesRequired": "Ambos intervalos de Fecha Inicial y Fecha Final deben
ser proporcionados.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json
index 93645eb7d71..1f9d2e6e8b0 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json
@@ -22,7 +22,6 @@
"title": "Rattrapage généré"
}
},
- "tooltip": "Le rattrapage nécessite une planification",
"unpause": "Réactiver {{dag_display_name}} lors du déclenchement",
"validation": {
"datesRequired": "Les dates de début et de fin de l'intervalle de
données doivent être renseignées.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json
index 23f23faf2aa..d7619be736b 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json
@@ -23,7 +23,6 @@
"title": "Backfill נוצר"
}
},
- "tooltip": "Backfill דורש לוח זמנים",
"unpause": "הפעל את {{dag_display_name}} בעת הריצה",
"validation": {
"datesRequired": "יש להזין גם תאריך התחלה וגם תאריך סיום של מרווח
הנתונים.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/hi/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/hi/components.json
index 2cd47777d8e..e50acde366a 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/hi/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/hi/components.json
@@ -22,7 +22,6 @@
"title": "बैकफ़िल जेनरेट किया गया"
}
},
- "tooltip": "बैकफ़िल के लिए एक शेड्यूल आवश्यक है",
"unpause": "ट्रिगर पर {{dag_display_name}} को अनपॉज़ करें",
"validation": {
"datesRequired": "डेटा अंतराल प्रारंभ तिथि और समाप्ति तिथि दोनों प्रदान
की जानी चाहिए।",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/hu/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/hu/components.json
index c116bbfb889..92a7ea6dac9 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/hu/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/hu/components.json
@@ -22,7 +22,6 @@
"title": "Backfill létrehozva"
}
},
- "tooltip": "A backfill futtatáshoz ütemezés szükséges",
"unpause": "{{dag_display_name}} folytatása indításkor",
"validation": {
"datesRequired": "A kezdő és a befejező dátumot is meg kell adni.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/it/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/it/components.json
index 69db472513a..6490aa57f92 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/it/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/it/components.json
@@ -23,7 +23,6 @@
"title": "Backfill generato"
}
},
- "tooltip": "Backfill richiede un programma",
"unpause": "Sospendi {{dag_display_name}} al trigger",
"validation": {
"datesRequired": "Devi fornire sia la Data di Inizio che la Data di
Fine.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ja/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/ja/components.json
index fc202eda899..899f71a489a 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/ja/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/ja/components.json
@@ -21,7 +21,6 @@
"title": "過去分再実行処理を生成しました"
}
},
- "tooltip": "過去分再実行処理には Dag にスケジューリングが必要です",
"unpause": "{{dag_display_name}} がトリガーされると一時停止を解除します",
"validation": {
"datesRequired": "データ範囲の開始日と終了日は必須です",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json
index f39fdb6a93d..d228b382b4e 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json
@@ -22,7 +22,6 @@
"title": "백필 생성됨"
}
},
- "tooltip": "백필에는 일정이 필요합니다.",
"unpause": "트리거 시 {{dag_display_name}} 일시 중지 해제",
"validation": {
"datesRequired": "데이터 구간 시작일과 종료일이 모두 제공되어야 합니다.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json
index cff84c9d82b..19f10e5c14c 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json
@@ -22,7 +22,6 @@
"title": "Backfill gegenereerd"
}
},
- "tooltip": "Backfill benodigd een planning",
"unpause": "Hervat {{dag_display_name}} na een trigger",
"validation": {
"datesRequired": "Data interval startdatum en einddatum moeten beide
gegeven zijn.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json
index c5cd6f930ad..7c47c6fecf6 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json
@@ -24,7 +24,6 @@
"title": "Ponowne przetwarzanie uruchomione"
}
},
- "tooltip": "Ponowne przetworzenie wymaga harmonogramu",
"unpause": "Wznów {{dag_display_name}} przy wykonaniu",
"validation": {
"datesRequired": "Należy podać zarówno datę początkową, jak i końcową
interwału danych.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pt/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/pt/components.json
index 8c40eb51349..c5826842374 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/pt/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/pt/components.json
@@ -24,7 +24,6 @@
"title": "Backfill gerado"
}
},
- "tooltip": "Backfill requer um agendamento",
"unpause": "Despausar {{dag_display_name}} ao acionar",
"validation": {
"datesRequired": "Ambas as datas de início e fim do intervalo devem ser
fornecidas.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ru/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/ru/components.json
index 5790128ef6c..8178275fb2c 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/ru/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/ru/components.json
@@ -23,7 +23,6 @@
"title": "Заполнение сгенерировано"
}
},
- "tooltip": "Заполнение требует расписания",
"unpause": "Возобновить {{dag_display_name}} при запуске",
"validation": {
"datesRequired": "Необходимо указать как дату начала, так и дату
окончания.",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/th/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/th/components.json
index e696d624939..6d56cc625e4 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/th/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/th/components.json
@@ -21,7 +21,6 @@
"title": "สร้าง Backfill แล้ว"
}
},
- "tooltip": "Backfill ต้องการการกำหนดเวลาทำงาน (schedule)",
"unpause": "ยกเลิกการหยุดพัก {{dag_display_name}} เมื่อทริกเกอร์",
"validation": {
"datesRequired": "จำเป็นต้องระบุวันเริ่มต้นและวันสิ้นสุดทั้งสองวัน",
diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/tr/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/tr/components.json
index 55f78719123..57eeb22473e 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/tr/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/tr/components.json
@@ -22,7 +22,6 @@
"title": "Geriye dönük çalıştırma oluşturuldu"
}
},
- "tooltip": "Geriye dönük çalıştırma için bir zamanlama gereklidir",
"unpause": "Tetiklendiğinde {{dag_display_name}} duraklatmasını kaldır",
"validation": {
"datesRequired": "Hem Veri Aralığı Başlangıç Tarihi hem de Bitiş Tarihi
sağlanmalıdır.",
diff --git
a/airflow-core/src/airflow/ui/public/i18n/locales/zh-CN/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/zh-CN/components.json
index a4a06d2ea00..ceb20f95a98 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-CN/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-CN/components.json
@@ -22,7 +22,6 @@
"title": "已触发 Dag 执行"
}
},
- "tooltip": "回填功能需要 Dag 具有调度",
"unpause": "触发时取消暂停 {{dag_display_name}}",
"validation": {
"datesRequired": "必须提供数据区间的开始与结束日期。",
diff --git
a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json
b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json
index 7e9e22e3814..0f84213efb8 100644
--- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json
+++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json
@@ -22,7 +22,6 @@
"title": "已觸發 Dag 執行"
}
},
- "tooltip": "回填功能需要 Dag 具有排程",
"unpause": "觸發時取消暫停 {{dag_display_name}}",
"validation": {
"datesRequired": "必須提供資料區間的開始與結束日期。",
diff --git
a/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGModal.tsx
b/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGModal.tsx
index 07080ac0d33..242d0f153a0 100644
--- a/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGModal.tsx
+++ b/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGModal.tsx
@@ -72,6 +72,7 @@ const TriggerDAGModal: React.FC<TriggerDAGModalProps> = ({
},
);
+ const isBackfillable = dag?.is_backfillable ?? false;
const hasSchedule = dag?.timetable_summary !== null;
const isPartitioned = dag ? dag.timetable_partitioned : false;
const { error, isPending, triggerDagRun } = useTrigger({ dagId,
onSuccessConfirm: onClose });
@@ -120,10 +121,14 @@ const TriggerDAGModal: React.FC<TriggerDAGModalProps> = ({
label={translate("triggerDag.selectLabel")}
value={RunMode.SINGLE}
/>
- <Tooltip content={translate("backfill.tooltip")}
disabled={hasSchedule}>
+ <Tooltip
+ content={translate("backfill.scheduleNotBackfillable")}
+ disabled={isBackfillable}
+ portalled
+ >
<RadioCardItem
description={translate("backfill.selectDescription")}
- disabled={!hasSchedule}
+ disabled={!isBackfillable}
label={translate("backfill.selectLabel")}
value={RunMode.BACKFILL}
/>
@@ -146,7 +151,7 @@ const TriggerDAGModal: React.FC<TriggerDAGModalProps> = ({
prefillConfig={prefillConfig}
/>
) : (
- hasSchedule && dag && <RunBackfillForm dag={dag}
onClose={onClose} />
+ isBackfillable && dag && <RunBackfillForm dag={dag}
onClose={onClose} />
)}
</>
)}
diff --git a/airflow-core/src/airflow/ui/src/pages/DagsList/DagCard.test.tsx
b/airflow-core/src/airflow/ui/src/pages/DagsList/DagCard.test.tsx
index ad0e34b6607..fb47e19d8e3 100644
--- a/airflow-core/src/airflow/ui/src/pages/DagsList/DagCard.test.tsx
+++ b/airflow-core/src/airflow/ui/src/pages/DagsList/DagCard.test.tsx
@@ -67,6 +67,7 @@ const mockDag = {
fileloc: "/files/dags/nested_task_groups.py",
has_import_errors: false,
has_task_concurrency_limits: false,
+ is_backfillable: true,
is_favorite: false,
is_paused: false,
is_stale: false,
@@ -110,6 +111,7 @@ const mockDag = {
tags: [],
timetable_description: "Every minute",
timetable_partitioned: false,
+ timetable_periodic: true,
timetable_summary: "* * * * *",
} satisfies DAGWithLatestDagRunsResponse;
diff --git a/airflow-core/src/airflow/utils/db.py
b/airflow-core/src/airflow/utils/db.py
index 13bbd440327..c16a797b1a2 100644
--- a/airflow-core/src/airflow/utils/db.py
+++ b/airflow-core/src/airflow/utils/db.py
@@ -116,7 +116,7 @@ _REVISION_HEADS_MAP: dict[str, str] = {
"3.1.0": "cc92b33c6709",
"3.1.8": "509b94a1042d",
"3.2.0": "1d6611b6ab7c",
- "3.3.0": "a4c2d171ae18",
+ "3.3.0": "9fabad868fdb",
}
# Prefix used to identify tables holding data moved during migration.
diff --git
a/airflow-core/tests/unit/api_fastapi/core_api/datamodels/__init__.py
b/airflow-core/tests/unit/api_fastapi/core_api/datamodels/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/airflow-core/tests/unit/api_fastapi/core_api/datamodels/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git
a/airflow-core/tests/unit/api_fastapi/core_api/datamodels/test_dags.py
b/airflow-core/tests/unit/api_fastapi/core_api/datamodels/test_dags.py
new file mode 100644
index 00000000000..f325124403f
--- /dev/null
+++ b/airflow-core/tests/unit/api_fastapi/core_api/datamodels/test_dags.py
@@ -0,0 +1,99 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pytest
+
+from airflow.api_fastapi.core_api.datamodels.dags import DAGResponse
+from airflow.utils.types import DagRunType
+
+
+def _make_dag_response(**overrides) -> DAGResponse:
+ """Create a minimal DAGResponse with sensible defaults."""
+ defaults = {
+ "dag_id": "test_dag",
+ "dag_display_name": "Test DAG",
+ "is_paused": False,
+ "is_stale": False,
+ "last_parsed_time": None,
+ "last_parse_duration": None,
+ "last_expired": None,
+ "bundle_name": "dags-folder",
+ "bundle_version": None,
+ "relative_fileloc": "test.py",
+ "fileloc": "/opt/airflow/dags/test.py",
+ "description": None,
+ "timetable_summary": "0 * * * *",
+ "timetable_description": "At the start of every hour",
+ "timetable_partitioned": False,
+ "timetable_periodic": True,
+ "tags": [],
+ "max_active_tasks": 16,
+ "max_active_runs": 25,
+ "max_consecutive_failed_dag_runs": 0,
+ "has_task_concurrency_limits": False,
+ "has_import_errors": False,
+ "next_dagrun_logical_date": None,
+ "next_dagrun_data_interval_start": None,
+ "next_dagrun_data_interval_end": None,
+ "next_dagrun_run_after": None,
+ "allowed_run_types": None,
+ "owners": "airflow",
+ }
+ defaults.update(overrides)
+ return DAGResponse.model_validate(defaults)
+
+
+class TestIsBackfillable:
+ @pytest.mark.parametrize(
+ "timetable_periodic",
+ [
+ pytest.param(False, id="non-periodic"),
+ ],
+ )
+ def test_non_periodic_not_backfillable(self, timetable_periodic):
+ dag = _make_dag_response(timetable_periodic=timetable_periodic)
+ assert dag.is_backfillable is False
+
+ def test_periodic_backfillable(self):
+ dag = _make_dag_response(timetable_periodic=True)
+ assert dag.is_backfillable is True
+
+ def test_periodic_with_allowed_run_types_none_is_backfillable(self):
+ dag = _make_dag_response(timetable_periodic=True,
allowed_run_types=None)
+ assert dag.is_backfillable is True
+
+ def test_periodic_with_backfill_in_allowed_run_types(self):
+ dag = _make_dag_response(
+ timetable_periodic=True,
+ allowed_run_types=[DagRunType.BACKFILL_JOB, DagRunType.MANUAL],
+ )
+ assert dag.is_backfillable is True
+
+ def test_periodic_with_backfill_excluded_from_allowed_run_types(self):
+ dag = _make_dag_response(
+ timetable_periodic=True,
+ allowed_run_types=[DagRunType.MANUAL],
+ )
+ assert dag.is_backfillable is False
+
+ def test_non_periodic_with_backfill_in_allowed_run_types(self):
+ dag = _make_dag_response(
+ timetable_periodic=False,
+ allowed_run_types=[DagRunType.BACKFILL_JOB, DagRunType.MANUAL],
+ )
+ assert dag.is_backfillable is False
diff --git
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_backfills.py
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_backfills.py
index 10bfd0f7817..80495fc9a8d 100644
---
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_backfills.py
+++
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_backfills.py
@@ -280,7 +280,7 @@ class TestCreateBackfill(TestBackfillEndpoint):
assert response.json().get("detail") == "Could not find dag
DAG_NOT_EXIST"
def test_no_schedule_dag(self, session, dag_maker, test_client):
- with dag_maker(session=session, dag_id="TEST_DAG_1", schedule="None")
as dag:
+ with dag_maker(session=session, dag_id="TEST_DAG_1", schedule=None) as
dag:
EmptyOperator(task_id="mytask")
session.scalars(select(DagModel)).all()
session.commit()
@@ -303,7 +303,7 @@ class TestCreateBackfill(TestBackfillEndpoint):
json=data,
)
assert response.status_code == 422
- assert response.json().get("detail") == f"{dag.dag_id} has no schedule"
+ assert "has a non-periodic schedule that does not support backfills"
in response.json().get("detail")
@pytest.mark.parametrize(
("repro_act", "repro_exp", "run_backwards", "status_code"),
diff --git
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dags.py
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dags.py
index 434cd9e07f3..f875392f003 100644
--- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dags.py
+++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dags.py
@@ -1016,6 +1016,7 @@ class TestDagDetails(TestDagEndpoint):
"file_token": file_token,
"has_import_errors": False,
"has_task_concurrency_limits": True,
+ "is_backfillable": False,
"is_favorite": False,
"is_paused": False,
"is_paused_upon_creation": None,
@@ -1058,6 +1059,7 @@ class TestDagDetails(TestDagEndpoint):
"template_search_path": None,
"timetable_description": "Never, external triggers only",
"timetable_partitioned": False,
+ "timetable_periodic": False,
"timetable_summary": None,
"timezone": UTC_JSON_REPR,
}
@@ -1115,6 +1117,7 @@ class TestDagDetails(TestDagEndpoint):
"file_token": file_token,
"has_import_errors": False,
"has_task_concurrency_limits": True,
+ "is_backfillable": False,
"is_favorite": False,
"is_stale": False,
"is_paused": False,
@@ -1158,6 +1161,7 @@ class TestDagDetails(TestDagEndpoint):
"timetable_summary": None,
"timetable_description": "Never, external triggers only",
"timetable_partitioned": False,
+ "timetable_periodic": False,
"timezone": UTC_JSON_REPR,
}
assert res_json == expected
@@ -1295,6 +1299,7 @@ class TestGetDag(TestDagEndpoint):
"file_token": file_token,
"has_import_errors": False,
"has_task_concurrency_limits": True,
+ "is_backfillable": False,
"is_paused": False,
"is_stale": False,
"last_expired": None,
@@ -1312,6 +1317,7 @@ class TestGetDag(TestDagEndpoint):
"tags": tags,
"timetable_description": "Never, external triggers only",
"timetable_partitioned": False,
+ "timetable_periodic": False,
"timetable_summary": None,
}
assert res_json == expected
diff --git a/airflow-core/tests/unit/cli/commands/test_dag_command.py
b/airflow-core/tests/unit/cli/commands/test_dag_command.py
index 208fd9bb074..57ae5959810 100644
--- a/airflow-core/tests/unit/cli/commands/test_dag_command.py
+++ b/airflow-core/tests/unit/cli/commands/test_dag_command.py
@@ -40,7 +40,7 @@ from airflow.models import DagModel, DagRun
from airflow.models.dagbag import DBDagBag
from airflow.models.serialized_dag import SerializedDagModel
from airflow.providers.standard.triggers.temporal import DateTimeTrigger,
TimeDeltaTrigger
-from airflow.sdk import BaseOperator, task
+from airflow.sdk import DAG, BaseOperator, task
from airflow.sdk.definitions.dag import _run_inline_trigger
from airflow.triggers.base import TriggerEvent
from airflow.utils.session import create_session
@@ -1070,3 +1070,36 @@ class TestCliDagsReserialize:
serialized_dag_ids =
set(session.execute(select(SerializedDagModel.dag_id)).scalars())
assert serialized_dag_ids == {"test_example_bash_operator",
"test_sensor"}
+
+
+class TestDagDetailsIsBackfillable:
+ """Tests for the is_backfillable computation in _get_dagbag_dag_details."""
+
+ @pytest.mark.parametrize(
+ ("schedule", "allowed_run_types", "expected"),
+ [
+ pytest.param("@daily", None, True, id="periodic-allowed-none"),
+ pytest.param(
+ "@daily",
+ [DagRunType.SCHEDULED, DagRunType.MANUAL,
DagRunType.BACKFILL_JOB],
+ True,
+ id="periodic-backfill-included",
+ ),
+ pytest.param(
+ "@daily",
+ [DagRunType.SCHEDULED, DagRunType.MANUAL],
+ False,
+ id="periodic-backfill-excluded",
+ ),
+ pytest.param(None, None, False, id="non-periodic-null-schedule"),
+ pytest.param("@once", None, False,
id="non-periodic-once-schedule"),
+ ],
+ )
+ def test_is_backfillable(self, schedule, allowed_run_types, expected):
+ dag = DAG(
+ dag_id="test_is_backfillable",
+ schedule=schedule,
+ allowed_run_types=allowed_run_types,
+ )
+ dag_details = dag_command._get_dagbag_dag_details(dag)
+ assert dag_details["is_backfillable"] is expected
diff --git a/airflow-core/tests/unit/models/test_backfill.py
b/airflow-core/tests/unit/models/test_backfill.py
index ad309e50383..d3323aeab35 100644
--- a/airflow-core/tests/unit/models/test_backfill.py
+++ b/airflow-core/tests/unit/models/test_backfill.py
@@ -32,13 +32,16 @@ from airflow.models.backfill import (
Backfill,
BackfillDagRun,
BackfillDagRunExceptionReason,
+ DagNonPeriodicScheduleException,
InvalidBackfillDirection,
InvalidReprocessBehavior,
ReprocessBehavior,
_create_backfill,
+ _do_dry_run,
_get_latest_dag_run_row_query,
)
from airflow.providers.standard.operators.python import PythonOperator
+from airflow.sdk import Asset
from airflow.ti_deps.dep_context import DepContext
from airflow.timetables.base import DagRunInfo
from airflow.utils.state import DagRunState, TaskInstanceState
@@ -593,3 +596,60 @@ def test_get_latest_dag_run_row_partitioned(session:
Session):
dr = session.scalar(stmt)
assert dr is not None
assert dr.start_date == timezone.parse("2026-02-23")
+
+
[email protected](
+ ("schedule", "dag_kwargs"),
+ [
+ pytest.param(None, {}, id="no-schedule"),
+ pytest.param("@once", {}, id="once"),
+ pytest.param("@continuous", {"max_active_runs": 1}, id="continuous"),
+ pytest.param([Asset(uri="test://asset", name="test-asset")], {},
id="asset-scheduled"),
+ ],
+)
+def test_create_backfill_non_periodic_schedule_rejected(schedule, dag_kwargs,
dag_maker, session):
+ with dag_maker(schedule=schedule, **dag_kwargs) as dag:
+ PythonOperator(task_id="hi", python_callable=print)
+ session.commit()
+ with pytest.raises(
+ DagNonPeriodicScheduleException,
+ match="has a non-periodic schedule that does not support backfills",
+ ):
+ _create_backfill(
+ dag_id=dag.dag_id,
+ from_date=pendulum.parse("2021-01-01"),
+ to_date=pendulum.parse("2021-01-05"),
+ max_active_runs=2,
+ reverse=False,
+ triggering_user_name="pytest",
+ dag_run_conf={},
+ )
+
+
[email protected](
+ ("schedule", "dag_kwargs"),
+ [
+ pytest.param(None, {}, id="no-schedule"),
+ pytest.param("@once", {}, id="once"),
+ pytest.param("@continuous", {"max_active_runs": 1}, id="continuous"),
+ pytest.param([Asset(uri="test://asset", name="test-asset")], {},
id="asset-scheduled"),
+ ],
+)
+def test_do_dry_run_non_periodic_schedule_rejected(schedule, dag_kwargs,
dag_maker, session):
+ with dag_maker(schedule=schedule, **dag_kwargs) as dag:
+ PythonOperator(task_id="hi", python_callable=print)
+ session.commit()
+ with pytest.raises(
+ DagNonPeriodicScheduleException,
+ match="has a non-periodic schedule that does not support backfills",
+ ):
+ list(
+ _do_dry_run(
+ dag_id=dag.dag_id,
+ from_date=pendulum.parse("2021-01-01"),
+ to_date=pendulum.parse("2021-01-05"),
+ reverse=False,
+ reprocess_behavior=ReprocessBehavior.NONE,
+ session=session,
+ )
+ )
diff --git a/airflow-ctl/src/airflowctl/api/datamodels/generated.py
b/airflow-ctl/src/airflowctl/api/datamodels/generated.py
index a0b32b7fbdd..6a182fdb206 100644
--- a/airflow-ctl/src/airflowctl/api/datamodels/generated.py
+++ b/airflow-ctl/src/airflowctl/api/datamodels/generated.py
@@ -1378,6 +1378,7 @@ class DAGDetailsResponse(BaseModel):
timetable_summary: Annotated[str | None, Field(title="Timetable Summary")]
= None
timetable_description: Annotated[str | None, Field(title="Timetable
Description")] = None
timetable_partitioned: Annotated[bool, Field(title="Timetable
Partitioned")]
+ timetable_periodic: Annotated[bool, Field(title="Timetable Periodic")]
tags: Annotated[list[DagTagResponse], Field(title="Tags")]
max_active_tasks: Annotated[int, Field(title="Max Active Tasks")]
max_active_runs: Annotated[int | None, Field(title="Max Active Runs")] =
None
@@ -1410,6 +1411,9 @@ class DAGDetailsResponse(BaseModel):
owner_links: Annotated[dict[str, str] | None, Field(title="Owner Links")]
= None
is_favorite: Annotated[bool | None, Field(title="Is Favorite")] = False
active_runs_count: Annotated[int | None, Field(title="Active Runs Count")]
= 0
+ is_backfillable: Annotated[
+ bool, Field(description="Whether this DAG's schedule supports
backfilling.", title="Is Backfillable")
+ ]
file_token: Annotated[str, Field(description="Return file token.",
title="File Token")]
concurrency: Annotated[
int,
@@ -1443,6 +1447,7 @@ class DAGResponse(BaseModel):
timetable_summary: Annotated[str | None, Field(title="Timetable Summary")]
= None
timetable_description: Annotated[str | None, Field(title="Timetable
Description")] = None
timetable_partitioned: Annotated[bool, Field(title="Timetable
Partitioned")]
+ timetable_periodic: Annotated[bool, Field(title="Timetable Periodic")]
tags: Annotated[list[DagTagResponse], Field(title="Tags")]
max_active_tasks: Annotated[int, Field(title="Max Active Tasks")]
max_active_runs: Annotated[int | None, Field(title="Max Active Runs")] =
None
@@ -1459,6 +1464,9 @@ class DAGResponse(BaseModel):
next_dagrun_run_after: Annotated[datetime | None, Field(title="Next Dagrun
Run After")] = None
allowed_run_types: Annotated[list[DagRunType] | None, Field(title="Allowed
Run Types")] = None
owners: Annotated[list[str], Field(title="Owners")]
+ is_backfillable: Annotated[
+ bool, Field(description="Whether this DAG's schedule supports
backfilling.", title="Is Backfillable")
+ ]
file_token: Annotated[str, Field(description="Return file token.",
title="File Token")]
diff --git a/airflow-ctl/tests/airflow_ctl/api/test_operations.py
b/airflow-ctl/tests/airflow_ctl/api/test_operations.py
index aa559f17421..48e44558ef4 100644
--- a/airflow-ctl/tests/airflow_ctl/api/test_operations.py
+++ b/airflow-ctl/tests/airflow_ctl/api/test_operations.py
@@ -731,6 +731,7 @@ class TestDagOperations:
timetable_summary="timetable_summary",
timetable_description="timetable_description",
timetable_partitioned=False,
+ timetable_periodic=True,
tags=[],
max_active_tasks=1,
max_active_runs=1,
@@ -742,6 +743,7 @@ class TestDagOperations:
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
+ is_backfillable=True,
file_token="file_token",
bundle_name="bundle_name",
is_stale=False,
@@ -759,6 +761,7 @@ class TestDagOperations:
timetable_summary="timetable_summary",
timetable_description="timetable_description",
timetable_partitioned=False,
+ timetable_periodic=True,
tags=[],
max_active_tasks=1,
max_active_runs=1,
@@ -770,6 +773,7 @@ class TestDagOperations:
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
+ is_backfillable=True,
catchup=False,
dag_run_timeout=datetime.timedelta(days=1),
asset_expression=None,
diff --git a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py
b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py
index 600a553109b..8eb65d46686 100644
--- a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py
+++ b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py
@@ -42,6 +42,7 @@ class TestDagCommands:
timetable_summary="timetable_summary",
timetable_description="timetable_description",
timetable_partitioned=False,
+ timetable_periodic=True,
tags=[],
max_active_tasks=1,
max_active_runs=1,
@@ -53,6 +54,7 @@ class TestDagCommands:
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
+ is_backfillable=True,
file_token="file_token",
bundle_name="bundle_name",
is_stale=False,
@@ -70,6 +72,7 @@ class TestDagCommands:
timetable_summary="timetable_summary",
timetable_description="timetable_description",
timetable_partitioned=False,
+ timetable_periodic=True,
tags=[],
max_active_tasks=1,
max_active_runs=1,
@@ -81,6 +84,7 @@ class TestDagCommands:
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
+ is_backfillable=True,
file_token="file_token",
bundle_name="bundle_name",
is_stale=False,