Skip to content

Commit

Permalink
Remove Airflow 2.1 compat code in Google provider (apache#43952)
Browse files Browse the repository at this point in the history
We've dropped the old version a long time ago. This is unreachable code
at this point.
  • Loading branch information
uranusjr authored Nov 13, 2024
1 parent 6519657 commit be870f6
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 39 deletions.
17 changes: 3 additions & 14 deletions providers/src/airflow/providers/google/cloud/sensors/gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,21 +178,10 @@ def ts_function(context):
"""
Act as a default callback for the GoogleCloudStorageObjectUpdatedSensor.
The default behaviour is check for the object being updated after the data interval's end,
or execution_date + interval on Airflow versions prior to 2.2 (before AIP-39 implementation).
The default behaviour is check for the object being updated after the data
interval's end.
"""
try:
return context["data_interval_end"]
except KeyError:
from airflow.utils import timezone

data_interval = context["dag"].infer_automated_data_interval(
timezone.coerce_datetime(context["execution_date"])
)
next_info = context["dag"].next_dagrun_info(data_interval, restricted=False)
if next_info is None:
return None
return next_info.data_interval.start
return context["data_interval_end"]


class GCSObjectUpdateSensor(BaseSensorOperator):
Expand Down
25 changes: 0 additions & 25 deletions providers/tests/google/cloud/sensors/test_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from datetime import datetime, timedelta
from unittest import mock

import pendulum
import pytest
from google.cloud.storage.retry import DEFAULT_RETRY

Expand All @@ -35,15 +34,13 @@
GCSObjectsWithPrefixExistenceSensor,
GCSObjectUpdateSensor,
GCSUploadSessionCompleteSensor,
ts_function,
)
from airflow.providers.google.cloud.triggers.gcs import (
GCSBlobTrigger,
GCSCheckBlobUpdateTimeTrigger,
GCSPrefixBlobTrigger,
GCSUploadSessionTrigger,
)
from airflow.utils import timezone

TEST_BUCKET = "TEST_BUCKET"

Expand Down Expand Up @@ -253,28 +250,6 @@ def test_gcs_object_existence_async_sensor_execute_complete(self):
mock_log_info.assert_called_with("File %s was found in bucket %s.", TEST_OBJECT, TEST_BUCKET)


class TestTsFunction:
def test_should_support_datetime(self):
context = {
"dag": DAG(
dag_id=TEST_DAG_ID, schedule=timedelta(days=5), start_date=datetime(2019, 2, 14, 0, 0)
),
"execution_date": datetime(2019, 2, 14, 0, 0),
}
result = ts_function(context)
assert datetime(2019, 2, 19, 0, 0, tzinfo=timezone.utc) == result

def test_should_support_cron(self):
dag = DAG(dag_id=TEST_DAG_ID, start_date=datetime(2019, 2, 19, 0, 0), schedule="@weekly")

context = {
"dag": dag,
"execution_date": datetime(2019, 2, 19),
}
result = ts_function(context)
assert pendulum.instance(datetime(2019, 2, 24)).isoformat() == result.isoformat()


class TestGoogleCloudStorageObjectUpdatedSensor:
@mock.patch("airflow.providers.google.cloud.sensors.gcs.GCSHook")
def test_should_pass_argument_to_hook(self, mock_hook):
Expand Down

0 comments on commit be870f6

Please sign in to comment.