diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index fdda56c910a5d..876bf3a30a3d0 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -41,7 +41,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst index c4faa1dde494f..7d3b484ba7cbe 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst @@ -31,7 +31,7 @@ This functionality can be disabled for an asynchronous wait -- typically with th Below is an example of using this operator to execute an Azure Data Factory pipeline. - .. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_adf_run_pipeline.py + .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline] @@ -39,7 +39,7 @@ Below is an example of using this operator to execute an Azure Data Factory pipe Here is a different example of using this operator to execute a pipeline but coupled with the :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunSensor` to perform an asynchronous wait. - .. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_adf_run_pipeline.py + .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline_async] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst index 2cd55a5360892..b7e29446c82b8 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst @@ -35,7 +35,7 @@ file(s) from Azure DataLake Storage Below is an example of using this operator to delete a file from ADL. -.. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_adls_delete.py +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_delete.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_delete] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst b/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst index 13cf897e9179c..1c1e084f51853 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst @@ -54,7 +54,7 @@ To get information about jobs within a Azure Blob Storage use: Example usage: -.. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_azure_blob_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py :language: python :start-after: [START how_to_azure_blob_to_gcs] :end-before: [END how_to_azure_blob_to_gcs] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/local_to_adls.rst b/docs/apache-airflow-providers-microsoft-azure/operators/local_to_adls.rst index f82e93c610027..0de321e63e191 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/local_to_adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/local_to_adls.rst @@ -38,7 +38,7 @@ upload data from local filesystem to ADL. Below is an example of using this operator to upload a file to ADL. -.. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_adls.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_adls] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/sftp_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/operators/sftp_to_wasb.rst index b628681f4ee89..a16e3fb12aaba 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/sftp_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/sftp_to_wasb.rst @@ -54,7 +54,7 @@ To get information about jobs within a Azure Blob Storage use: :class:`~airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator` Example usage: -.. exampleinclude:: /../../airflow/providers/microsoft/azure/example_dags/example_sftp_to_wasb.py +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_sftp_to_wasb.py :language: python :dedent: 4 :start-after: [START how_to_sftp_to_wasb] diff --git a/tests/providers/microsoft/azure/operators/test_adls_delete_system.py b/tests/providers/microsoft/azure/operators/test_adls_delete_system.py index 7fe437a5bbf9d..9a5f387d37f83 100644 --- a/tests/providers/microsoft/azure/operators/test_adls_delete_system.py +++ b/tests/providers/microsoft/azure/operators/test_adls_delete_system.py @@ -19,7 +19,7 @@ import pytest -from airflow.providers.microsoft.azure.example_dags.example_local_to_adls import LOCAL_FILE_PATH +from tests.system.providers.microsoft.azure.example_local_to_adls import LOCAL_FILE_PATH from tests.test_utils.azure_system_helpers import ( AZURE_DAG_FOLDER, AzureSystemTest, diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py b/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py index cd0363f2bfad8..99ed741067123 100644 --- a/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py +++ b/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py @@ -19,7 +19,7 @@ import pytest -from airflow.providers.microsoft.azure.example_dags.example_local_to_adls import LOCAL_FILE_PATH +from tests.system.providers.microsoft.azure.example_local_to_adls import LOCAL_FILE_PATH from tests.test_utils.azure_system_helpers import ( AZURE_DAG_FOLDER, AzureSystemTest, diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_wasb_system.py b/tests/providers/microsoft/azure/transfers/test_local_to_wasb_system.py index dea89362e8d78..f57ac8ca97290 100644 --- a/tests/providers/microsoft/azure/transfers/test_local_to_wasb_system.py +++ b/tests/providers/microsoft/azure/transfers/test_local_to_wasb_system.py @@ -19,9 +19,7 @@ import pytest -from airflow.providers.microsoft.azure.example_dags.example_local_to_wasb import ( # type: ignore - PATH_TO_UPLOAD_FILE, -) +from tests.system.providers.microsoft.azure.example_local_to_wasb import PATH_TO_UPLOAD_FILE # type: ignore from tests.test_utils.azure_system_helpers import ( AZURE_DAG_FOLDER, AzureSystemTest, diff --git a/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb_system.py b/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb_system.py index 600f84fc33585..91256539b28bc 100644 --- a/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb_system.py +++ b/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb_system.py @@ -20,7 +20,7 @@ import pytest -from airflow.providers.microsoft.azure.example_dags.example_sftp_to_wasb import ( +from tests.system.providers.microsoft.azure.example_sftp_to_wasb import ( FILE_COMPLETE_PATH, LOCAL_FILE_PATH, SAMPLE_FILENAME, diff --git a/airflow/providers/microsoft/azure/example_dags/__init__.py b/tests/system/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/example_dags/__init__.py rename to tests/system/__init__.py diff --git a/tests/system/providers/__init__.py b/tests/system/providers/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/system/providers/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/microsoft/__init__.py b/tests/system/providers/microsoft/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/system/providers/microsoft/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/microsoft/azure/__init__.py b/tests/system/providers/microsoft/azure/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/system/providers/microsoft/azure/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/microsoft/azure/example_dags/example_adf_run_pipeline.py b/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py similarity index 78% rename from airflow/providers/microsoft/azure/example_dags/example_adf_run_pipeline.py rename to tests/system/providers/microsoft/azure/example_adf_run_pipeline.py index 9833dc3190205..03a1a71d63313 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_adf_run_pipeline.py +++ b/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py @@ -14,21 +14,25 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import os from datetime import datetime, timedelta -from airflow.models import DAG, BaseOperator +from airflow.models import DAG try: from airflow.operators.empty import EmptyOperator except ModuleNotFoundError: from airflow.operators.dummy import DummyOperator as EmptyOperator # type: ignore + from airflow.providers.microsoft.azure.operators.data_factory import AzureDataFactoryRunPipelineOperator from airflow.providers.microsoft.azure.sensors.data_factory import AzureDataFactoryPipelineRunStatusSensor from airflow.utils.edgemodifier import Label +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_adf_run_pipeline" + with DAG( - dag_id="example_adf_run_pipeline", + dag_id=DAG_ID, start_date=datetime(2021, 8, 13), schedule_interval="@daily", catchup=False, @@ -45,7 +49,7 @@ end = EmptyOperator(task_id="end") # [START howto_operator_adf_run_pipeline] - run_pipeline1: BaseOperator = AzureDataFactoryRunPipelineOperator( + run_pipeline1 = AzureDataFactoryRunPipelineOperator( task_id="run_pipeline1", pipeline_name="pipeline1", parameters={"myParam": "value"}, @@ -53,13 +57,13 @@ # [END howto_operator_adf_run_pipeline] # [START howto_operator_adf_run_pipeline_async] - run_pipeline2: BaseOperator = AzureDataFactoryRunPipelineOperator( + run_pipeline2 = AzureDataFactoryRunPipelineOperator( task_id="run_pipeline2", pipeline_name="pipeline2", wait_for_termination=False, ) - pipeline_run_sensor: BaseOperator = AzureDataFactoryPipelineRunStatusSensor( + pipeline_run_sensor = AzureDataFactoryPipelineRunStatusSensor( task_id="pipeline_run_sensor", run_id=run_pipeline2.output["run_id"], ) @@ -71,3 +75,14 @@ # Task dependency created via `XComArgs`: # run_pipeline2 >> pipeline_run_sensor + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_adls_delete.py b/tests/system/providers/microsoft/azure/example_adls_delete.py similarity index 77% rename from airflow/providers/microsoft/azure/example_dags/example_adls_delete.py rename to tests/system/providers/microsoft/azure/example_adls_delete.py index e007846f53292..e592160c1d03d 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_adls_delete.py +++ b/tests/system/providers/microsoft/azure/example_adls_delete.py @@ -25,9 +25,11 @@ LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", 'localfile.txt') REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", 'remote.txt') +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_adls_delete" with models.DAG( - "example_adls_delete", + DAG_ID, start_date=datetime(2021, 1, 1), schedule_interval=None, tags=['example'], @@ -43,3 +45,14 @@ # [END howto_operator_adls_delete] upload_file >> remove_file + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_azure_blob_to_gcs.py b/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py similarity index 81% rename from airflow/providers/microsoft/azure/example_dags/example_azure_blob_to_gcs.py rename to tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py index 668597cc97789..00e021432c7f8 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_azure_blob_to_gcs.py +++ b/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py @@ -31,10 +31,12 @@ GCP_BUCKET_FILE_PATH = os.environ.get("GCP_BUCKET_FILE_PATH", "file.txt") GCP_BUCKET_NAME = os.environ.get("GCP_BUCKET_NAME", "INVALID BUCKET NAME") GCP_OBJECT_NAME = os.environ.get("GCP_OBJECT_NAME", "file.txt") +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_azure_blob_to_gcs" # [START how_to_azure_blob_to_gcs] with DAG( - "example_azure_blob_to_gcs", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), # Override to match your needs default_args={"container_name": AZURE_CONTAINER_NAME, "blob_name": BLOB_NAME}, @@ -57,3 +59,14 @@ # [END how_to_azure_blob_to_gcs] wait_for_blob >> transfer_files_to_gcs + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py b/tests/system/providers/microsoft/azure/example_azure_container_instances.py similarity index 85% rename from airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py rename to tests/system/providers/microsoft/azure/example_azure_container_instances.py index 42f258a54a415..0f281844762e5 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py +++ b/tests/system/providers/microsoft/azure/example_azure_container_instances.py @@ -18,13 +18,17 @@ """ This is an example dag for using the AzureContainerInstancesOperator. """ +import os from datetime import datetime, timedelta from airflow import DAG from airflow.providers.microsoft.azure.operators.container_instances import AzureContainerInstancesOperator +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "aci_example" + with DAG( - dag_id='aci_example', + dag_id=DAG_ID, default_args={'retries': 1}, schedule_interval=timedelta(days=1), start_date=datetime(2018, 11, 1), @@ -45,3 +49,9 @@ cpu=1.0, task_id='start_container', ) + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py similarity index 79% rename from airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py rename to tests/system/providers/microsoft/azure/example_azure_cosmosdb.py index 626fc54644436..fe2977d2cf49a 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py +++ b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py @@ -28,15 +28,18 @@ *Note: Make sure that connection `azure_cosmos_default` is properly set before running this example.* """ - +import os from datetime import datetime from airflow import DAG from airflow.providers.microsoft.azure.operators.cosmos import AzureCosmosInsertDocumentOperator from airflow.providers.microsoft.azure.sensors.cosmos import AzureCosmosDocumentSensor +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_azure_cosmosdb_sensor" + with DAG( - dag_id='example_azure_cosmosdb_sensor', + dag_id=DAG_ID, default_args={'database_name': 'airflow_example_db'}, start_date=datetime(2021, 1, 1), catchup=False, @@ -57,3 +60,14 @@ ) t1 >> t2 + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_fileshare.py b/tests/system/providers/microsoft/azure/example_fileshare.py similarity index 76% rename from airflow/providers/microsoft/azure/example_dags/example_fileshare.py rename to tests/system/providers/microsoft/azure/example_fileshare.py index d50db3cb04027..f336b699a5cb7 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_fileshare.py +++ b/tests/system/providers/microsoft/azure/example_fileshare.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import os from datetime import datetime from airflow.decorators import task @@ -23,6 +23,8 @@ NAME = 'myfileshare' DIRECTORY = "mydirectory" +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_fileshare" @task @@ -44,9 +46,20 @@ def delete_fileshare(): with DAG( - "example_fileshare", + DAG_ID, schedule_interval="@once", start_date=datetime(2021, 1, 1), catchup=False, ) as dag: create_fileshare() >> delete_fileshare() + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py b/tests/system/providers/microsoft/azure/example_local_to_adls.py similarity index 77% rename from airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py rename to tests/system/providers/microsoft/azure/example_local_to_adls.py index 8dd0a682eb595..9ff15a45850b5 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py +++ b/tests/system/providers/microsoft/azure/example_local_to_adls.py @@ -24,9 +24,11 @@ LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", 'localfile.txt') REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", 'remote.txt') +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_local_to_adls" with models.DAG( - "example_local_to_adls", + DAG_ID, start_date=datetime(2021, 1, 1), catchup=False, schedule_interval=None, @@ -43,3 +45,14 @@ delete_file = ADLSDeleteOperator(task_id="remove_task", path=REMOTE_FILE_PATH, recursive=True) upload_file >> delete_file + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_local_to_wasb.py b/tests/system/providers/microsoft/azure/example_local_to_wasb.py similarity index 76% rename from airflow/providers/microsoft/azure/example_dags/example_local_to_wasb.py rename to tests/system/providers/microsoft/azure/example_local_to_wasb.py index 27372eee1fbbc..df7ca9b4dd302 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_local_to_wasb.py +++ b/tests/system/providers/microsoft/azure/example_local_to_wasb.py @@ -26,9 +26,11 @@ from airflow.providers.microsoft.azure.transfers.local_to_wasb import LocalFilesystemToWasbOperator PATH_TO_UPLOAD_FILE = os.environ.get('AZURE_PATH_TO_UPLOAD_FILE', 'example-text.txt') +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_local_to_wasb" with DAG( - "example_local_to_wasb", + DAG_ID, schedule_interval="@once", start_date=datetime(2021, 1, 1), catchup=False, @@ -38,3 +40,14 @@ delete = WasbDeleteBlobOperator(task_id="delete_file") upload >> delete + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/microsoft/azure/example_dags/example_sftp_to_wasb.py b/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py similarity index 84% rename from airflow/providers/microsoft/azure/example_dags/example_sftp_to_wasb.py rename to tests/system/providers/microsoft/azure/example_sftp_to_wasb.py index d70ca31f2875b..a8073242aeb5c 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_sftp_to_wasb.py +++ b/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py @@ -32,6 +32,8 @@ SAMPLE_FILENAME = os.environ.get("SFTP_SAMPLE_FILENAME", "sftp_to_wasb_test.txt") FILE_COMPLETE_PATH = os.path.join(LOCAL_FILE_PATH, SAMPLE_FILENAME) SFTP_FILE_COMPLETE_PATH = os.path.join(SFTP_SRC_PATH, SAMPLE_FILENAME) +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_sftp_to_wasb" @task @@ -41,7 +43,7 @@ def delete_sftp_file(): with DAG( - "example_sftp_to_wasb", + DAG_ID, schedule_interval=None, catchup=False, start_date=datetime(2021, 1, 1), # Override to match your needs @@ -70,3 +72,14 @@ def delete_sftp_file(): ) transfer_files_to_sftp_step >> transfer_files_to_azure >> delete_blob_file_step >> delete_sftp_file() + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag)