Skip to content

Commit

Permalink
Fix import error TaskInstance not define (#1495)
Browse files Browse the repository at this point in the history
  • Loading branch information
pankajastro authored Mar 21, 2024
1 parent a6f8f8d commit f453c39
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 15 deletions.
7 changes: 2 additions & 5 deletions astronomer/providers/apache/hive/example_dags/example_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
import os
import time
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any, List
from typing import Any, List

from airflow import DAG, settings
from airflow.exceptions import AirflowException
from airflow.models import Connection, Variable
from airflow.models import Connection, TaskInstance, Variable
from airflow.operators.python import PythonOperator
from airflow.providers.amazon.aws.operators.emr import (
EmrCreateJobFlowOperator,
Expand All @@ -26,9 +26,6 @@
NamedHivePartitionSensorAsync,
)

if TYPE_CHECKING:
from airflow.models.taskinstance import TaskInstance

HIVE_CLUSTER = os.getenv("HIVE_CLUSTER", "example_hive_sensor_cluster")
AWS_S3_CREDS = {
"aws_access_key_id": os.getenv("AWS_ACCESS_KEY_ID", "aws_access_key"),
Expand Down
7 changes: 2 additions & 5 deletions astronomer/providers/apache/livy/example_dags/example_livy.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
import os
import time
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any, List
from typing import Any, List

from airflow import DAG, settings
from airflow.exceptions import AirflowException
from airflow.models import Connection, Variable
from airflow.models import Connection, TaskInstance, Variable
from airflow.operators.python import PythonOperator
from airflow.providers.amazon.aws.operators.emr import (
EmrCreateJobFlowOperator,
Expand All @@ -25,9 +25,6 @@

from astronomer.providers.apache.livy.operators.livy import LivyOperatorAsync

if TYPE_CHECKING:
from airflow.models.taskinstance import TaskInstance

LIVY_CLUSTER = os.getenv("LIVY_CLUSTER", "example_livy_operator_cluster")
BOTO_DUPLICATE_PERMISSION_ERROR = "InvalidPermission.Duplicate"
LIVY_JAVA_FILE = os.getenv("LIVY_JAVA_FILE", "/spark-examples.jar")
Expand Down
7 changes: 2 additions & 5 deletions astronomer/providers/sftp/example_dags/example_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import os
import time
from datetime import timedelta
from typing import TYPE_CHECKING, Any, List
from typing import Any, List

from airflow import DAG, AirflowException, settings
from airflow.models import Connection, Variable
from airflow.models import Connection, TaskInstance, Variable
from airflow.operators.python import PythonOperator, get_current_context
from airflow.utils.state import State
from airflow.utils.timezone import datetime
Expand All @@ -15,9 +15,6 @@

from astronomer.providers.sftp.sensors.sftp import SFTPSensorAsync

if TYPE_CHECKING:
from airflow.models import TaskInstance

SFTP_CONN_ID = os.getenv("ASTRO_SFTP_CONN_ID", "sftp_default")
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
AWS_S3_CREDS = {
Expand Down

0 comments on commit f453c39

Please sign in to comment.