-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathtest_aws_athena_operator.py
More file actions
37 lines (28 loc) · 1.02 KB
/
test_aws_athena_operator.py
File metadata and controls
37 lines (28 loc) · 1.02 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator
from datetime import datetime, timedelta
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": datetime(2020, 9, 7),
"email": ["mikaela.pisani@rootstrap.com"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=5)
}
with DAG("query_s3", default_args=default_args, schedule_interval= '@once') as dag:
t1 = BashOperator(
task_id='bash_test',
bash_command='echo "Starting AWSAthenaOperator TEST"'
)
run_query = AWSAthenaOperator(
task_id='run_query',
database='mr-csv',
query='select text FROM "{DATABASE}"."{TABLE}"',
output_location='s3://s3://XXX/YYY/ZZZ',
aws_conn_id='s3_connection'
)
t1.set_upstream(run_query)