-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMYDAG.py
68 lines (45 loc) · 1.55 KB
/
MYDAG.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator
from datetime import datetime, timedelta
from airflow.contrib.operators.gcs_operator import GoogleCloudStorageCreateBucketOperator
from airflow.operators.email import EmailOperator
import papermill as pm
default_args = {
'owner': 'leander',
'depends_on_past': False,
'start_date': datetime.today() - timedelta(days=5),
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(seconds=10),
}
dag = DAG('MYDAG',
default_args=default_args,
schedule_interval='0 11 * * *'
)
def DataFetch_Transform_Clean():
pm.execute_notebook(
'/c/users/leand/AirflowHome/dags/stock.ipynb',
'/c/users/leand/AirflowHome/dags/stock_out.ipynb',
parameters=dict(alpha=0.6, ratio=0.1)
)
DataFetch_Transform_Clean = PythonOperator(
task_id="run_notebook",
python_callable=DataFetch_Transform_Clean,
dag=dag
)
move_file = BashOperator(
task_id='move-file',
bash_command='tail -7 /c/users/leand/AirflowHome/dags/stocks.csv>> /c/users/leand/AirflowHome/dags/stocks.csv',
dag=dag
)
send_email = EmailOperator(
task_id='send_email',
to='[email protected]',
subject='Airflow Alert',
html_content=""" <h3>HELLO,</h3><br><h3> PIPELINE SUCESSFULL</h3> """,
dag=dag
)
DataFetch_Transform_Clean >> move_file >> send_email