Apache Airflow dags w/ backend configuration bundle.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

41 lines
1.2 KiB

from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
# from airflow.hooks.S3_hook import S3Hook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
DEFAULT_ARGS = {
"owner": "Airflow",
"depends_on_past": False,
"start_date": datetime(2020, 1, 13),
"email": ["airflow@example.com"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=5),
}
dag = DAG("create_date_dimension", default_args=DEFAULT_ARGS, schedule_interval="@once")
# Create a task to call your processing function
def write_text_file(ds, **kwargs):
with open("/data/data/temp.txt", "w") as fp:
# Add file generation/processing step here, E.g.:
# fp.write(ds)
# Upload generated file to Minio
# s3 = S3Hook('local_minio')
s3 = S3Hook(aws_conn_id="minio")
s3.load_file("/data/data/temp.txt", key=f"my-test-file.txt", bucket_name="airflow")
# Create a task to call your processing function
t1 = PythonOperator(
task_id="generate_and_upload_to_s3",
provide_context=True,
python_callable=write_text_file,
dag=dag,
)