Apache Airflow dags w/ backend configuration bundle.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

122 lines
4.0 KiB

#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This is an example DAG for the use of the SqliteOperator.
In this example, we create two tasks that execute in sequence.
The first task calls an sql command, defined in the SQLite operator,
which when triggered, is performed on the connected sqlite database.
The second task is similar but instead calls the SQL command from an external file.
"""
import apprise
from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.providers.docker.operators.docker import DockerOperator
from airflow.providers.sqlite.operators.sqlite import SqliteOperator
from airflow.utils.dates import days_ago
default_args = {"owner": "airflow"}
dag = DAG(
dag_id="example_sqlite",
default_args=default_args,
schedule_interval="@hourly",
start_date=days_ago(2),
tags=["example"],
)
with dag:
# [START howto_operator_sqlite]
# Example of creating a task that calls a common CREATE TABLE sql command.
# def notify():
# # Create an Apprise instance
# apobj = apprise.Apprise()
# # Add all of the notification services by their server url.
# # A sample email notification:
# # apobj.add('mailto://myuserid:mypass@gmail.com')
# # A sample pushbullet notification
# apobj.add('pover://aejghiy6af1bshe8mbdksmkzeon3ip@umjiu36dxwwaj8pnfx3n6y2xbm3ssx')
# # Then notify these services any time you desire. The below would
# # notify all of the services loaded into our Apprise object.
# apobj.notify(
# body='what a great notification service!',
# title='my notification title',
# )
# return apobj
# apprise = PythonOperator(
# task_id="apprise",
# python_callable=notify,
# dag=dag
# # "s3_bucket": S3_BUCKET,
# # "s3_key": S3_KEY,
# # "date": date}
# # provide_context=True
# )
# t2 = DockerOperator(
# task_id='docker_command',
# image='selenoid/chrome:latest',
# api_version='auto',
# auto_remove=False,
# command="/bin/sleep 30",
# docker_url="unix://var/run/docker.sock",
# network_mode="bridge"
# )
start = DummyOperator(task_id="start")
docker = BashOperator(
task_id="pull_selenoid_2",
bash_command="sudo docker pull selenoid/chrome:latest",
# retries=3,
# dag=dag
)
pre2 = BashOperator(
task_id="apprise",
bash_command="apprise -vv -t 'my title' -b 'my notification body' pover://umjiu36dxwwaj8pnfx3n6y2xbm3ssx@aejghiy6af1bshe8mbdksmkzeon3ip",
# retries=3,
# dag=dag
)
# t3 = DockerOperator(
# api_version="1.19",
# docker_url="tcp://localhost:2375", # Set your docker URL
# command="/bin/sleep 30",
# image="centos:latest",
# network_mode="bridge",
# task_id="docker_op_tester",
# # dag=dag,
# )
end = DummyOperator(task_id="end")
start >> docker >> pre2 >> end
# [END howto_operator_sqlite_external_file]
# create_table_sqlite_task >> external_create_table_sqlite_task
#