Apache Airflow dags w/ backend configuration bundle.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

65 lines
1.6 KiB

from datetime import timedelta
from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.providers.docker.operators.docker import DockerOperator
from airflow.utils.dates import days_ago
default_args = {
"owner": "airflow",
"depends_on_past": False,
"email": ["airflow@example.com"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=5),
}
dag = DAG(
"selenoid_setup",
default_args=default_args,
schedule_interval="@daily",
start_date=days_ago(2),
)
with dag:
kick_off_dag = DummyOperator(task_id="kick_off_dag")
t1 = DockerOperator(
# api_version='1.19',
# docker_url='tcp://localhost:2375', # Set your docker URL
command="/bin/sleep 30",
image="selenoid/chrome:latest",
network_mode="bridge",
task_id="selenoid1",
# dag=dag,
)
t2 = DockerOperator(
# api_version='1.19',
# docker_url='tcp://localhost:2375', # Set your docker URL
command="/bin/sleep 30",
image="selenoid/video-recorder:latest-release",
network_mode="bridge",
task_id="selenoid2",
# dag=dag,
)
scrape = BashOperator(
task_id="pull_selenoid_video-recorder",
bash_command="docker pull selenoid/video-recorder:latest-release",
# retries=3,
# dag=dag
)
scrape2 = BashOperator(
task_id="pull_selenoid_chrome",
bash_command="docker pull selenoid/chrome:latest",
# retries=3,
# dag=dag
)
scrape >> t1 >> t2