This repo has been archived. You should now use the official operators in Airflow. Thanks everyone for the over 4.5k downloads.
Airflow plugin for orchestrating Google Cloud Run jobs.
- Easier to use alternative
to
KubernetesPodOperator
- Securely use sensitive data stored in Google Cloud Secrets Manager
- Create tasks with isolated dependencies
- Enables polyglot workflows
CloudRunJobOperator
CloudRunCreateJobOperator
CloudRunGetJobOperator
🔜CloudRunUpdateJobOperator
🔜CloudRunDeleteJobOperator
CloudRunListJobsOperator
🔜
CloudRunJobHook
CloudRunJobExecutionSensor
🔜
from airflow import DAG
from airflow_google_cloud_run_plugin.operators.cloud_run import CloudRunJobOperator
with DAG(dag_id="example_dag") as dag:
job = CloudRunJobOperator(
task_id="example-job",
name="example-job",
location="us-central1",
project_id="example-project",
image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
command=["echo"],
cpu="1000m",
memory="512Mi",
create_if_not_exists=True,
delete_on_exit=True
)
from airflow import DAG
from airflow_google_cloud_run_plugin.operators.cloud_run import (
CloudRunJobOperator,
CloudRunCreateJobOperator,
CloudRunDeleteJobOperator,
)
with DAG(dag_id="example_dag") as dag:
create_job = CloudRunCreateJobOperator(
task_id="create",
name="example-job",
location="us-central1",
project_id="example-project",
image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
command=["echo"],
cpu="1000m",
memory="512Mi"
)
run_job = CloudRunJobOperator(
task_id="run",
name="example-job",
location="us-central1",
project_id="example-project"
)
delete_job = CloudRunDeleteJobOperator(
task_id="delete",
name="example-job",
location="us-central1",
project_id="example-project"
)
create_job >> run_job >> delete_job
from airflow import DAG
from airflow_google_cloud_run_plugin.operators.cloud_run import CloudRunJobOperator
# Simple environment variable
FOO = {
"name": "FOO",
"value": "not_so_secret_value_123"
}
# Environment variable from Secret Manager
BAR = {
"name": "BAR",
"valueFrom": {
"secretKeyRef": {
"name": "super_secret_password",
"key": "1" # or "latest" for latest secret version
}
}
}
with DAG(dag_id="example_dag") as dag:
job = CloudRunJobOperator(
task_id="example-job",
name="example-job",
location="us-central1",
project_id="example-project",
image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
command=["echo"],
args=["$FOO", "$BAR"],
env_vars=[FOO, BAR],
cpu="1000m",
memory="512Mi",
create_if_not_exists=True,
delete_on_exit=True
)