GCP Cloud Composer - ghdrako/doc_snipets GitHub Wiki

DBT

Cloud Composer uses Cloud Storage to store Apache Airflow DAGs, also known as workflows. Each environment has an associated Cloud Storage bucket. Cloud Composer schedules only the DAGs in the Cloud Storage bucket.

export PROJECT_ID=$DEVSHELL_PROJECT_ID
export DEST_BUCKET_NAME="$PROJECT_ID-destination"
export SRC_BUCKET_NAME="$PROJECT_ID-source"
echo “Making bucket: gs://$DEST_BUCKET_NAME”
gsutil mb --retention 120m -c standard -l europe-west3 -p $DEVSHELL_PROJECT_ID gs://$DEST_BUCKET_NAME 
gsutil mb --retention 120m -c standard -l us-central1 -p $DEVSHELL_PROJECT_ID gs://$SRC_BUCKET_NAME 


gcloud composer environments run composer-advanced-lab \
    --location europe-west3 variables -- \
    --get gcs_source_bucket


gcloud composer environments run composer-advanced-lab \
--location europe-west3 variables -- \
--set table_list_file_path /home/airflow/gcs/dags/bq_copy_eu_to_us_sample.csv 

gcloud composer environments run composer-advanced-lab \
--location europe-west3 variables -- \
--set gcs_source_bucket $SRC_BUCKET_NAME 


gcloud composer environments run composer-advanced-lab \
--location europe-west3 variables -- \
--set gcs_dest_bucket $DEST_BUCKET_NAME

gcloud composer environments run composer-advanced-lab \
    --location us-central1 variables -- \
    --get gcs_source_bucket