Skip to content

Commit

Permalink
Update variables usage in bigquery_github_trends DAG
Browse files Browse the repository at this point in the history
  • Loading branch information
tuanavu committed Feb 16, 2019
1 parent 8441236 commit 796fee4
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,17 @@
from datetime import timedelta, datetime

from airflow import DAG
from airflow.models import Variable
from airflow.contrib.operators.bigquery_operator import BigQueryOperator
from airflow.contrib.operators.bigquery_check_operator import BigQueryCheckOperator


# Config variables
dag_config = Variable.get("bigquery_github_trends_variables", deserialize_json=True)
BQ_CONN_ID = dag_config["bq_conn_id"]
BQ_PROJECT = dag_config["bq_project"]
BQ_DATASET = dag_config["bq_dataset"]

default_args = {
'owner': 'airflow',
'depends_on_past': True,
Expand All @@ -28,11 +36,6 @@
schedule_interval=schedule_interval
)

# Config variables
BQ_CONN_ID = "my_gcp_conn"
BQ_PROJECT = "my-bq-project"
BQ_DATASET = "my-bq-dataset"

## Task 1: check that the github archive data has a dated table created for that date
# To test this task, run this command:
# docker-compose -f docker-compose-gcloud.yml run --rm webserver airflow test bigquery_github_trends bq_check_githubarchive_day 2018-12-01
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"bigquery_github_trends_variables": {
"bq_conn_id": "my_gcp_conn",
"bq_project": "my_bq_project",
"bq_dataset": "my_bq_dataset"
}
}

0 comments on commit 796fee4

Please sign in to comment.