forked from databrickslabs/dbx
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[FEATURE][databrickslabs#123] add support for jinja2 deployment files (…
…databrickslabs#124) [FEATURE][databrickslabs#123] add support for jinja2 deployment files (databrickslabs#124)
- Loading branch information
1 parent
af9fe07
commit 36a6cd9
Showing
20 changed files
with
628 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
Jinja2 Support: Environment variables, logic and loops | ||
============================================================= | ||
|
||
Since version 0.4.0 :code:`dbx` supports `Jinja2 <https://jinja.palletsprojects.com/en/3.0.x/api/>`_ rendering for JSON and YAML based configurations. | ||
This allows you to use environment variables in the deployment, add variable-based conditions, `Jinja filters <https://jinja.palletsprojects.com/en/3.0.x/templates/#filters>`_ and for loops to make your deployment more flexible for CI pipelines. | ||
|
||
To add Jinja2 support to your deployment file, please add postfix :code:`.j2` to the name of your deployment file, for example :code:`deployment.yml.j2` | ||
|
||
Please find examples on how to use Jinja2 templates below: | ||
|
||
.. tabs:: | ||
|
||
.. tab:: deployment.json.j2 | ||
|
||
.. literalinclude:: ../../tests/deployment-configs/jinja-example.json.j2 | ||
:language: jinja | ||
|
||
.. tab:: deployment.yml.j2 | ||
|
||
.. literalinclude:: ../../tests/deployment-configs/jinja-example.yaml.j2 | ||
:language: yaml+jinja |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
environments: | ||
default: | ||
jobs: | ||
- name: "your-job-name" | ||
new_cluster: | ||
spark_version: "7.3.x-cpu-ml-scala2.12" | ||
node_type_id: "some-node-type" | ||
aws_attributes: | ||
first_on_demand: 0 | ||
availability: "SPOT" | ||
num_workers: 2 | ||
libraries: [] | ||
max_retries: 0 | ||
spark_python_task: | ||
python_file: "tests/deployment-configs/placeholder_1.py" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
{ | ||
"default": { | ||
"jobs": [ | ||
{ | ||
"name": "your-job-name", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "SPOT" | ||
}, | ||
"num_workers": 2 | ||
}, | ||
"libraries": [], | ||
"max_retries": 0, | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_1.py" | ||
} | ||
} | ||
] | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
{ | ||
"default": { | ||
"jobs": [ | ||
{ | ||
"name": "your-job-name", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "SPOT" | ||
}, | ||
"num_workers": 2 | ||
}, | ||
"libraries": [], | ||
"max_retries": 0, | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_1.py" | ||
} | ||
}, | ||
{ | ||
"name": "your-job-name-2", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "SPOT" | ||
}, | ||
"num_workers": 2 | ||
}, | ||
"libraries": [], | ||
"max_retries": 0, | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_2.py" | ||
} | ||
}, | ||
{ | ||
"name": "your-job-name-3", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "SPOT" | ||
}, | ||
"num_workers": 2 | ||
}, | ||
"libraries": [ | ||
{ | ||
"pypi": { | ||
"package": "pydash" | ||
} | ||
} | ||
], | ||
"max_retries": 5, | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_2.py" | ||
} | ||
}, | ||
{ | ||
"name": "your-job-name-4", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "SPOT" | ||
}, | ||
"autoscale": { | ||
"min_workers": 2, | ||
"max_workers": 5 | ||
} | ||
}, | ||
"libraries": [ | ||
{ | ||
"pypi": { | ||
"package": "pydash" | ||
} | ||
} | ||
], | ||
"max_retries": 5, | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_2.py" | ||
} | ||
} | ||
] | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
# http://yaml.org/spec/1.2/spec.html | ||
# https://learnxinyminutes.com/docs/yaml/ | ||
|
||
custom: | ||
basic-cluster-props: &basic-cluster-props | ||
spark_version: "7.3.x-cpu-ml-scala2.12" | ||
node_type_id: "some-node-type" | ||
aws_attributes: | ||
first_on_demand: 0 | ||
availability: "SPOT" | ||
basic-auto-scale-props: &basic-auto-scale-props | ||
autoscale: | ||
min_workers: 2 | ||
max_workers: 5 | ||
|
||
basic-static-cluster: &basic-static-cluster | ||
new_cluster: | ||
<<: *basic-cluster-props | ||
num_workers: 2 | ||
|
||
basic-autoscale-cluster: &basic-autoscale-cluster | ||
new_cluster: | ||
<<: # merge these two maps and place them here. | ||
- *basic-cluster-props | ||
- *basic-auto-scale-props | ||
|
||
basic-cluster-libraries: &basic-cluster-libraries | ||
libraries: | ||
- pypi: | ||
package: "pydash" | ||
|
||
|
||
environments: | ||
default: | ||
jobs: | ||
- name: "your-job-name" | ||
<<: *basic-static-cluster | ||
libraries: [] | ||
max_retries: 0 | ||
spark_python_task: | ||
python_file: "tests/deployment-configs/placeholder_1.py" | ||
|
||
- name: "your-job-name-2" | ||
<<: *basic-static-cluster | ||
libraries: [] | ||
max_retries: 0 | ||
spark_python_task: | ||
python_file: "tests/deployment-configs/placeholder_2.py" | ||
|
||
- name: "your-job-name-3" | ||
<<: | ||
- *basic-static-cluster | ||
- *basic-cluster-libraries | ||
max_retries: 5 | ||
spark_python_task: | ||
python_file: "tests/deployment-configs/placeholder_2.py" | ||
|
||
- name: "your-job-name-4" | ||
<<: | ||
- *basic-autoscale-cluster | ||
- *basic-cluster-libraries | ||
max_retries: 5 | ||
spark_python_task: | ||
python_file: "tests/deployment-configs/placeholder_2.py" |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
{ | ||
"default": { | ||
"jobs": [ | ||
{ | ||
"name": "multitask-job-name", | ||
"tasks": [ | ||
{ | ||
"task_key": "first-task", | ||
"description": "some description", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"num_workers": 2 | ||
}, | ||
"max_retries": 0, | ||
"spark_python_task": { | ||
"python_file": "placeholder_1.py" | ||
} | ||
}, | ||
{ | ||
"task_key": "second", | ||
"description": "some description", | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"num_workers": 2 | ||
}, | ||
"max_retries": 0, | ||
"spark_python_task": { | ||
"python_file": "placeholder_1.py" | ||
}, | ||
"depends_on": [ | ||
{ | ||
"task_key": "first-task" | ||
} | ||
] | ||
} | ||
] | ||
} | ||
] | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
# http://yaml.org/spec/1.2/spec.html | ||
# https://learnxinyminutes.com/docs/yaml/ | ||
|
||
custom: | ||
basic-cluster-props: &basic-cluster-props | ||
spark_version: "7.3.x-cpu-ml-scala2.12" | ||
node_type_id: "some-node-type" | ||
|
||
basic-static-cluster: &basic-static-cluster | ||
new_cluster: | ||
<<: *basic-cluster-props | ||
num_workers: 2 | ||
|
||
environments: | ||
default: | ||
jobs: | ||
- name: "your-job-name" | ||
tasks: | ||
- task_key: "first-task" | ||
<<: *basic-static-cluster | ||
spark_python_task: | ||
python_file: "./placeholder_1.py" | ||
- task_key: "second-task" | ||
<<: *basic-static-cluster | ||
spark_python_task: | ||
python_file: "./placeholder_2.py" | ||
depends_on: | ||
- task_key: "second-task" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
{ | ||
"default": { | ||
"jobs": [ | ||
{ | ||
"name": "your-job-name", | ||
"timeout_seconds": "{{ TIMEOUT }}", | ||
"email_notifications": { | ||
"on_failure": [ | ||
"{{ ALERT_EMAIL | lower }}", | ||
"[email protected]" | ||
] | ||
}, | ||
"new_cluster": { | ||
"spark_version": "7.3.x-cpu-ml-scala2.12", | ||
"node_type_id": "some-node-type", | ||
"aws_attributes": { | ||
"first_on_demand": 0, | ||
"availability": "{{ AVAILABILITY | default('SPOT') }}" | ||
}, | ||
"num_workers": 2 | ||
}, | ||
"libraries": [], | ||
"max_retries": "{{ MAX_RETRY | default(3) }}", | ||
"spark_python_task": { | ||
"python_file": "tests/deployment-configs/placeholder_1.py" | ||
} | ||
}] | ||
} | ||
} |
Oops, something went wrong.