forked from TobikoData/sqlmesh
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
103 lines (67 loc) · 2.34 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
.PHONY: docs
install-dev:
pip3 install -e ".[dev,web,slack]"
install-engine-integration:
pip3 install -e ".[dev,web,slack,mysql,postgres,databricks,redshift,bigquery,snowflake]"
install-pre-commit:
pre-commit install
style:
pre-commit run --all-files
py-style:
SKIP=prettier,eslint pre-commit run --all-files
ui-style:
SKIP=autoflake,isort,black,mypy pre-commit run --all-files
unit-test:
pytest -m "not integration"
doc-test:
PYTEST_PLUGINS=tests.common_fixtures pytest --doctest-modules sqlmesh/core sqlmesh/utils
core-it-test:
pytest -m "core_integration"
core_engine_it_test:
pytest -m "engine_integration"
core_engine_it_test_docker:
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml up -d
engine_it_test: core_engine_it_test_docker core_engine_it_test
it-test: core-it-test airflow-it-test-with-env
it-test-docker: core-it-test airflow-it-test-docker-with-env
test: unit-test doc-test it-test
package:
pip3 install wheel && python3 setup.py sdist bdist_wheel
publish: package
pip3 install twine && python3 -m twine upload dist/*
develop:
python3 setup.py develop
airflow-init:
export AIRFLOW_ENGINE_OPERATOR=spark && make -C ./examples/airflow init
airflow-run:
make -C ./examples/airflow run
airflow-stop:
make -C ./examples/airflow stop
airflow-clean:
make -C ./examples/airflow clean
airflow-psql:
make -C ./examples/airflow psql
airflow-spark-sql:
make -C ./examples/airflow spark-sql
airflow-it-test:
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@localhost/airflow && \
pytest -m "airflow_integration"
airflow-it-test-docker:
make -C ./examples/airflow it-test-docker
airflow-it-test-with-env: develop airflow-clean airflow-init airflow-run airflow-it-test airflow-stop
airflow-it-test-docker-with-env: develop airflow-clean airflow-init airflow-run airflow-it-test-docker airflow-stop
docs-serve:
mkdocs serve
api-docs:
python pdoc/cli.py -o docs/_readthedocs/html/
api-docs-serve:
python pdoc/cli.py
ui-up:
docker-compose up --build -d && $(if $(shell which open), open http://localhost:8001, echo "Open http://localhost:8001 in your browser.")
ui-down:
docker-compose down
ui-build:
docker-compose -f docker-compose.yml -f docker-compose.build.yml run app
clean-build:
rm -rf build/ && rm -rf dist/ && rm -rf *.egg-info
dev-publish: clean-build ui-build publish