-
Notifications
You must be signed in to change notification settings - Fork 0
/
unix-build.yml
114 lines (102 loc) · 4 KB
/
unix-build.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# Variables:
# CACHE_VERSION: unique cache identifier
# CURRENT_WEEK: weekly changing cache identifier
# PYTHON_VERSION: string in the form of "3.x"
# TODAY_ISO: today's date in ISO format, eg. "20200531"
steps:
# Obtain a shallow clone of the DXTBX repository.
# DXTBX will not be able to report proper version numbers
- checkout: self
path: ./modules/dxtbx
fetchDepth: 1
displayName: Checkout $(Build.SourceBranch)
# Download other source repositories
- bash: |
set -eux
python3 modules/dxtbx/.azure-pipelines/bootstrap.py update
displayName: Repository checkout
workingDirectory: $(Pipeline.Workspace)
# Download additional source repositories required by cctbx-base (but not dxtbx)
- bash: |
set -eux
git clone https://github.com/dials/annlib.git modules/annlib
git clone https://github.com/dials/annlib_adaptbx.git modules/annlib_adaptbx
displayName: Repository checkout (additional cctbx)
workingDirectory: $(Pipeline.Workspace)
# Create a new conda environment using the bootstrap script
# Extract the dials-data version so we can correctly cache regression data.
- script: |
set -eux
python3 modules/dxtbx/.azure-pipelines/bootstrap.py base --clean --python $(PYTHON_VERSION)
set +ux
. conda_base/bin/activate
set -ux
dials.data info -v
echo "##vso[task.setvariable variable=DIALS_DATA_VERSION_FULL]$(dials.data info -v | grep version.full)"
echo "##vso[task.setvariable variable=DIALS_DATA_VERSION]$(dials.data info -v | grep version.major)"
# this is a bug in dials-data ^^^^^
mkdir -p data
displayName: Create python $(PYTHON_VERSION) environment
workingDirectory: $(Pipeline.Workspace)
# Build dxtbx
- bash: |
set -e
. conda_base/bin/activate
set -ux
mkdir build
cd build
libtbx.configure dxtbx cbflib_adaptbx
make
displayName: Build dxtbx
workingDirectory: $(Pipeline.Workspace)
# Retrieve the regression data from cache if possible
# The cache allows day-to-day incremental updates, which is relevant only if
# tests are added that refer to datasets in dials-data that were not previously
# referred to.
# New versions of dials-data also lead to cache updates, kick-started from the
# previous cache version.
# The cache is shared across operating systems and python versions, and flushed
# once a week and for dials-data major and minor releases (eg. 2.0->2.1).
- task: Cache@2
inputs:
key: '"data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)" | "$(DIALS_DATA_VERSION_FULL)"'
restoreKeys: |
"data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)"
"data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)"
path: $(Pipeline.Workspace)/data
cacheHitVar: DATA_CACHED
displayName: Restore regression data cache
# Finally, run the full regression test suite
- bash: |
set -e
. conda_base/bin/activate
set -x
. build/setpaths.sh
set -u
export DIALS_DATA=${PWD}/data
cd modules/dxtbx
export PYTHONDEVMODE=1
pytest -v -ra -n auto --basetemp="$(Pipeline.Workspace)/tests" --durations=10 \
--cov=$(pwd) --cov-report=html --cov-report=xml --cov-branch \
--timeout=5400 --regression || echo "##vso[task.complete result=Failed;]Some tests failed"
displayName: Run tests
workingDirectory: $(Pipeline.Workspace)
- script: |
bash <(curl -s https://codecov.io/bash) -v -n "Python $(PYTHON_VERSION) $(Agent.OS)"
displayName: Publish coverage stats
continueOnError: True
timeoutInMinutes: 2
workingDirectory: $(Pipeline.Workspace)/modules/dxtbx
# Recover disk space after testing
# This is only relevant if we had cache misses, as free disk space is required to create cache archives
- bash: |
echo Disk space usage:
df -h
du -sh *
echo
echo Test artefacts:
du -h tests
rm -rf tests
displayName: Recover disk space
workingDirectory: $(Pipeline.Workspace)
condition: ne(variables.DATA_CACHED, 'true')