forked from pandas-dev/pandas
-
Notifications
You must be signed in to change notification settings - Fork 0
/
azure-pipelines.yml
181 lines (160 loc) · 7.16 KB
/
azure-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
# Adapted from https://github.com/numba/numba/blob/master/azure-pipelines.yml
jobs:
# Mac and Linux use the same template
- template: ci/azure/posix.yml
parameters:
name: macOS
vmImage: xcode9-macos10.13
- template: ci/azure/posix.yml
parameters:
name: Linux
vmImage: ubuntu-16.04
- template: ci/azure/windows.yml
parameters:
name: Windows
vmImage: vs2017-win2016
- job: 'Checks'
pool:
vmImage: ubuntu-16.04
timeoutInMinutes: 90
steps:
- script: |
echo '##vso[task.prependpath]$(HOME)/miniconda3/bin'
echo '##vso[task.setvariable variable=ENV_FILE]environment.yml'
echo '##vso[task.setvariable variable=AZURE]true'
displayName: 'Setting environment variables'
# Do not require a conda environment
- script: ci/code_checks.sh patterns
displayName: 'Looking for unwanted patterns'
condition: true
- script: |
sudo apt-get update
sudo apt-get install -y libc6-dev-i386
ci/setup_env.sh
displayName: 'Setup environment and build pandas'
condition: true
# Do not require pandas
- script: |
source activate pandas-dev
ci/code_checks.sh lint
displayName: 'Linting'
condition: true
- script: |
source activate pandas-dev
ci/code_checks.sh dependencies
displayName: 'Dependencies consistency'
condition: true
# Require pandas
- script: |
source activate pandas-dev
ci/code_checks.sh code
displayName: 'Checks on imported code'
condition: true
- script: |
source activate pandas-dev
ci/code_checks.sh doctests
displayName: 'Running doctests'
condition: true
- script: |
source activate pandas-dev
ci/code_checks.sh docstrings
displayName: 'Docstring validation'
condition: true
- script: |
source activate pandas-dev
ci/code_checks.sh typing
displayName: 'Typing validation'
condition: true
- script: |
source activate pandas-dev
pytest --capture=no --strict scripts
displayName: 'Testing docstring validation script'
condition: true
- script: |
source activate pandas-dev
cd asv_bench
asv check -E existing
git remote add upstream https://github.com/pandas-dev/pandas.git
git fetch upstream
if git diff upstream/master --name-only | grep -q "^asv_bench/"; then
asv machine --yes
ASV_OUTPUT="$(asv dev)"
if [[ $(echo "$ASV_OUTPUT" | grep "failed") ]]; then
echo "##vso[task.logissue type=error]Benchmarks run with errors"
echo "$ASV_OUTPUT"
exit 1
else
echo "Benchmarks run without errors"
fi
else
echo "Benchmarks did not run, no changes detected"
fi
displayName: 'Running benchmarks'
condition: true
- job: 'Web_and_Docs'
pool:
vmImage: ubuntu-16.04
timeoutInMinutes: 90
steps:
- script: |
echo '##vso[task.setvariable variable=ENV_FILE]environment.yml'
echo '##vso[task.prependpath]$(HOME)/miniconda3/bin'
displayName: 'Setting environment variables'
- script: |
sudo apt-get install -y libc6-dev-i386
ci/setup_env.sh
displayName: 'Setup environment and build pandas'
- script: |
source activate pandas-dev
python web/pandas_web.py web/pandas --target-path=web/build
displayName: 'Build website'
- script: |
source activate pandas-dev
# Next we should simply have `doc/make.py --warnings-are-errors`, everything else is required because the ipython directive doesn't fail the build on errors (https://github.com/ipython/ipython/issues/11547)
doc/make.py --warnings-are-errors | tee sphinx.log ; SPHINX_RET=${PIPESTATUS[0]}
grep -B1 "^<<<-------------------------------------------------------------------------$" sphinx.log ; IPY_RET=$(( $? != 1 ))
exit $(( $SPHINX_RET + $IPY_RET ))
displayName: 'Build documentation'
- script: |
mkdir -p to_deploy/docs
cp -r web/build/* to_deploy/
cp -r doc/build/html/* to_deploy/docs/
displayName: 'Merge website and docs'
- script: |
cd to_deploy
git init
touch .nojekyll
echo "dev.pandas.io" > CNAME
printf "User-agent: *\nDisallow: /" > robots.txt
git add --all .
git config user.email "pandas-dev@python.org"
git config user.name "pandas-bot"
git commit -m "pandas web and documentation in master"
displayName: 'Create git repo for docs build'
condition : |
and(not(eq(variables['Build.Reason'], 'PullRequest')),
eq(variables['Build.SourceBranch'], 'refs/heads/master'))
# For `InstallSSHKey@0` to work, next steps are required:
# 1. Generate a pair of private/public keys (i.e. `ssh-keygen -t rsa -b 4096 -C "your_email@example.com"`)
# 2. Go to "Library > Secure files" in the Azure Pipelines dashboard: https://dev.azure.com/pandas-dev/pandas/_library?itemType=SecureFiles
# 3. Click on "+ Secure file"
# 4. Upload the private key (the name of the file must match with the specified in "sshKeySecureFile" input below, "pandas_docs_key")
# 5. Click on file name after it is created, tick the box "Authorize for use in all pipelines" and save
# 6. The public key specified in "sshPublicKey" is the pair of the uploaded private key, and needs to be set as a deploy key of the repo where the docs will be pushed (with write access): https://github.com/pandas-dev/pandas-dev.github.io/settings/keys
- task: InstallSSHKey@0
inputs:
hostName: 'github.com,192.30.252.128 ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=='
sshPublicKey: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDHmz3l/EdqrgNxEUKkwDUuUcLv91unig03pYFGO/DMIgCmPdMG96zAgfnESd837Rm0wSSqylwSzkRJt5MV/TpFlcVifDLDQmUhqCeO8Z6dLl/oe35UKmyYICVwcvQTAaHNnYRpKC5IUlTh0JEtw9fGlnp1Ta7U1ENBLbKdpywczElhZu+hOQ892zqOj3CwA+U2329/d6cd7YnqIKoFN9DWT3kS5K6JE4IoBfQEVekIOs23bKjNLvPoOmi6CroAhu/K8j+NCWQjge5eJf2x/yTnIIP1PlEcXoHIr8io517posIx3TBup+CN8bNS1PpDW3jyD3ttl1uoBudjOQrobNnJeR6Rn67DRkG6IhSwr3BWj8alwUG5mTdZzwV5Pa9KZFdIiqX7NoDGg+itsR39QCn0thK8lGRNSR8KrWC1PSjecwelKBO7uQ7rnk/rkrZdBWR4oEA8YgNH8tirUw5WfOr5a0AIaJicKxGKNdMxZt+zmC+bS7F4YCOGIm9KHa43RrKhoGRhRf9fHHHKUPwFGqtWG4ykcUgoamDOURJyepesBAO3FiRE9rLU6ILbB3yEqqoekborHmAJD5vf7PWItW3Q/YQKuk3kkqRcKnexPyzyyq5lUgTi8CxxZdaASIOu294wjBhhdyHlXEkVTNJ9JKkj/obF+XiIIp0cBDsOXY9hDQ== pandas-dev@python.org'
sshKeySecureFile: 'pandas_docs_key'
displayName: 'Install GitHub ssh deployment key'
condition : |
and(not(eq(variables['Build.Reason'], 'PullRequest')),
eq(variables['Build.SourceBranch'], 'refs/heads/master'))
- script: |
cd to_deploy
git remote add origin git@github.com:pandas-dev/pandas-dev.github.io.git
git push -f origin master
displayName: 'Publish web and docs to GitHub pages'
condition : |
and(not(eq(variables['Build.Reason'], 'PullRequest')),
eq(variables['Build.SourceBranch'], 'refs/heads/master'))