34
34
test-dev :
35
35
name : " Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }})"
36
36
runs-on : ${{ matrix.os }}
37
- if : github.repository == 'dask-contrib/dask-sql'
38
37
env :
39
38
CONDA_FILE : continuous_integration/environment-${{ matrix.python }}-dev.yaml
40
39
defaults :
76
75
if : env.which_upstream == 'Dask'
77
76
run : |
78
77
mamba install --no-channel-priority dask/label/dev::dask
78
+ - name : Install pytest-reportlog
79
+ run : |
80
+ # TODO: add pytest-reportlog to testing environments if we move over to JSONL output
81
+ mamba install pytest-reportlog
79
82
- name : Test with pytest
83
+ id : run_tests
80
84
run : |
81
- pytest --junitxml=junit/test-results.xml --cov-report=xml -n auto tests --dist loadfile
85
+ pytest --report-log test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl --cov-report=xml -n auto tests --dist loadfile
86
+ - name : Upload pytest results for failure
87
+ if : |
88
+ always()
89
+ && steps.run_tests.outcome != 'skipped'
90
+ uses : actions/upload-artifact@v3
91
+ with :
92
+ name : test-${{ matrix.os }}-py${{ matrix.python }}-results
93
+ path : test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl
82
94
83
95
cluster-dev :
84
96
name : " Test upstream dev in a dask cluster"
@@ -104,7 +116,8 @@ jobs:
104
116
python setup.py build install
105
117
- name : Install cluster dependencies
106
118
run : |
107
- mamba install python-blosc lz4 -c conda-forge
119
+ # TODO: add pytest-reportlog to testing environments if we move over to JSONL output
120
+ mamba install pytest-reportlog python-blosc lz4 -c conda-forge
108
121
109
122
which python
110
123
pip list
@@ -127,8 +140,17 @@ jobs:
127
140
docker logs dask-scheduler
128
141
docker logs dask-worker
129
142
- name : Test with pytest while running an independent dask cluster
143
+ id : run_tests
130
144
run : |
131
- DASK_SQL_TEST_SCHEDULER="tcp://127.0.0.1:8786" pytest --junitxml=junit/test-cluster-results.xml --cov-report=xml -n auto tests --dist loadfile
145
+ DASK_SQL_TEST_SCHEDULER="tcp://127.0.0.1:8786" pytest --report-log test-cluster-results.jsonl --cov-report=xml -n auto tests --dist loadfile
146
+ - name : Upload pytest results for failure
147
+ if : |
148
+ always()
149
+ && steps.run_tests.outcome != 'skipped'
150
+ uses : actions/upload-artifact@v3
151
+ with :
152
+ name : test-cluster-results
153
+ path : test-cluster-results.jsonl
132
154
133
155
import-dev :
134
156
name : " Test importing with bare requirements and upstream dev"
@@ -168,59 +190,28 @@ jobs:
168
190
169
191
report-failures :
170
192
name : Open issue for upstream dev failures
171
- needs : [test-dev, cluster-dev]
193
+ needs : [test-dev, cluster-dev, import-dev ]
172
194
if : |
173
195
always()
174
196
&& (
175
- needs.test-dev.result == 'failure' || needs.cluster-dev.result == 'failure'
197
+ needs.test-dev.result == 'failure'
198
+ || needs.cluster-dev.result == 'failure'
199
+ || needs.import-dev.result == 'failure'
176
200
)
201
+ && github.repository == 'dask-contrib/dask-sql'
177
202
runs-on : ubuntu-latest
178
203
steps :
179
204
- uses : actions/checkout@v3
180
- - name : Report failures
181
- uses : actions/github-script@v6
205
+ - uses : actions/download-artifact@v3
206
+ with :
207
+ name : test-ubuntu-latest-py3.10-results
208
+ - name : Prepare issue label
209
+ run : |
210
+ # convert which_upstream to lowercase
211
+ echo "which_upstream_lower=${which_upstream,,}" >> $GITHUB_ENV
212
+ - name : Open or update issue on failure
213
+ uses :
xarray-contrib/[email protected]
182
214
with :
183
- github-token : ${{ secrets.GITHUB_TOKEN }}
184
- script : |
185
- const title = "⚠️ Upstream CI ${{ env.which_upstream }} failed ⚠️"
186
- const workflow_url = `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`
187
- const issue_body = `[Workflow Run URL](${workflow_url})`
188
- // Run GraphQL query against GitHub API to find the most recent open issue used for reporting failures
189
- const query = `query($owner:String!, $name:String!, $creator:String!, $label:String!){
190
- repository(owner: $owner, name: $name) {
191
- issues(first: 1, states: OPEN, filterBy: {createdBy: $creator, labels: [$label]}, orderBy: {field: CREATED_AT, direction: DESC}) {
192
- edges {
193
- node {
194
- body
195
- id
196
- number
197
- }
198
- }
199
- }
200
- }
201
- }`;
202
- const variables = {
203
- owner: context.repo.owner,
204
- name: context.repo.repo,
205
- label: 'upstream',
206
- creator: "github-actions[bot]"
207
- }
208
- const result = await github.graphql(query, variables)
209
- // If no issue is open, create a new issue,
210
- // else update the body of the existing issue.
211
- if (result.repository.issues.edges.length === 0) {
212
- github.issues.create({
213
- owner: variables.owner,
214
- repo: variables.name,
215
- body: issue_body,
216
- title: title,
217
- labels: [variables.label]
218
- })
219
- } else {
220
- github.issues.update({
221
- owner: variables.owner,
222
- repo: variables.name,
223
- issue_number: result.repository.issues.edges[0].node.number,
224
- body: issue_body
225
- })
226
- }
215
+ log-path : test-ubuntu-latest-py3.10-results.jsonl
216
+ issue-title : ⚠️ Upstream CI ${{ env.which_upstream }} failed ⚠️
217
+ issue-label : upstream-${{ env.which_upstream_lower }}
0 commit comments