Skip to content

Commit cbaf10e

Browse files
committed
Merge remote-tracking branch 'upstream/main' into experimental-distributed-delete
2 parents eb95e5e + e6266f7 commit cbaf10e

File tree

80 files changed

+5886
-2270
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+5886
-2270
lines changed

.github/workflows/ci.yml

+6-30
Original file line numberDiff line numberDiff line change
@@ -198,39 +198,15 @@ jobs:
198198

199199
##############################################################################
200200

201-
# Depends on all the other jobs to provide a single anchor that indicates the
202-
# final status. Status reporting will become more sophisticated in the future
203-
# and we can hopefully avoid the need to explicitly list every single job...
204-
summary:
205-
# Even if you have an explicit if condition, you still need to override
206-
# GitHub's default behavior of not running if any dependencies failed.
201+
# Aggregate job status and alerting on failures.
202+
ci_summary:
207203
if: always()
208-
runs-on: ubuntu-20.04
209204
needs:
210205
- setup
211206
- runtime
212207
- runtime_small
213208
- runtime_tracing
214-
steps:
215-
- name: "Checking out repository"
216-
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
217-
- name: Getting failed jobs
218-
id: failed_jobs
219-
run: |
220-
echo '${{ toJson(needs) }}'
221-
FAILED_JOBS="$(echo '${{ toJson(needs) }}' \
222-
| jq --raw-output \
223-
'map_values(select(.result!="success" and .result!="skipped")) | keys | join(",")' \
224-
)"
225-
echo "failed-jobs=${FAILED_JOBS}" >> $GITHUB_OUTPUT
226-
if [[ "${FAILED_JOBS}" != "" ]]; then
227-
echo "The following jobs failed: ${FAILED_JOBS}"
228-
exit 1
229-
fi
230-
- name: Posting to Discord
231-
uses: sarisia/actions-status-discord@ce8cc68e4e626000136b3c702d049a154243e490 # v1.14.7
232-
if: failure() && github.ref_name == 'main'
233-
with:
234-
webhook: ${{ secrets.DISCORD_WEBHOOK }}
235-
description: "The following jobs failed: ${{ steps.failed_jobs.outputs.failed-jobs }}"
236-
url: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}"
209+
uses: ./.github/workflows/workflow_summary.yml
210+
secrets: inherit
211+
with:
212+
jobs-json: ${{ toJson(needs) }}

.github/workflows/pkgci.yml

+27-1
Original file line numberDiff line numberDiff line change
@@ -104,10 +104,11 @@ jobs:
104104
if: contains(fromJson(needs.setup.outputs.enabled-jobs), 'test_onnx')
105105
uses: ./.github/workflows/pkgci_test_onnx.yml
106106

107+
# TODO(https://github.com/iree-org/iree-test-suites/issues/56): re-enable when git LFS quota is available
107108
test_sharktank:
108109
name: Test Sharktank
109110
needs: [setup, build_packages]
110-
if: contains(fromJson(needs.setup.outputs.enabled-jobs), 'test_sharktank')
111+
if: false && contains(fromJson(needs.setup.outputs.enabled-jobs), 'test_sharktank')
111112
uses: ./.github/workflows/pkgci_test_sharktank.yml
112113

113114
test_tensorflow:
@@ -121,3 +122,28 @@ jobs:
121122
needs: [setup, build_packages]
122123
if: contains(fromJson(needs.setup.outputs.enabled-jobs), 'test_pjrt')
123124
uses: ./.github/workflows/pkgci_test_pjrt.yml
125+
126+
##############################################################################
127+
128+
# Aggregate job status and alerting on failures.
129+
pkgci_summary:
130+
if: always()
131+
needs:
132+
- setup
133+
- build_packages
134+
- unit_test
135+
- regression_test
136+
- test_amd_mi250
137+
- test_amd_mi300
138+
- test_amd_w7900
139+
# - test_nvidia_t4
140+
- test_android
141+
- test_riscv64
142+
- test_onnx
143+
- test_sharktank
144+
- test_tensorflow
145+
- test_pjrt
146+
uses: ./.github/workflows/workflow_summary.yml
147+
secrets: inherit
148+
with:
149+
jobs-json: ${{ toJson(needs) }}
+63
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
# Copyright 2024 The IREE Authors
2+
#
3+
# Licensed under the Apache License v2.0 with LLVM Exceptions.
4+
# See https://llvm.org/LICENSE.txt for license information.
5+
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6+
7+
# Checks the result status of each job provided by 'jobs-json' and sends an
8+
# alert if at least one job failed.
9+
#
10+
# Usage:
11+
# ```yml
12+
# jobs:
13+
# job_1:
14+
# ...
15+
# job_2:
16+
# ...
17+
# my_summary:
18+
# if: always()
19+
# needs:
20+
# - job_1
21+
# - job_2
22+
# uses: ./.github/workflows/workflow_summary.yml
23+
# secrets: inherit
24+
# with:
25+
# jobs-json: ${{ toJson(needs) }}
26+
# ```
27+
28+
name: Workflow Summary
29+
30+
on:
31+
workflow_call:
32+
inputs:
33+
jobs-json:
34+
type: string
35+
description: The output of `toJson(needs)`
36+
37+
permissions:
38+
contents: read
39+
40+
jobs:
41+
summary:
42+
runs-on: ubuntu-20.04
43+
steps:
44+
- name: Getting failed jobs
45+
id: failed_jobs
46+
run: |
47+
echo '${{ inputs.jobs-json }}'
48+
FAILED_JOBS="$(echo '${{ inputs.jobs-json }}' \
49+
| jq --raw-output \
50+
'map_values(select(.result!="success" and .result!="skipped")) | keys | join(",")' \
51+
)"
52+
echo "failed-jobs=${FAILED_JOBS}" >> $GITHUB_OUTPUT
53+
if [[ "${FAILED_JOBS}" != "" ]]; then
54+
echo "The following jobs failed: ${FAILED_JOBS}"
55+
exit 1
56+
fi
57+
- name: Posting to Discord
58+
uses: sarisia/actions-status-discord@ce8cc68e4e626000136b3c702d049a154243e490 # v1.14.7
59+
if: failure() && github.ref_name == 'main' && github.repository_owner == 'iree-org'
60+
with:
61+
webhook: ${{ secrets.DISCORD_WEBHOOK }}
62+
description: "The following jobs failed: ${{ steps.failed_jobs.outputs.failed-jobs }}"
63+
url: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}"

build_tools/github_actions/configure_ci.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@
3939
import fnmatch
4040
import json
4141
import os
42-
import re
4342
import pathlib
43+
import re
4444
import string
4545
import subprocess
4646
import sys
@@ -111,7 +111,12 @@ def contains(cls, val):
111111
RUNNER_ENV_DEFAULT = "prod"
112112
RUNNER_ENV_OPTIONS = [RUNNER_ENV_DEFAULT, "testing"]
113113

114-
CONTROL_JOBS = frozenset(["setup", "summary"])
114+
CONTROL_JOB_REGEXES = frozenset(
115+
[
116+
re.compile("setup"),
117+
re.compile(".*summary"),
118+
]
119+
)
115120

116121
# Jobs to run only on postsubmit by default.
117122
# They may also run on presubmit only under certain conditions.
@@ -380,7 +385,8 @@ def parse_jobs_from_workflow_file(workflow_file: pathlib.Path) -> Set[str]:
380385

381386
workflow = yaml.load(workflow_file.read_text(), Loader=yaml.SafeLoader)
382387
all_jobs = set(workflow["jobs"].keys())
383-
all_jobs -= CONTROL_JOBS
388+
for regex in CONTROL_JOB_REGEXES:
389+
all_jobs = {j for j in all_jobs if not regex.match(j)}
384390

385391
if ALL_KEY in all_jobs:
386392
raise ValueError(f"Workflow has job with reserved name '{ALL_KEY}'")

compiler/src/iree/compiler/Codegen/Common/EncodingUtils.cpp

+4-7
Original file line numberDiff line numberDiff line change
@@ -100,16 +100,13 @@ MaterializeEncodingTypeConverter::MaterializeEncodingTypeConverter(
100100
// itself.
101101
RankedTensorType tensorType =
102102
transposeNarrowN ? transposeIfNarrowNResult(type) : type;
103-
FailureOr<MaterializeEncodingInfo> maybeEncodingInfo =
104-
getEncodingInfo(tensorType);
105-
if (failed(maybeEncodingInfo) ||
106-
IREE::Codegen::isIdentityLayout(maybeEncodingInfo.value())) {
103+
MaterializeEncodingInfo encodingInfo = getEncodingInfo(tensorType);
104+
if (IREE::Codegen::isIdentityLayout(encodingInfo)) {
107105
return dropEncoding(type);
108106
}
109-
auto encodingInfo = *maybeEncodingInfo;
110107
auto packedType = cast<RankedTensorType>(tensor::PackOp::inferPackedType(
111-
tensorType, maybeEncodingInfo->innerTileSizes,
112-
maybeEncodingInfo->innerDimsPos, maybeEncodingInfo->outerDimsPerm));
108+
tensorType, encodingInfo.innerTileSizes, encodingInfo.innerDimsPos,
109+
encodingInfo.outerDimsPerm));
113110

114111
// There is no swizzle, we are already done. Typically the case on CPU.
115112
if (!encodingInfo.swizzle) {

compiler/src/iree/compiler/Codegen/Common/EncodingUtils.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ class MaterializeEncodingTypeConverter : public TypeConverter {
4242
return layoutAttr;
4343
}
4444

45-
FailureOr<IREE::Codegen::MaterializeEncodingInfo>
45+
IREE::Codegen::MaterializeEncodingInfo
4646
getEncodingInfo(RankedTensorType type) const {
4747
return layoutAttr.getEncodingInfo(type);
4848
}

compiler/src/iree/compiler/Codegen/Common/GPU/GPUMaterializeEncoding.cpp

+14-19
Original file line numberDiff line numberDiff line change
@@ -108,13 +108,9 @@ struct GPUSetEncodingOpLoweringConversion
108108
return success();
109109
}
110110

111-
FailureOr<MaterializeEncodingInfo> maybeEncodingInfo =
111+
MaterializeEncodingInfo encodingInfo =
112112
converter->getEncodingInfo(encodingOp.getResultType());
113-
if (failed(maybeEncodingInfo)) {
114-
return rewriter.notifyMatchFailure(encodingOp,
115-
"unhandled result encoding");
116-
}
117-
if (!maybeEncodingInfo->swizzle) {
113+
if (!encodingInfo.swizzle) {
118114
rewriter.replaceOp(encodingOp, packedValue.value());
119115
return success();
120116
}
@@ -128,18 +124,18 @@ struct GPUSetEncodingOpLoweringConversion
128124
.getShape()
129125
.take_front(origRank));
130126
expandShapeShape.append(
131-
getExpandedTileShape(maybeEncodingInfo->swizzle->expandShape));
127+
getExpandedTileShape(encodingInfo.swizzle->expandShape));
132128
RankedTensorType expandShapeType =
133129
encodingOp.getSourceType().clone(expandShapeShape);
134130

135-
SmallVector<ReassociationIndices> reassociation = getReassociationIndices(
136-
origRank, maybeEncodingInfo->swizzle->expandShape);
131+
SmallVector<ReassociationIndices> reassociation =
132+
getReassociationIndices(origRank, encodingInfo.swizzle->expandShape);
137133
auto expandShapeOp = rewriter.create<tensor::ExpandShapeOp>(
138134
loc, expandShapeType, packedValue.value(), reassociation);
139135

140136
SmallVector<int64_t> transposePerm =
141137
llvm::to_vector(llvm::seq<int64_t>(0, origRank));
142-
for (auto perm : maybeEncodingInfo->swizzle->permutation) {
138+
for (auto perm : encodingInfo.swizzle->permutation) {
143139
transposePerm.push_back(origRank + perm);
144140
}
145141
SmallVector<OpFoldResult> transposeResultDims =
@@ -168,9 +164,9 @@ struct GPUUnsetEncodingOpLoweringConversion
168164
auto converter = static_cast<const MaterializeEncodingTypeConverter *>(
169165
getTypeConverter());
170166

171-
FailureOr<MaterializeEncodingInfo> maybeEncodingInfo =
167+
MaterializeEncodingInfo encodingInfo =
172168
converter->getEncodingInfo(unsetEncodingOp.getSource().getType());
173-
if (failed(maybeEncodingInfo)) {
169+
if (IREE::Codegen::isIdentityLayout(encodingInfo)) {
174170
Type targetType =
175171
getTypeConverter()->convertType(unsetEncodingOp.getSourceType());
176172
Value result = rewriter.createOrFold<tensor::CastOp>(
@@ -181,35 +177,34 @@ struct GPUUnsetEncodingOpLoweringConversion
181177

182178
Location loc = unsetEncodingOp.getLoc();
183179
Value unpackSrc = adaptor.getSource();
184-
if (maybeEncodingInfo->swizzle) {
180+
if (encodingInfo.swizzle) {
185181
int targetRank = unsetEncodingOp.getResultType().getRank();
186182
auto srcConvertedType =
187183
cast<RankedTensorType>(adaptor.getSource().getType());
188184
SmallVector<OpFoldResult> emptyShape =
189185
tensor::getMixedSizes(rewriter, loc, adaptor.getSource());
190186
emptyShape.resize(targetRank);
191-
for (auto i :
192-
getExpandedTileShape(maybeEncodingInfo->swizzle->expandShape)) {
187+
for (auto i : getExpandedTileShape(encodingInfo.swizzle->expandShape)) {
193188
emptyShape.push_back(rewriter.getIndexAttr(i));
194189
}
195190
auto emptyTensor = rewriter.create<tensor::EmptyOp>(
196191
loc, emptyShape, unsetEncodingOp.getSourceType().getElementType());
197192

198193
SmallVector<int64_t> transposePerm =
199194
llvm::to_vector(llvm::seq<int64_t>(0, targetRank));
200-
for (auto perm : maybeEncodingInfo->swizzle->permutation) {
195+
for (auto perm : encodingInfo.swizzle->permutation) {
201196
transposePerm.push_back(targetRank + perm);
202197
}
203198
auto invertedTransposePerm = invertPermutationVector(transposePerm);
204199
auto transposeOp = rewriter.create<linalg::TransposeOp>(
205200
loc, adaptor.getSource(), emptyTensor, invertedTransposePerm);
206201

207202
SmallVector<ReassociationIndices> reassociation = getReassociationIndices(
208-
targetRank, maybeEncodingInfo->swizzle->expandShape);
203+
targetRank, encodingInfo.swizzle->expandShape);
209204
SmallVector<int64_t> unpackSrcShape(
210205
srcConvertedType.getShape().take_front(targetRank));
211-
unpackSrcShape.append(maybeEncodingInfo->innerTileSizes.begin(),
212-
maybeEncodingInfo->innerTileSizes.end());
206+
unpackSrcShape.append(encodingInfo.innerTileSizes.begin(),
207+
encodingInfo.innerTileSizes.end());
213208
RankedTensorType unpackSrcType =
214209
unsetEncodingOp.getResultType().clone(unpackSrcShape);
215210
unpackSrc = rewriter.create<tensor::CollapseShapeOp>(

0 commit comments

Comments
 (0)