Skip to content

Commit bab309c

Browse files
Merge branch 'main' into bmm_conv_docstrings
2 parents 7b3bd8f + 7e228ee commit bab309c

File tree

20 files changed

+139
-47
lines changed

20 files changed

+139
-47
lines changed

.ci/scripts/setup-samsung-linux-deps.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ set -ex
1111

1212
download_ai_lite_core() {
1313
API_BASE="https://soc-developer.semiconductor.samsung.com/api/v1/resource/ai-litecore/download"
14-
API_KEY="kn10SoSY3hkC-9Qny5TqD2mnqVrlupv3krnjLeBt5cY"
14+
API_KEY=$SAMSUNG_AI_LITECORE_KEY
1515

1616
VERSION="0.5"
1717
OS_NAME="Ubuntu 22.04"

.github/scripts/propose_ghstack_orig_pr.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,17 @@ def get_pr_stack_from_number(ref: str, repo: Repository) -> List[int]:
8686
return pr_stack
8787

8888

89+
def get_differential_revision(pr, repo: Repository) -> str:
90+
body = repo.get_pull(pr.number).body
91+
matches = re.findall(r"Differential Revision: .*", body)
92+
count = len(matches)
93+
if count == 1:
94+
# If there's more than one Differential Revision, let's just return empty
95+
# so that we can disambiguate manually.
96+
return matches[0]
97+
return ""
98+
99+
89100
def create_prs_for_orig_branch(pr_stack: List[int], repo: Repository):
90101
# For the first PR, we want to merge to `main` branch, and we will update
91102
# as we go through the stack
@@ -100,13 +111,15 @@ def create_prs_for_orig_branch(pr_stack: List[int], repo: Repository):
100111
# The PR we want to create is then "branch_to_merge" <- gh/user/x/orig
101112
# gh/user/x/orig is the clean diff between gh/user/x/base <- gh/user/x/head
102113
orig_branch_merge_head = pr.base.ref.replace("base", "orig")
114+
differential_revision_text = get_differential_revision(pr, repo)
103115
bot_metadata = f"""This PR was created by the merge bot to help merge the original PR into the main branch.
104116
ghstack PR number: https://github.com/pytorch/executorch/pull/{pr.number} by @{pr.user.login}
105117
^ Please use this as the source of truth for the PR details, comments, and reviews
106118
ghstack PR base: https://github.com/pytorch/executorch/tree/{pr.base.ref}
107119
ghstack PR head: https://github.com/pytorch/executorch/tree/{pr.head.ref}
108120
Merge bot PR base: https://github.com/pytorch/executorch/tree/{orig_branch_merge_base}
109121
Merge bot PR head: https://github.com/pytorch/executorch/tree/{orig_branch_merge_head}
122+
{differential_revision_text}
110123
@diff-train-skip-merge"""
111124

112125
existing_orig_pr = repo.get_pulls(

.github/workflows/pull.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -900,12 +900,14 @@ jobs:
900900
permissions:
901901
id-token: write
902902
contents: read
903+
secrets: inherit
903904
with:
904905
runner: linux.2xlarge
905906
docker-image: ci-image:executorch-ubuntu-22.04-clang12-android
906907
submodules: 'recursive'
907908
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
908909
timeout: 90
910+
secrets-env: SAMSUNG_AI_LITECORE_KEY
909911
script: |
910912
set -ex
911913
@@ -917,6 +919,7 @@ jobs:
917919
PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh --build-tool "cmake"
918920
919921
# Setup Samsung SDK (AI Lite Core) and install enn backend
922+
export SAMSUNG_AI_LITECORE_KEY=$SECRET_SAMSUNG_AI_LITECORE_KEY
920923
source .ci/scripts/setup-samsung-linux-deps.sh
921924
922925
# Test models serially

.github/workflows/test-cuda-builds.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
strategy:
2525
fail-fast: false
2626
matrix:
27-
cuda-version: ["12.6", "12.8", "12.9"]
27+
cuda-version: ["12.6", "12.8", "13.0"]
2828

2929
name: test-executorch-cuda-build-${{ matrix.cuda-version }}
3030
uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main

CONTRIBUTING.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -199,8 +199,7 @@ We use [`lintrunner`](https://pypi.org/project/lintrunner/) to help make sure th
199199
code follows our standards. Set it up with:
200200

201201
```
202-
pip install lintrunner==0.12.7
203-
pip install lintrunner-adapters==0.12.4
202+
./install_requirements.sh # (automatically run by install_executorch.sh)
204203
lintrunner init
205204
```
206205

backends/xnnpack/runtime/XNNPACKBackend.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
#pragma once
22

3-
#include <executorch/runtime/platform/compiler.h>
4-
53
namespace executorch::backends::xnnpack {
64
/// The key for the backend. This is used to register the backend, check
75
/// availability, and get/set options.

backends/xnnpack/targets.bzl

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,3 +73,13 @@ def define_common_targets():
7373
# @lint-ignore BUCKLINT: Avoid `link_whole=True` (https://fburl.com/avoid-link-whole)
7474
link_whole = True,
7575
)
76+
77+
runtime.cxx_library(
78+
name = "xnnpack_interface",
79+
visibility = [
80+
"@EXECUTORCH_CLIENTS",
81+
],
82+
exported_headers = [
83+
"runtime/XNNPACKBackend.h",
84+
],
85+
)

exir/passes/memory_planning_pass.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ def run(
287287
return PassResult(graph_module, True)
288288

289289
def run_multimethod(self):
290-
"Resolve any memory planning done across entry points"
290+
"""Resolve any memory planning done across entry points, called after run is called on all entry points."""
291291
if self.share_mutable_buffers:
292292
arena: int = 0
293293

extension/android/executorch_android/src/main/java/org/pytorch/executorch/ExecutorchRuntimeException.java

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,7 @@ public class ExecutorchRuntimeException extends RuntimeException {
7575
}
7676

7777
static class ErrorHelper {
78+
private static final boolean ENABLE_READ_LOG_BUFFER = false;
7879
// Reusable StringBuilder instance
7980
private static final StringBuilder sb = new StringBuilder();
8081

@@ -92,14 +93,15 @@ static String formatMessage(int errorCode, String details) {
9293
.append("] ")
9394
.append(baseMessage)
9495
.append(": ")
95-
.append(details)
96-
.append("\nDetailed Logs:\n");
97-
98-
try {
99-
String[] logEntries = readLogBuffer(); // JNI call
100-
formatLogEntries(sb, logEntries);
101-
} catch (Exception e) {
102-
sb.append("Failed to retrieve detailed logs: ").append(e.getMessage());
96+
.append(details);
97+
if (ENABLE_READ_LOG_BUFFER) {
98+
try {
99+
sb.append("\nDetailed Logs:\n");
100+
String[] logEntries = readLogBuffer(); // JNI call
101+
formatLogEntries(sb, logEntries);
102+
} catch (Exception e) {
103+
sb.append("Failed to retrieve detailed logs: ").append(e.getMessage());
104+
}
103105
}
104106

105107
return sb.toString();

extension/android/executorch_android/src/main/java/org/pytorch/executorch/Module.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -204,10 +204,9 @@ public MethodMetadata getMethodMetadata(String name) {
204204
throw new RuntimeException("method " + name + "does not exist for this module");
205205
}
206206

207-
MethodMetadata methodMetadata =mMethodMetadata.get(name);
207+
MethodMetadata methodMetadata = mMethodMetadata.get(name);
208208
if (methodMetadata != null) {
209209
methodMetadata.setBackends(getUsedBackends(name));
210-
211210
}
212211
return methodMetadata;
213212
}

0 commit comments

Comments
 (0)