Skip to content

Commit

Permalink
Optionally add qnn backend to llama runner buck file (#6355)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #6355

Include qnn backend as part of the llama runner dependency, and it's control by a build flag. Default to false.

Reviewed By: kirklandsign

Differential Revision: D64334713

fbshipit-source-id: 8f8f3416bd042f340eed4d50ed4a192de148d91d
  • Loading branch information
cccclai authored and facebook-github-bot committed Oct 18, 2024
1 parent 339bb74 commit 8209bc1
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions examples/models/llama/runner/targets.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,20 @@ def _get_operator_lib(aten = False):
else:
return ["//executorch/configurations:optimized_native_cpu_ops", "//executorch/extension/llm/custom_ops:custom_ops"]

def get_qnn_dependency():
# buck build -c executorch.enable_qnn=true //executorch/examples/models/llama/runner:runner
# Check if QNN is enabled before including the dependency
if native.read_config("executorch", "enable_qnn", "false") == "true":
# //executorch/backends/qualcomm:qnn_executorch_backend doesn't work,
# likely due to it's an empty library with dependency only
return [
"//executorch/backends/qualcomm/runtime:runtime",
]
return []

def define_common_targets():
for aten in (True, False):
aten_suffix = "_aten" if aten else ""

runtime.cxx_library(
name = "runner" + aten_suffix,
srcs = [
Expand All @@ -27,7 +37,6 @@ def define_common_targets():
visibility = [
"@EXECUTORCH_CLIENTS",
],
# qnn_executorch_backend can be added below //executorch/backends/qualcomm:qnn_executorch_backend
exported_deps = [
"//executorch/backends/xnnpack:xnnpack_backend",
"//executorch/extension/llm/runner:stats",
Expand All @@ -46,7 +55,7 @@ def define_common_targets():
# Vulkan API currently cannot build on some platforms (e.g. Apple, FBCODE)
# Therefore enable it explicitly for now to avoid failing tests
"//executorch/backends/vulkan:vulkan_backend_lib",
] if native.read_config("llama", "use_vulkan", "0") == "1" else []),
] if native.read_config("llama", "use_vulkan", "0") == "1" else []) + get_qnn_dependency(),
external_deps = [
"libtorch",
] if aten else [],
Expand Down

0 comments on commit 8209bc1

Please sign in to comment.