Skip to content

Commit 011cfd4

Browse files
njriasanfacebook-github-bot
authored andcommitted
[Inductor][Triton] Support TMA before strict 3.4 cutoff
Summary: Inductor's 3.4 Triton release is the most common used variant of Triton, but if someone is working with an alternative version of Triton this may not match. This moves the version check from 3.4 Triton to any variant that has support for the TMA APIs. Test Plan: Relying on CI. Should be a NFC. Rollback Plan: Reviewed By: davidberard98 Differential Revision: D79378792
1 parent fc340d0 commit 011cfd4

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

torch/_inductor/codegen/triton.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from torch._prims_common import is_integer_dtype
2727
from torch.utils._ordered_set import OrderedSet
2828
from torch.utils._sympy.functions import CeilDiv, FloorDiv, ModularIndexing
29-
from torch.utils._triton import has_triton_package
29+
from torch.utils._triton import has_triton_package, has_triton_stable_tma_api
3030

3131
from ...utils._sympy.symbol import free_symbol_is_type, prefix_str, symbol_is_type, SymT
3232
from ...utils._sympy.value_ranges import ValueRanges
@@ -1674,7 +1674,6 @@ def augment_key(self, cache_key: str) -> Union[str, tuple[str, str]]:
16741674
else:
16751675
return cache_key
16761676

1677-
16781677
@dataclasses.dataclass
16791678
class TMACompatibilityChecker:
16801679
"""
@@ -1692,14 +1691,13 @@ def __post_init__(self):
16921691
def can_use_tma(
16931692
self,
16941693
) -> bool:
1695-
import triton
16961694

16971695
if not (
16981696
V.graph.get_current_device_or_throw().type == "cuda"
16991697
and torch.cuda.get_device_capability()[0] >= 9
17001698
and config.triton.use_tensor_descriptor
17011699
and config.assume_aligned_inputs
1702-
and triton.__version__ >= "3.4.0"
1700+
and has_triton_stable_tma_api()
17031701
# For CUDA The base ptr needs to be aligned
17041702
):
17051703
log.debug(

0 commit comments

Comments
 (0)