Skip to content

Commit

Permalink
Remove cache for fa deterministic which causes UT failed
Browse files Browse the repository at this point in the history
Signed-off-by: Reese Wang <[email protected]>
  • Loading branch information
zlsh80826 committed Dec 11, 2024
1 parent 246fc88 commit 7c76d81
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions transformer_engine/jax/cpp_extensions/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# See LICENSE for license information.
"""JAX/TE custom ops for attention"""
from dataclasses import dataclass
from functools import partial, reduce, cache
from functools import partial, reduce
import operator
import os
from typing import Optional, Tuple
Expand Down Expand Up @@ -133,7 +133,6 @@ def get_fused_attn_backend(self):
)

@staticmethod
@cache
def is_non_deterministic_allowed():
"""Check if non-deterministic kernels are allowed"""
return bool(int(os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO", "1")))
Expand Down

0 comments on commit 7c76d81

Please sign in to comment.