Skip to content

Commit

Permalink
Fix GPTQ tests
Browse files Browse the repository at this point in the history
  • Loading branch information
MekkCyber committed Nov 22, 2024
1 parent bb1e2d3 commit f54c8cb
Showing 1 changed file with 1 addition and 12 deletions.
13 changes: 1 addition & 12 deletions src/transformers/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1002,18 +1002,7 @@ def is_compressed_tensors_available():


def is_auto_gptq_available():
if not _auto_gptq_available:
return _auto_gptq_available

try:
from auto_gptq import exllama_set_max_input_length
except ImportError as exc:
if "shard_checkpoint" in str(exc):
# auto_gptq requires eetq and it is currently broken with newer transformers versions because it tries to import shard_checkpoint
# see https://github.com/NetEase-FuXi/EETQ/issues/34
# TODO: Remove once eetq releasees a fix and this release is used in CI
return False
return _auto_gptq_available
return _auto_gptq_available and is_eetq_available()

def is_eetq_available():
return _eetq_available
Expand Down

0 comments on commit f54c8cb

Please sign in to comment.