We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 134f4fd commit bd11bf0Copy full SHA for bd11bf0
1 file changed
library/attention.py
@@ -17,10 +17,10 @@
17
18
if flash_attn is None:
19
try:
20
- import flash_attn_turing as flash_attn
21
- from flash_attn.flash_attention_interface import _flash_attn_forward
22
- from flash_attn.flash_attention_interface import flash_attn_varlen_func
23
- from flash_attn.flash_attention_interface import flash_attn_func
+ import flash_attn_turing
+ from flash_attn_turing.flash_attention_interface import _flash_attn_forward
+ from flash_attn_turing.flash_attention_interface import flash_attn_varlen_func
+ from flash_attn_turing.flash_attention_interface import flash_attn_func
24
except ImportError as e:
25
flash_attn = None
26
flash_attn_varlen_func = None
0 commit comments