Update README.md
#44
by
Jalea96
- opened
README.md
CHANGED
|
@@ -66,6 +66,7 @@ addict
|
|
| 66 |
easydict
|
| 67 |
pip install flash-attn==2.7.3 --no-build-isolation
|
| 68 |
```
|
|
|
|
| 69 |
|
| 70 |
```python
|
| 71 |
from transformers import AutoModel, AutoTokenizer
|
|
|
|
| 66 |
easydict
|
| 67 |
pip install flash-attn==2.7.3 --no-build-isolation
|
| 68 |
```
|
| 69 |
+
> **Note:** You need to have the NVIDIA Toolkit installed in order to use FlashAttention.
|
| 70 |
|
| 71 |
```python
|
| 72 |
from transformers import AutoModel, AutoTokenizer
|