aifeifei798 commited on
Commit
4b214ab
·
verified ·
1 Parent(s): ce8e85d

Upload feifeiflorence.py

Browse files
Files changed (1) hide show
  1. feifeilib/feifeiflorence.py +7 -0
feifeilib/feifeiflorence.py CHANGED
@@ -6,6 +6,13 @@ from transformers import (
6
  AutoModelForCausalLM,
7
  )
8
  import torch
 
 
 
 
 
 
 
9
 
10
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
11
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
 
6
  AutoModelForCausalLM,
7
  )
8
  import torch
9
+ import subprocess
10
+
11
+ subprocess.run(
12
+ "pip install flash-attn --no-build-isolation",
13
+ env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
14
+ shell=True,
15
+ )
16
 
17
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
18
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32