Spaces:
Runtime error
Runtime error
init
Browse files- app.py +12 -10
- requirements.txt +1 -1
app.py
CHANGED
@@ -139,15 +139,17 @@ def download_llama_7b(ckpt_dir, tokenizer_path):
|
|
139 |
os.makedirs(ckpt_dir, exist_ok=True)
|
140 |
ckpt_path = os.path.join(ckpt_dir, "consolidated.00.pth")
|
141 |
param_path = os.path.join(ckpt_dir, "params.json")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
if not os.path.exists(ckpt_path):
|
143 |
-
os.system(
|
144 |
-
f"wget -O {ckpt_path} https://huggingface.co/nyanko7/LLaMA-7B/resolve/main/consolidated.00.pth")
|
145 |
-
if not os.path.exists(param_path):
|
146 |
-
os.system(
|
147 |
-
f"wget -O {param_path} https://huggingface.co/nyanko7/LLaMA-7B/raw/main/params.json")
|
148 |
-
if not os.path.exists(tokenizer_path):
|
149 |
-
os.system(
|
150 |
-
f"wget -O {tokenizer_path} https://huggingface.co/nyanko7/LLaMA-7B/resolve/main/tokenizer.model")
|
151 |
print("LLaMA-7B downloaded")
|
152 |
|
153 |
def download_llama_adapter(instruct_adapter_path, caption_adapter_path):
|
@@ -160,8 +162,8 @@ def download_llama_adapter(instruct_adapter_path, caption_adapter_path):
|
|
160 |
|
161 |
# ckpt_dir = "/data1/llma/7B"
|
162 |
# tokenizer_path = "/data1/llma/tokenizer.model"
|
163 |
-
ckpt_dir = "
|
164 |
-
tokenizer_path = "tokenizer.model"
|
165 |
instruct_adapter_path = "llama_adapter_len10_layer30_release.pth"
|
166 |
caption_adapter_path = "llama_adapter_len10_layer30_caption_vit_l.pth"
|
167 |
max_seq_len = 512
|
|
|
139 |
os.makedirs(ckpt_dir, exist_ok=True)
|
140 |
ckpt_path = os.path.join(ckpt_dir, "consolidated.00.pth")
|
141 |
param_path = os.path.join(ckpt_dir, "params.json")
|
142 |
+
# if not os.path.exists(ckpt_path):
|
143 |
+
# os.system(
|
144 |
+
# f"wget -O {ckpt_path} https://huggingface.co/nyanko7/LLaMA-7B/resolve/main/consolidated.00.pth")
|
145 |
+
# if not os.path.exists(param_path):
|
146 |
+
# os.system(
|
147 |
+
# f"wget -O {param_path} https://huggingface.co/nyanko7/LLaMA-7B/raw/main/params.json")
|
148 |
+
# if not os.path.exists(tokenizer_path):
|
149 |
+
# os.system(
|
150 |
+
# f"wget -O {tokenizer_path} https://huggingface.co/nyanko7/LLaMA-7B/resolve/main/tokenizer.model")
|
151 |
if not os.path.exists(ckpt_path):
|
152 |
+
os.system("git clone https://huggingface.co/nyanko7/LLaMA-7B")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
153 |
print("LLaMA-7B downloaded")
|
154 |
|
155 |
def download_llama_adapter(instruct_adapter_path, caption_adapter_path):
|
|
|
162 |
|
163 |
# ckpt_dir = "/data1/llma/7B"
|
164 |
# tokenizer_path = "/data1/llma/tokenizer.model"
|
165 |
+
ckpt_dir = "LLaMA-7B/"
|
166 |
+
tokenizer_path = "LLaMA-7B/tokenizer.model"
|
167 |
instruct_adapter_path = "llama_adapter_len10_layer30_release.pth"
|
168 |
caption_adapter_path = "llama_adapter_len10_layer30_caption_vit_l.pth"
|
169 |
max_seq_len = 512
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
torch
|
2 |
fairscale
|
3 |
sentencepiece
|
4 |
Pillow
|
|
|
1 |
+
torch==1.12.0
|
2 |
fairscale
|
3 |
sentencepiece
|
4 |
Pillow
|