Spaces:
Running
on
Zero
Running
on
Zero
doubility123
commited on
Commit
·
cafe00b
1
Parent(s):
a3742d0
update
Browse files- app.py +2 -0
- deepseek_vl2/serve/inference.py +0 -2
app.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19 |
import os.path
|
20 |
# -*- coding:utf-8 -*-
|
21 |
from argparse import ArgumentParser
|
|
|
22 |
|
23 |
import io
|
24 |
import sys
|
@@ -299,6 +300,7 @@ def transfer_input(input_text, input_images):
|
|
299 |
|
300 |
|
301 |
@wrap_gen_fn
|
|
|
302 |
def predict(
|
303 |
text,
|
304 |
images,
|
|
|
19 |
import os.path
|
20 |
# -*- coding:utf-8 -*-
|
21 |
from argparse import ArgumentParser
|
22 |
+
import spaces
|
23 |
|
24 |
import io
|
25 |
import sys
|
|
|
300 |
|
301 |
|
302 |
@wrap_gen_fn
|
303 |
+
@spaces.GPU(duration=120) # Specify a duration to avoid timeout
|
304 |
def predict(
|
305 |
text,
|
306 |
images,
|
deepseek_vl2/serve/inference.py
CHANGED
@@ -19,7 +19,6 @@
|
|
19 |
|
20 |
from threading import Thread
|
21 |
from typing import List
|
22 |
-
import spaces
|
23 |
|
24 |
import torch
|
25 |
import transformers
|
@@ -90,7 +89,6 @@ class StoppingCriteriaSub(StoppingCriteria):
|
|
90 |
|
91 |
|
92 |
@torch.inference_mode()
|
93 |
-
@spaces.GPU(duration=120)
|
94 |
def deepseek_generate(
|
95 |
conversations: list,
|
96 |
vl_gpt: torch.nn.Module,
|
|
|
19 |
|
20 |
from threading import Thread
|
21 |
from typing import List
|
|
|
22 |
|
23 |
import torch
|
24 |
import transformers
|
|
|
89 |
|
90 |
|
91 |
@torch.inference_mode()
|
|
|
92 |
def deepseek_generate(
|
93 |
conversations: list,
|
94 |
vl_gpt: torch.nn.Module,
|