Spaces:
vilarin
/
Running on Zero

vilarin commited on
Commit
4b68e4e
·
verified ·
1 Parent(s): a478bcb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -2
app.py CHANGED
@@ -1,19 +1,29 @@
1
  import os
2
  import gradio as gr
3
  import torch
4
- from diffusers import StableDiffusionXLPipeline, AutoencoderKL, KDPM2AncestralDiscreteScheduler
5
  from huggingface_hub import hf_hub_download
6
  import spaces
7
  from PIL import Image
8
  import requests
9
  from translatepy import Translator
10
 
 
11
  translator = Translator()
12
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
13
  # Constants
14
  model = "stabilityai/stable-diffusion-3-medium"
15
  vae_model = "madebyollin/sdxl-vae-fp16-fix"
16
 
 
 
 
 
 
 
 
 
 
17
  CSS = """
18
  .gradio-container {
19
  max-width: 690px !important;
@@ -39,7 +49,7 @@ vae = AutoencoderKL.from_pretrained(
39
 
40
  # Ensure model and scheduler are initialized in GPU-enabled function
41
  if torch.cuda.is_available():
42
- pipe = StableDiffusionXLPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16).to("cuda")
43
 
44
 
45
  # Function
 
1
  import os
2
  import gradio as gr
3
  import torch
4
+ from diffusers import StableDiffusion3Pipeline, AutoencoderKL, KDPM2AncestralDiscreteScheduler
5
  from huggingface_hub import hf_hub_download
6
  import spaces
7
  from PIL import Image
8
  import requests
9
  from translatepy import Translator
10
 
11
+ os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
12
  translator = Translator()
13
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
14
  # Constants
15
  model = "stabilityai/stable-diffusion-3-medium"
16
  vae_model = "madebyollin/sdxl-vae-fp16-fix"
17
 
18
+
19
+ model_path = hf_hub_download(
20
+ repo_id="stabilityai/stable-diffusion-3-medium",
21
+ ignore_patterns=["*.md", "*..gitattributes"],
22
+ local_dir="model",
23
+ token=HF_TOKEN,
24
+ )
25
+
26
+
27
  CSS = """
28
  .gradio-container {
29
  max-width: 690px !important;
 
49
 
50
  # Ensure model and scheduler are initialized in GPU-enabled function
51
  if torch.cuda.is_available():
52
+ pipe = StableDiffusion3Pipeline.from_pretrained(model_path, vae=vae, torch_dtype=torch.float16).to("cuda")
53
 
54
 
55
  # Function