aifeifei798's picture
Update feifeilib/feifeimodload.py
dfd2cf6 verified
raw
history blame
838 Bytes
import torch
import spaces
from diffusers import (
DiffusionPipeline,
AutoencoderTiny,
)
from huggingface_hub import hf_hub_download
def feifeimodload():
dtype = torch.bfloat16
device = "cuda" if torch.cuda.is_available() else "cpu"
pipe = DiffusionPipeline.from_pretrained(
"aifeifei798/DarkIdol-flux-v1", torch_dtype=dtype
).to(device)
pipe.load_lora_weights(
hf_hub_download("aifeifei798/feifei-flux-lora-v1.1", "feifei-v1.1.safetensors"),
adapter_name="feifei",
)
pipe.set_adapters(
["feifei"],
adapter_weights=[0.8],
)
pipe.fuse_lora(
adapter_name=["feifei"],
lora_scale=1.0,
)
pipe.vae.enable_slicing()
pipe.vae.enable_tiling()
pipe.unload_lora_weights()
torch.cuda.empty_cache()
return pipe