aifeifei798's picture
Update feifeilib/feifeifluxapi.py
cfb87a3 verified
raw
history blame
713 Bytes
import os
# from huggingface_hub import InferenceClient
from gradio_client import Client
#client = InferenceClient("black-forest-labs/FLUX.1-dev", token=os.getenv('HF_TOKEN'))
#client.headers["x-use-cache"] = "0"
def feifeifluxapi(prompt, height=1152, width=896, guidance_scale=3.5):
# image = client.text_to_image(
# prompt=prompt,
# height=height,
# width=width,
# guidance_scale=guidance_scale
# )
# return image
client = Client("aifeifei798/DarkIdol-flux")
result = client.predict(
prompt=prompt,
seed=0,
randomize_seed=True,
width=width,
height=height,
num_inference_steps=6,
api_name="/infer"
)
return result