Spaces:
Running
on
Zero
Running
on
Zero
Update feifeilib/feifeifluxapi.py
Browse files- feifeilib/feifeifluxapi.py +6 -15
feifeilib/feifeifluxapi.py
CHANGED
@@ -1,27 +1,18 @@
|
|
1 |
import os
|
2 |
-
|
3 |
-
from gradio_client import Client
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
#client.headers["x-use-cache"] = "0"
|
8 |
|
9 |
def feifeifluxapi(prompt, height=1152, width=896, guidance_scale=3.5):
|
10 |
-
#
|
11 |
-
|
12 |
-
# height=height,
|
13 |
-
# width=width,
|
14 |
-
# guidance_scale=guidance_scale
|
15 |
-
# )
|
16 |
-
# return image
|
17 |
-
client = Client("aifeifei798/DarkIdol-flux")
|
18 |
-
result = client.predict(
|
19 |
prompt=prompt,
|
20 |
seed=0,
|
21 |
randomize_seed=True,
|
22 |
width=width,
|
23 |
height=height,
|
24 |
num_inference_steps=6,
|
25 |
-
|
26 |
)
|
27 |
return result
|
|
|
1 |
import os
|
2 |
+
from huggingface_hub import InferenceClient
|
|
|
3 |
|
4 |
+
client = InferenceClient("aifeifei798/feifei-flux-lora-v1.1", token=os.getenv('HF_TOKEN'))
|
5 |
+
client.headers["x-use-cache"] = "0"
|
|
|
6 |
|
7 |
def feifeifluxapi(prompt, height=1152, width=896, guidance_scale=3.5):
|
8 |
+
# output is a PIL.Image object
|
9 |
+
result = client.text_to_image(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
prompt=prompt,
|
11 |
seed=0,
|
12 |
randomize_seed=True,
|
13 |
width=width,
|
14 |
height=height,
|
15 |
num_inference_steps=6,
|
16 |
+
guidance_scale=guidance_scale
|
17 |
)
|
18 |
return result
|