roxky commited on
Commit
2f323d8
Β·
1 Parent(s): 3cdbc8b

Update README.md

Browse files
Files changed (2) hide show
  1. README.md +3 -3
  2. demo/BackendApi.py +32 -0
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
- title: G4f Dev2
3
- emoji: 🌍
4
  colorFrom: yellow
5
  colorTo: green
6
  sdk: docker
7
  pinned: false
8
  ---
9
 
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: G4F DEMO
3
+ emoji: πŸ€”
4
  colorFrom: yellow
5
  colorTo: green
6
  sdk: docker
7
  pinned: false
8
  ---
9
 
10
+ Check out https://github.com/xtekky/gpt4free
demo/BackendApi.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from g4f.typing import Messages, AsyncResult
4
+ import g4f.Provider
5
+ from g4f.Provider import BackendApi
6
+
7
+ from . import url, headers
8
+
9
+ BackendApi.working = True
10
+ BackendApi.ssl = False
11
+ BackendApi.url = url
12
+ BackendApi.headers = headers
13
+
14
+ class BackendApi(BackendApi):
15
+ working = True
16
+ ssl = False
17
+ url = url
18
+ headers = headers
19
+ image_models = ["flux", "flux-pro"]
20
+ models = ["deepseek-r1", *g4f.Provider.OpenaiAccount.get_models(), "flux", "flux-pro"]
21
+
22
+ @classmethod
23
+ async def create_async_generator(
24
+ cls,
25
+ model: str,
26
+ messages: Messages,
27
+ **kwargs
28
+ ) -> AsyncResult:
29
+ if model in g4f.Provider.OpenaiAccount.get_models():
30
+ kwargs["provider"] = "OpenaiAccount"
31
+ async for chunk in super().create_async_generator(model, messages, **kwargs):
32
+ yield chunk