Spaces:
Running on CPU Upgrade

akhaliq HF staff commited on
Commit
31e34e6
·
1 Parent(s): 9b618f0

update openai

Browse files
Files changed (4) hide show
  1. app.py +4 -2
  2. app_openai_coder.py +1 -1
  3. pyproject.toml +1 -1
  4. requirements.txt +1 -1
app.py CHANGED
@@ -28,10 +28,13 @@ from app_deepseek import demo as demo_deepseek
28
  from app_qwen import demo as demo_qwen
29
  from app_qwen_coder import demo as demo_qwen_coder
30
  from app_nvidia_coder import demo as demo_nvidia_coder
 
31
  from utils import get_app
32
 
33
  # Create mapping of providers to their demos
34
  PROVIDERS = {
 
 
35
  "NVIDIA Coder": demo_nvidia_coder,
36
  "Hyperbolic Coder": demo_hyperbolic_coder,
37
  "Hyperbolic": demo_hyperbolic,
@@ -45,7 +48,6 @@ PROVIDERS = {
45
  "Minimax": demo_minimax,
46
  "Mistral": demo_mistral,
47
  "Langchain Agent": demo_langchain,
48
- "OpenAI Coder": demo_openai_coder,
49
  "Gemini": demo_gemini,
50
  "SmolAgents": demo_smolagents,
51
  "CrewAI": demo_crew,
@@ -66,7 +68,7 @@ PROVIDERS = {
66
 
67
  demo = get_app(
68
  models=list(PROVIDERS.keys()),
69
- default_model="NVIDIA Coder",
70
  src=PROVIDERS,
71
  dropdown_label="Select Provider",
72
  )
 
28
  from app_qwen import demo as demo_qwen
29
  from app_qwen_coder import demo as demo_qwen_coder
30
  from app_nvidia_coder import demo as demo_nvidia_coder
31
+ from app_openai import demo as demo_openai
32
  from utils import get_app
33
 
34
  # Create mapping of providers to their demos
35
  PROVIDERS = {
36
+ "OpenAI Coder": demo_openai_coder,
37
+ "OpenAI": demo_openai,
38
  "NVIDIA Coder": demo_nvidia_coder,
39
  "Hyperbolic Coder": demo_hyperbolic_coder,
40
  "Hyperbolic": demo_hyperbolic,
 
48
  "Minimax": demo_minimax,
49
  "Mistral": demo_mistral,
50
  "Langchain Agent": demo_langchain,
 
51
  "Gemini": demo_gemini,
52
  "SmolAgents": demo_smolagents,
53
  "CrewAI": demo_crew,
 
68
 
69
  demo = get_app(
70
  models=list(PROVIDERS.keys()),
71
+ default_model="OpenAI Coder",
72
  src=PROVIDERS,
73
  dropdown_label="Select Provider",
74
  )
app_openai_coder.py CHANGED
@@ -11,7 +11,7 @@ OPENAI_MODELS_DISPLAY = [k.replace("openai:", "") for k in OPENAI_MODELS_FULL]
11
  # Create and launch the interface using get_app utility
12
  demo = get_app(
13
  models=OPENAI_MODELS_FULL, # Use the full names with prefix
14
- default_model=OPENAI_MODELS_FULL[5],
15
  dropdown_label="Select OpenAI Model",
16
  choices=OPENAI_MODELS_DISPLAY, # Display names without prefix
17
  fill_height=True,
 
11
  # Create and launch the interface using get_app utility
12
  demo = get_app(
13
  models=OPENAI_MODELS_FULL, # Use the full names with prefix
14
+ default_model=OPENAI_MODELS_FULL[-1],
15
  dropdown_label="Select OpenAI Model",
16
  choices=OPENAI_MODELS_DISPLAY, # Display names without prefix
17
  fill_height=True,
pyproject.toml CHANGED
@@ -38,7 +38,7 @@ dependencies = [
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
- "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.34",
42
  ]
43
 
44
  [tool.uv.sources]
 
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
+ "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.36",
42
  ]
43
 
44
  [tool.uv.sources]
requirements.txt CHANGED
@@ -2,7 +2,7 @@
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
- ai-gradio==0.2.34
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
 
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
+ ai-gradio==0.2.36
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio