Spaces:
Running on CPU Upgrade

akhaliq HF staff commited on
Commit
b458f8a
·
1 Parent(s): 31e34e6

update gemini

Browse files
Files changed (4) hide show
  1. app.py +1 -1
  2. app_gemini_coder.py +1 -1
  3. pyproject.toml +1 -1
  4. requirements.txt +1 -1
app.py CHANGED
@@ -33,6 +33,7 @@ from utils import get_app
33
 
34
  # Create mapping of providers to their demos
35
  PROVIDERS = {
 
36
  "OpenAI Coder": demo_openai_coder,
37
  "OpenAI": demo_openai,
38
  "NVIDIA Coder": demo_nvidia_coder,
@@ -41,7 +42,6 @@ PROVIDERS = {
41
  "Groq Coder": demo_groq_coder,
42
  "Qwen Coder": demo_qwen_coder,
43
  "Qwen": demo_qwen,
44
- "Gemini Coder": demo_gemini_coder,
45
  "DeepSeek Coder": demo_deepseek,
46
  "Minimax Coder": demo_minimax_coder,
47
  "NVIDIA": demo_nvidia,
 
33
 
34
  # Create mapping of providers to their demos
35
  PROVIDERS = {
36
+ "Gemini Coder": demo_gemini_coder,
37
  "OpenAI Coder": demo_openai_coder,
38
  "OpenAI": demo_openai,
39
  "NVIDIA Coder": demo_nvidia_coder,
 
42
  "Groq Coder": demo_groq_coder,
43
  "Qwen Coder": demo_qwen_coder,
44
  "Qwen": demo_qwen,
 
45
  "DeepSeek Coder": demo_deepseek,
46
  "Minimax Coder": demo_minimax_coder,
47
  "NVIDIA": demo_nvidia,
app_gemini_coder.py CHANGED
@@ -11,7 +11,7 @@ GEMINI_MODELS_DISPLAY = [k.replace("gemini:", "") for k in GEMINI_MODELS_FULL]
11
  # Create and launch the interface using get_app utility
12
  demo = get_app(
13
  models=GEMINI_MODELS_FULL, # Use the full names with prefix
14
- default_model=GEMINI_MODELS_FULL[-1],
15
  dropdown_label="Select Gemini Model",
16
  choices=GEMINI_MODELS_DISPLAY, # Display names without prefix
17
  src=ai_gradio.registry,
 
11
  # Create and launch the interface using get_app utility
12
  demo = get_app(
13
  models=GEMINI_MODELS_FULL, # Use the full names with prefix
14
+ default_model=GEMINI_MODELS_FULL[-2],
15
  dropdown_label="Select Gemini Model",
16
  choices=GEMINI_MODELS_DISPLAY, # Display names without prefix
17
  src=ai_gradio.registry,
pyproject.toml CHANGED
@@ -38,7 +38,7 @@ dependencies = [
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
- "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.36",
42
  ]
43
 
44
  [tool.uv.sources]
 
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
+ "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.38",
42
  ]
43
 
44
  [tool.uv.sources]
requirements.txt CHANGED
@@ -2,7 +2,7 @@
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
- ai-gradio==0.2.36
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
 
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
+ ai-gradio==0.2.38
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio