Steven10429 commited on
Commit
12b2006
·
verified ·
1 Parent(s): 260542b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -11
app.py CHANGED
@@ -8,7 +8,6 @@ from tqdm import tqdm
8
  from huggingface_hub import login, create_repo, HfApi
9
  import subprocess
10
  import math
11
- from dotenv import load_dotenv
12
  import gradio as gr
13
  import threading
14
  import queue
@@ -86,10 +85,10 @@ def check_system_resources(model_name):
86
  else:
87
  raise MemoryError(f"❌ 系统内存不足 (需要 {required_memory_gb:.1f}GB, 可用 {available_memory_gb:.1f}GB)")
88
 
89
- def setup_environment(model_name):
90
  """设置环境并返回设备信息"""
91
- load_dotenv()
92
- hf_token = os.getenv('HF_TOKEN')
93
  if not hf_token:
94
  raise ValueError("请在环境变量中设置HF_TOKEN")
95
  login(hf_token)
@@ -98,10 +97,10 @@ def setup_environment(model_name):
98
  device, available_memory = check_system_resources(model_name)
99
  return device
100
 
101
- def create_hf_repo(repo_name, private=True):
102
  """创建HuggingFace仓库"""
103
  try:
104
- repo_url = create_repo(repo_name, private=private)
105
  log(f"创建仓库成功: {repo_url}")
106
  return repo_url
107
  except Exception as e:
@@ -204,17 +203,17 @@ def quantize_and_push_model(model_path, repo_id, bits=8):
204
  log(f"量化或上传过程中出错: {str(e)}")
205
  raise
206
 
207
- def process_model(base_model, lora_model, repo_name, progress=gr.Progress()):
208
  """处理模型的主函数,用于Gradio界面"""
209
  try:
210
  # 清空之前的日志
211
  current_logs.clear()
212
 
213
  # 设置环境和检查资源
214
- device = setup_environment(base_model)
215
 
216
  # 创建HuggingFace仓库
217
- repo_url = create_hf_repo(repo_name)
218
 
219
  # 设置输出目录
220
  output_dir = os.path.join(".", "output", repo_name)
@@ -256,7 +255,7 @@ def create_ui():
256
  with gr.Column():
257
  base_model = gr.Textbox(
258
  label="基础模型路径",
259
- placeholder="例如: Qwen/Qwen2.5-7B-Instruct",
260
  value="Qwen/Qwen2.5-7B-Instruct"
261
  )
262
  lora_model = gr.Textbox(
@@ -267,6 +266,10 @@ def create_ui():
267
  label="HuggingFace仓库名称",
268
  placeholder="输入要创建的仓库名称"
269
  )
 
 
 
 
270
  convert_btn = gr.Button("开始转换", variant="primary")
271
 
272
  with gr.Column():
@@ -281,7 +284,7 @@ def create_ui():
281
  # 设置事件处理
282
  convert_btn.click(
283
  fn=process_model,
284
- inputs=[base_model, lora_model, repo_name],
285
  outputs=output
286
  )
287
 
 
8
  from huggingface_hub import login, create_repo, HfApi
9
  import subprocess
10
  import math
 
11
  import gradio as gr
12
  import threading
13
  import queue
 
85
  else:
86
  raise MemoryError(f"❌ 系统内存不足 (需要 {required_memory_gb:.1f}GB, 可用 {available_memory_gb:.1f}GB)")
87
 
88
+ def setup_environment(model_name, hf_token):
89
  """设置环境并返回设备信息"""
90
+ if not hf_token:
91
+ hf_token = os.getenv('HF_TOKEN', None) # try to get from env
92
  if not hf_token:
93
  raise ValueError("请在环境变量中设置HF_TOKEN")
94
  login(hf_token)
 
97
  device, available_memory = check_system_resources(model_name)
98
  return device
99
 
100
+ def create_hf_repo(repo_name, hf_token, private=True):
101
  """创建HuggingFace仓库"""
102
  try:
103
+ repo_url = create_repo(repo_name, private=private, token=hf_token)
104
  log(f"创建仓库成功: {repo_url}")
105
  return repo_url
106
  except Exception as e:
 
203
  log(f"量化或上传过程中出错: {str(e)}")
204
  raise
205
 
206
+ def process_model(base_model, lora_model, repo_name, hf_token, progress=gr.Progress()):
207
  """处理模型的主函数,用于Gradio界面"""
208
  try:
209
  # 清空之前的日志
210
  current_logs.clear()
211
 
212
  # 设置环境和检查资源
213
+ device = setup_environment(base_model, hf_token)
214
 
215
  # 创建HuggingFace仓库
216
+ repo_url = create_hf_repo(repo_name, hf_token)
217
 
218
  # 设置输出目录
219
  output_dir = os.path.join(".", "output", repo_name)
 
255
  with gr.Column():
256
  base_model = gr.Textbox(
257
  label="基础模型路径",
258
+ placeholder="例如: Qwen/Qwen2.5-14B-Instruct",
259
  value="Qwen/Qwen2.5-7B-Instruct"
260
  )
261
  lora_model = gr.Textbox(
 
266
  label="HuggingFace仓库名称",
267
  placeholder="输入要创建的仓库名称"
268
  )
269
+ hf_token = gr.Textbox(
270
+ label="HuggingFace Token",
271
+ placeholder="输入你的HuggingFace Token"
272
+ )
273
  convert_btn = gr.Button("开始转换", variant="primary")
274
 
275
  with gr.Column():
 
284
  # 设置事件处理
285
  convert_btn.click(
286
  fn=process_model,
287
+ inputs=[base_model, lora_model, repo_name, hf_token],
288
  outputs=output
289
  )
290