aladdin1995 commited on
Commit
6945334
·
verified ·
1 Parent(s): 6f21ce1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -115,14 +115,14 @@ def gpu_predict(model_path, device_map, torch_dtype,
115
  # 兼容原逻辑:提取 think> 之后的内容
116
  try:
117
  assert output_res.count("think>") == 2
118
- new_prompt = output_res.split("think>")[-1]
119
- if new_prompt.startswith("\n"):
120
  new_prompt = new_prompt[1:]
121
  new_prompt = replace_single_quotes(new_prompt)
122
  except Exception:
123
  # 如果格式不符合预期,则直接回退为原始输入
124
- new_prompt = org_prompt_cot
125
- return new_prompt, ""
126
  except Exception as e:
127
  # 失败则返回原始提示词和错误信息
128
  return org_prompt_cot, f"推理失败:{e}"
@@ -198,7 +198,7 @@ with gr.Blocks(title="Prompt Enhancer_V2") as demo:
198
  )
199
  with gr.Row():
200
  temperature = gr.Slider(0, 1, value=0.1, step=0.05, label="Temperature")
201
- max_new_tokens = gr.Slider(16, 4096, value=2048, step=16, label="Max New Tokens")
202
  device = gr.Dropdown(choices=["cuda", "cpu"], value="cuda", label="推理device")
203
 
204
  state = gr.State(value=None)
 
115
  # 兼容原逻辑:提取 think> 之后的内容
116
  try:
117
  assert output_res.count("think>") == 2
118
+ new_prompt_ret = output_res.split("think>")[-1]
119
+ if new_prompt_ret.startswith("\n"):
120
  new_prompt = new_prompt[1:]
121
  new_prompt = replace_single_quotes(new_prompt)
122
  except Exception:
123
  # 如果格式不符合预期,则直接回退为原始输入
124
+ new_prompt_ret = org_prompt_cot
125
+ return new_prompt_ret, ""
126
  except Exception as e:
127
  # 失败则返回原始提示词和错误信息
128
  return org_prompt_cot, f"推理失败:{e}"
 
198
  )
199
  with gr.Row():
200
  temperature = gr.Slider(0, 1, value=0.1, step=0.05, label="Temperature")
201
+ max_new_tokens = gr.Slider(16, 16384, value=4096, step=16, label="Max New Tokens")
202
  device = gr.Dropdown(choices=["cuda", "cpu"], value="cuda", label="推理device")
203
 
204
  state = gr.State(value=None)