rahul7star commited on
Commit
4ed1f6e
·
verified ·
1 Parent(s): a02142f

Update app_low.py

Browse files
Files changed (1) hide show
  1. app_low.py +12 -2
app_low.py CHANGED
@@ -52,16 +52,26 @@ def enhance_prompt(user_prompt, temperature, max_tokens, chat_history):
52
  return chat_history
53
 
54
 
 
55
  def enhance_prompt1(user_prompt, temperature, max_tokens, chat_history):
 
56
  messages = [
57
  {"role": "system", "content": "Enhance and expand the following prompt with more details and context:"},
58
  {"role": "user", "content": user_prompt}
59
  ]
 
 
60
  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
61
  output = pipe(prompt, max_new_tokens=256)
62
- print(output[0]['generated_text'])
 
63
 
64
- return output[0]['generated_text']
 
 
 
 
 
65
 
66
  # ============================================================
67
  # 3️⃣ Gradio UI
 
52
  return chat_history
53
 
54
 
55
+ # ===================== Prompt Enhancer Function =====================
56
  def enhance_prompt1(user_prompt, temperature, max_tokens, chat_history):
57
+ # Build the messages for the model
58
  messages = [
59
  {"role": "system", "content": "Enhance and expand the following prompt with more details and context:"},
60
  {"role": "user", "content": user_prompt}
61
  ]
62
+
63
+ # Generate model prompt
64
  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
65
  output = pipe(prompt, max_new_tokens=256)
66
+ enhanced_text = output[0]['generated_text']
67
+ print(enhanced_text)
68
 
69
+ # Wrap in a chat message format
70
+ chat_history = chat_history or []
71
+ chat_history.append({"role": "user", "content": user_prompt})
72
+ chat_history.append({"role": "assistant", "content": enhanced_text})
73
+
74
+ return chat_history
75
 
76
  # ============================================================
77
  # 3️⃣ Gradio UI