botbottingbot commited on
Commit
bb444e1
·
verified ·
1 Parent(s): 508deab

Update App.py

Browse files
Files changed (1) hide show
  1. App.py +32 -36
App.py CHANGED
@@ -13,53 +13,49 @@ GEN_BY_ID = {m["id"]: m for m in GENERATORS}
13
  llm = pipeline("text-generation", model="gpt2", max_new_tokens=512)
14
 
15
  def call_llm(prompt):
16
- out = llm(prompt, max_new_tokens=512, do_sample=False)[0]["generated_text"]
17
- return out[len(prompt):].strip() if out.startswith(prompt) else out
18
 
19
- def generator_prompt(module_id, *inputs):
20
- m = GEN_BY_ID[module_id]
21
  keys = list(m["input_placeholders"].keys())
22
- inputs = {k: inputs[i] if i < len(inputs) else "" for i, k in enumerate(keys)}
23
- sections = m["output_sections"]
24
 
25
  p = []
26
- p.append("You are a structured reasoning module.")
27
  p.append(f"MODULE: {m['label']}")
28
- p.append("INPUTS:")
29
- for k, v in inputs.items():
30
  p.append(f"{k.upper()}: {v}")
31
  p.append("")
32
- p.append("Produce the following sections:")
33
- for s in sections:
34
  p.append(f"- {s}")
35
  p.append("")
36
- for s in sections:
37
  p.append(f"{s}:")
38
  p.append("[content]")
39
  p.append("")
40
  return "\n".join(p)
41
 
42
- def checker_prompt(checker_id, *vals):
43
- c = CHECKERS[checker_id]
44
  secs = c["output_sections"]
45
- if len(vals) == 0:
46
- orig, draft = "", ""
47
- elif len(vals) == 1:
48
- orig, draft = "", vals[0]
49
  else:
50
- *orig_parts, draft = vals
51
- orig = "\n\n".join(orig_parts)
52
 
53
  p = []
54
- p.append("You are a strict evaluator.")
55
  p.append(f"CHECKER: {c['label']}")
56
  p.append("ORIGINAL TASK:")
57
  p.append(orig)
58
  p.append("")
59
- p.append("DRAFT OUTPUT:")
60
  p.append(draft)
61
  p.append("")
62
- p.append("Respond with sections:")
63
  for s in secs:
64
  p.append(f"- {s}")
65
  p.append("")
@@ -77,32 +73,32 @@ def run_checker(cid, *inputs):
77
 
78
  def build_ui():
79
  with gr.Blocks(title="Modular Intelligence") as demo:
80
- gr.Markdown("# Modular Intelligence\nSelect a module and generate output.")
81
 
82
  for m in GENERATORS:
83
  with gr.Tab(m["label"]):
84
  gr.Markdown(m["description"])
85
- input_boxes = []
86
- for key, ph in m["input_placeholders"].items():
87
- tb = gr.Textbox(label=key, placeholder=ph, lines=4)
88
- input_boxes.append(tb)
89
 
90
- output_box = gr.Textbox(label="Module output", lines=18)
91
- gr.Button("Run module").click(
92
  fn=partial(run_generator, m["id"]),
93
- inputs=input_boxes,
94
- outputs=output_box
95
  )
96
 
97
  if m.get("has_checker"):
98
  cid = m.get("checker_id")
99
  if cid in CHECKERS:
100
  gr.Markdown("### Checker")
101
- checker_out = gr.Textbox(label="Checker output", lines=14)
102
- gr.Button("Run checker").click(
103
  fn=partial(run_checker, cid),
104
- inputs=input_boxes + [output_box],
105
- outputs=checker_out
106
  )
107
  return demo
108
 
 
13
  llm = pipeline("text-generation", model="gpt2", max_new_tokens=512)
14
 
15
  def call_llm(prompt):
16
+ o = llm(prompt, max_new_tokens=512, do_sample=False)[0]["generated_text"]
17
+ return o[len(prompt):].strip() if o.startswith(prompt) else o
18
 
19
+ def generator_prompt(mid, *inputs):
20
+ m = GEN_BY_ID[mid]
21
  keys = list(m["input_placeholders"].keys())
22
+ vals = {k: inputs[i] if i < len(inputs) else "" for i, k in enumerate(keys)}
23
+ secs = m["output_sections"]
24
 
25
  p = []
 
26
  p.append(f"MODULE: {m['label']}")
27
+ p.append("INPUT:")
28
+ for k, v in vals.items():
29
  p.append(f"{k.upper()}: {v}")
30
  p.append("")
31
+ p.append("OUTPUT SECTIONS:")
32
+ for s in secs:
33
  p.append(f"- {s}")
34
  p.append("")
35
+ for s in secs:
36
  p.append(f"{s}:")
37
  p.append("[content]")
38
  p.append("")
39
  return "\n".join(p)
40
 
41
+ def checker_prompt(cid, *vals):
42
+ c = CHECKERS[cid]
43
  secs = c["output_sections"]
44
+ if len(vals) < 2:
45
+ orig, draft = "", vals[0] if vals else ""
 
 
46
  else:
47
+ orig = "\n\n".join(vals[:-1])
48
+ draft = vals[-1]
49
 
50
  p = []
 
51
  p.append(f"CHECKER: {c['label']}")
52
  p.append("ORIGINAL TASK:")
53
  p.append(orig)
54
  p.append("")
55
+ p.append("DRAFT:")
56
  p.append(draft)
57
  p.append("")
58
+ p.append("RESPOND WITH:")
59
  for s in secs:
60
  p.append(f"- {s}")
61
  p.append("")
 
73
 
74
  def build_ui():
75
  with gr.Blocks(title="Modular Intelligence") as demo:
76
+ gr.Markdown("# Modular Intelligence Demo")
77
 
78
  for m in GENERATORS:
79
  with gr.Tab(m["label"]):
80
  gr.Markdown(m["description"])
81
+ inputs = []
82
+ for k, ph in m["input_placeholders"].items():
83
+ t = gr.Textbox(label=k, placeholder=ph, lines=4)
84
+ inputs.append(t)
85
 
86
+ out = gr.Textbox(label="Output", lines=16)
87
+ gr.Button("Run").click(
88
  fn=partial(run_generator, m["id"]),
89
+ inputs=inputs,
90
+ outputs=out
91
  )
92
 
93
  if m.get("has_checker"):
94
  cid = m.get("checker_id")
95
  if cid in CHECKERS:
96
  gr.Markdown("### Checker")
97
+ chk = gr.Textbox(label="Checker Output", lines=14)
98
+ gr.Button("Check").click(
99
  fn=partial(run_checker, cid),
100
+ inputs=inputs + [out],
101
+ outputs=chk
102
  )
103
  return demo
104