Update App.py
Browse files
App.py
CHANGED
|
@@ -13,53 +13,49 @@ GEN_BY_ID = {m["id"]: m for m in GENERATORS}
|
|
| 13 |
llm = pipeline("text-generation", model="gpt2", max_new_tokens=512)
|
| 14 |
|
| 15 |
def call_llm(prompt):
|
| 16 |
-
|
| 17 |
-
return
|
| 18 |
|
| 19 |
-
def generator_prompt(
|
| 20 |
-
m = GEN_BY_ID[
|
| 21 |
keys = list(m["input_placeholders"].keys())
|
| 22 |
-
|
| 23 |
-
|
| 24 |
|
| 25 |
p = []
|
| 26 |
-
p.append("You are a structured reasoning module.")
|
| 27 |
p.append(f"MODULE: {m['label']}")
|
| 28 |
-
p.append("
|
| 29 |
-
for k, v in
|
| 30 |
p.append(f"{k.upper()}: {v}")
|
| 31 |
p.append("")
|
| 32 |
-
p.append("
|
| 33 |
-
for s in
|
| 34 |
p.append(f"- {s}")
|
| 35 |
p.append("")
|
| 36 |
-
for s in
|
| 37 |
p.append(f"{s}:")
|
| 38 |
p.append("[content]")
|
| 39 |
p.append("")
|
| 40 |
return "\n".join(p)
|
| 41 |
|
| 42 |
-
def checker_prompt(
|
| 43 |
-
c = CHECKERS[
|
| 44 |
secs = c["output_sections"]
|
| 45 |
-
if len(vals)
|
| 46 |
-
orig, draft = "", ""
|
| 47 |
-
elif len(vals) == 1:
|
| 48 |
-
orig, draft = "", vals[0]
|
| 49 |
else:
|
| 50 |
-
|
| 51 |
-
|
| 52 |
|
| 53 |
p = []
|
| 54 |
-
p.append("You are a strict evaluator.")
|
| 55 |
p.append(f"CHECKER: {c['label']}")
|
| 56 |
p.append("ORIGINAL TASK:")
|
| 57 |
p.append(orig)
|
| 58 |
p.append("")
|
| 59 |
-
p.append("DRAFT
|
| 60 |
p.append(draft)
|
| 61 |
p.append("")
|
| 62 |
-
p.append("
|
| 63 |
for s in secs:
|
| 64 |
p.append(f"- {s}")
|
| 65 |
p.append("")
|
|
@@ -77,32 +73,32 @@ def run_checker(cid, *inputs):
|
|
| 77 |
|
| 78 |
def build_ui():
|
| 79 |
with gr.Blocks(title="Modular Intelligence") as demo:
|
| 80 |
-
gr.Markdown("# Modular Intelligence
|
| 81 |
|
| 82 |
for m in GENERATORS:
|
| 83 |
with gr.Tab(m["label"]):
|
| 84 |
gr.Markdown(m["description"])
|
| 85 |
-
|
| 86 |
-
for
|
| 87 |
-
|
| 88 |
-
|
| 89 |
|
| 90 |
-
|
| 91 |
-
gr.Button("Run
|
| 92 |
fn=partial(run_generator, m["id"]),
|
| 93 |
-
inputs=
|
| 94 |
-
outputs=
|
| 95 |
)
|
| 96 |
|
| 97 |
if m.get("has_checker"):
|
| 98 |
cid = m.get("checker_id")
|
| 99 |
if cid in CHECKERS:
|
| 100 |
gr.Markdown("### Checker")
|
| 101 |
-
|
| 102 |
-
gr.Button("
|
| 103 |
fn=partial(run_checker, cid),
|
| 104 |
-
inputs=
|
| 105 |
-
outputs=
|
| 106 |
)
|
| 107 |
return demo
|
| 108 |
|
|
|
|
| 13 |
llm = pipeline("text-generation", model="gpt2", max_new_tokens=512)
|
| 14 |
|
| 15 |
def call_llm(prompt):
|
| 16 |
+
o = llm(prompt, max_new_tokens=512, do_sample=False)[0]["generated_text"]
|
| 17 |
+
return o[len(prompt):].strip() if o.startswith(prompt) else o
|
| 18 |
|
| 19 |
+
def generator_prompt(mid, *inputs):
|
| 20 |
+
m = GEN_BY_ID[mid]
|
| 21 |
keys = list(m["input_placeholders"].keys())
|
| 22 |
+
vals = {k: inputs[i] if i < len(inputs) else "" for i, k in enumerate(keys)}
|
| 23 |
+
secs = m["output_sections"]
|
| 24 |
|
| 25 |
p = []
|
|
|
|
| 26 |
p.append(f"MODULE: {m['label']}")
|
| 27 |
+
p.append("INPUT:")
|
| 28 |
+
for k, v in vals.items():
|
| 29 |
p.append(f"{k.upper()}: {v}")
|
| 30 |
p.append("")
|
| 31 |
+
p.append("OUTPUT SECTIONS:")
|
| 32 |
+
for s in secs:
|
| 33 |
p.append(f"- {s}")
|
| 34 |
p.append("")
|
| 35 |
+
for s in secs:
|
| 36 |
p.append(f"{s}:")
|
| 37 |
p.append("[content]")
|
| 38 |
p.append("")
|
| 39 |
return "\n".join(p)
|
| 40 |
|
| 41 |
+
def checker_prompt(cid, *vals):
|
| 42 |
+
c = CHECKERS[cid]
|
| 43 |
secs = c["output_sections"]
|
| 44 |
+
if len(vals) < 2:
|
| 45 |
+
orig, draft = "", vals[0] if vals else ""
|
|
|
|
|
|
|
| 46 |
else:
|
| 47 |
+
orig = "\n\n".join(vals[:-1])
|
| 48 |
+
draft = vals[-1]
|
| 49 |
|
| 50 |
p = []
|
|
|
|
| 51 |
p.append(f"CHECKER: {c['label']}")
|
| 52 |
p.append("ORIGINAL TASK:")
|
| 53 |
p.append(orig)
|
| 54 |
p.append("")
|
| 55 |
+
p.append("DRAFT:")
|
| 56 |
p.append(draft)
|
| 57 |
p.append("")
|
| 58 |
+
p.append("RESPOND WITH:")
|
| 59 |
for s in secs:
|
| 60 |
p.append(f"- {s}")
|
| 61 |
p.append("")
|
|
|
|
| 73 |
|
| 74 |
def build_ui():
|
| 75 |
with gr.Blocks(title="Modular Intelligence") as demo:
|
| 76 |
+
gr.Markdown("# Modular Intelligence Demo")
|
| 77 |
|
| 78 |
for m in GENERATORS:
|
| 79 |
with gr.Tab(m["label"]):
|
| 80 |
gr.Markdown(m["description"])
|
| 81 |
+
inputs = []
|
| 82 |
+
for k, ph in m["input_placeholders"].items():
|
| 83 |
+
t = gr.Textbox(label=k, placeholder=ph, lines=4)
|
| 84 |
+
inputs.append(t)
|
| 85 |
|
| 86 |
+
out = gr.Textbox(label="Output", lines=16)
|
| 87 |
+
gr.Button("Run").click(
|
| 88 |
fn=partial(run_generator, m["id"]),
|
| 89 |
+
inputs=inputs,
|
| 90 |
+
outputs=out
|
| 91 |
)
|
| 92 |
|
| 93 |
if m.get("has_checker"):
|
| 94 |
cid = m.get("checker_id")
|
| 95 |
if cid in CHECKERS:
|
| 96 |
gr.Markdown("### Checker")
|
| 97 |
+
chk = gr.Textbox(label="Checker Output", lines=14)
|
| 98 |
+
gr.Button("Check").click(
|
| 99 |
fn=partial(run_checker, cid),
|
| 100 |
+
inputs=inputs + [out],
|
| 101 |
+
outputs=chk
|
| 102 |
)
|
| 103 |
return demo
|
| 104 |
|